file_path
stringlengths 21
202
| content
stringlengths 19
1.02M
| size
int64 19
1.02M
| lang
stringclasses 8
values | avg_line_length
float64 5.88
100
| max_line_length
int64 12
993
| alphanum_fraction
float64 0.27
0.93
|
---|---|---|---|---|---|---|
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/waiter.py | import asyncio
# WaiterModel is required for client.py import
from botocore.exceptions import ClientError
from botocore.waiter import WaiterModel # noqa: F401, lgtm[py/unused-import]
from botocore.waiter import Waiter, xform_name, logger, WaiterError, \
NormalizedOperationMethod as _NormalizedOperationMethod
from botocore.docs.docstring import WaiterDocstring
from botocore.utils import get_service_module_name
class NormalizedOperationMethod(_NormalizedOperationMethod):
async def __call__(self, **kwargs):
try:
return await self._client_method(**kwargs)
except ClientError as e:
return e.response
class AIOWaiter(Waiter):
async def wait(self, **kwargs):
acceptors = list(self.config.acceptors)
current_state = 'waiting'
# pop the invocation specific config
config = kwargs.pop('WaiterConfig', {})
sleep_amount = config.get('Delay', self.config.delay)
max_attempts = config.get('MaxAttempts', self.config.max_attempts)
last_matched_acceptor = None
num_attempts = 0
while True:
response = await self._operation_method(**kwargs)
num_attempts += 1
for acceptor in acceptors:
if acceptor.matcher_func(response):
last_matched_acceptor = acceptor
current_state = acceptor.state
break
else:
# If none of the acceptors matched, we should
# transition to the failure state if an error
# response was received.
if 'Error' in response:
# Transition to a failure state, which we
# can just handle here by raising an exception.
raise WaiterError(
name=self.name,
reason='An error occurred (%s): %s' % (
response['Error'].get('Code', 'Unknown'),
response['Error'].get('Message', 'Unknown'),
),
last_response=response,
)
if current_state == 'success':
logger.debug("Waiting complete, waiter matched the "
"success state.")
return
if current_state == 'failure':
reason = 'Waiter encountered a terminal failure state: %s' % (
acceptor.explanation
)
raise WaiterError(
name=self.name,
reason=reason,
last_response=response,
)
if num_attempts >= max_attempts:
if last_matched_acceptor is None:
reason = 'Max attempts exceeded'
else:
reason = 'Max attempts exceeded. Previously accepted state: %s' % (
acceptor.explanation
)
raise WaiterError(
name=self.name,
reason=reason,
last_response=response,
)
await asyncio.sleep(sleep_amount)
def create_waiter_with_client(waiter_name, waiter_model, client):
"""
:type waiter_name: str
:param waiter_name: The name of the waiter. The name should match
the name (including the casing) of the key name in the waiter
model file (typically this is CamelCasing).
:type waiter_model: botocore.waiter.WaiterModel
:param waiter_model: The model for the waiter configuration.
:type client: botocore.client.BaseClient
:param client: The botocore client associated with the service.
:rtype: botocore.waiter.Waiter
:return: The waiter object.
"""
single_waiter_config = waiter_model.get_waiter(waiter_name)
operation_name = xform_name(single_waiter_config.operation)
operation_method = NormalizedOperationMethod(
getattr(client, operation_name))
# Create a new wait method that will serve as a proxy to the underlying
# Waiter.wait method. This is needed to attach a docstring to the
# method.
async def wait(self, **kwargs):
await AIOWaiter.wait(self, **kwargs)
wait.__doc__ = WaiterDocstring(
waiter_name=waiter_name,
event_emitter=client.meta.events,
service_model=client.meta.service_model,
service_waiter_model=waiter_model,
include_signature=False
)
# Rename the waiter class based on the type of waiter.
waiter_class_name = str('%s.AIOWaiter.%s' % (
get_service_module_name(client.meta.service_model),
waiter_name))
# Create the new waiter class
documented_waiter_cls = type(
waiter_class_name, (AIOWaiter,), {'wait': wait})
# Return an instance of the new waiter class.
return documented_waiter_cls(
waiter_name, single_waiter_config, operation_method
)
| 5,020 | Python | 37.037879 | 87 | 0.581275 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/endpoint.py | import aiohttp
import asyncio
import io
import ssl
import aiohttp.http_exceptions
from aiohttp.client import URL
from botocore.endpoint import EndpointCreator, Endpoint, DEFAULT_TIMEOUT, \
MAX_POOL_CONNECTIONS, logger, history_recorder, create_request_object
from botocore.exceptions import ConnectionClosedError
from botocore.hooks import first_non_none_response
from botocore.utils import is_valid_endpoint_url
from multidict import MultiDict
from urllib.parse import urlparse
from urllib3.response import HTTPHeaderDict
from aiobotocore.response import StreamingBody
from aiobotocore._endpoint_helpers import _text, _IOBaseWrapper, \
ClientResponseProxy
async def convert_to_response_dict(http_response, operation_model):
"""Convert an HTTP response object to a request dict.
This converts the requests library's HTTP response object to
a dictionary.
:type http_response: botocore.vendored.requests.model.Response
:param http_response: The HTTP response from an AWS service request.
:rtype: dict
:return: A response dictionary which will contain the following keys:
* headers (dict)
* status_code (int)
* body (string or file-like object)
"""
response_dict = {
# botocore converts keys to str, so make sure that they are in
# the expected case. See detailed discussion here:
# https://github.com/aio-libs/aiobotocore/pull/116
# aiohttp's CIMultiDict camel cases the headers :(
'headers': HTTPHeaderDict(
{k.decode('utf-8').lower(): v.decode('utf-8')
for k, v in http_response.raw_headers}),
'status_code': http_response.status_code,
'context': {
'operation_name': operation_model.name,
}
}
if response_dict['status_code'] >= 300:
response_dict['body'] = await http_response.read()
elif operation_model.has_event_stream_output:
response_dict['body'] = http_response.raw
elif operation_model.has_streaming_output:
length = response_dict['headers'].get('content-length')
response_dict['body'] = StreamingBody(http_response.raw, length)
else:
response_dict['body'] = await http_response.read()
return response_dict
class AioEndpoint(Endpoint):
def __init__(self, *args, proxies=None, **kwargs):
super().__init__(*args, **kwargs)
self.proxies = proxies or {}
async def create_request(self, params, operation_model=None):
request = create_request_object(params)
if operation_model:
request.stream_output = any([
operation_model.has_streaming_output,
operation_model.has_event_stream_output
])
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'request-created.{service_id}.{op_name}'.format(
service_id=service_id,
op_name=operation_model.name)
await self._event_emitter.emit(event_name, request=request,
operation_name=operation_model.name)
prepared_request = self.prepare_request(request)
return prepared_request
async def _send_request(self, request_dict, operation_model):
attempts = 1
request = await self.create_request(request_dict, operation_model)
context = request_dict['context']
success_response, exception = await self._get_response(
request, operation_model, context)
while await self._needs_retry(attempts, operation_model,
request_dict, success_response,
exception):
attempts += 1
# If there is a stream associated with the request, we need
# to reset it before attempting to send the request again.
# This will ensure that we resend the entire contents of the
# body.
request.reset_stream()
# Create a new request when retried (including a new signature).
request = await self.create_request(
request_dict, operation_model)
success_response, exception = await self._get_response(
request, operation_model, context)
if success_response is not None and \
'ResponseMetadata' in success_response[1]:
# We want to share num retries, not num attempts.
total_retries = attempts - 1
success_response[1]['ResponseMetadata']['RetryAttempts'] = \
total_retries
if exception is not None:
raise exception
else:
return success_response
async def _get_response(self, request, operation_model, context):
# This will return a tuple of (success_response, exception)
# and success_response is itself a tuple of
# (http_response, parsed_dict).
# If an exception occurs then the success_response is None.
# If no exception occurs then exception is None.
success_response, exception = await self._do_get_response(
request, operation_model)
kwargs_to_emit = {
'response_dict': None,
'parsed_response': None,
'context': context,
'exception': exception,
}
if success_response is not None:
http_response, parsed_response = success_response
kwargs_to_emit['parsed_response'] = parsed_response
kwargs_to_emit['response_dict'] = await convert_to_response_dict(
http_response, operation_model)
service_id = operation_model.service_model.service_id.hyphenize()
await self._event_emitter.emit(
'response-received.%s.%s' % (
service_id, operation_model.name), **kwargs_to_emit)
return success_response, exception
async def _do_get_response(self, request, operation_model):
try:
logger.debug("Sending http request: %s", request)
history_recorder.record('HTTP_REQUEST', {
'method': request.method,
'headers': request.headers,
'streaming': operation_model.has_streaming_input,
'url': request.url,
'body': request.body
})
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'before-send.%s.%s' % (
service_id, operation_model.name)
responses = await self._event_emitter.emit(event_name,
request=request)
http_response = first_non_none_response(responses)
if http_response is None:
http_response = await self._send(request)
except aiohttp.ClientConnectionError as e:
e.request = request # botocore expects the request property
return None, e
except aiohttp.http_exceptions.BadStatusLine:
better_exception = ConnectionClosedError(
endpoint_url=request.url, request=request)
return None, better_exception
except Exception as e:
logger.debug("Exception received when sending HTTP request.",
exc_info=True)
return None, e
# This returns the http_response and the parsed_data.
response_dict = await convert_to_response_dict(http_response,
operation_model)
http_response_record_dict = response_dict.copy()
http_response_record_dict['streaming'] = \
operation_model.has_streaming_output
history_recorder.record('HTTP_RESPONSE', http_response_record_dict)
protocol = operation_model.metadata['protocol']
parser = self._response_parser_factory.create_parser(protocol)
parsed_response = parser.parse(
response_dict, operation_model.output_shape)
if http_response.status_code >= 300:
self._add_modeled_error_fields(
response_dict, parsed_response,
operation_model, parser,
)
history_recorder.record('PARSED_RESPONSE', parsed_response)
return (http_response, parsed_response), None
# NOTE: The only line changed here changing time.sleep to asyncio.sleep
async def _needs_retry(self, attempts, operation_model, request_dict,
response=None, caught_exception=None):
service_id = operation_model.service_model.service_id.hyphenize()
event_name = 'needs-retry.%s.%s' % (
service_id,
operation_model.name)
responses = await self._event_emitter.emit(
event_name, response=response, endpoint=self,
operation=operation_model, attempts=attempts,
caught_exception=caught_exception, request_dict=request_dict)
handler_response = first_non_none_response(responses)
if handler_response is None:
return False
else:
# Request needs to be retried, and we need to sleep
# for the specified number of times.
logger.debug("Response received to retry, sleeping for "
"%s seconds", handler_response)
await asyncio.sleep(handler_response)
return True
async def _send(self, request):
# Note: When using aiobotocore with dynamodb, requests fail on crc32
# checksum computation as soon as the response data reaches ~5KB.
# When AWS response is gzip compressed:
# 1. aiohttp is automatically decompressing the data
# (http://aiohttp.readthedocs.io/en/stable/client.html#binary-response-content)
# 2. botocore computes crc32 on the uncompressed data bytes and fails
# cause crc32 has been computed on the compressed data
# The following line forces aws not to use gzip compression,
# if there is a way to configure aiohttp not to perform decompression,
# we can remove the following line and take advantage of
# aws gzip compression.
# https://github.com/boto/botocore/issues/1255
url = request.url
headers = request.headers
data = request.body
headers['Accept-Encoding'] = 'identity'
headers_ = MultiDict(
(z[0], _text(z[1], encoding='utf-8')) for z in headers.items())
# botocore does this during the request so we do this here as well
# TODO: this should be part of the ClientSession, perhaps make wrapper
proxy = self.proxies.get(urlparse(url.lower()).scheme)
if isinstance(data, io.IOBase):
data = _IOBaseWrapper(data)
url = URL(url, encoded=True)
resp = await self.http_session.request(
request.method, url=url, headers=headers_, data=data, proxy=proxy)
# If we're not streaming, read the content so we can retry any timeout
# errors, see:
# https://github.com/boto/botocore/blob/develop/botocore/vendored/requests/sessions.py#L604
if not request.stream_output:
await resp.read()
return resp
class AioEndpointCreator(EndpointCreator):
# TODO: handle socket_options
def create_endpoint(self, service_model, region_name, endpoint_url,
verify=None, response_parser_factory=None,
timeout=DEFAULT_TIMEOUT,
max_pool_connections=MAX_POOL_CONNECTIONS,
http_session_cls=aiohttp.ClientSession,
proxies=None,
socket_options=None,
client_cert=None,
connector_args=None):
if not is_valid_endpoint_url(endpoint_url):
raise ValueError("Invalid endpoint: %s" % endpoint_url)
if proxies is None:
proxies = self._get_proxies(endpoint_url)
endpoint_prefix = service_model.endpoint_prefix
logger.debug('Setting %s timeout as %s', endpoint_prefix, timeout)
if isinstance(timeout, (list, tuple)):
conn_timeout, read_timeout = timeout
else:
conn_timeout = read_timeout = timeout
if connector_args is None:
# AWS has a 20 second idle timeout:
# https://forums.aws.amazon.com/message.jspa?messageID=215367
# aiohttp default timeout is 30s so set something reasonable here
connector_args = dict(keepalive_timeout=12)
timeout = aiohttp.ClientTimeout(
sock_connect=conn_timeout,
sock_read=read_timeout
)
ssl_context = None
if client_cert:
if isinstance(client_cert, str):
key_file = None
cert_file = client_cert
elif isinstance(client_cert, tuple):
cert_file, key_file = client_cert
else:
assert False
ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_context.load_cert_chain(cert_file, key_file)
connector = aiohttp.TCPConnector(
limit=max_pool_connections,
verify_ssl=self._get_verify_value(verify),
ssl_context=ssl_context,
**connector_args)
aio_session = http_session_cls(
connector=connector,
timeout=timeout,
skip_auto_headers={'CONTENT-TYPE'},
response_class=ClientResponseProxy,
auto_decompress=False)
return AioEndpoint(
endpoint_url,
endpoint_prefix=endpoint_prefix,
event_emitter=self._event_emitter,
response_parser_factory=response_parser_factory,
http_session=aio_session,
proxies=proxies)
| 13,904 | Python | 42.317757 | 99 | 0.611263 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/utils.py | import asyncio
import logging
import json
import aiohttp
import aiohttp.client_exceptions
from botocore.utils import ContainerMetadataFetcher, InstanceMetadataFetcher, \
IMDSFetcher, get_environ_proxies, BadIMDSRequestError, S3RegionRedirector, \
ClientError
from botocore.exceptions import (
InvalidIMDSEndpointError, MetadataRetrievalError,
)
import botocore.awsrequest
logger = logging.getLogger(__name__)
RETRYABLE_HTTP_ERRORS = (aiohttp.client_exceptions.ClientError, asyncio.TimeoutError)
class AioIMDSFetcher(IMDSFetcher):
class Response(object):
def __init__(self, status_code, text, url):
self.status_code = status_code
self.url = url
self.text = text
self.content = text
def __init__(self, *args, session=None, **kwargs):
super(AioIMDSFetcher, self).__init__(*args, **kwargs)
self._trust_env = bool(get_environ_proxies(self._base_url))
self._session = session or aiohttp.ClientSession
async def _fetch_metadata_token(self):
self._assert_enabled()
url = self._base_url + self._TOKEN_PATH
headers = {
'x-aws-ec2-metadata-token-ttl-seconds': self._TOKEN_TTL,
}
self._add_user_agent(headers)
request = botocore.awsrequest.AWSRequest(
method='PUT', url=url, headers=headers)
timeout = aiohttp.ClientTimeout(total=self._timeout)
async with self._session(timeout=timeout,
trust_env=self._trust_env) as session:
for i in range(self._num_attempts):
try:
async with session.put(url, headers=headers) as resp:
text = await resp.text()
if resp.status == 200:
return text
elif resp.status in (404, 403, 405):
return None
elif resp.status in (400,):
raise BadIMDSRequestError(request)
except asyncio.TimeoutError:
return None
except RETRYABLE_HTTP_ERRORS as e:
logger.debug(
"Caught retryable HTTP exception while making metadata "
"service request to %s: %s", url, e, exc_info=True)
except aiohttp.client_exceptions.ClientConnectorError as e:
if getattr(e, 'errno', None) == 8 or \
str(getattr(e, 'os_error', None)) == \
'Domain name not found': # threaded vs async resolver
raise InvalidIMDSEndpointError(endpoint=url, error=e)
else:
raise
return None
async def _get_request(self, url_path, retry_func, token=None):
self._assert_enabled()
if retry_func is None:
retry_func = self._default_retry
url = self._base_url + url_path
headers = {}
if token is not None:
headers['x-aws-ec2-metadata-token'] = token
self._add_user_agent(headers)
timeout = aiohttp.ClientTimeout(total=self._timeout)
async with self._session(timeout=timeout,
trust_env=self._trust_env) as session:
for i in range(self._num_attempts):
try:
async with session.get(url, headers=headers) as resp:
text = await resp.text()
response = self.Response(resp.status, text, resp.url)
if not retry_func(response):
return response
except RETRYABLE_HTTP_ERRORS as e:
logger.debug(
"Caught retryable HTTP exception while making metadata "
"service request to %s: %s", url, e, exc_info=True)
raise self._RETRIES_EXCEEDED_ERROR_CLS()
class AioInstanceMetadataFetcher(AioIMDSFetcher, InstanceMetadataFetcher):
async def retrieve_iam_role_credentials(self):
try:
token = await self._fetch_metadata_token()
role_name = await self._get_iam_role(token)
credentials = await self._get_credentials(role_name, token)
if self._contains_all_credential_fields(credentials):
return {
'role_name': role_name,
'access_key': credentials['AccessKeyId'],
'secret_key': credentials['SecretAccessKey'],
'token': credentials['Token'],
'expiry_time': credentials['Expiration'],
}
else:
if 'Code' in credentials and 'Message' in credentials:
logger.debug('Error response received when retrieving'
'credentials: %s.', credentials)
return {}
except self._RETRIES_EXCEEDED_ERROR_CLS:
logger.debug("Max number of attempts exceeded (%s) when "
"attempting to retrieve data from metadata service.",
self._num_attempts)
except BadIMDSRequestError as e:
logger.debug("Bad IMDS request: %s", e.request)
return {}
async def _get_iam_role(self, token=None):
r = await self._get_request(
url_path=self._URL_PATH,
retry_func=self._needs_retry_for_role_name,
token=token
)
return r.text
async def _get_credentials(self, role_name, token=None):
r = await self._get_request(
url_path=self._URL_PATH + role_name,
retry_func=self._needs_retry_for_credentials,
token=token
)
return json.loads(r.text)
class AioS3RegionRedirector(S3RegionRedirector):
async def redirect_from_error(self, request_dict, response, operation, **kwargs):
if response is None:
# This could be none if there was a ConnectionError or other
# transport error.
return
if self._is_s3_accesspoint(request_dict.get('context', {})):
logger.debug(
'S3 request was previously to an accesspoint, not redirecting.'
)
return
if request_dict.get('context', {}).get('s3_redirected'):
logger.debug(
'S3 request was previously redirected, not redirecting.')
return
error = response[1].get('Error', {})
error_code = error.get('Code')
response_metadata = response[1].get('ResponseMetadata', {})
# We have to account for 400 responses because
# if we sign a Head* request with the wrong region,
# we'll get a 400 Bad Request but we won't get a
# body saying it's an "AuthorizationHeaderMalformed".
is_special_head_object = (
error_code in ['301', '400'] and
operation.name == 'HeadObject'
)
is_special_head_bucket = (
error_code in ['301', '400'] and
operation.name == 'HeadBucket' and
'x-amz-bucket-region' in response_metadata.get('HTTPHeaders', {})
)
is_wrong_signing_region = (
error_code == 'AuthorizationHeaderMalformed' and
'Region' in error
)
is_redirect_status = response[0] is not None and \
response[0].status_code in [301, 302, 307]
is_permanent_redirect = error_code == 'PermanentRedirect'
if not any([is_special_head_object, is_wrong_signing_region,
is_permanent_redirect, is_special_head_bucket,
is_redirect_status]):
return
bucket = request_dict['context']['signing']['bucket']
client_region = request_dict['context'].get('client_region')
new_region = await self.get_bucket_region(bucket, response)
if new_region is None:
logger.debug(
"S3 client configured for region %s but the bucket %s is not "
"in that region and the proper region could not be "
"automatically determined." % (client_region, bucket))
return
logger.debug(
"S3 client configured for region %s but the bucket %s is in region"
" %s; Please configure the proper region to avoid multiple "
"unnecessary redirects and signing attempts." % (
client_region, bucket, new_region))
endpoint = self._endpoint_resolver.resolve('s3', new_region)
endpoint = endpoint['endpoint_url']
signing_context = {
'region': new_region,
'bucket': bucket,
'endpoint': endpoint
}
request_dict['context']['signing'] = signing_context
self._cache[bucket] = signing_context
self.set_request_url(request_dict, request_dict['context'])
request_dict['context']['s3_redirected'] = True
# Return 0 so it doesn't wait to retry
return 0
async def get_bucket_region(self, bucket, response):
# First try to source the region from the headers.
service_response = response[1]
response_headers = service_response['ResponseMetadata']['HTTPHeaders']
if 'x-amz-bucket-region' in response_headers:
return response_headers['x-amz-bucket-region']
# Next, check the error body
region = service_response.get('Error', {}).get('Region', None)
if region is not None:
return region
# Finally, HEAD the bucket. No other choice sadly.
try:
response = await self._client.head_bucket(Bucket=bucket)
headers = response['ResponseMetadata']['HTTPHeaders']
except ClientError as e:
headers = e.response['ResponseMetadata']['HTTPHeaders']
region = headers.get('x-amz-bucket-region', None)
return region
class AioContainerMetadataFetcher(ContainerMetadataFetcher):
def __init__(self, session=None, sleep=asyncio.sleep):
if session is None:
session = aiohttp.ClientSession
super(AioContainerMetadataFetcher, self).__init__(session, sleep)
async def retrieve_full_uri(self, full_url, headers=None):
self._validate_allowed_url(full_url)
return await self._retrieve_credentials(full_url, headers)
async def retrieve_uri(self, relative_uri):
"""Retrieve JSON metadata from ECS metadata.
:type relative_uri: str
:param relative_uri: A relative URI, e.g "/foo/bar?id=123"
:return: The parsed JSON response.
"""
full_url = self.full_url(relative_uri)
return await self._retrieve_credentials(full_url)
async def _retrieve_credentials(self, full_url, extra_headers=None):
headers = {'Accept': 'application/json'}
if extra_headers is not None:
headers.update(extra_headers)
attempts = 0
while True:
try:
return await self._get_response(
full_url, headers, self.TIMEOUT_SECONDS)
except MetadataRetrievalError as e:
logger.debug("Received error when attempting to retrieve "
"container metadata: %s", e, exc_info=True)
await self._sleep(self.SLEEP_TIME)
attempts += 1
if attempts >= self.RETRY_ATTEMPTS:
raise
async def _get_response(self, full_url, headers, timeout):
try:
timeout = aiohttp.ClientTimeout(total=self.TIMEOUT_SECONDS)
async with self._session(timeout=timeout) as session:
async with session.get(full_url, headers=headers) as resp:
if resp.status != 200:
text = await resp.text()
raise MetadataRetrievalError(
error_msg=(
"Received non 200 response (%d) "
"from ECS metadata: %s"
) % (resp.status, text))
try:
return await resp.json()
except ValueError:
text = await resp.text()
error_msg = (
"Unable to parse JSON returned from ECS metadata services"
)
logger.debug('%s:%s', error_msg, text)
raise MetadataRetrievalError(error_msg=error_msg)
except RETRYABLE_HTTP_ERRORS as e:
error_msg = ("Received error when attempting to retrieve "
"ECS metadata: %s" % e)
raise MetadataRetrievalError(error_msg=error_msg)
| 12,902 | Python | 40.223642 | 86 | 0.557976 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/credentials.py | import asyncio
import datetime
import logging
import subprocess
import json
from copy import deepcopy
from typing import Optional
from hashlib import sha1
from dateutil.tz import tzutc
from botocore import UNSIGNED
from botocore.config import Config
import botocore.compat
from botocore.credentials import EnvProvider, Credentials, RefreshableCredentials, \
ReadOnlyCredentials, ContainerProvider, ContainerMetadataFetcher, \
_parse_if_needed, InstanceMetadataProvider, _get_client_creator, \
ProfileProviderBuilder, ConfigProvider, SharedCredentialProvider, \
ProcessProvider, AssumeRoleWithWebIdentityProvider, _local_now, \
CachedCredentialFetcher, _serialize_if_needed, BaseAssumeRoleCredentialFetcher, \
AssumeRoleProvider, AssumeRoleCredentialFetcher, CredentialResolver, \
CanonicalNameCredentialSourcer, BotoProvider, OriginalEC2Provider, \
SSOProvider
from botocore.exceptions import UnauthorizedSSOTokenError
from botocore.exceptions import MetadataRetrievalError, CredentialRetrievalError, \
InvalidConfigError, PartialCredentialsError, RefreshWithMFAUnsupportedError, \
UnknownCredentialError
from botocore.compat import compat_shell_split
from botocore.utils import SSOTokenLoader
from aiobotocore.utils import AioContainerMetadataFetcher, AioInstanceMetadataFetcher
from aiobotocore.config import AioConfig
logger = logging.getLogger(__name__)
def create_credential_resolver(session, cache=None, region_name=None):
"""Create a default credential resolver.
This creates a pre-configured credential resolver
that includes the default lookup chain for
credentials.
"""
profile_name = session.get_config_variable('profile') or 'default'
metadata_timeout = session.get_config_variable('metadata_service_timeout')
num_attempts = session.get_config_variable('metadata_service_num_attempts')
disable_env_vars = session.instance_variables().get('profile') is not None
imds_config = {
'ec2_metadata_service_endpoint': session.get_config_variable(
'ec2_metadata_service_endpoint'),
'imds_use_ipv6': session.get_config_variable('imds_use_ipv6')
}
if cache is None:
cache = {}
env_provider = AioEnvProvider()
container_provider = AioContainerProvider()
instance_metadata_provider = AioInstanceMetadataProvider(
iam_role_fetcher=AioInstanceMetadataFetcher(
timeout=metadata_timeout,
num_attempts=num_attempts,
user_agent=session.user_agent(),
config=imds_config)
)
profile_provider_builder = AioProfileProviderBuilder(
session, cache=cache, region_name=region_name)
assume_role_provider = AioAssumeRoleProvider(
load_config=lambda: session.full_config,
client_creator=_get_client_creator(session, region_name),
cache=cache,
profile_name=profile_name,
credential_sourcer=AioCanonicalNameCredentialSourcer([
env_provider, container_provider, instance_metadata_provider
]),
profile_provider_builder=profile_provider_builder,
)
pre_profile = [
env_provider,
assume_role_provider,
]
profile_providers = profile_provider_builder.providers(
profile_name=profile_name,
disable_env_vars=disable_env_vars,
)
post_profile = [
AioOriginalEC2Provider(),
AioBotoProvider(),
container_provider,
instance_metadata_provider,
]
providers = pre_profile + profile_providers + post_profile
if disable_env_vars:
# An explicitly provided profile will negate an EnvProvider.
# We will defer to providers that understand the "profile"
# concept to retrieve credentials.
# The one edge case if is all three values are provided via
# env vars:
# export AWS_ACCESS_KEY_ID=foo
# export AWS_SECRET_ACCESS_KEY=bar
# export AWS_PROFILE=baz
# Then, just like our client() calls, the explicit credentials
# will take precedence.
#
# This precedence is enforced by leaving the EnvProvider in the chain.
# This means that the only way a "profile" would win is if the
# EnvProvider does not return credentials, which is what we want
# in this scenario.
providers.remove(env_provider)
logger.debug('Skipping environment variable credential check'
' because profile name was explicitly set.')
resolver = AioCredentialResolver(providers=providers)
return resolver
class AioProfileProviderBuilder(ProfileProviderBuilder):
def _create_process_provider(self, profile_name):
return AioProcessProvider(
profile_name=profile_name,
load_config=lambda: self._session.full_config,
)
def _create_shared_credential_provider(self, profile_name):
credential_file = self._session.get_config_variable('credentials_file')
return AioSharedCredentialProvider(
profile_name=profile_name,
creds_filename=credential_file,
)
def _create_config_provider(self, profile_name):
config_file = self._session.get_config_variable('config_file')
return AioConfigProvider(
profile_name=profile_name,
config_filename=config_file,
)
def _create_web_identity_provider(self, profile_name, disable_env_vars):
return AioAssumeRoleWithWebIdentityProvider(
load_config=lambda: self._session.full_config,
client_creator=_get_client_creator(
self._session, self._region_name),
cache=self._cache,
profile_name=profile_name,
disable_env_vars=disable_env_vars,
)
def _create_sso_provider(self, profile_name):
return AioSSOProvider(
load_config=lambda: self._session.full_config,
client_creator=self._session.create_client,
profile_name=profile_name,
cache=self._cache,
token_cache=self._sso_token_cache,
)
async def get_credentials(session):
resolver = create_credential_resolver(session)
return await resolver.load_credentials()
def create_assume_role_refresher(client, params):
async def refresh():
async with client as sts:
response = await sts.assume_role(**params)
credentials = response['Credentials']
# We need to normalize the credential names to
# the values expected by the refresh creds.
return {
'access_key': credentials['AccessKeyId'],
'secret_key': credentials['SecretAccessKey'],
'token': credentials['SessionToken'],
'expiry_time': _serialize_if_needed(credentials['Expiration']),
}
return refresh
def create_aio_mfa_serial_refresher(actual_refresh):
class _Refresher(object):
def __init__(self, refresh):
self._refresh = refresh
self._has_been_called = False
async def call(self):
if self._has_been_called:
# We can explore an option in the future to support
# reprompting for MFA, but for now we just error out
# when the temp creds expire.
raise RefreshWithMFAUnsupportedError()
self._has_been_called = True
return await self._refresh()
return _Refresher(actual_refresh).call
class AioCredentials(Credentials):
async def get_frozen_credentials(self):
return ReadOnlyCredentials(self.access_key,
self.secret_key,
self.token)
@classmethod
def from_credentials(cls, obj: Optional[Credentials]):
if obj is None:
return None
return cls(
obj.access_key, obj.secret_key,
obj.token, obj.method)
class AioRefreshableCredentials(RefreshableCredentials):
def __init__(self, *args, **kwargs):
super(AioRefreshableCredentials, self).__init__(*args, **kwargs)
self._refresh_lock = asyncio.Lock()
@classmethod
def from_refreshable_credentials(cls, obj: Optional[RefreshableCredentials]):
if obj is None:
return None
return cls( # Using interval values here to skip property calling .refresh()
obj._access_key, obj._secret_key,
obj._token, obj._expiry_time,
obj._refresh_using, obj.method,
obj._time_fetcher
)
# Redeclaring the properties so it doesnt call refresh
# Have to redeclare setter as we're overriding the getter
@property
def access_key(self):
# TODO: this needs to be resolved
raise NotImplementedError("missing call to self._refresh. "
"Use get_frozen_credentials instead")
return self._access_key
@access_key.setter
def access_key(self, value):
self._access_key = value
@property
def secret_key(self):
# TODO: this needs to be resolved
raise NotImplementedError("missing call to self._refresh. "
"Use get_frozen_credentials instead")
return self._secret_key
@secret_key.setter
def secret_key(self, value):
self._secret_key = value
@property
def token(self):
# TODO: this needs to be resolved
raise NotImplementedError("missing call to self._refresh. "
"Use get_frozen_credentials instead")
return self._token
@token.setter
def token(self, value):
self._token = value
async def _refresh(self):
if not self.refresh_needed(self._advisory_refresh_timeout):
return
# By this point we need a refresh but its not critical
if not self._refresh_lock.locked():
async with self._refresh_lock:
if not self.refresh_needed(self._advisory_refresh_timeout):
return
is_mandatory_refresh = self.refresh_needed(
self._mandatory_refresh_timeout)
await self._protected_refresh(is_mandatory=is_mandatory_refresh)
return
elif self.refresh_needed(self._mandatory_refresh_timeout):
# If we're here, we absolutely need a refresh and the
# lock is held so wait for it
async with self._refresh_lock:
# Might have refreshed by now
if not self.refresh_needed(self._mandatory_refresh_timeout):
return
await self._protected_refresh(is_mandatory=True)
async def _protected_refresh(self, is_mandatory):
try:
metadata = await self._refresh_using()
except Exception:
period_name = 'mandatory' if is_mandatory else 'advisory'
logger.warning("Refreshing temporary credentials failed "
"during %s refresh period.",
period_name, exc_info=True)
if is_mandatory:
# If this is a mandatory refresh, then
# all errors that occur when we attempt to refresh
# credentials are propagated back to the user.
raise
# Otherwise we'll just return.
# The end result will be that we'll use the current
# set of temporary credentials we have.
return
self._set_from_data(metadata)
self._frozen_credentials = ReadOnlyCredentials(
self._access_key, self._secret_key, self._token)
if self._is_expired():
msg = ("Credentials were refreshed, but the "
"refreshed credentials are still expired.")
logger.warning(msg)
raise RuntimeError(msg)
async def get_frozen_credentials(self):
await self._refresh()
return self._frozen_credentials
class AioDeferredRefreshableCredentials(AioRefreshableCredentials):
def __init__(self, refresh_using, method, time_fetcher=_local_now):
self._refresh_using = refresh_using
self._access_key = None
self._secret_key = None
self._token = None
self._expiry_time = None
self._time_fetcher = time_fetcher
self._refresh_lock = asyncio.Lock()
self.method = method
self._frozen_credentials = None
def refresh_needed(self, refresh_in=None):
if self._frozen_credentials is None:
return True
return super(AioDeferredRefreshableCredentials, self).refresh_needed(
refresh_in
)
class AioCachedCredentialFetcher(CachedCredentialFetcher):
async def _get_credentials(self):
raise NotImplementedError('_get_credentials()')
async def fetch_credentials(self):
return await self._get_cached_credentials()
async def _get_cached_credentials(self):
"""Get up-to-date credentials.
This will check the cache for up-to-date credentials, calling assume
role if none are available.
"""
response = self._load_from_cache()
if response is None:
response = await self._get_credentials()
self._write_to_cache(response)
else:
logger.debug("Credentials for role retrieved from cache.")
creds = response['Credentials']
expiration = _serialize_if_needed(creds['Expiration'], iso=True)
return {
'access_key': creds['AccessKeyId'],
'secret_key': creds['SecretAccessKey'],
'token': creds['SessionToken'],
'expiry_time': expiration,
}
class AioBaseAssumeRoleCredentialFetcher(BaseAssumeRoleCredentialFetcher,
AioCachedCredentialFetcher):
pass
class AioAssumeRoleCredentialFetcher(AssumeRoleCredentialFetcher,
AioBaseAssumeRoleCredentialFetcher):
async def _get_credentials(self):
"""Get credentials by calling assume role."""
kwargs = self._assume_role_kwargs()
client = await self._create_client()
async with client as sts:
return await sts.assume_role(**kwargs)
async def _create_client(self):
"""Create an STS client using the source credentials."""
frozen_credentials = await self._source_credentials.get_frozen_credentials()
return self._client_creator(
'sts',
aws_access_key_id=frozen_credentials.access_key,
aws_secret_access_key=frozen_credentials.secret_key,
aws_session_token=frozen_credentials.token,
)
class AioAssumeRoleWithWebIdentityCredentialFetcher(
AioBaseAssumeRoleCredentialFetcher
):
def __init__(self, client_creator, web_identity_token_loader, role_arn,
extra_args=None, cache=None, expiry_window_seconds=None):
self._web_identity_token_loader = web_identity_token_loader
super(AioAssumeRoleWithWebIdentityCredentialFetcher, self).__init__(
client_creator, role_arn, extra_args=extra_args,
cache=cache, expiry_window_seconds=expiry_window_seconds
)
async def _get_credentials(self):
"""Get credentials by calling assume role."""
kwargs = self._assume_role_kwargs()
# Assume role with web identity does not require credentials other than
# the token, explicitly configure the client to not sign requests.
config = AioConfig(signature_version=UNSIGNED)
async with self._client_creator('sts', config=config) as client:
return await client.assume_role_with_web_identity(**kwargs)
def _assume_role_kwargs(self):
"""Get the arguments for assume role based on current configuration."""
assume_role_kwargs = deepcopy(self._assume_kwargs)
identity_token = self._web_identity_token_loader()
assume_role_kwargs['WebIdentityToken'] = identity_token
return assume_role_kwargs
class AioProcessProvider(ProcessProvider):
def __init__(self, *args, popen=asyncio.create_subprocess_exec, **kwargs):
super(AioProcessProvider, self).__init__(*args, **kwargs, popen=popen)
async def load(self):
credential_process = self._credential_process
if credential_process is None:
return
creds_dict = await self._retrieve_credentials_using(credential_process)
if creds_dict.get('expiry_time') is not None:
return AioRefreshableCredentials.create_from_metadata(
creds_dict,
lambda: self._retrieve_credentials_using(credential_process),
self.METHOD
)
return AioCredentials(
access_key=creds_dict['access_key'],
secret_key=creds_dict['secret_key'],
token=creds_dict.get('token'),
method=self.METHOD
)
async def _retrieve_credentials_using(self, credential_process):
# We're not using shell=True, so we need to pass the
# command and all arguments as a list.
process_list = compat_shell_split(credential_process)
p = await self._popen(*process_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = await p.communicate()
if p.returncode != 0:
raise CredentialRetrievalError(
provider=self.METHOD, error_msg=stderr.decode('utf-8'))
parsed = botocore.compat.json.loads(stdout.decode('utf-8'))
version = parsed.get('Version', '<Version key not provided>')
if version != 1:
raise CredentialRetrievalError(
provider=self.METHOD,
error_msg=("Unsupported version '%s' for credential process "
"provider, supported versions: 1" % version))
try:
return {
'access_key': parsed['AccessKeyId'],
'secret_key': parsed['SecretAccessKey'],
'token': parsed.get('SessionToken'),
'expiry_time': parsed.get('Expiration'),
}
except KeyError as e:
raise CredentialRetrievalError(
provider=self.METHOD,
error_msg="Missing required key in response: %s" % e
)
class AioInstanceMetadataProvider(InstanceMetadataProvider):
async def load(self):
fetcher = self._role_fetcher
metadata = await fetcher.retrieve_iam_role_credentials()
if not metadata:
return None
logger.debug('Found credentials from IAM Role: %s',
metadata['role_name'])
creds = AioRefreshableCredentials.create_from_metadata(
metadata,
method=self.METHOD,
refresh_using=fetcher.retrieve_iam_role_credentials,
)
return creds
class AioEnvProvider(EnvProvider):
async def load(self):
# It gets credentials from an env var,
# so just convert the response to Aio variants
result = super().load()
if isinstance(result, RefreshableCredentials):
return AioRefreshableCredentials.\
from_refreshable_credentials(result)
elif isinstance(result, Credentials):
return AioCredentials.from_credentials(result)
return None
class AioOriginalEC2Provider(OriginalEC2Provider):
async def load(self):
result = super(AioOriginalEC2Provider, self).load()
if isinstance(result, Credentials):
result = AioCredentials.from_credentials(result)
return result
class AioSharedCredentialProvider(SharedCredentialProvider):
async def load(self):
result = super(AioSharedCredentialProvider, self).load()
if isinstance(result, Credentials):
result = AioCredentials.from_credentials(result)
return result
class AioConfigProvider(ConfigProvider):
async def load(self):
result = super(AioConfigProvider, self).load()
if isinstance(result, Credentials):
result = AioCredentials.from_credentials(result)
return result
class AioBotoProvider(BotoProvider):
async def load(self):
result = super(AioBotoProvider, self).load()
if isinstance(result, Credentials):
result = AioCredentials.from_credentials(result)
return result
class AioAssumeRoleProvider(AssumeRoleProvider):
async def load(self):
self._loaded_config = self._load_config()
profiles = self._loaded_config.get('profiles', {})
profile = profiles.get(self._profile_name, {})
if self._has_assume_role_config_vars(profile):
return await self._load_creds_via_assume_role(self._profile_name)
async def _load_creds_via_assume_role(self, profile_name):
role_config = self._get_role_config(profile_name)
source_credentials = await self._resolve_source_credentials(
role_config, profile_name
)
extra_args = {}
role_session_name = role_config.get('role_session_name')
if role_session_name is not None:
extra_args['RoleSessionName'] = role_session_name
external_id = role_config.get('external_id')
if external_id is not None:
extra_args['ExternalId'] = external_id
mfa_serial = role_config.get('mfa_serial')
if mfa_serial is not None:
extra_args['SerialNumber'] = mfa_serial
duration_seconds = role_config.get('duration_seconds')
if duration_seconds is not None:
extra_args['DurationSeconds'] = duration_seconds
fetcher = AioAssumeRoleCredentialFetcher(
client_creator=self._client_creator,
source_credentials=source_credentials,
role_arn=role_config['role_arn'],
extra_args=extra_args,
mfa_prompter=self._prompter,
cache=self.cache,
)
refresher = fetcher.fetch_credentials
if mfa_serial is not None:
refresher = create_aio_mfa_serial_refresher(refresher)
# The initial credentials are empty and the expiration time is set
# to now so that we can delay the call to assume role until it is
# strictly needed.
return AioDeferredRefreshableCredentials(
method=self.METHOD,
refresh_using=refresher,
time_fetcher=_local_now
)
async def _resolve_source_credentials(self, role_config, profile_name):
credential_source = role_config.get('credential_source')
if credential_source is not None:
return await self._resolve_credentials_from_source(
credential_source, profile_name
)
source_profile = role_config['source_profile']
self._visited_profiles.append(source_profile)
return await self._resolve_credentials_from_profile(source_profile)
async def _resolve_credentials_from_profile(self, profile_name):
profiles = self._loaded_config.get('profiles', {})
profile = profiles[profile_name]
if self._has_static_credentials(profile) and \
not self._profile_provider_builder:
return self._resolve_static_credentials_from_profile(profile)
elif self._has_static_credentials(profile) or \
not self._has_assume_role_config_vars(profile):
profile_providers = self._profile_provider_builder.providers(
profile_name=profile_name,
disable_env_vars=True,
)
profile_chain = AioCredentialResolver(profile_providers)
credentials = await profile_chain.load_credentials()
if credentials is None:
error_message = (
'The source profile "%s" must have credentials.'
)
raise InvalidConfigError(
error_msg=error_message % profile_name,
)
return credentials
return self._load_creds_via_assume_role(profile_name)
def _resolve_static_credentials_from_profile(self, profile):
try:
return AioCredentials(
access_key=profile['aws_access_key_id'],
secret_key=profile['aws_secret_access_key'],
token=profile.get('aws_session_token')
)
except KeyError as e:
raise PartialCredentialsError(
provider=self.METHOD, cred_var=str(e))
async def _resolve_credentials_from_source(self, credential_source,
profile_name):
credentials = await self._credential_sourcer.source_credentials(
credential_source)
if credentials is None:
raise CredentialRetrievalError(
provider=credential_source,
error_msg=(
'No credentials found in credential_source referenced '
'in profile %s' % profile_name
)
)
return credentials
class AioAssumeRoleWithWebIdentityProvider(AssumeRoleWithWebIdentityProvider):
async def load(self):
return await self._assume_role_with_web_identity()
async def _assume_role_with_web_identity(self):
token_path = self._get_config('web_identity_token_file')
if not token_path:
return None
token_loader = self._token_loader_cls(token_path)
role_arn = self._get_config('role_arn')
if not role_arn:
error_msg = (
'The provided profile or the current environment is '
'configured to assume role with web identity but has no '
'role ARN configured. Ensure that the profile has the role_arn'
'configuration set or the AWS_ROLE_ARN env var is set.'
)
raise InvalidConfigError(error_msg=error_msg)
extra_args = {}
role_session_name = self._get_config('role_session_name')
if role_session_name is not None:
extra_args['RoleSessionName'] = role_session_name
fetcher = AioAssumeRoleWithWebIdentityCredentialFetcher(
client_creator=self._client_creator,
web_identity_token_loader=token_loader,
role_arn=role_arn,
extra_args=extra_args,
cache=self.cache,
)
# The initial credentials are empty and the expiration time is set
# to now so that we can delay the call to assume role until it is
# strictly needed.
return AioDeferredRefreshableCredentials(
method=self.METHOD,
refresh_using=fetcher.fetch_credentials,
)
class AioCanonicalNameCredentialSourcer(CanonicalNameCredentialSourcer):
async def source_credentials(self, source_name):
"""Loads source credentials based on the provided configuration.
:type source_name: str
:param source_name: The value of credential_source in the config
file. This is the canonical name of the credential provider.
:rtype: Credentials
"""
source = self._get_provider(source_name)
if isinstance(source, AioCredentialResolver):
return await source.load_credentials()
return await source.load()
def _get_provider(self, canonical_name):
"""Return a credential provider by its canonical name.
:type canonical_name: str
:param canonical_name: The canonical name of the provider.
:raises UnknownCredentialError: Raised if no
credential provider by the provided name
is found.
"""
provider = self._get_provider_by_canonical_name(canonical_name)
# The AssumeRole provider should really be part of the SharedConfig
# provider rather than being its own thing, but it is not. It is
# effectively part of both the SharedConfig provider and the
# SharedCredentials provider now due to the way it behaves.
# Therefore if we want either of those providers we should return
# the AssumeRole provider with it.
if canonical_name.lower() in ['sharedconfig', 'sharedcredentials']:
assume_role_provider = self._get_provider_by_method('assume-role')
if assume_role_provider is not None:
# The SharedConfig or SharedCredentials provider may not be
# present if it was removed for some reason, but the
# AssumeRole provider could still be present. In that case,
# return the assume role provider by itself.
if provider is None:
return assume_role_provider
# If both are present, return them both as a
# CredentialResolver so that calling code can treat them as
# a single entity.
return AioCredentialResolver([assume_role_provider, provider])
if provider is None:
raise UnknownCredentialError(name=canonical_name)
return provider
class AioContainerProvider(ContainerProvider):
def __init__(self, *args, **kwargs):
super(AioContainerProvider, self).__init__(*args, **kwargs)
# This will always run if no fetcher arg is provided
if isinstance(self._fetcher, ContainerMetadataFetcher):
self._fetcher = AioContainerMetadataFetcher()
async def load(self):
if self.ENV_VAR in self._environ or self.ENV_VAR_FULL in self._environ:
return await self._retrieve_or_fail()
async def _retrieve_or_fail(self):
if self._provided_relative_uri():
full_uri = self._fetcher.full_url(self._environ[self.ENV_VAR])
else:
full_uri = self._environ[self.ENV_VAR_FULL]
headers = self._build_headers()
fetcher = self._create_fetcher(full_uri, headers)
creds = await fetcher()
return AioRefreshableCredentials(
access_key=creds['access_key'],
secret_key=creds['secret_key'],
token=creds['token'],
method=self.METHOD,
expiry_time=_parse_if_needed(creds['expiry_time']),
refresh_using=fetcher,
)
def _create_fetcher(self, full_uri, headers):
async def fetch_creds():
try:
response = await self._fetcher.retrieve_full_uri(
full_uri, headers=headers)
except MetadataRetrievalError as e:
logger.debug("Error retrieving container metadata: %s", e,
exc_info=True)
raise CredentialRetrievalError(provider=self.METHOD,
error_msg=str(e))
return {
'access_key': response['AccessKeyId'],
'secret_key': response['SecretAccessKey'],
'token': response['Token'],
'expiry_time': response['Expiration'],
}
return fetch_creds
class AioCredentialResolver(CredentialResolver):
async def load_credentials(self):
"""
Goes through the credentials chain, returning the first ``Credentials``
that could be loaded.
"""
# First provider to return a non-None response wins.
for provider in self.providers:
logger.debug("Looking for credentials via: %s", provider.METHOD)
creds = await provider.load()
if creds is not None:
return creds
# If we got here, no credentials could be found.
# This feels like it should be an exception, but historically, ``None``
# is returned.
#
# +1
# -js
return None
class AioSSOCredentialFetcher(AioCachedCredentialFetcher):
_UTC_DATE_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
def __init__(self, start_url, sso_region, role_name, account_id,
client_creator, token_loader=None, cache=None,
expiry_window_seconds=None):
self._client_creator = client_creator
self._sso_region = sso_region
self._role_name = role_name
self._account_id = account_id
self._start_url = start_url
self._token_loader = token_loader
super(AioSSOCredentialFetcher, self).__init__(
cache, expiry_window_seconds
)
def _create_cache_key(self):
args = {
'startUrl': self._start_url,
'roleName': self._role_name,
'accountId': self._account_id,
}
args = json.dumps(args, sort_keys=True, separators=(',', ':'))
argument_hash = sha1(args.encode('utf-8')).hexdigest()
return self._make_file_safe(argument_hash)
def _parse_timestamp(self, timestamp_ms):
# fromtimestamp expects seconds so: milliseconds / 1000 = seconds
timestamp_seconds = timestamp_ms / 1000.0
timestamp = datetime.datetime.fromtimestamp(timestamp_seconds, tzutc())
return timestamp.strftime(self._UTC_DATE_FORMAT)
async def _get_credentials(self):
"""Get credentials by calling SSO get role credentials."""
config = Config(
signature_version=UNSIGNED,
region_name=self._sso_region,
)
async with self._client_creator('sso', config=config) as client:
kwargs = {
'roleName': self._role_name,
'accountId': self._account_id,
'accessToken': self._token_loader(self._start_url),
}
try:
response = await client.get_role_credentials(**kwargs)
except client.exceptions.UnauthorizedException:
raise UnauthorizedSSOTokenError()
credentials = response['roleCredentials']
credentials = {
'ProviderType': 'sso',
'Credentials': {
'AccessKeyId': credentials['accessKeyId'],
'SecretAccessKey': credentials['secretAccessKey'],
'SessionToken': credentials['sessionToken'],
'Expiration': self._parse_timestamp(credentials['expiration']),
}
}
return credentials
class AioSSOProvider(SSOProvider):
async def load(self):
sso_config = self._load_sso_config()
if not sso_config:
return None
sso_fetcher = AioSSOCredentialFetcher(
sso_config['sso_start_url'],
sso_config['sso_region'],
sso_config['sso_role_name'],
sso_config['sso_account_id'],
self._client_creator,
token_loader=SSOTokenLoader(cache=self._token_cache),
cache=self.cache,
)
return AioDeferredRefreshableCredentials(
method=self.METHOD,
refresh_using=sso_fetcher.fetch_credentials,
)
| 35,111 | Python | 37.627063 | 85 | 0.620404 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/paginate.py | from botocore.exceptions import PaginationError
from botocore.paginate import Paginator, PageIterator
from botocore.utils import set_value_from_jmespath, merge_dicts
from botocore.compat import six
import jmespath
import aioitertools
class AioPageIterator(PageIterator):
def __aiter__(self):
return self.__anext__()
async def __anext__(self):
current_kwargs = self._op_kwargs
previous_next_token = None
next_token = dict((key, None) for key in self._input_token)
if self._starting_token is not None:
# If the starting token exists, populate the next_token with the
# values inside it. This ensures that we have the service's
# pagination token on hand if we need to truncate after the
# first response.
next_token = self._parse_starting_token()[0]
# The number of items from result_key we've seen so far.
total_items = 0
first_request = True
primary_result_key = self.result_keys[0]
starting_truncation = 0
self._inject_starting_params(current_kwargs)
while True:
response = await self._make_request(current_kwargs)
parsed = self._extract_parsed_response(response)
if first_request:
# The first request is handled differently. We could
# possibly have a resume/starting token that tells us where
# to index into the retrieved page.
if self._starting_token is not None:
starting_truncation = self._handle_first_request(
parsed, primary_result_key, starting_truncation)
first_request = False
self._record_non_aggregate_key_values(parsed)
else:
# If this isn't the first request, we have already sliced into
# the first request and had to make additional requests after.
# We no longer need to add this to truncation.
starting_truncation = 0
current_response = primary_result_key.search(parsed)
if current_response is None:
current_response = []
num_current_response = len(current_response)
truncate_amount = 0
if self._max_items is not None:
truncate_amount = (total_items + num_current_response) \
- self._max_items
if truncate_amount > 0:
self._truncate_response(parsed, primary_result_key,
truncate_amount, starting_truncation,
next_token)
yield response
break
else:
yield response
total_items += num_current_response
next_token = self._get_next_token(parsed)
if all(t is None for t in next_token.values()):
break
if self._max_items is not None and \
total_items == self._max_items:
# We're on a page boundary so we can set the current
# next token to be the resume token.
self.resume_token = next_token
break
if previous_next_token is not None and \
previous_next_token == next_token:
message = ("The same next token was received "
"twice: %s" % next_token)
raise PaginationError(message=message)
self._inject_token_into_kwargs(current_kwargs, next_token)
previous_next_token = next_token
def result_key_iters(self):
teed_results = aioitertools.tee(self, len(self.result_keys))
return [ResultKeyIterator(i, result_key) for i, result_key
in zip(teed_results, self.result_keys)]
async def build_full_result(self):
complete_result = {}
async for response in self:
page = response
# We want to try to catch operation object pagination
# and format correctly for those. They come in the form
# of a tuple of two elements: (http_response, parsed_responsed).
# We want the parsed_response as that is what the page iterator
# uses. We can remove it though once operation objects are removed.
if isinstance(response, tuple) and len(response) == 2:
page = response[1]
# We're incrementally building the full response page
# by page. For each page in the response we need to
# inject the necessary components from the page
# into the complete_result.
for result_expression in self.result_keys:
# In order to incrementally update a result key
# we need to search the existing value from complete_result,
# then we need to search the _current_ page for the
# current result key value. Then we append the current
# value onto the existing value, and re-set that value
# as the new value.
result_value = result_expression.search(page)
if result_value is None:
continue
existing_value = result_expression.search(complete_result)
if existing_value is None:
# Set the initial result
set_value_from_jmespath(
complete_result, result_expression.expression,
result_value)
continue
# Now both result_value and existing_value contain something
if isinstance(result_value, list):
existing_value.extend(result_value)
elif isinstance(result_value, (int, float, six.string_types)):
# Modify the existing result with the sum or concatenation
set_value_from_jmespath(
complete_result, result_expression.expression,
existing_value + result_value)
merge_dicts(complete_result, self.non_aggregate_part)
if self.resume_token is not None:
complete_result['NextToken'] = self.resume_token
return complete_result
async def search(self, expression):
compiled = jmespath.compile(expression)
async for page in self:
results = compiled.search(page)
if isinstance(results, list):
for element in results:
yield element
else:
yield results
class AioPaginator(Paginator):
PAGE_ITERATOR_CLS = AioPageIterator
class ResultKeyIterator:
"""Iterates over the results of paginated responses.
Each iterator is associated with a single result key.
Iterating over this object will give you each element in
the result key list.
:param pages_iterator: An iterator that will give you
pages of results (a ``PageIterator`` class).
:param result_key: The JMESPath expression representing
the result key.
"""
def __init__(self, pages_iterator, result_key):
self._pages_iterator = pages_iterator
self.result_key = result_key
def __aiter__(self):
return self.__anext__()
async def __anext__(self):
async for page in self._pages_iterator:
results = self.result_key.search(page)
if results is None:
results = []
for result in results:
yield result
| 7,699 | Python | 42.75 | 79 | 0.572022 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/eventstream.py | from botocore.eventstream import EventStream, EventStreamBuffer
class AioEventStream(EventStream):
async def _create_raw_event_generator(self):
event_stream_buffer = EventStreamBuffer()
async for chunk, _ in self._raw_stream.iter_chunks():
event_stream_buffer.add_data(chunk)
for event in event_stream_buffer:
yield event
def __iter__(self):
raise NotImplementedError('Use async-for instead')
def __aiter__(self):
return self.__anext__()
async def __anext__(self):
async for event in self._event_generator:
parsed_event = self._parse_event(event)
if parsed_event:
yield parsed_event
| 724 | Python | 30.521738 | 63 | 0.620166 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/client.py | from botocore.awsrequest import prepare_request_dict
from botocore.client import logger, PaginatorDocstring, ClientCreator, \
BaseClient, ClientEndpointBridge, S3ArnParamHandler, S3EndpointSetter
from botocore.exceptions import OperationNotPageableError
from botocore.history import get_global_history_recorder
from botocore.utils import get_service_module_name
from botocore.waiter import xform_name
from botocore.hooks import first_non_none_response
from .paginate import AioPaginator
from .args import AioClientArgsCreator
from .utils import AioS3RegionRedirector
from . import waiter
history_recorder = get_global_history_recorder()
class AioClientCreator(ClientCreator):
async def create_client(self, service_name, region_name, is_secure=True,
endpoint_url=None, verify=None,
credentials=None, scoped_config=None,
api_version=None,
client_config=None):
responses = await self._event_emitter.emit(
'choose-service-name', service_name=service_name)
service_name = first_non_none_response(responses, default=service_name)
service_model = self._load_service_model(service_name, api_version)
cls = await self._create_client_class(service_name, service_model)
endpoint_bridge = ClientEndpointBridge(
self._endpoint_resolver, scoped_config, client_config,
service_signing_name=service_model.metadata.get('signingName'))
client_args = self._get_client_args(
service_model, region_name, is_secure, endpoint_url,
verify, credentials, scoped_config, client_config, endpoint_bridge)
service_client = cls(**client_args)
self._register_retries(service_client)
self._register_s3_events(
service_client, endpoint_bridge, endpoint_url, client_config,
scoped_config)
self._register_s3_events(
service_client, endpoint_bridge, endpoint_url, client_config,
scoped_config)
self._register_endpoint_discovery(
service_client, endpoint_url, client_config
)
return service_client
async def _create_client_class(self, service_name, service_model):
class_attributes = self._create_methods(service_model)
py_name_to_operation_name = self._create_name_mapping(service_model)
class_attributes['_PY_TO_OP_NAME'] = py_name_to_operation_name
bases = [AioBaseClient]
service_id = service_model.service_id.hyphenize()
await self._event_emitter.emit(
'creating-client-class.%s' % service_id,
class_attributes=class_attributes,
base_classes=bases)
class_name = get_service_module_name(service_model)
cls = type(str(class_name), tuple(bases), class_attributes)
return cls
def _register_s3_events(self, client, endpoint_bridge, endpoint_url,
client_config, scoped_config):
if client.meta.service_model.service_name != 's3':
return
AioS3RegionRedirector(endpoint_bridge, client).register()
S3ArnParamHandler().register(client.meta.events)
S3EndpointSetter(
endpoint_resolver=self._endpoint_resolver,
region=client.meta.region_name,
s3_config=client.meta.config.s3,
endpoint_url=endpoint_url,
partition=client.meta.partition
).register(client.meta.events)
self._set_s3_presign_signature_version(
client.meta, client_config, scoped_config)
def _get_client_args(self, service_model, region_name, is_secure,
endpoint_url, verify, credentials,
scoped_config, client_config, endpoint_bridge):
# This is a near copy of ClientCreator. What's replaced
# is ClientArgsCreator->AioClientArgsCreator
args_creator = AioClientArgsCreator(
self._event_emitter, self._user_agent,
self._response_parser_factory, self._loader,
self._exceptions_factory, config_store=self._config_store)
return args_creator.get_client_args(
service_model, region_name, is_secure, endpoint_url,
verify, credentials, scoped_config, client_config, endpoint_bridge)
class AioBaseClient(BaseClient):
async def _async_getattr(self, item):
event_name = 'getattr.%s.%s' % (
self._service_model.service_id.hyphenize(), item
)
handler, event_response = await self.meta.events.emit_until_response(
event_name, client=self)
return event_response
def __getattr__(self, item):
# NOTE: we can not reliably support this because if we were to make this a
# deferred attrgetter (See #803), it would resolve in hasattr always returning
# true. This ends up breaking ddtrace for example when it tries to set a pin.
raise AttributeError(
"'%s' object has no attribute '%s'" % (self.__class__.__name__, item))
async def _make_api_call(self, operation_name, api_params):
operation_model = self._service_model.operation_model(operation_name)
service_name = self._service_model.service_name
history_recorder.record('API_CALL', {
'service': service_name,
'operation': operation_name,
'params': api_params,
})
if operation_model.deprecated:
logger.debug('Warning: %s.%s() is deprecated',
service_name, operation_name)
request_context = {
'client_region': self.meta.region_name,
'client_config': self.meta.config,
'has_streaming_input': operation_model.has_streaming_input,
'auth_type': operation_model.auth_type,
}
request_dict = await self._convert_to_request_dict(
api_params, operation_model, context=request_context)
service_id = self._service_model.service_id.hyphenize()
handler, event_response = await self.meta.events.emit_until_response(
'before-call.{service_id}.{operation_name}'.format(
service_id=service_id,
operation_name=operation_name),
model=operation_model, params=request_dict,
request_signer=self._request_signer, context=request_context)
if event_response is not None:
http, parsed_response = event_response
else:
http, parsed_response = await self._make_request(
operation_model, request_dict, request_context)
await self.meta.events.emit(
'after-call.{service_id}.{operation_name}'.format(
service_id=service_id,
operation_name=operation_name),
http_response=http, parsed=parsed_response,
model=operation_model, context=request_context
)
if http.status_code >= 300:
error_code = parsed_response.get("Error", {}).get("Code")
error_class = self.exceptions.from_code(error_code)
raise error_class(parsed_response, operation_name)
else:
return parsed_response
async def _make_request(self, operation_model, request_dict, request_context):
try:
return await self._endpoint.make_request(operation_model, request_dict)
except Exception as e:
await self.meta.events.emit(
'after-call-error.{service_id}.{operation_name}'.format(
service_id=self._service_model.service_id.hyphenize(),
operation_name=operation_model.name),
exception=e, context=request_context
)
raise
async def _convert_to_request_dict(self, api_params, operation_model,
context=None):
api_params = await self._emit_api_params(
api_params, operation_model, context)
request_dict = self._serializer.serialize_to_request(
api_params, operation_model)
if not self._client_config.inject_host_prefix:
request_dict.pop('host_prefix', None)
prepare_request_dict(request_dict, endpoint_url=self._endpoint.host,
user_agent=self._client_config.user_agent,
context=context)
return request_dict
async def _emit_api_params(self, api_params, operation_model, context):
# Given the API params provided by the user and the operation_model
# we can serialize the request to a request_dict.
operation_name = operation_model.name
# Emit an event that allows users to modify the parameters at the
# beginning of the method. It allows handlers to modify existing
# parameters or return a new set of parameters to use.
service_id = self._service_model.service_id.hyphenize()
responses = await self.meta.events.emit(
'provide-client-params.{service_id}.{operation_name}'.format(
service_id=service_id,
operation_name=operation_name),
params=api_params, model=operation_model, context=context)
api_params = first_non_none_response(responses, default=api_params)
event_name = (
'before-parameter-build.{service_id}.{operation_name}')
await self.meta.events.emit(
event_name.format(
service_id=service_id,
operation_name=operation_name),
params=api_params, model=operation_model, context=context)
return api_params
def get_paginator(self, operation_name):
"""Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is ``create_foo``, and you'd normally invoke the
operation as ``client.create_foo(**kwargs)``, if the
``create_foo`` operation can be paginated, you can use the
call ``client.get_paginator("create_foo")``.
:raise OperationNotPageableError: Raised if the operation is not
pageable. You can use the ``client.can_paginate`` method to
check if an operation is pageable.
:rtype: L{botocore.paginate.Paginator}
:return: A paginator object.
"""
if not self.can_paginate(operation_name):
raise OperationNotPageableError(operation_name=operation_name)
else:
actual_operation_name = self._PY_TO_OP_NAME[operation_name]
# Create a new paginate method that will serve as a proxy to
# the underlying Paginator.paginate method. This is needed to
# attach a docstring to the method.
def paginate(self, **kwargs):
return AioPaginator.paginate(self, **kwargs)
paginator_config = self._cache['page_config'][
actual_operation_name]
# Add the docstring for the paginate method.
paginate.__doc__ = PaginatorDocstring(
paginator_name=actual_operation_name,
event_emitter=self.meta.events,
service_model=self.meta.service_model,
paginator_config=paginator_config,
include_signature=False
)
# Rename the paginator class based on the type of paginator.
paginator_class_name = str('%s.Paginator.%s' % (
get_service_module_name(self.meta.service_model),
actual_operation_name))
# Create the new paginator class
documented_paginator_cls = type(
paginator_class_name, (AioPaginator,), {'paginate': paginate})
operation_model = self._service_model.operation_model(actual_operation_name)
paginator = documented_paginator_cls(
getattr(self, operation_name),
paginator_config,
operation_model)
return paginator
# NOTE: this method does not differ from botocore, however it's important to keep
# as the "waiter" value points to our own asyncio waiter module
def get_waiter(self, waiter_name):
"""Returns an object that can wait for some condition.
:type waiter_name: str
:param waiter_name: The name of the waiter to get. See the waiters
section of the service docs for a list of available waiters.
:returns: The specified waiter object.
:rtype: botocore.waiter.Waiter
"""
config = self._get_waiter_config()
if not config:
raise ValueError("Waiter does not exist: %s" % waiter_name)
model = waiter.WaiterModel(config)
mapping = {}
for name in model.waiter_names:
mapping[xform_name(name)] = name
if waiter_name not in mapping:
raise ValueError("Waiter does not exist: %s" % waiter_name)
return waiter.create_waiter_with_client(
mapping[waiter_name], model, self)
async def __aenter__(self):
await self._endpoint.http_session.__aenter__()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._endpoint.http_session.__aexit__(exc_type, exc_val, exc_tb)
async def close(self):
"""Close all http connections."""
return await self._endpoint.http_session.close()
| 13,609 | Python | 44.366667 | 88 | 0.623117 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/session.py | from botocore.session import Session, EVENT_ALIASES, ServiceModel, UnknownServiceError
from botocore import UNSIGNED
from botocore import retryhandler, translate
from botocore.exceptions import PartialCredentialsError
from .client import AioClientCreator, AioBaseClient
from .hooks import AioHierarchicalEmitter
from .parsers import AioResponseParserFactory
from .signers import add_generate_presigned_url, add_generate_presigned_post, \
add_generate_db_auth_token
from .credentials import create_credential_resolver, AioCredentials
class ClientCreatorContext:
def __init__(self, coro):
self._coro = coro
self._client = None
async def __aenter__(self) -> AioBaseClient:
self._client = await self._coro
return await self._client.__aenter__()
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._client.__aexit__(exc_type, exc_val, exc_tb)
class AioSession(Session):
# noinspection PyMissingConstructor
def __init__(self, session_vars=None, event_hooks=None,
include_builtin_handlers=True, profile=None):
if event_hooks is None:
event_hooks = AioHierarchicalEmitter()
super().__init__(session_vars, event_hooks, include_builtin_handlers, profile)
# Register our own handlers. These normally happen via
# `botocore.handlers.BUILTIN_HANDLERS`
self.register('creating-client-class', add_generate_presigned_url)
self.register('creating-client-class.s3', add_generate_presigned_post)
self.register('creating-client-class.rds', add_generate_db_auth_token),
def _register_response_parser_factory(self):
self._components.register_component('response_parser_factory',
AioResponseParserFactory())
def create_client(self, *args, **kwargs):
return ClientCreatorContext(self._create_client(*args, **kwargs))
async def _create_client(self, service_name, region_name=None,
api_version=None,
use_ssl=True, verify=None, endpoint_url=None,
aws_access_key_id=None, aws_secret_access_key=None,
aws_session_token=None, config=None):
default_client_config = self.get_default_client_config()
# If a config is provided and a default config is set, then
# use the config resulting from merging the two.
if config is not None and default_client_config is not None:
config = default_client_config.merge(config)
# If a config was not provided then use the default
# client config from the session
elif default_client_config is not None:
config = default_client_config
region_name = self._resolve_region_name(region_name, config)
# Figure out the verify value base on the various
# configuration options.
if verify is None:
verify = self.get_config_variable('ca_bundle')
if api_version is None:
api_version = self.get_config_variable('api_versions').get(
service_name, None)
loader = self.get_component('data_loader')
event_emitter = self.get_component('event_emitter')
response_parser_factory = self.get_component(
'response_parser_factory')
if config is not None and config.signature_version is UNSIGNED:
credentials = None
elif aws_access_key_id is not None and \
aws_secret_access_key is not None:
credentials = AioCredentials(
access_key=aws_access_key_id,
secret_key=aws_secret_access_key,
token=aws_session_token)
elif self._missing_cred_vars(aws_access_key_id,
aws_secret_access_key):
raise PartialCredentialsError(
provider='explicit',
cred_var=self._missing_cred_vars(aws_access_key_id,
aws_secret_access_key))
else:
credentials = await self.get_credentials()
endpoint_resolver = self._get_internal_component('endpoint_resolver')
exceptions_factory = self._get_internal_component('exceptions_factory')
config_store = self.get_component('config_store')
client_creator = AioClientCreator(
loader, endpoint_resolver, self.user_agent(), event_emitter,
retryhandler, translate, response_parser_factory,
exceptions_factory, config_store)
client = await client_creator.create_client(
service_name=service_name, region_name=region_name,
is_secure=use_ssl, endpoint_url=endpoint_url, verify=verify,
credentials=credentials, scoped_config=self.get_scoped_config(),
client_config=config, api_version=api_version)
monitor = self._get_internal_component('monitor')
if monitor is not None:
monitor.register(client.meta.events)
return client
def _create_credential_resolver(self):
return create_credential_resolver(
self, region_name=self._last_client_region_used)
async def get_credentials(self):
if self._credentials is None:
self._credentials = await (self._components.get_component(
'credential_provider').load_credentials())
return self._credentials
def set_credentials(self, access_key, secret_key, token=None):
self._credentials = AioCredentials(access_key, secret_key, token)
async def get_service_model(self, service_name, api_version=None):
service_description = await self.get_service_data(service_name, api_version)
return ServiceModel(service_description, service_name=service_name)
async def get_service_data(self, service_name, api_version=None):
"""
Retrieve the fully merged data associated with a service.
"""
data_path = service_name
service_data = self.get_component('data_loader').load_service_model(
data_path,
type_name='service-2',
api_version=api_version
)
service_id = EVENT_ALIASES.get(service_name, service_name)
await self._events.emit('service-data-loaded.%s' % service_id,
service_data=service_data,
service_name=service_name, session=self)
return service_data
async def get_available_regions(self, service_name, partition_name='aws',
allow_non_regional=False):
resolver = self._get_internal_component('endpoint_resolver')
results = []
try:
service_data = await self.get_service_data(service_name)
endpoint_prefix = service_data['metadata'].get(
'endpointPrefix', service_name)
results = resolver.get_available_endpoints(
endpoint_prefix, partition_name, allow_non_regional)
except UnknownServiceError:
pass
return results
def get_session(env_vars=None):
"""
Return a new session object.
"""
return AioSession(env_vars)
| 7,266 | Python | 42.51497 | 86 | 0.630333 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiobotocore/parsers.py | from botocore.parsers import ResponseParserFactory, RestXMLParser, \
RestJSONParser, JSONParser, QueryParser, EC2QueryParser
from .eventstream import AioEventStream
class AioRestXMLParser(RestXMLParser):
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return AioEventStream(response['body'], shape, parser, name)
class AioEC2QueryParser(EC2QueryParser):
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return AioEventStream(response['body'], shape, parser, name)
class AioQueryParser(QueryParser):
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return AioEventStream(response['body'], shape, parser, name)
class AioJSONParser(JSONParser):
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return AioEventStream(response['body'], shape, parser, name)
class AioRestJSONParser(RestJSONParser):
def _create_event_stream(self, response, shape):
parser = self._event_stream_parser
name = response['context'].get('operation_name')
return AioEventStream(response['body'], shape, parser, name)
PROTOCOL_PARSERS = {
'ec2': AioEC2QueryParser,
'query': AioQueryParser,
'json': AioJSONParser,
'rest-json': AioRestJSONParser,
'rest-xml': AioRestXMLParser,
}
class AioResponseParserFactory(ResponseParserFactory):
def create_parser(self, protocol_name):
parser_cls = PROTOCOL_PARSERS[protocol_name]
return parser_cls(**self._defaults)
| 1,857 | Python | 33.407407 | 68 | 0.700054 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/certifi/__init__.py | from .core import contents, where
__all__ = ["contents", "where"]
__version__ = "2023.05.07"
| 94 | Python | 17.999996 | 33 | 0.617021 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/certifi/core.py | """
certifi.py
~~~~~~~~~~
This module returns the installation location of cacert.pem or its contents.
"""
import sys
if sys.version_info >= (3, 11):
from importlib.resources import as_file, files
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the file
# in cases where we're inside of a zipimport situation until someone
# actually calls where(), but we don't want to re-extract the file
# on every call of where(), so we'll do it once then store it in a
# global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you to
# manage the cleanup of this file, so it doesn't actually return a
# path, it returns a context manager that will give you the path
# when you enter it and will do any cleanup when you leave it. In
# the common case of not needing a temporary file, it will just
# return the file system location and the __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
_CACERT_PATH = str(_CACERT_CTX.__enter__())
return _CACERT_PATH
def contents() -> str:
return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
elif sys.version_info >= (3, 7):
from importlib.resources import path as get_path, read_text
_CACERT_CTX = None
_CACERT_PATH = None
def where() -> str:
# This is slightly terrible, but we want to delay extracting the
# file in cases where we're inside of a zipimport situation until
# someone actually calls where(), but we don't want to re-extract
# the file on every call of where(), so we'll do it once then store
# it in a global variable.
global _CACERT_CTX
global _CACERT_PATH
if _CACERT_PATH is None:
# This is slightly janky, the importlib.resources API wants you
# to manage the cleanup of this file, so it doesn't actually
# return a path, it returns a context manager that will give
# you the path when you enter it and will do any cleanup when
# you leave it. In the common case of not needing a temporary
# file, it will just return the file system location and the
# __exit__() is a no-op.
#
# We also have to hold onto the actual context manager, because
# it will do the cleanup whenever it gets garbage collected, so
# we will also store that at the global level as well.
_CACERT_CTX = get_path("certifi", "cacert.pem")
_CACERT_PATH = str(_CACERT_CTX.__enter__())
return _CACERT_PATH
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")
else:
import os
import types
from typing import Union
Package = Union[types.ModuleType, str]
Resource = Union[str, "os.PathLike"]
# This fallback will work for Python versions prior to 3.7 that lack the
# importlib.resources module but relies on the existing `where` function
# so won't address issues with environments like PyOxidizer that don't set
# __file__ on modules.
def read_text(
package: Package,
resource: Resource,
encoding: str = 'utf-8',
errors: str = 'strict'
) -> str:
with open(where(), encoding=encoding) as data:
return data.read()
# If we don't have importlib.resources, then we will just do the old logic
# of assuming we're on the filesystem and munge the path directly.
def where() -> str:
f = os.path.dirname(__file__)
return os.path.join(f, "cacert.pem")
def contents() -> str:
return read_text("certifi", "cacert.pem", encoding="ascii")
| 4,219 | Python | 37.715596 | 82 | 0.620289 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/certifi/__main__.py | import argparse
from certifi import contents, where
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--contents", action="store_true")
args = parser.parse_args()
if args.contents:
print(contents())
else:
print(where())
| 243 | Python | 17.769229 | 60 | 0.711934 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/validators.py | """
Creation and extension of validators, with implementations for existing drafts.
"""
from __future__ import division
from warnings import warn
import contextlib
import json
import numbers
from six import add_metaclass
from jsonschema import (
_legacy_validators,
_types,
_utils,
_validators,
exceptions,
)
from jsonschema.compat import (
Sequence,
int_types,
iteritems,
lru_cache,
str_types,
unquote,
urldefrag,
urljoin,
urlopen,
urlsplit,
)
# Sigh. https://gitlab.com/pycqa/flake8/issues/280
# https://github.com/pyga/ebb-lint/issues/7
# Imported for backwards compatibility.
from jsonschema.exceptions import ErrorTree
ErrorTree
class _DontDoThat(Exception):
"""
Raised when a Validators with non-default type checker is misused.
Asking one for DEFAULT_TYPES doesn't make sense, since type checkers
exist for the unrepresentable cases where DEFAULT_TYPES can't
represent the type relationship.
"""
def __str__(self):
return "DEFAULT_TYPES cannot be used on Validators using TypeCheckers"
validators = {}
meta_schemas = _utils.URIDict()
def _generate_legacy_type_checks(types=()):
"""
Generate newer-style type checks out of JSON-type-name-to-type mappings.
Arguments:
types (dict):
A mapping of type names to their Python types
Returns:
A dictionary of definitions to pass to `TypeChecker`
"""
types = dict(types)
def gen_type_check(pytypes):
pytypes = _utils.flatten(pytypes)
def type_check(checker, instance):
if isinstance(instance, bool):
if bool not in pytypes:
return False
return isinstance(instance, pytypes)
return type_check
definitions = {}
for typename, pytypes in iteritems(types):
definitions[typename] = gen_type_check(pytypes)
return definitions
_DEPRECATED_DEFAULT_TYPES = {
u"array": list,
u"boolean": bool,
u"integer": int_types,
u"null": type(None),
u"number": numbers.Number,
u"object": dict,
u"string": str_types,
}
_TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES = _types.TypeChecker(
type_checkers=_generate_legacy_type_checks(_DEPRECATED_DEFAULT_TYPES),
)
def validates(version):
"""
Register the decorated validator for a ``version`` of the specification.
Registered validators and their meta schemas will be considered when
parsing ``$schema`` properties' URIs.
Arguments:
version (str):
An identifier to use as the version's name
Returns:
collections.Callable:
a class decorator to decorate the validator with the version
"""
def _validates(cls):
validators[version] = cls
meta_schema_id = cls.ID_OF(cls.META_SCHEMA)
if meta_schema_id:
meta_schemas[meta_schema_id] = cls
return cls
return _validates
def _DEFAULT_TYPES(self):
if self._CREATED_WITH_DEFAULT_TYPES is None:
raise _DontDoThat()
warn(
(
"The DEFAULT_TYPES attribute is deprecated. "
"See the type checker attached to this validator instead."
),
DeprecationWarning,
stacklevel=2,
)
return self._DEFAULT_TYPES
class _DefaultTypesDeprecatingMetaClass(type):
DEFAULT_TYPES = property(_DEFAULT_TYPES)
def _id_of(schema):
if schema is True or schema is False:
return u""
return schema.get(u"$id", u"")
def create(
meta_schema,
validators=(),
version=None,
default_types=None,
type_checker=None,
id_of=_id_of,
):
"""
Create a new validator class.
Arguments:
meta_schema (collections.Mapping):
the meta schema for the new validator class
validators (collections.Mapping):
a mapping from names to callables, where each callable will
validate the schema property with the given name.
Each callable should take 4 arguments:
1. a validator instance,
2. the value of the property being validated within the
instance
3. the instance
4. the schema
version (str):
an identifier for the version that this validator class will
validate. If provided, the returned validator class will
have its ``__name__`` set to include the version, and also
will have `jsonschema.validators.validates` automatically
called for the given version.
type_checker (jsonschema.TypeChecker):
a type checker, used when applying the :validator:`type` validator.
If unprovided, a `jsonschema.TypeChecker` will be created
with a set of default types typical of JSON Schema drafts.
default_types (collections.Mapping):
.. deprecated:: 3.0.0
Please use the type_checker argument instead.
If set, it provides mappings of JSON types to Python types
that will be converted to functions and redefined in this
object's `jsonschema.TypeChecker`.
id_of (collections.Callable):
A function that given a schema, returns its ID.
Returns:
a new `jsonschema.IValidator` class
"""
if default_types is not None:
if type_checker is not None:
raise TypeError(
"Do not specify default_types when providing a type checker.",
)
_created_with_default_types = True
warn(
(
"The default_types argument is deprecated. "
"Use the type_checker argument instead."
),
DeprecationWarning,
stacklevel=2,
)
type_checker = _types.TypeChecker(
type_checkers=_generate_legacy_type_checks(default_types),
)
else:
default_types = _DEPRECATED_DEFAULT_TYPES
if type_checker is None:
_created_with_default_types = False
type_checker = _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES
elif type_checker is _TYPE_CHECKER_FOR_DEPRECATED_DEFAULT_TYPES:
_created_with_default_types = False
else:
_created_with_default_types = None
@add_metaclass(_DefaultTypesDeprecatingMetaClass)
class Validator(object):
VALIDATORS = dict(validators)
META_SCHEMA = dict(meta_schema)
TYPE_CHECKER = type_checker
ID_OF = staticmethod(id_of)
DEFAULT_TYPES = property(_DEFAULT_TYPES)
_DEFAULT_TYPES = dict(default_types)
_CREATED_WITH_DEFAULT_TYPES = _created_with_default_types
def __init__(
self,
schema,
types=(),
resolver=None,
format_checker=None,
):
if types:
warn(
(
"The types argument is deprecated. Provide "
"a type_checker to jsonschema.validators.extend "
"instead."
),
DeprecationWarning,
stacklevel=2,
)
self.TYPE_CHECKER = self.TYPE_CHECKER.redefine_many(
_generate_legacy_type_checks(types),
)
if resolver is None:
resolver = RefResolver.from_schema(schema, id_of=id_of)
self.resolver = resolver
self.format_checker = format_checker
self.schema = schema
@classmethod
def check_schema(cls, schema):
for error in cls(cls.META_SCHEMA).iter_errors(schema):
raise exceptions.SchemaError.create_from(error)
def iter_errors(self, instance, _schema=None):
if _schema is None:
_schema = self.schema
if _schema is True:
return
elif _schema is False:
yield exceptions.ValidationError(
"False schema does not allow %r" % (instance,),
validator=None,
validator_value=None,
instance=instance,
schema=_schema,
)
return
scope = id_of(_schema)
if scope:
self.resolver.push_scope(scope)
try:
ref = _schema.get(u"$ref")
if ref is not None:
validators = [(u"$ref", ref)]
else:
validators = iteritems(_schema)
for k, v in validators:
validator = self.VALIDATORS.get(k)
if validator is None:
continue
errors = validator(self, v, instance, _schema) or ()
for error in errors:
# set details if not already set by the called fn
error._set(
validator=k,
validator_value=v,
instance=instance,
schema=_schema,
)
if k != u"$ref":
error.schema_path.appendleft(k)
yield error
finally:
if scope:
self.resolver.pop_scope()
def descend(self, instance, schema, path=None, schema_path=None):
for error in self.iter_errors(instance, schema):
if path is not None:
error.path.appendleft(path)
if schema_path is not None:
error.schema_path.appendleft(schema_path)
yield error
def validate(self, *args, **kwargs):
for error in self.iter_errors(*args, **kwargs):
raise error
def is_type(self, instance, type):
try:
return self.TYPE_CHECKER.is_type(instance, type)
except exceptions.UndefinedTypeCheck:
raise exceptions.UnknownType(type, instance, self.schema)
def is_valid(self, instance, _schema=None):
error = next(self.iter_errors(instance, _schema), None)
return error is None
if version is not None:
Validator = validates(version)(Validator)
Validator.__name__ = version.title().replace(" ", "") + "Validator"
return Validator
def extend(validator, validators=(), version=None, type_checker=None):
"""
Create a new validator class by extending an existing one.
Arguments:
validator (jsonschema.IValidator):
an existing validator class
validators (collections.Mapping):
a mapping of new validator callables to extend with, whose
structure is as in `create`.
.. note::
Any validator callables with the same name as an
existing one will (silently) replace the old validator
callable entirely, effectively overriding any validation
done in the "parent" validator class.
If you wish to instead extend the behavior of a parent's
validator callable, delegate and call it directly in
the new validator function by retrieving it using
``OldValidator.VALIDATORS["validator_name"]``.
version (str):
a version for the new validator class
type_checker (jsonschema.TypeChecker):
a type checker, used when applying the :validator:`type` validator.
If unprovided, the type checker of the extended
`jsonschema.IValidator` will be carried along.`
Returns:
a new `jsonschema.IValidator` class extending the one provided
.. note:: Meta Schemas
The new validator class will have its parent's meta schema.
If you wish to change or extend the meta schema in the new
validator class, modify ``META_SCHEMA`` directly on the returned
class. Note that no implicit copying is done, so a copy should
likely be made before modifying it, in order to not affect the
old validator.
"""
all_validators = dict(validator.VALIDATORS)
all_validators.update(validators)
if type_checker is None:
type_checker = validator.TYPE_CHECKER
elif validator._CREATED_WITH_DEFAULT_TYPES:
raise TypeError(
"Cannot extend a validator created with default_types "
"with a type_checker. Update the validator to use a "
"type_checker when created."
)
return create(
meta_schema=validator.META_SCHEMA,
validators=all_validators,
version=version,
type_checker=type_checker,
id_of=validator.ID_OF,
)
Draft3Validator = create(
meta_schema=_utils.load_schema("draft3"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"dependencies": _legacy_validators.dependencies_draft3,
u"disallow": _legacy_validators.disallow_draft3,
u"divisibleBy": _validators.multipleOf,
u"enum": _validators.enum,
u"extends": _legacy_validators.extends_draft3,
u"format": _validators.format,
u"items": _legacy_validators.items_draft3_draft4,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maximum": _legacy_validators.maximum_draft3_draft4,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minimum": _legacy_validators.minimum_draft3_draft4,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _legacy_validators.properties_draft3,
u"type": _legacy_validators.type_draft3,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft3_type_checker,
version="draft3",
id_of=lambda schema: schema.get(u"id", ""),
)
Draft4Validator = create(
meta_schema=_utils.load_schema("draft4"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"format": _validators.format,
u"items": _legacy_validators.items_draft3_draft4,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _legacy_validators.maximum_draft3_draft4,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _legacy_validators.minimum_draft3_draft4,
u"multipleOf": _validators.multipleOf,
u"not": _validators.not_,
u"oneOf": _validators.oneOf,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft4_type_checker,
version="draft4",
id_of=lambda schema: schema.get(u"id", ""),
)
Draft6Validator = create(
meta_schema=_utils.load_schema("draft6"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"const": _validators.const,
u"contains": _validators.contains,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"exclusiveMaximum": _validators.exclusiveMaximum,
u"exclusiveMinimum": _validators.exclusiveMinimum,
u"format": _validators.format,
u"items": _validators.items,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _validators.maximum,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _validators.minimum,
u"multipleOf": _validators.multipleOf,
u"not": _validators.not_,
u"oneOf": _validators.oneOf,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"propertyNames": _validators.propertyNames,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft6_type_checker,
version="draft6",
)
Draft7Validator = create(
meta_schema=_utils.load_schema("draft7"),
validators={
u"$ref": _validators.ref,
u"additionalItems": _validators.additionalItems,
u"additionalProperties": _validators.additionalProperties,
u"allOf": _validators.allOf,
u"anyOf": _validators.anyOf,
u"const": _validators.const,
u"contains": _validators.contains,
u"dependencies": _validators.dependencies,
u"enum": _validators.enum,
u"exclusiveMaximum": _validators.exclusiveMaximum,
u"exclusiveMinimum": _validators.exclusiveMinimum,
u"format": _validators.format,
u"if": _validators.if_,
u"items": _validators.items,
u"maxItems": _validators.maxItems,
u"maxLength": _validators.maxLength,
u"maxProperties": _validators.maxProperties,
u"maximum": _validators.maximum,
u"minItems": _validators.minItems,
u"minLength": _validators.minLength,
u"minProperties": _validators.minProperties,
u"minimum": _validators.minimum,
u"multipleOf": _validators.multipleOf,
u"oneOf": _validators.oneOf,
u"not": _validators.not_,
u"pattern": _validators.pattern,
u"patternProperties": _validators.patternProperties,
u"properties": _validators.properties,
u"propertyNames": _validators.propertyNames,
u"required": _validators.required,
u"type": _validators.type,
u"uniqueItems": _validators.uniqueItems,
},
type_checker=_types.draft7_type_checker,
version="draft7",
)
_LATEST_VERSION = Draft7Validator
class RefResolver(object):
"""
Resolve JSON References.
Arguments:
base_uri (str):
The URI of the referring document
referrer:
The actual referring document
store (dict):
A mapping from URIs to documents to cache
cache_remote (bool):
Whether remote refs should be cached after first resolution
handlers (dict):
A mapping from URI schemes to functions that should be used
to retrieve them
urljoin_cache (:func:`functools.lru_cache`):
A cache that will be used for caching the results of joining
the resolution scope to subscopes.
remote_cache (:func:`functools.lru_cache`):
A cache that will be used for caching the results of
resolved remote URLs.
Attributes:
cache_remote (bool):
Whether remote refs should be cached after first resolution
"""
def __init__(
self,
base_uri,
referrer,
store=(),
cache_remote=True,
handlers=(),
urljoin_cache=None,
remote_cache=None,
):
if urljoin_cache is None:
urljoin_cache = lru_cache(1024)(urljoin)
if remote_cache is None:
remote_cache = lru_cache(1024)(self.resolve_from_url)
self.referrer = referrer
self.cache_remote = cache_remote
self.handlers = dict(handlers)
self._scopes_stack = [base_uri]
self.store = _utils.URIDict(
(id, validator.META_SCHEMA)
for id, validator in iteritems(meta_schemas)
)
self.store.update(store)
self.store[base_uri] = referrer
self._urljoin_cache = urljoin_cache
self._remote_cache = remote_cache
@classmethod
def from_schema(cls, schema, id_of=_id_of, *args, **kwargs):
"""
Construct a resolver from a JSON schema object.
Arguments:
schema:
the referring schema
Returns:
`RefResolver`
"""
return cls(base_uri=id_of(schema), referrer=schema, *args, **kwargs)
def push_scope(self, scope):
"""
Enter a given sub-scope.
Treats further dereferences as being performed underneath the
given scope.
"""
self._scopes_stack.append(
self._urljoin_cache(self.resolution_scope, scope),
)
def pop_scope(self):
"""
Exit the most recent entered scope.
Treats further dereferences as being performed underneath the
original scope.
Don't call this method more times than `push_scope` has been
called.
"""
try:
self._scopes_stack.pop()
except IndexError:
raise exceptions.RefResolutionError(
"Failed to pop the scope from an empty stack. "
"`pop_scope()` should only be called once for every "
"`push_scope()`"
)
@property
def resolution_scope(self):
"""
Retrieve the current resolution scope.
"""
return self._scopes_stack[-1]
@property
def base_uri(self):
"""
Retrieve the current base URI, not including any fragment.
"""
uri, _ = urldefrag(self.resolution_scope)
return uri
@contextlib.contextmanager
def in_scope(self, scope):
"""
Temporarily enter the given scope for the duration of the context.
"""
self.push_scope(scope)
try:
yield
finally:
self.pop_scope()
@contextlib.contextmanager
def resolving(self, ref):
"""
Resolve the given ``ref`` and enter its resolution scope.
Exits the scope on exit of this context manager.
Arguments:
ref (str):
The reference to resolve
"""
url, resolved = self.resolve(ref)
self.push_scope(url)
try:
yield resolved
finally:
self.pop_scope()
def resolve(self, ref):
"""
Resolve the given reference.
"""
url = self._urljoin_cache(self.resolution_scope, ref)
return url, self._remote_cache(url)
def resolve_from_url(self, url):
"""
Resolve the given remote URL.
"""
url, fragment = urldefrag(url)
try:
document = self.store[url]
except KeyError:
try:
document = self.resolve_remote(url)
except Exception as exc:
raise exceptions.RefResolutionError(exc)
return self.resolve_fragment(document, fragment)
def resolve_fragment(self, document, fragment):
"""
Resolve a ``fragment`` within the referenced ``document``.
Arguments:
document:
The referent document
fragment (str):
a URI fragment to resolve within it
"""
fragment = fragment.lstrip(u"/")
parts = unquote(fragment).split(u"/") if fragment else []
for part in parts:
part = part.replace(u"~1", u"/").replace(u"~0", u"~")
if isinstance(document, Sequence):
# Array indexes should be turned into integers
try:
part = int(part)
except ValueError:
pass
try:
document = document[part]
except (TypeError, LookupError):
raise exceptions.RefResolutionError(
"Unresolvable JSON pointer: %r" % fragment
)
return document
def resolve_remote(self, uri):
"""
Resolve a remote ``uri``.
If called directly, does not check the store first, but after
retrieving the document at the specified URI it will be saved in
the store if :attr:`cache_remote` is True.
.. note::
If the requests_ library is present, ``jsonschema`` will use it to
request the remote ``uri``, so that the correct encoding is
detected and used.
If it isn't, or if the scheme of the ``uri`` is not ``http`` or
``https``, UTF-8 is assumed.
Arguments:
uri (str):
The URI to resolve
Returns:
The retrieved document
.. _requests: https://pypi.org/project/requests/
"""
try:
import requests
except ImportError:
requests = None
scheme = urlsplit(uri).scheme
if scheme in self.handlers:
result = self.handlers[scheme](uri)
elif scheme in [u"http", u"https"] and requests:
# Requests has support for detecting the correct encoding of
# json over http
result = requests.get(uri).json()
else:
# Otherwise, pass off to urllib and assume utf-8
with urlopen(uri) as url:
result = json.loads(url.read().decode("utf-8"))
if self.cache_remote:
self.store[uri] = result
return result
def validate(instance, schema, cls=None, *args, **kwargs):
"""
Validate an instance under the given schema.
>>> validate([2, 3, 4], {"maxItems": 2})
Traceback (most recent call last):
...
ValidationError: [2, 3, 4] is too long
:func:`validate` will first verify that the provided schema is
itself valid, since not doing so can lead to less obvious error
messages and fail in less obvious or consistent ways.
If you know you have a valid schema already, especially if you
intend to validate multiple instances with the same schema, you
likely would prefer using the `IValidator.validate` method directly
on a specific validator (e.g. ``Draft7Validator.validate``).
Arguments:
instance:
The instance to validate
schema:
The schema to validate with
cls (IValidator):
The class that will be used to validate the instance.
If the ``cls`` argument is not provided, two things will happen
in accordance with the specification. First, if the schema has a
:validator:`$schema` property containing a known meta-schema [#]_
then the proper validator will be used. The specification recommends
that all schemas contain :validator:`$schema` properties for this
reason. If no :validator:`$schema` property is found, the default
validator class is the latest released draft.
Any other provided positional and keyword arguments will be passed
on when instantiating the ``cls``.
Raises:
`jsonschema.exceptions.ValidationError` if the instance
is invalid
`jsonschema.exceptions.SchemaError` if the schema itself
is invalid
.. rubric:: Footnotes
.. [#] known by a validator registered with
`jsonschema.validators.validates`
"""
if cls is None:
cls = validator_for(schema)
cls.check_schema(schema)
validator = cls(schema, *args, **kwargs)
error = exceptions.best_match(validator.iter_errors(instance))
if error is not None:
raise error
def validator_for(schema, default=_LATEST_VERSION):
"""
Retrieve the validator class appropriate for validating the given schema.
Uses the :validator:`$schema` property that should be present in the
given schema to look up the appropriate validator class.
Arguments:
schema (collections.Mapping or bool):
the schema to look at
default:
the default to return if the appropriate validator class
cannot be determined.
If unprovided, the default is to return the latest supported
draft.
"""
if schema is True or schema is False or u"$schema" not in schema:
return default
if schema[u"$schema"] not in meta_schemas:
warn(
(
"The metaschema specified by $schema was not found. "
"Using the latest draft to validate, but this will raise "
"an error in the future."
),
DeprecationWarning,
stacklevel=2,
)
return meta_schemas.get(schema[u"$schema"], _LATEST_VERSION)
| 29,400 | Python | 29.279094 | 79 | 0.59085 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_format.py | import datetime
import re
import socket
import struct
from jsonschema.compat import str_types
from jsonschema.exceptions import FormatError
class FormatChecker(object):
"""
A ``format`` property checker.
JSON Schema does not mandate that the ``format`` property actually do any
validation. If validation is desired however, instances of this class can
be hooked into validators to enable format validation.
`FormatChecker` objects always return ``True`` when asked about
formats that they do not know how to validate.
To check a custom format using a function that takes an instance and
returns a ``bool``, use the `FormatChecker.checks` or
`FormatChecker.cls_checks` decorators.
Arguments:
formats (~collections.Iterable):
The known formats to validate. This argument can be used to
limit which formats will be used during validation.
"""
checkers = {}
def __init__(self, formats=None):
if formats is None:
self.checkers = self.checkers.copy()
else:
self.checkers = dict((k, self.checkers[k]) for k in formats)
def __repr__(self):
return "<FormatChecker checkers={}>".format(sorted(self.checkers))
def checks(self, format, raises=()):
"""
Register a decorated function as validating a new format.
Arguments:
format (str):
The format that the decorated function will check.
raises (Exception):
The exception(s) raised by the decorated function when an
invalid instance is found.
The exception object will be accessible as the
`jsonschema.exceptions.ValidationError.cause` attribute of the
resulting validation error.
"""
def _checks(func):
self.checkers[format] = (func, raises)
return func
return _checks
cls_checks = classmethod(checks)
def check(self, instance, format):
"""
Check whether the instance conforms to the given format.
Arguments:
instance (*any primitive type*, i.e. str, number, bool):
The instance to check
format (str):
The format that instance should conform to
Raises:
FormatError: if the instance does not conform to ``format``
"""
if format not in self.checkers:
return
func, raises = self.checkers[format]
result, cause = None, None
try:
result = func(instance)
except raises as e:
cause = e
if not result:
raise FormatError(
"%r is not a %r" % (instance, format), cause=cause,
)
def conforms(self, instance, format):
"""
Check whether the instance conforms to the given format.
Arguments:
instance (*any primitive type*, i.e. str, number, bool):
The instance to check
format (str):
The format that instance should conform to
Returns:
bool: whether it conformed
"""
try:
self.check(instance, format)
except FormatError:
return False
else:
return True
draft3_format_checker = FormatChecker()
draft4_format_checker = FormatChecker()
draft6_format_checker = FormatChecker()
draft7_format_checker = FormatChecker()
_draft_checkers = dict(
draft3=draft3_format_checker,
draft4=draft4_format_checker,
draft6=draft6_format_checker,
draft7=draft7_format_checker,
)
def _checks_drafts(
name=None,
draft3=None,
draft4=None,
draft6=None,
draft7=None,
raises=(),
):
draft3 = draft3 or name
draft4 = draft4 or name
draft6 = draft6 or name
draft7 = draft7 or name
def wrap(func):
if draft3:
func = _draft_checkers["draft3"].checks(draft3, raises)(func)
if draft4:
func = _draft_checkers["draft4"].checks(draft4, raises)(func)
if draft6:
func = _draft_checkers["draft6"].checks(draft6, raises)(func)
if draft7:
func = _draft_checkers["draft7"].checks(draft7, raises)(func)
# Oy. This is bad global state, but relied upon for now, until
# deprecation. See https://github.com/Julian/jsonschema/issues/519
# and test_format_checkers_come_with_defaults
FormatChecker.cls_checks(draft7 or draft6 or draft4 or draft3, raises)(
func,
)
return func
return wrap
@_checks_drafts(name="idn-email")
@_checks_drafts(name="email")
def is_email(instance):
if not isinstance(instance, str_types):
return True
return "@" in instance
_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
@_checks_drafts(
draft3="ip-address", draft4="ipv4", draft6="ipv4", draft7="ipv4",
)
def is_ipv4(instance):
if not isinstance(instance, str_types):
return True
if not _ipv4_re.match(instance):
return False
return all(0 <= int(component) <= 255 for component in instance.split("."))
if hasattr(socket, "inet_pton"):
# FIXME: Really this only should raise struct.error, but see the sadness
# that is https://twistedmatrix.com/trac/ticket/9409
@_checks_drafts(
name="ipv6", raises=(socket.error, struct.error, ValueError),
)
def is_ipv6(instance):
if not isinstance(instance, str_types):
return True
return socket.inet_pton(socket.AF_INET6, instance)
_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
@_checks_drafts(
draft3="host-name",
draft4="hostname",
draft6="hostname",
draft7="hostname",
)
def is_host_name(instance):
if not isinstance(instance, str_types):
return True
if not _host_name_re.match(instance):
return False
components = instance.split(".")
for component in components:
if len(component) > 63:
return False
return True
try:
# The built-in `idna` codec only implements RFC 3890, so we go elsewhere.
import idna
except ImportError:
pass
else:
@_checks_drafts(draft7="idn-hostname", raises=idna.IDNAError)
def is_idn_host_name(instance):
if not isinstance(instance, str_types):
return True
idna.encode(instance)
return True
try:
import rfc3987
except ImportError:
try:
from rfc3986_validator import validate_rfc3986
except ImportError:
pass
else:
@_checks_drafts(name="uri")
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3986(instance, rule="URI_reference")
else:
@_checks_drafts(draft7="iri", raises=ValueError)
def is_iri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI")
@_checks_drafts(draft7="iri-reference", raises=ValueError)
def is_iri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="IRI_reference")
@_checks_drafts(name="uri", raises=ValueError)
def is_uri(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI")
@_checks_drafts(
draft6="uri-reference",
draft7="uri-reference",
raises=ValueError,
)
def is_uri_reference(instance):
if not isinstance(instance, str_types):
return True
return rfc3987.parse(instance, rule="URI_reference")
try:
from strict_rfc3339 import validate_rfc3339
except ImportError:
try:
from rfc3339_validator import validate_rfc3339
except ImportError:
validate_rfc3339 = None
if validate_rfc3339:
@_checks_drafts(name="date-time")
def is_datetime(instance):
if not isinstance(instance, str_types):
return True
return validate_rfc3339(instance)
@_checks_drafts(draft7="time")
def is_time(instance):
if not isinstance(instance, str_types):
return True
return is_datetime("1970-01-01T" + instance)
@_checks_drafts(name="regex", raises=re.error)
def is_regex(instance):
if not isinstance(instance, str_types):
return True
return re.compile(instance)
@_checks_drafts(draft3="date", draft7="date", raises=ValueError)
def is_date(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%Y-%m-%d")
@_checks_drafts(draft3="time", raises=ValueError)
def is_draft3_time(instance):
if not isinstance(instance, str_types):
return True
return datetime.datetime.strptime(instance, "%H:%M:%S")
try:
import webcolors
except ImportError:
pass
else:
def is_css_color_code(instance):
return webcolors.normalize_hex(instance)
@_checks_drafts(draft3="color", raises=(ValueError, TypeError))
def is_css21_color(instance):
if (
not isinstance(instance, str_types) or
instance.lower() in webcolors.css21_names_to_hex
):
return True
return is_css_color_code(instance)
def is_css3_color(instance):
if instance.lower() in webcolors.css3_names_to_hex:
return True
return is_css_color_code(instance)
try:
import jsonpointer
except ImportError:
pass
else:
@_checks_drafts(
draft6="json-pointer",
draft7="json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_json_pointer(instance):
if not isinstance(instance, str_types):
return True
return jsonpointer.JsonPointer(instance)
# TODO: I don't want to maintain this, so it
# needs to go either into jsonpointer (pending
# https://github.com/stefankoegl/python-json-pointer/issues/34) or
# into a new external library.
@_checks_drafts(
draft7="relative-json-pointer",
raises=jsonpointer.JsonPointerException,
)
def is_relative_json_pointer(instance):
# Definition taken from:
# https://tools.ietf.org/html/draft-handrews-relative-json-pointer-01#section-3
if not isinstance(instance, str_types):
return True
non_negative_integer, rest = [], ""
for i, character in enumerate(instance):
if character.isdigit():
non_negative_integer.append(character)
continue
if not non_negative_integer:
return False
rest = instance[i:]
break
return (rest == "#") or jsonpointer.JsonPointer(rest)
try:
import uritemplate.exceptions
except ImportError:
pass
else:
@_checks_drafts(
draft6="uri-template",
draft7="uri-template",
raises=uritemplate.exceptions.InvalidTemplate,
)
def is_uri_template(
instance,
template_validator=uritemplate.Validator().force_balanced_braces(),
):
template = uritemplate.URITemplate(instance)
return template_validator.validate(template)
| 11,691 | Python | 26.446009 | 87 | 0.616029 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/exceptions.py | """
Validation errors, and some surrounding helpers.
"""
from collections import defaultdict, deque
import itertools
import pprint
import textwrap
import attr
from jsonschema import _utils
from jsonschema.compat import PY3, iteritems
WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
STRONG_MATCHES = frozenset()
_unset = _utils.Unset()
class _Error(Exception):
def __init__(
self,
message,
validator=_unset,
path=(),
cause=None,
context=(),
validator_value=_unset,
instance=_unset,
schema=_unset,
schema_path=(),
parent=None,
):
super(_Error, self).__init__(
message,
validator,
path,
cause,
context,
validator_value,
instance,
schema,
schema_path,
parent,
)
self.message = message
self.path = self.relative_path = deque(path)
self.schema_path = self.relative_schema_path = deque(schema_path)
self.context = list(context)
self.cause = self.__cause__ = cause
self.validator = validator
self.validator_value = validator_value
self.instance = instance
self.schema = schema
self.parent = parent
for error in context:
error.parent = self
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.message)
def __unicode__(self):
essential_for_verbose = (
self.validator, self.validator_value, self.instance, self.schema,
)
if any(m is _unset for m in essential_for_verbose):
return self.message
pschema = pprint.pformat(self.schema, width=72)
pinstance = pprint.pformat(self.instance, width=72)
return self.message + textwrap.dedent("""
Failed validating %r in %s%s:
%s
On %s%s:
%s
""".rstrip()
) % (
self.validator,
self._word_for_schema_in_error_message,
_utils.format_as_index(list(self.relative_schema_path)[:-1]),
_utils.indent(pschema),
self._word_for_instance_in_error_message,
_utils.format_as_index(self.relative_path),
_utils.indent(pinstance),
)
if PY3:
__str__ = __unicode__
else:
def __str__(self):
return unicode(self).encode("utf-8")
@classmethod
def create_from(cls, other):
return cls(**other._contents())
@property
def absolute_path(self):
parent = self.parent
if parent is None:
return self.relative_path
path = deque(self.relative_path)
path.extendleft(reversed(parent.absolute_path))
return path
@property
def absolute_schema_path(self):
parent = self.parent
if parent is None:
return self.relative_schema_path
path = deque(self.relative_schema_path)
path.extendleft(reversed(parent.absolute_schema_path))
return path
def _set(self, **kwargs):
for k, v in iteritems(kwargs):
if getattr(self, k) is _unset:
setattr(self, k, v)
def _contents(self):
attrs = (
"message", "cause", "context", "validator", "validator_value",
"path", "schema_path", "instance", "schema", "parent",
)
return dict((attr, getattr(self, attr)) for attr in attrs)
class ValidationError(_Error):
"""
An instance was invalid under a provided schema.
"""
_word_for_schema_in_error_message = "schema"
_word_for_instance_in_error_message = "instance"
class SchemaError(_Error):
"""
A schema was invalid under its corresponding metaschema.
"""
_word_for_schema_in_error_message = "metaschema"
_word_for_instance_in_error_message = "schema"
@attr.s(hash=True)
class RefResolutionError(Exception):
"""
A ref could not be resolved.
"""
_cause = attr.ib()
def __str__(self):
return str(self._cause)
class UndefinedTypeCheck(Exception):
"""
A type checker was asked to check a type it did not have registered.
"""
def __init__(self, type):
self.type = type
def __unicode__(self):
return "Type %r is unknown to this type checker" % self.type
if PY3:
__str__ = __unicode__
else:
def __str__(self):
return unicode(self).encode("utf-8")
class UnknownType(Exception):
"""
A validator was asked to validate an instance against an unknown type.
"""
def __init__(self, type, instance, schema):
self.type = type
self.instance = instance
self.schema = schema
def __unicode__(self):
pschema = pprint.pformat(self.schema, width=72)
pinstance = pprint.pformat(self.instance, width=72)
return textwrap.dedent("""
Unknown type %r for validator with schema:
%s
While checking instance:
%s
""".rstrip()
) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
if PY3:
__str__ = __unicode__
else:
def __str__(self):
return unicode(self).encode("utf-8")
class FormatError(Exception):
"""
Validating a format failed.
"""
def __init__(self, message, cause=None):
super(FormatError, self).__init__(message, cause)
self.message = message
self.cause = self.__cause__ = cause
def __unicode__(self):
return self.message
if PY3:
__str__ = __unicode__
else:
def __str__(self):
return self.message.encode("utf-8")
class ErrorTree(object):
"""
ErrorTrees make it easier to check which validations failed.
"""
_instance = _unset
def __init__(self, errors=()):
self.errors = {}
self._contents = defaultdict(self.__class__)
for error in errors:
container = self
for element in error.path:
container = container[element]
container.errors[error.validator] = error
container._instance = error.instance
def __contains__(self, index):
"""
Check whether ``instance[index]`` has any errors.
"""
return index in self._contents
def __getitem__(self, index):
"""
Retrieve the child tree one level down at the given ``index``.
If the index is not in the instance that this tree corresponds to and
is not known by this tree, whatever error would be raised by
``instance.__getitem__`` will be propagated (usually this is some
subclass of `exceptions.LookupError`.
"""
if self._instance is not _unset and index not in self:
self._instance[index]
return self._contents[index]
def __setitem__(self, index, value):
"""
Add an error to the tree at the given ``index``.
"""
self._contents[index] = value
def __iter__(self):
"""
Iterate (non-recursively) over the indices in the instance with errors.
"""
return iter(self._contents)
def __len__(self):
"""
Return the `total_errors`.
"""
return self.total_errors
def __repr__(self):
return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
@property
def total_errors(self):
"""
The total number of errors in the entire tree, including children.
"""
child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
return len(self.errors) + child_errors
def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
"""
Create a key function that can be used to sort errors by relevance.
Arguments:
weak (set):
a collection of validator names to consider to be "weak".
If there are two errors at the same level of the instance
and one is in the set of weak validator names, the other
error will take priority. By default, :validator:`anyOf` and
:validator:`oneOf` are considered weak validators and will
be superseded by other same-level validation errors.
strong (set):
a collection of validator names to consider to be "strong"
"""
def relevance(error):
validator = error.validator
return -len(error.path), validator not in weak, validator in strong
return relevance
relevance = by_relevance()
def best_match(errors, key=relevance):
"""
Try to find an error that appears to be the best match among given errors.
In general, errors that are higher up in the instance (i.e. for which
`ValidationError.path` is shorter) are considered better matches,
since they indicate "more" is wrong with the instance.
If the resulting match is either :validator:`oneOf` or :validator:`anyOf`,
the *opposite* assumption is made -- i.e. the deepest error is picked,
since these validators only need to match once, and any other errors may
not be relevant.
Arguments:
errors (collections.Iterable):
the errors to select from. Do not provide a mixture of
errors from different validation attempts (i.e. from
different instances or schemas), since it won't produce
sensical output.
key (collections.Callable):
the key to use when sorting errors. See `relevance` and
transitively `by_relevance` for more details (the default is
to sort with the defaults of that function). Changing the
default is only useful if you want to change the function
that rates errors but still want the error context descent
done by this function.
Returns:
the best matching error, or ``None`` if the iterable was empty
.. note::
This function is a heuristic. Its return value may change for a given
set of inputs from version to version if better heuristics are added.
"""
errors = iter(errors)
best = next(errors, None)
if best is None:
return
best = max(itertools.chain([best], errors), key=key)
while best.context:
best = min(best.context, key=key)
return best
| 10,450 | Python | 26.869333 | 79 | 0.585359 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_types.py | import numbers
from pyrsistent import pmap
import attr
from jsonschema.compat import int_types, str_types
from jsonschema.exceptions import UndefinedTypeCheck
def is_array(checker, instance):
return isinstance(instance, list)
def is_bool(checker, instance):
return isinstance(instance, bool)
def is_integer(checker, instance):
# bool inherits from int, so ensure bools aren't reported as ints
if isinstance(instance, bool):
return False
return isinstance(instance, int_types)
def is_null(checker, instance):
return instance is None
def is_number(checker, instance):
# bool inherits from int, so ensure bools aren't reported as ints
if isinstance(instance, bool):
return False
return isinstance(instance, numbers.Number)
def is_object(checker, instance):
return isinstance(instance, dict)
def is_string(checker, instance):
return isinstance(instance, str_types)
def is_any(checker, instance):
return True
@attr.s(frozen=True)
class TypeChecker(object):
"""
A ``type`` property checker.
A `TypeChecker` performs type checking for an `IValidator`. Type
checks to perform are updated using `TypeChecker.redefine` or
`TypeChecker.redefine_many` and removed via `TypeChecker.remove`.
Each of these return a new `TypeChecker` object.
Arguments:
type_checkers (dict):
The initial mapping of types to their checking functions.
"""
_type_checkers = attr.ib(default=pmap(), converter=pmap)
def is_type(self, instance, type):
"""
Check if the instance is of the appropriate type.
Arguments:
instance (object):
The instance to check
type (str):
The name of the type that is expected.
Returns:
bool: Whether it conformed.
Raises:
`jsonschema.exceptions.UndefinedTypeCheck`:
if type is unknown to this object.
"""
try:
fn = self._type_checkers[type]
except KeyError:
raise UndefinedTypeCheck(type)
return fn(self, instance)
def redefine(self, type, fn):
"""
Produce a new checker with the given type redefined.
Arguments:
type (str):
The name of the type to check.
fn (collections.Callable):
A function taking exactly two parameters - the type
checker calling the function and the instance to check.
The function should return true if instance is of this
type and false otherwise.
Returns:
A new `TypeChecker` instance.
"""
return self.redefine_many({type: fn})
def redefine_many(self, definitions=()):
"""
Produce a new checker with the given types redefined.
Arguments:
definitions (dict):
A dictionary mapping types to their checking functions.
Returns:
A new `TypeChecker` instance.
"""
return attr.evolve(
self, type_checkers=self._type_checkers.update(definitions),
)
def remove(self, *types):
"""
Produce a new checker with the given types forgotten.
Arguments:
types (~collections.Iterable):
the names of the types to remove.
Returns:
A new `TypeChecker` instance
Raises:
`jsonschema.exceptions.UndefinedTypeCheck`:
if any given type is unknown to this object
"""
checkers = self._type_checkers
for each in types:
try:
checkers = checkers.remove(each)
except KeyError:
raise UndefinedTypeCheck(each)
return attr.evolve(self, type_checkers=checkers)
draft3_type_checker = TypeChecker(
{
u"any": is_any,
u"array": is_array,
u"boolean": is_bool,
u"integer": is_integer,
u"object": is_object,
u"null": is_null,
u"number": is_number,
u"string": is_string,
},
)
draft4_type_checker = draft3_type_checker.remove(u"any")
draft6_type_checker = draft4_type_checker.redefine(
u"integer",
lambda checker, instance: (
is_integer(checker, instance) or
isinstance(instance, float) and instance.is_integer()
),
)
draft7_type_checker = draft6_type_checker
| 4,490 | Python | 22.761905 | 72 | 0.607572 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_reflect.py | # -*- test-case-name: twisted.test.test_reflect -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Standardized versions of various cool and/or strange things that you can do
with Python's reflection capabilities.
"""
import sys
from jsonschema.compat import PY3
class _NoModuleFound(Exception):
"""
No module was found because none exists.
"""
class InvalidName(ValueError):
"""
The given name is not a dot-separated list of Python objects.
"""
class ModuleNotFound(InvalidName):
"""
The module associated with the given name doesn't exist and it can't be
imported.
"""
class ObjectNotFound(InvalidName):
"""
The object associated with the given name doesn't exist and it can't be
imported.
"""
if PY3:
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
exec("""def reraise(exception, traceback):
raise exception.__class__, exception, traceback""")
reraise.__doc__ = """
Re-raise an exception, with an optional traceback, in a way that is compatible
with both Python 2 and Python 3.
Note that on Python 3, re-raised exceptions will be mutated, with their
C{__traceback__} attribute being set.
@param exception: The exception instance.
@param traceback: The traceback to use, or C{None} indicating a new traceback.
"""
def _importAndCheckStack(importName):
"""
Import the given name as a module, then walk the stack to determine whether
the failure was the module not existing, or some code in the module (for
example a dependent import) failing. This can be helpful to determine
whether any actual application code was run. For example, to distiguish
administrative error (entering the wrong module name), from programmer
error (writing buggy code in a module that fails to import).
@param importName: The name of the module to import.
@type importName: C{str}
@raise Exception: if something bad happens. This can be any type of
exception, since nobody knows what loading some arbitrary code might
do.
@raise _NoModuleFound: if no module was found.
"""
try:
return __import__(importName)
except ImportError:
excType, excValue, excTraceback = sys.exc_info()
while excTraceback:
execName = excTraceback.tb_frame.f_globals["__name__"]
# in Python 2 execName is None when an ImportError is encountered,
# where in Python 3 execName is equal to the importName.
if execName is None or execName == importName:
reraise(excValue, excTraceback)
excTraceback = excTraceback.tb_next
raise _NoModuleFound()
def namedAny(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it. For example, the fully-qualified name of this
object is 'twisted.python.reflect.namedAny'.
@type name: L{str}
@param name: The name of the object to return.
@raise InvalidName: If the name is an empty string, starts or ends with
a '.', or is otherwise syntactically incorrect.
@raise ModuleNotFound: If the name is syntactically correct but the
module it specifies cannot be imported because it does not appear to
exist.
@raise ObjectNotFound: If the name is syntactically correct, includes at
least one '.', but the module it specifies cannot be imported because
it does not appear to exist.
@raise AttributeError: If an attribute of an object along the way cannot be
accessed, or a module along the way is not found.
@return: the Python object identified by 'name'.
"""
if not name:
raise InvalidName('Empty module name')
names = name.split('.')
# if the name starts or ends with a '.' or contains '..', the __import__
# will raise an 'Empty module name' error. This will provide a better error
# message.
if '' in names:
raise InvalidName(
"name must be a string giving a '.'-separated list of Python "
"identifiers, not %r" % (name,))
topLevelPackage = None
moduleNames = names[:]
while not topLevelPackage:
if moduleNames:
trialname = '.'.join(moduleNames)
try:
topLevelPackage = _importAndCheckStack(trialname)
except _NoModuleFound:
moduleNames.pop()
else:
if len(names) == 1:
raise ModuleNotFound("No module named %r" % (name,))
else:
raise ObjectNotFound('%r does not name an object' % (name,))
obj = topLevelPackage
for n in names[1:]:
obj = getattr(obj, n)
return obj
| 5,023 | Python | 31.205128 | 79 | 0.66514 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_validators.py | import re
from jsonschema._utils import (
ensure_list,
equal,
extras_msg,
find_additional_properties,
types_msg,
unbool,
uniq,
)
from jsonschema.exceptions import FormatError, ValidationError
from jsonschema.compat import iteritems
def patternProperties(validator, patternProperties, instance, schema):
if not validator.is_type(instance, "object"):
return
for pattern, subschema in iteritems(patternProperties):
for k, v in iteritems(instance):
if re.search(pattern, k):
for error in validator.descend(
v, subschema, path=k, schema_path=pattern,
):
yield error
def propertyNames(validator, propertyNames, instance, schema):
if not validator.is_type(instance, "object"):
return
for property in instance:
for error in validator.descend(
instance=property,
schema=propertyNames,
):
yield error
def additionalProperties(validator, aP, instance, schema):
if not validator.is_type(instance, "object"):
return
extras = set(find_additional_properties(instance, schema))
if validator.is_type(aP, "object"):
for extra in extras:
for error in validator.descend(instance[extra], aP, path=extra):
yield error
elif not aP and extras:
if "patternProperties" in schema:
patterns = sorted(schema["patternProperties"])
if len(extras) == 1:
verb = "does"
else:
verb = "do"
error = "%s %s not match any of the regexes: %s" % (
", ".join(map(repr, sorted(extras))),
verb,
", ".join(map(repr, patterns)),
)
yield ValidationError(error)
else:
error = "Additional properties are not allowed (%s %s unexpected)"
yield ValidationError(error % extras_msg(extras))
def items(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
if validator.is_type(items, "array"):
for (index, item), subschema in zip(enumerate(instance), items):
for error in validator.descend(
item, subschema, path=index, schema_path=index,
):
yield error
else:
for index, item in enumerate(instance):
for error in validator.descend(item, items, path=index):
yield error
def additionalItems(validator, aI, instance, schema):
if (
not validator.is_type(instance, "array") or
validator.is_type(schema.get("items", {}), "object")
):
return
len_items = len(schema.get("items", []))
if validator.is_type(aI, "object"):
for index, item in enumerate(instance[len_items:], start=len_items):
for error in validator.descend(item, aI, path=index):
yield error
elif not aI and len(instance) > len(schema.get("items", [])):
error = "Additional items are not allowed (%s %s unexpected)"
yield ValidationError(
error %
extras_msg(instance[len(schema.get("items", [])):])
)
def const(validator, const, instance, schema):
if not equal(instance, const):
yield ValidationError("%r was expected" % (const,))
def contains(validator, contains, instance, schema):
if not validator.is_type(instance, "array"):
return
if not any(validator.is_valid(element, contains) for element in instance):
yield ValidationError(
"None of %r are valid under the given schema" % (instance,)
)
def exclusiveMinimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance <= minimum:
yield ValidationError(
"%r is less than or equal to the minimum of %r" % (
instance, minimum,
),
)
def exclusiveMaximum(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance >= maximum:
yield ValidationError(
"%r is greater than or equal to the maximum of %r" % (
instance, maximum,
),
)
def minimum(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance < minimum:
yield ValidationError(
"%r is less than the minimum of %r" % (instance, minimum)
)
def maximum(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if instance > maximum:
yield ValidationError(
"%r is greater than the maximum of %r" % (instance, maximum)
)
def multipleOf(validator, dB, instance, schema):
if not validator.is_type(instance, "number"):
return
if isinstance(dB, float):
quotient = instance / dB
failed = int(quotient) != quotient
else:
failed = instance % dB
if failed:
yield ValidationError("%r is not a multiple of %r" % (instance, dB))
def minItems(validator, mI, instance, schema):
if validator.is_type(instance, "array") and len(instance) < mI:
yield ValidationError("%r is too short" % (instance,))
def maxItems(validator, mI, instance, schema):
if validator.is_type(instance, "array") and len(instance) > mI:
yield ValidationError("%r is too long" % (instance,))
def uniqueItems(validator, uI, instance, schema):
if (
uI and
validator.is_type(instance, "array") and
not uniq(instance)
):
yield ValidationError("%r has non-unique elements" % (instance,))
def pattern(validator, patrn, instance, schema):
if (
validator.is_type(instance, "string") and
not re.search(patrn, instance)
):
yield ValidationError("%r does not match %r" % (instance, patrn))
def format(validator, format, instance, schema):
if validator.format_checker is not None:
try:
validator.format_checker.check(instance, format)
except FormatError as error:
yield ValidationError(error.message, cause=error.cause)
def minLength(validator, mL, instance, schema):
if validator.is_type(instance, "string") and len(instance) < mL:
yield ValidationError("%r is too short" % (instance,))
def maxLength(validator, mL, instance, schema):
if validator.is_type(instance, "string") and len(instance) > mL:
yield ValidationError("%r is too long" % (instance,))
def dependencies(validator, dependencies, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, dependency in iteritems(dependencies):
if property not in instance:
continue
if validator.is_type(dependency, "array"):
for each in dependency:
if each not in instance:
message = "%r is a dependency of %r"
yield ValidationError(message % (each, property))
else:
for error in validator.descend(
instance, dependency, schema_path=property,
):
yield error
def enum(validator, enums, instance, schema):
if instance == 0 or instance == 1:
unbooled = unbool(instance)
if all(unbooled != unbool(each) for each in enums):
yield ValidationError("%r is not one of %r" % (instance, enums))
elif instance not in enums:
yield ValidationError("%r is not one of %r" % (instance, enums))
def ref(validator, ref, instance, schema):
resolve = getattr(validator.resolver, "resolve", None)
if resolve is None:
with validator.resolver.resolving(ref) as resolved:
for error in validator.descend(instance, resolved):
yield error
else:
scope, resolved = validator.resolver.resolve(ref)
validator.resolver.push_scope(scope)
try:
for error in validator.descend(instance, resolved):
yield error
finally:
validator.resolver.pop_scope()
def type(validator, types, instance, schema):
types = ensure_list(types)
if not any(validator.is_type(instance, type) for type in types):
yield ValidationError(types_msg(instance, types))
def properties(validator, properties, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, subschema in iteritems(properties):
if property in instance:
for error in validator.descend(
instance[property],
subschema,
path=property,
schema_path=property,
):
yield error
def required(validator, required, instance, schema):
if not validator.is_type(instance, "object"):
return
for property in required:
if property not in instance:
yield ValidationError("%r is a required property" % property)
def minProperties(validator, mP, instance, schema):
if validator.is_type(instance, "object") and len(instance) < mP:
yield ValidationError(
"%r does not have enough properties" % (instance,)
)
def maxProperties(validator, mP, instance, schema):
if not validator.is_type(instance, "object"):
return
if validator.is_type(instance, "object") and len(instance) > mP:
yield ValidationError("%r has too many properties" % (instance,))
def allOf(validator, allOf, instance, schema):
for index, subschema in enumerate(allOf):
for error in validator.descend(instance, subschema, schema_path=index):
yield error
def anyOf(validator, anyOf, instance, schema):
all_errors = []
for index, subschema in enumerate(anyOf):
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
break
all_errors.extend(errs)
else:
yield ValidationError(
"%r is not valid under any of the given schemas" % (instance,),
context=all_errors,
)
def oneOf(validator, oneOf, instance, schema):
subschemas = enumerate(oneOf)
all_errors = []
for index, subschema in subschemas:
errs = list(validator.descend(instance, subschema, schema_path=index))
if not errs:
first_valid = subschema
break
all_errors.extend(errs)
else:
yield ValidationError(
"%r is not valid under any of the given schemas" % (instance,),
context=all_errors,
)
more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
if more_valid:
more_valid.append(first_valid)
reprs = ", ".join(repr(schema) for schema in more_valid)
yield ValidationError(
"%r is valid under each of %s" % (instance, reprs)
)
def not_(validator, not_schema, instance, schema):
if validator.is_valid(instance, not_schema):
yield ValidationError(
"%r is not allowed for %r" % (not_schema, instance)
)
def if_(validator, if_schema, instance, schema):
if validator.is_valid(instance, if_schema):
if u"then" in schema:
then = schema[u"then"]
for error in validator.descend(instance, then, schema_path="then"):
yield error
elif u"else" in schema:
else_ = schema[u"else"]
for error in validator.descend(instance, else_, schema_path="else"):
yield error
| 11,703 | Python | 30.294118 | 79 | 0.607366 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/__init__.py | """
An implementation of JSON Schema for Python
The main functionality is provided by the validator classes for each of the
supported JSON Schema versions.
Most commonly, `validate` is the quickest way to simply validate a given
instance under a schema, and will create a validator for you.
"""
from jsonschema.exceptions import (
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
)
from jsonschema._format import (
FormatChecker,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
)
from jsonschema._types import TypeChecker
from jsonschema.validators import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
RefResolver,
validate,
)
try:
from importlib import metadata
except ImportError: # for Python<3.8
import importlib_metadata as metadata
__version__ = metadata.version("jsonschema")
| 934 | Python | 25.714285 | 76 | 0.761242 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_legacy_validators.py | from jsonschema import _utils
from jsonschema.compat import iteritems
from jsonschema.exceptions import ValidationError
def dependencies_draft3(validator, dependencies, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, dependency in iteritems(dependencies):
if property not in instance:
continue
if validator.is_type(dependency, "object"):
for error in validator.descend(
instance, dependency, schema_path=property,
):
yield error
elif validator.is_type(dependency, "string"):
if dependency not in instance:
yield ValidationError(
"%r is a dependency of %r" % (dependency, property)
)
else:
for each in dependency:
if each not in instance:
message = "%r is a dependency of %r"
yield ValidationError(message % (each, property))
def disallow_draft3(validator, disallow, instance, schema):
for disallowed in _utils.ensure_list(disallow):
if validator.is_valid(instance, {"type": [disallowed]}):
yield ValidationError(
"%r is disallowed for %r" % (disallowed, instance)
)
def extends_draft3(validator, extends, instance, schema):
if validator.is_type(extends, "object"):
for error in validator.descend(instance, extends):
yield error
return
for index, subschema in enumerate(extends):
for error in validator.descend(instance, subschema, schema_path=index):
yield error
def items_draft3_draft4(validator, items, instance, schema):
if not validator.is_type(instance, "array"):
return
if validator.is_type(items, "object"):
for index, item in enumerate(instance):
for error in validator.descend(item, items, path=index):
yield error
else:
for (index, item), subschema in zip(enumerate(instance), items):
for error in validator.descend(
item, subschema, path=index, schema_path=index,
):
yield error
def minimum_draft3_draft4(validator, minimum, instance, schema):
if not validator.is_type(instance, "number"):
return
if schema.get("exclusiveMinimum", False):
failed = instance <= minimum
cmp = "less than or equal to"
else:
failed = instance < minimum
cmp = "less than"
if failed:
yield ValidationError(
"%r is %s the minimum of %r" % (instance, cmp, minimum)
)
def maximum_draft3_draft4(validator, maximum, instance, schema):
if not validator.is_type(instance, "number"):
return
if schema.get("exclusiveMaximum", False):
failed = instance >= maximum
cmp = "greater than or equal to"
else:
failed = instance > maximum
cmp = "greater than"
if failed:
yield ValidationError(
"%r is %s the maximum of %r" % (instance, cmp, maximum)
)
def properties_draft3(validator, properties, instance, schema):
if not validator.is_type(instance, "object"):
return
for property, subschema in iteritems(properties):
if property in instance:
for error in validator.descend(
instance[property],
subschema,
path=property,
schema_path=property,
):
yield error
elif subschema.get("required", False):
error = ValidationError("%r is a required property" % property)
error._set(
validator="required",
validator_value=subschema["required"],
instance=instance,
schema=schema,
)
error.path.appendleft(property)
error.schema_path.extend([property, "required"])
yield error
def type_draft3(validator, types, instance, schema):
types = _utils.ensure_list(types)
all_errors = []
for index, type in enumerate(types):
if validator.is_type(type, "object"):
errors = list(validator.descend(instance, type, schema_path=index))
if not errors:
return
all_errors.extend(errors)
else:
if validator.is_type(instance, type):
return
else:
yield ValidationError(
_utils.types_msg(instance, types), context=all_errors,
)
| 4,584 | Python | 31.288732 | 79 | 0.585079 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/_utils.py | import itertools
import json
import pkgutil
import re
from jsonschema.compat import MutableMapping, str_types, urlsplit
class URIDict(MutableMapping):
"""
Dictionary which uses normalized URIs as keys.
"""
def normalize(self, uri):
return urlsplit(uri).geturl()
def __init__(self, *args, **kwargs):
self.store = dict()
self.store.update(*args, **kwargs)
def __getitem__(self, uri):
return self.store[self.normalize(uri)]
def __setitem__(self, uri, value):
self.store[self.normalize(uri)] = value
def __delitem__(self, uri):
del self.store[self.normalize(uri)]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __repr__(self):
return repr(self.store)
class Unset(object):
"""
An as-of-yet unset attribute or unprovided default parameter.
"""
def __repr__(self):
return "<unset>"
def load_schema(name):
"""
Load a schema from ./schemas/``name``.json and return it.
"""
data = pkgutil.get_data("jsonschema", "schemas/{0}.json".format(name))
return json.loads(data.decode("utf-8"))
def indent(string, times=1):
"""
A dumb version of `textwrap.indent` from Python 3.3.
"""
return "\n".join(" " * (4 * times) + line for line in string.splitlines())
def format_as_index(indices):
"""
Construct a single string containing indexing operations for the indices.
For example, [1, 2, "foo"] -> [1][2]["foo"]
Arguments:
indices (sequence):
The indices to format.
"""
if not indices:
return ""
return "[%s]" % "][".join(repr(index) for index in indices)
def find_additional_properties(instance, schema):
"""
Return the set of additional properties for the given ``instance``.
Weeds out properties that should have been validated by ``properties`` and
/ or ``patternProperties``.
Assumes ``instance`` is dict-like already.
"""
properties = schema.get("properties", {})
patterns = "|".join(schema.get("patternProperties", {}))
for property in instance:
if property not in properties:
if patterns and re.search(patterns, property):
continue
yield property
def extras_msg(extras):
"""
Create an error message for extra items or properties.
"""
if len(extras) == 1:
verb = "was"
else:
verb = "were"
return ", ".join(repr(extra) for extra in extras), verb
def types_msg(instance, types):
"""
Create an error message for a failure to match the given types.
If the ``instance`` is an object and contains a ``name`` property, it will
be considered to be a description of that object and used as its type.
Otherwise the message is simply the reprs of the given ``types``.
"""
reprs = []
for type in types:
try:
reprs.append(repr(type["name"]))
except Exception:
reprs.append(repr(type))
return "%r is not of type %s" % (instance, ", ".join(reprs))
def flatten(suitable_for_isinstance):
"""
isinstance() can accept a bunch of really annoying different types:
* a single type
* a tuple of types
* an arbitrary nested tree of tuples
Return a flattened tuple of the given argument.
"""
types = set()
if not isinstance(suitable_for_isinstance, tuple):
suitable_for_isinstance = (suitable_for_isinstance,)
for thing in suitable_for_isinstance:
if isinstance(thing, tuple):
types.update(flatten(thing))
else:
types.add(thing)
return tuple(types)
def ensure_list(thing):
"""
Wrap ``thing`` in a list if it's a single str.
Otherwise, return it unchanged.
"""
if isinstance(thing, str_types):
return [thing]
return thing
def equal(one, two):
"""
Check if two things are equal, but evade booleans and ints being equal.
"""
return unbool(one) == unbool(two)
def unbool(element, true=object(), false=object()):
"""
A hack to make True and 1 and False and 0 unique for ``uniq``.
"""
if element is True:
return true
elif element is False:
return false
return element
def uniq(container):
"""
Check if all of a container's elements are unique.
Successively tries first to rely that the elements are hashable, then
falls back on them being sortable, and finally falls back on brute
force.
"""
try:
return len(set(unbool(i) for i in container)) == len(container)
except TypeError:
try:
sort = sorted(unbool(i) for i in container)
sliced = itertools.islice(sort, 1, None)
for i, j in zip(sort, sliced):
if i == j:
return False
except (NotImplementedError, TypeError):
seen = []
for e in container:
e = unbool(e)
if e in seen:
return False
seen.append(e)
return True
| 5,168 | Python | 23.267606 | 78 | 0.595395 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/compat.py | """
Python 2/3 compatibility helpers.
Note: This module is *not* public API.
"""
import contextlib
import operator
import sys
try:
from collections.abc import MutableMapping, Sequence # noqa
except ImportError:
from collections import MutableMapping, Sequence # noqa
PY3 = sys.version_info[0] >= 3
if PY3:
zip = zip
from functools import lru_cache
from io import StringIO as NativeIO
from urllib.parse import (
unquote, urljoin, urlunsplit, SplitResult, urlsplit
)
from urllib.request import pathname2url, urlopen
str_types = str,
int_types = int,
iteritems = operator.methodcaller("items")
else:
from itertools import izip as zip # noqa
from io import BytesIO as NativeIO
from urlparse import urljoin, urlunsplit, SplitResult, urlsplit
from urllib import pathname2url, unquote # noqa
import urllib2 # noqa
def urlopen(*args, **kwargs):
return contextlib.closing(urllib2.urlopen(*args, **kwargs))
str_types = basestring
int_types = int, long
iteritems = operator.methodcaller("iteritems")
from functools32 import lru_cache
def urldefrag(url):
if "#" in url:
s, n, p, q, frag = urlsplit(url)
defrag = urlunsplit((s, n, p, q, ""))
else:
defrag = url
frag = ""
return defrag, frag
# flake8: noqa
| 1,353 | Python | 23.178571 | 67 | 0.670362 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/__main__.py | from jsonschema.cli import main
main()
| 39 | Python | 12.333329 | 31 | 0.794872 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/cli.py | """
The ``jsonschema`` command line.
"""
from __future__ import absolute_import
import argparse
import json
import sys
from jsonschema import __version__
from jsonschema._reflect import namedAny
from jsonschema.validators import validator_for
def _namedAnyWithDefault(name):
if "." not in name:
name = "jsonschema." + name
return namedAny(name)
def _json_file(path):
with open(path) as file:
return json.load(file)
parser = argparse.ArgumentParser(
description="JSON Schema Validation CLI",
)
parser.add_argument(
"-i", "--instance",
action="append",
dest="instances",
type=_json_file,
help=(
"a path to a JSON instance (i.e. filename.json) "
"to validate (may be specified multiple times)"
),
)
parser.add_argument(
"-F", "--error-format",
default="{error.instance}: {error.message}\n",
help=(
"the format to use for each error output message, specified in "
"a form suitable for passing to str.format, which will be called "
"with 'error' for each error"
),
)
parser.add_argument(
"-V", "--validator",
type=_namedAnyWithDefault,
help=(
"the fully qualified object name of a validator to use, or, for "
"validators that are registered with jsonschema, simply the name "
"of the class."
),
)
parser.add_argument(
"--version",
action="version",
version=__version__,
)
parser.add_argument(
"schema",
help="the JSON Schema to validate with (i.e. schema.json)",
type=_json_file,
)
def parse_args(args):
arguments = vars(parser.parse_args(args=args or ["--help"]))
if arguments["validator"] is None:
arguments["validator"] = validator_for(arguments["schema"])
return arguments
def main(args=sys.argv[1:]):
sys.exit(run(arguments=parse_args(args=args)))
def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
error_format = arguments["error_format"]
validator = arguments["validator"](schema=arguments["schema"])
validator.check_schema(arguments["schema"])
errored = False
for instance in arguments["instances"] or ():
for error in validator.iter_errors(instance):
stderr.write(error_format.format(error=error))
errored = True
return errored
| 2,310 | Python | 24.395604 | 74 | 0.649784 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_jsonschema_test_suite.py | """
Test runner for the JSON Schema official test suite
Tests comprehensive correctness of each draft's validator.
See https://github.com/json-schema-org/JSON-Schema-Test-Suite for details.
"""
import sys
import warnings
from jsonschema import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
)
from jsonschema.tests._helpers import bug
from jsonschema.tests._suite import Suite
from jsonschema.validators import _DEPRECATED_DEFAULT_TYPES, create
SUITE = Suite()
DRAFT3 = SUITE.version(name="draft3")
DRAFT4 = SUITE.version(name="draft4")
DRAFT6 = SUITE.version(name="draft6")
DRAFT7 = SUITE.version(name="draft7")
def skip(message, **kwargs):
def skipper(test):
if all(value == getattr(test, attr) for attr, value in kwargs.items()):
return message
return skipper
def missing_format(checker):
def missing_format(test):
schema = test.schema
if schema is True or schema is False or "format" not in schema:
return
if schema["format"] not in checker.checkers:
return "Format checker {0!r} not found.".format(schema["format"])
return missing_format
is_narrow_build = sys.maxunicode == 2 ** 16 - 1
if is_narrow_build: # pragma: no cover
message = "Not running surrogate Unicode case, this Python is narrow."
def narrow_unicode_build(test): # pragma: no cover
return skip(
message=message,
description="one supplementary Unicode code point is not long enough",
)(test) or skip(
message=message,
description="two supplementary Unicode code points is long enough",
)(test)
else:
def narrow_unicode_build(test): # pragma: no cover
return
TestDraft3 = DRAFT3.to_unittest_testcase(
DRAFT3.tests(),
DRAFT3.optional_tests_of(name="bignum"),
DRAFT3.optional_tests_of(name="format"),
DRAFT3.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft3Validator,
format_checker=draft3_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or missing_format(draft3_format_checker)(test)
or skip(
message="Upstream bug in strict_rfc3339",
subject="format",
description="case-insensitive T and Z",
)(test)
),
)
TestDraft4 = DRAFT4.to_unittest_testcase(
DRAFT4.tests(),
DRAFT4.optional_tests_of(name="bignum"),
DRAFT4.optional_tests_of(name="format"),
DRAFT4.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft4Validator,
format_checker=draft4_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or missing_format(draft4_format_checker)(test)
or skip(
message=bug(),
subject="ref",
case_description="Recursive references between schemas",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description="Location-independent identifier",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with absolute URI"
),
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with base URI change in subschema"
),
)(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message="Upstream bug in strict_rfc3339",
subject="format",
description="case-insensitive T and Z",
)(test)
),
)
TestDraft6 = DRAFT6.to_unittest_testcase(
DRAFT6.tests(),
DRAFT6.optional_tests_of(name="bignum"),
DRAFT6.optional_tests_of(name="format"),
DRAFT6.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft6Validator,
format_checker=draft6_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or missing_format(draft6_format_checker)(test)
or skip(
message=bug(),
subject="ref",
case_description="Recursive references between schemas",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description="Location-independent identifier",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with absolute URI"
),
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with base URI change in subschema"
),
)(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message="Upstream bug in strict_rfc3339",
subject="format",
description="case-insensitive T and Z",
)(test)
),
)
TestDraft7 = DRAFT7.to_unittest_testcase(
DRAFT7.tests(),
DRAFT7.format_tests(),
DRAFT7.optional_tests_of(name="bignum"),
DRAFT7.optional_tests_of(name="content"),
DRAFT7.optional_tests_of(name="zeroTerminatedFloats"),
Validator=Draft7Validator,
format_checker=draft7_format_checker,
skip=lambda test: (
narrow_unicode_build(test)
or missing_format(draft7_format_checker)(test)
or skip(
message=bug(),
subject="ref",
case_description="Recursive references between schemas",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description="Location-independent identifier",
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with absolute URI"
),
)(test)
or skip(
message=bug(371),
subject="ref",
case_description=(
"Location-independent identifier with base URI change in subschema"
),
)(test)
or skip(
message=bug(),
subject="refRemote",
case_description="base URI change - change folder in subschema",
)(test)
or skip(
message="Upstream bug in strict_rfc3339",
subject="date-time",
description="case-insensitive T and Z",
)(test)
or skip(
message=bug(593),
subject="content",
case_description=(
"validation of string-encoded content based on media type"
),
)(test)
or skip(
message=bug(593),
subject="content",
case_description="validation of binary string-encoding",
)(test)
or skip(
message=bug(593),
subject="content",
case_description=(
"validation of binary-encoded media type documents"
),
)(test)
),
)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
TestDraft3LegacyTypeCheck = DRAFT3.to_unittest_testcase(
# Interestingly the any part couldn't really be done w/the old API.
(
(test for test in each if test.schema != {"type": "any"})
for each in DRAFT3.tests_of(name="type")
),
name="TestDraft3LegacyTypeCheck",
Validator=create(
meta_schema=Draft3Validator.META_SCHEMA,
validators=Draft3Validator.VALIDATORS,
default_types=_DEPRECATED_DEFAULT_TYPES,
),
)
TestDraft4LegacyTypeCheck = DRAFT4.to_unittest_testcase(
DRAFT4.tests_of(name="type"),
name="TestDraft4LegacyTypeCheck",
Validator=create(
meta_schema=Draft4Validator.META_SCHEMA,
validators=Draft4Validator.VALIDATORS,
default_types=_DEPRECATED_DEFAULT_TYPES,
),
)
| 8,464 | Python | 29.44964 | 83 | 0.586366 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_cli.py | from unittest import TestCase
import json
import subprocess
import sys
from jsonschema import Draft4Validator, ValidationError, cli, __version__
from jsonschema.compat import NativeIO
from jsonschema.exceptions import SchemaError
def fake_validator(*errors):
errors = list(reversed(errors))
class FakeValidator(object):
def __init__(self, *args, **kwargs):
pass
def iter_errors(self, instance):
if errors:
return errors.pop()
return []
def check_schema(self, schema):
pass
return FakeValidator
class TestParser(TestCase):
FakeValidator = fake_validator()
instance_file = "foo.json"
schema_file = "schema.json"
def setUp(self):
cli.open = self.fake_open
self.addCleanup(delattr, cli, "open")
def fake_open(self, path):
if path == self.instance_file:
contents = ""
elif path == self.schema_file:
contents = {}
else: # pragma: no cover
self.fail("What is {!r}".format(path))
return NativeIO(json.dumps(contents))
def test_find_validator_by_fully_qualified_object_name(self):
arguments = cli.parse_args(
[
"--validator",
"jsonschema.tests.test_cli.TestParser.FakeValidator",
"--instance", self.instance_file,
self.schema_file,
]
)
self.assertIs(arguments["validator"], self.FakeValidator)
def test_find_validator_in_jsonschema(self):
arguments = cli.parse_args(
[
"--validator", "Draft4Validator",
"--instance", self.instance_file,
self.schema_file,
]
)
self.assertIs(arguments["validator"], Draft4Validator)
class TestCLI(TestCase):
def test_draft3_schema_draft4_validator(self):
stdout, stderr = NativeIO(), NativeIO()
with self.assertRaises(SchemaError):
cli.run(
{
"validator": Draft4Validator,
"schema": {
"anyOf": [
{"minimum": 20},
{"type": "string"},
{"required": True},
],
},
"instances": [1],
"error_format": "{error.message}",
},
stdout=stdout,
stderr=stderr,
)
def test_successful_validation(self):
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator(),
"schema": {},
"instances": [1],
"error_format": "{error.message}",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertFalse(stderr.getvalue())
self.assertEqual(exit_code, 0)
def test_unsuccessful_validation(self):
error = ValidationError("I am an error!", instance=1)
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator([error]),
"schema": {},
"instances": [1],
"error_format": "{error.instance} - {error.message}",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertEqual(stderr.getvalue(), "1 - I am an error!")
self.assertEqual(exit_code, 1)
def test_unsuccessful_validation_multiple_instances(self):
first_errors = [
ValidationError("9", instance=1),
ValidationError("8", instance=1),
]
second_errors = [ValidationError("7", instance=2)]
stdout, stderr = NativeIO(), NativeIO()
exit_code = cli.run(
{
"validator": fake_validator(first_errors, second_errors),
"schema": {},
"instances": [1, 2],
"error_format": "{error.instance} - {error.message}\t",
},
stdout=stdout,
stderr=stderr,
)
self.assertFalse(stdout.getvalue())
self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
self.assertEqual(exit_code, 1)
def test_version(self):
version = subprocess.check_output(
[sys.executable, "-m", "jsonschema", "--version"],
stderr=subprocess.STDOUT,
)
version = version.decode("utf-8").strip()
self.assertEqual(version, __version__)
| 4,727 | Python | 30.105263 | 73 | 0.512376 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/_suite.py | """
Python representations of the JSON Schema Test Suite tests.
"""
from functools import partial
import json
import os
import re
import subprocess
import sys
import unittest
from twisted.python.filepath import FilePath
import attr
from jsonschema.compat import PY3
from jsonschema.validators import validators
import jsonschema
def _find_suite():
root = os.environ.get("JSON_SCHEMA_TEST_SUITE")
if root is not None:
return FilePath(root)
root = FilePath(jsonschema.__file__).parent().sibling("json")
if not root.isdir(): # pragma: no cover
raise ValueError(
(
"Can't find the JSON-Schema-Test-Suite directory. "
"Set the 'JSON_SCHEMA_TEST_SUITE' environment "
"variable or run the tests from alongside a checkout "
"of the suite."
),
)
return root
@attr.s(hash=True)
class Suite(object):
_root = attr.ib(default=attr.Factory(_find_suite))
def _remotes(self):
jsonschema_suite = self._root.descendant(["bin", "jsonschema_suite"])
remotes = subprocess.check_output(
[sys.executable, jsonschema_suite.path, "remotes"],
)
return {
"http://localhost:1234/" + name: schema
for name, schema in json.loads(remotes.decode("utf-8")).items()
}
def benchmark(self, runner): # pragma: no cover
for name in validators:
self.version(name=name).benchmark(runner=runner)
def version(self, name):
return Version(
name=name,
path=self._root.descendant(["tests", name]),
remotes=self._remotes(),
)
@attr.s(hash=True)
class Version(object):
_path = attr.ib()
_remotes = attr.ib()
name = attr.ib()
def benchmark(self, runner, **kwargs): # pragma: no cover
for suite in self.tests():
for test in suite:
runner.bench_func(
test.fully_qualified_name,
partial(test.validate_ignoring_errors, **kwargs),
)
def tests(self):
return (
test
for child in self._path.globChildren("*.json")
for test in self._tests_in(
subject=child.basename()[:-5],
path=child,
)
)
def format_tests(self):
path = self._path.descendant(["optional", "format"])
return (
test
for child in path.globChildren("*.json")
for test in self._tests_in(
subject=child.basename()[:-5],
path=child,
)
)
def tests_of(self, name):
return self._tests_in(
subject=name,
path=self._path.child(name + ".json"),
)
def optional_tests_of(self, name):
return self._tests_in(
subject=name,
path=self._path.descendant(["optional", name + ".json"]),
)
def to_unittest_testcase(self, *suites, **kwargs):
name = kwargs.pop("name", "Test" + self.name.title())
methods = {
test.method_name: test.to_unittest_method(**kwargs)
for suite in suites
for tests in suite
for test in tests
}
cls = type(name, (unittest.TestCase,), methods)
try:
cls.__module__ = _someone_save_us_the_module_of_the_caller()
except Exception: # pragma: no cover
# We're doing crazy things, so if they go wrong, like a function
# behaving differently on some other interpreter, just make them
# not happen.
pass
return cls
def _tests_in(self, subject, path):
for each in json.loads(path.getContent().decode("utf-8")):
yield (
_Test(
version=self,
subject=subject,
case_description=each["description"],
schema=each["schema"],
remotes=self._remotes,
**test
) for test in each["tests"]
)
@attr.s(hash=True, repr=False)
class _Test(object):
version = attr.ib()
subject = attr.ib()
case_description = attr.ib()
description = attr.ib()
data = attr.ib()
schema = attr.ib(repr=False)
valid = attr.ib()
_remotes = attr.ib()
def __repr__(self): # pragma: no cover
return "<Test {}>".format(self.fully_qualified_name)
@property
def fully_qualified_name(self): # pragma: no cover
return " > ".join(
[
self.version.name,
self.subject,
self.case_description,
self.description,
]
)
@property
def method_name(self):
delimiters = r"[\W\- ]+"
name = "test_%s_%s_%s" % (
re.sub(delimiters, "_", self.subject),
re.sub(delimiters, "_", self.case_description),
re.sub(delimiters, "_", self.description),
)
if not PY3: # pragma: no cover
name = name.encode("utf-8")
return name
def to_unittest_method(self, skip=lambda test: None, **kwargs):
if self.valid:
def fn(this):
self.validate(**kwargs)
else:
def fn(this):
with this.assertRaises(jsonschema.ValidationError):
self.validate(**kwargs)
fn.__name__ = self.method_name
reason = skip(self)
return unittest.skipIf(reason is not None, reason)(fn)
def validate(self, Validator, **kwargs):
resolver = jsonschema.RefResolver.from_schema(
schema=self.schema,
store=self._remotes,
id_of=Validator.ID_OF,
)
jsonschema.validate(
instance=self.data,
schema=self.schema,
cls=Validator,
resolver=resolver,
**kwargs
)
def validate_ignoring_errors(self, Validator): # pragma: no cover
try:
self.validate(Validator=Validator)
except jsonschema.ValidationError:
pass
def _someone_save_us_the_module_of_the_caller():
"""
The FQON of the module 2nd stack frames up from here.
This is intended to allow us to dynamicallly return test case classes that
are indistinguishable from being defined in the module that wants them.
Otherwise, trial will mis-print the FQON, and copy pasting it won't re-run
the class that really is running.
Save us all, this is all so so so so so terrible.
"""
return sys._getframe(2).f_globals["__name__"]
| 6,728 | Python | 27.0375 | 78 | 0.546225 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_exceptions.py | from unittest import TestCase
import textwrap
from jsonschema import Draft4Validator, exceptions
from jsonschema.compat import PY3
class TestBestMatch(TestCase):
def best_match(self, errors):
errors = list(errors)
best = exceptions.best_match(errors)
reversed_best = exceptions.best_match(reversed(errors))
msg = "Didn't return a consistent best match!\nGot: {0}\n\nThen: {1}"
self.assertEqual(
best._contents(), reversed_best._contents(),
msg=msg.format(best, reversed_best),
)
return best
def test_shallower_errors_are_better_matches(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"minProperties": 2,
"properties": {"bar": {"type": "object"}},
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": []}}))
self.assertEqual(best.validator, "minProperties")
def test_oneOf_and_anyOf_are_weak_matches(self):
"""
A property you *must* match is probably better than one you have to
match a part of.
"""
validator = Draft4Validator(
{
"minProperties": 2,
"anyOf": [{"type": "string"}, {"type": "number"}],
"oneOf": [{"type": "string"}, {"type": "number"}],
}
)
best = self.best_match(validator.iter_errors({}))
self.assertEqual(best.validator, "minProperties")
def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
"""
If the most relevant error is an anyOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"anyOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
"""
If the most relevant error is an oneOf, then we traverse its context
and select the otherwise *least* relevant error, since in this case
that means the most specific, deep, error inside the instance.
I.e. since only one of the schemas must match, we look for the most
relevant one.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
"""
Now, if the error is allOf, we traverse but select the *most* relevant
error from the context, because all schemas here must match anyways.
"""
validator = Draft4Validator(
{
"properties": {
"foo": {
"allOf": [
{"type": "string"},
{"properties": {"bar": {"type": "array"}}},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "string")
def test_nested_context_for_oneOf(self):
validator = Draft4Validator(
{
"properties": {
"foo": {
"oneOf": [
{"type": "string"},
{
"oneOf": [
{"type": "string"},
{
"properties": {
"bar": {"type": "array"},
},
},
],
},
],
},
},
},
)
best = self.best_match(validator.iter_errors({"foo": {"bar": 12}}))
self.assertEqual(best.validator_value, "array")
def test_one_error(self):
validator = Draft4Validator({"minProperties": 2})
error, = validator.iter_errors({})
self.assertEqual(
exceptions.best_match(validator.iter_errors({})).validator,
"minProperties",
)
def test_no_errors(self):
validator = Draft4Validator({})
self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
class TestByRelevance(TestCase):
def test_short_paths_are_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=["baz"])
deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
match = max([shallow, deep], key=exceptions.relevance)
self.assertIs(match, shallow)
match = max([deep, shallow], key=exceptions.relevance)
self.assertIs(match, shallow)
def test_global_errors_are_even_better_matches(self):
shallow = exceptions.ValidationError("Oh no!", path=[])
deep = exceptions.ValidationError("Oh yes!", path=["foo"])
errors = sorted([shallow, deep], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
errors = sorted([deep, shallow], key=exceptions.relevance)
self.assertEqual(
[list(error.path) for error in errors],
[["foo"], []],
)
def test_weak_validators_are_lower_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
best_match = exceptions.by_relevance(weak="a")
match = max([weak, normal], key=best_match)
self.assertIs(match, normal)
match = max([normal, weak], key=best_match)
self.assertIs(match, normal)
def test_strong_validators_are_higher_priority(self):
weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
best_match = exceptions.by_relevance(weak="a", strong="c")
match = max([weak, normal, strong], key=best_match)
self.assertIs(match, strong)
match = max([strong, normal, weak], key=best_match)
self.assertIs(match, strong)
class TestErrorTree(TestCase):
def test_it_knows_how_many_total_errors_it_contains(self):
# FIXME: https://github.com/Julian/jsonschema/issues/442
errors = [
exceptions.ValidationError("Something", validator=i)
for i in range(8)
]
tree = exceptions.ErrorTree(errors)
self.assertEqual(tree.total_errors, 8)
def test_it_contains_an_item_if_the_item_had_an_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertIn("bar", tree)
def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
errors = [exceptions.ValidationError("a message", path=["bar"])]
tree = exceptions.ErrorTree(errors)
self.assertNotIn("foo", tree)
def test_validators_that_failed_appear_in_errors_dict(self):
error = exceptions.ValidationError("a message", validator="foo")
tree = exceptions.ErrorTree([error])
self.assertEqual(tree.errors, {"foo": error})
def test_it_creates_a_child_tree_for_each_nested_path(self):
errors = [
exceptions.ValidationError("a bar message", path=["bar"]),
exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
]
tree = exceptions.ErrorTree(errors)
self.assertIn(0, tree["bar"])
self.assertNotIn(1, tree["bar"])
def test_children_have_their_errors_dicts_built(self):
e1, e2 = (
exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
)
tree = exceptions.ErrorTree([e1, e2])
self.assertEqual(tree["bar"][0].errors, {"foo": e1, "quux": e2})
def test_multiple_errors_with_instance(self):
e1, e2 = (
exceptions.ValidationError(
"1",
validator="foo",
path=["bar", "bar2"],
instance="i1"),
exceptions.ValidationError(
"2",
validator="quux",
path=["foobar", 2],
instance="i2"),
)
exceptions.ErrorTree([e1, e2])
def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
error = exceptions.ValidationError("123", validator="foo", instance=[])
tree = exceptions.ErrorTree([error])
with self.assertRaises(IndexError):
tree[0]
def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
"""
If a validator is dumb (like :validator:`required` in draft 3) and
refers to a path that isn't in the instance, the tree still properly
returns a subtree for that path.
"""
error = exceptions.ValidationError(
"a message", validator="foo", instance={}, path=["foo"],
)
tree = exceptions.ErrorTree([error])
self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
class TestErrorInitReprStr(TestCase):
def make_error(self, **kwargs):
defaults = dict(
message=u"hello",
validator=u"type",
validator_value=u"string",
instance=5,
schema={u"type": u"string"},
)
defaults.update(kwargs)
return exceptions.ValidationError(**defaults)
def assertShows(self, expected, **kwargs):
if PY3: # pragma: no cover
expected = expected.replace("u'", "'")
expected = textwrap.dedent(expected).rstrip("\n")
error = self.make_error(**kwargs)
message_line, _, rest = str(error).partition("\n")
self.assertEqual(message_line, error.message)
self.assertEqual(rest, expected)
def test_it_calls_super_and_sets_args(self):
error = self.make_error()
self.assertGreater(len(error.args), 1)
def test_repr(self):
self.assertEqual(
repr(exceptions.ValidationError(message="Hello!")),
"<ValidationError: %r>" % "Hello!",
)
def test_unset_error(self):
error = exceptions.ValidationError("message")
self.assertEqual(str(error), "message")
kwargs = {
"validator": "type",
"validator_value": "string",
"instance": 5,
"schema": {"type": "string"},
}
# Just the message should show if any of the attributes are unset
for attr in kwargs:
k = dict(kwargs)
del k[attr]
error = exceptions.ValidationError("message", **k)
self.assertEqual(str(error), "message")
def test_empty_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema:
{u'type': u'string'}
On instance:
5
""",
path=[],
schema_path=[],
)
def test_one_item_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema:
{u'type': u'string'}
On instance[0]:
5
""",
path=[0],
schema_path=["items"],
)
def test_multiple_item_paths(self):
self.assertShows(
"""
Failed validating u'type' in schema[u'items'][0]:
{u'type': u'string'}
On instance[0][u'a']:
5
""",
path=[0, u"a"],
schema_path=[u"items", 0, 1],
)
def test_uses_pprint(self):
self.assertShows(
"""
Failed validating u'maxLength' in schema:
{0: 0,
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
7: 7,
8: 8,
9: 9,
10: 10,
11: 11,
12: 12,
13: 13,
14: 14,
15: 15,
16: 16,
17: 17,
18: 18,
19: 19}
On instance:
[0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24]
""",
instance=list(range(25)),
schema=dict(zip(range(20), range(20))),
validator=u"maxLength",
)
def test_str_works_with_instances_having_overriden_eq_operator(self):
"""
Check for https://github.com/Julian/jsonschema/issues/164 which
rendered exceptions unusable when a `ValidationError` involved
instances with an `__eq__` method that returned truthy values.
"""
class DontEQMeBro(object):
def __eq__(this, other): # pragma: no cover
self.fail("Don't!")
def __ne__(this, other): # pragma: no cover
self.fail("Don't!")
instance = DontEQMeBro()
error = exceptions.ValidationError(
"a message",
validator="foo",
instance=instance,
validator_value="some",
schema="schema",
)
self.assertIn(repr(instance), str(error))
class TestHashable(TestCase):
def test_hashable(self):
set([exceptions.ValidationError("")])
set([exceptions.SchemaError("")])
| 15,348 | Python | 32.151188 | 79 | 0.49179 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/_helpers.py | def bug(issue=None):
message = "A known bug."
if issue is not None:
message += " See issue #{issue}.".format(issue=issue)
return message
| 157 | Python | 25.333329 | 61 | 0.611465 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_format.py | """
Tests for the parts of jsonschema related to the :validator:`format` property.
"""
from unittest import TestCase
from jsonschema import FormatError, ValidationError, FormatChecker
from jsonschema.validators import Draft4Validator
BOOM = ValueError("Boom!")
BANG = ZeroDivisionError("Bang!")
def boom(thing):
if thing == "bang":
raise BANG
raise BOOM
class TestFormatChecker(TestCase):
def test_it_can_validate_no_formats(self):
checker = FormatChecker(formats=())
self.assertFalse(checker.checkers)
def test_it_raises_a_key_error_for_unknown_formats(self):
with self.assertRaises(KeyError):
FormatChecker(formats=["o noes"])
def test_it_can_register_cls_checkers(self):
original = dict(FormatChecker.checkers)
self.addCleanup(FormatChecker.checkers.pop, "boom")
FormatChecker.cls_checks("boom")(boom)
self.assertEqual(
FormatChecker.checkers,
dict(original, boom=(boom, ())),
)
def test_it_can_register_checkers(self):
checker = FormatChecker()
checker.checks("boom")(boom)
self.assertEqual(
checker.checkers,
dict(FormatChecker.checkers, boom=(boom, ()))
)
def test_it_catches_registered_errors(self):
checker = FormatChecker()
checker.checks("boom", raises=type(BOOM))(boom)
with self.assertRaises(FormatError) as cm:
checker.check(instance=12, format="boom")
self.assertIs(cm.exception.cause, BOOM)
self.assertIs(cm.exception.__cause__, BOOM)
# Unregistered errors should not be caught
with self.assertRaises(type(BANG)):
checker.check(instance="bang", format="boom")
def test_format_error_causes_become_validation_error_causes(self):
checker = FormatChecker()
checker.checks("boom", raises=ValueError)(boom)
validator = Draft4Validator({"format": "boom"}, format_checker=checker)
with self.assertRaises(ValidationError) as cm:
validator.validate("BOOM")
self.assertIs(cm.exception.cause, BOOM)
self.assertIs(cm.exception.__cause__, BOOM)
def test_format_checkers_come_with_defaults(self):
# This is bad :/ but relied upon.
# The docs for quite awhile recommended people do things like
# validate(..., format_checker=FormatChecker())
# We should change that, but we can't without deprecation...
checker = FormatChecker()
with self.assertRaises(FormatError):
checker.check(instance="not-an-ipv4", format="ipv4")
def test_repr(self):
checker = FormatChecker(formats=())
checker.checks("foo")(lambda thing: True)
checker.checks("bar")(lambda thing: True)
checker.checks("baz")(lambda thing: True)
self.assertEqual(
repr(checker),
"<FormatChecker checkers=['bar', 'baz', 'foo']>",
)
| 2,982 | Python | 32.144444 | 79 | 0.640174 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_types.py | """
Tests on the new type interface. The actual correctness of the type checking
is handled in test_jsonschema_test_suite; these tests check that TypeChecker
functions correctly and can facilitate extensions to type checking
"""
from collections import namedtuple
from unittest import TestCase
from jsonschema import ValidationError, _validators
from jsonschema._types import TypeChecker
from jsonschema.exceptions import UndefinedTypeCheck
from jsonschema.validators import Draft4Validator, extend
def equals_2(checker, instance):
return instance == 2
def is_namedtuple(instance):
return isinstance(instance, tuple) and getattr(instance, "_fields", None)
def is_object_or_named_tuple(checker, instance):
if Draft4Validator.TYPE_CHECKER.is_type(instance, "object"):
return True
return is_namedtuple(instance)
def coerce_named_tuple(fn):
def coerced(validator, value, instance, schema):
if is_namedtuple(instance):
instance = instance._asdict()
return fn(validator, value, instance, schema)
return coerced
required = coerce_named_tuple(_validators.required)
properties = coerce_named_tuple(_validators.properties)
class TestTypeChecker(TestCase):
def test_is_type(self):
checker = TypeChecker({"two": equals_2})
self.assertEqual(
(
checker.is_type(instance=2, type="two"),
checker.is_type(instance="bar", type="two"),
),
(True, False),
)
def test_is_unknown_type(self):
with self.assertRaises(UndefinedTypeCheck) as context:
TypeChecker().is_type(4, "foobar")
self.assertIn("foobar", str(context.exception))
def test_checks_can_be_added_at_init(self):
checker = TypeChecker({"two": equals_2})
self.assertEqual(checker, TypeChecker().redefine("two", equals_2))
def test_redefine_existing_type(self):
self.assertEqual(
TypeChecker().redefine("two", object()).redefine("two", equals_2),
TypeChecker().redefine("two", equals_2),
)
def test_remove(self):
self.assertEqual(
TypeChecker({"two": equals_2}).remove("two"),
TypeChecker(),
)
def test_remove_unknown_type(self):
with self.assertRaises(UndefinedTypeCheck) as context:
TypeChecker().remove("foobar")
self.assertIn("foobar", str(context.exception))
def test_redefine_many(self):
self.assertEqual(
TypeChecker().redefine_many({"foo": int, "bar": str}),
TypeChecker().redefine("foo", int).redefine("bar", str),
)
def test_remove_multiple(self):
self.assertEqual(
TypeChecker({"foo": int, "bar": str}).remove("foo", "bar"),
TypeChecker(),
)
def test_type_check_can_raise_key_error(self):
"""
Make sure no one writes:
try:
self._type_checkers[type](...)
except KeyError:
ignoring the fact that the function itself can raise that.
"""
error = KeyError("Stuff")
def raises_keyerror(checker, instance):
raise error
with self.assertRaises(KeyError) as context:
TypeChecker({"foo": raises_keyerror}).is_type(4, "foo")
self.assertIs(context.exception, error)
class TestCustomTypes(TestCase):
def test_simple_type_can_be_extended(self):
def int_or_str_int(checker, instance):
if not isinstance(instance, (int, str)):
return False
try:
int(instance)
except ValueError:
return False
return True
CustomValidator = extend(
Draft4Validator,
type_checker=Draft4Validator.TYPE_CHECKER.redefine(
"integer", int_or_str_int,
),
)
validator = CustomValidator({"type": "integer"})
validator.validate(4)
validator.validate("4")
with self.assertRaises(ValidationError):
validator.validate(4.4)
def test_object_can_be_extended(self):
schema = {"type": "object"}
Point = namedtuple("Point", ["x", "y"])
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
u"object", is_object_or_named_tuple,
)
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
validator = CustomValidator(schema)
validator.validate(Point(x=4, y=5))
def test_object_extensions_require_custom_validators(self):
schema = {"type": "object", "required": ["x"]}
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
u"object", is_object_or_named_tuple,
)
CustomValidator = extend(Draft4Validator, type_checker=type_checker)
validator = CustomValidator(schema)
Point = namedtuple("Point", ["x", "y"])
# Cannot handle required
with self.assertRaises(ValidationError):
validator.validate(Point(x=4, y=5))
def test_object_extensions_can_handle_custom_validators(self):
schema = {
"type": "object",
"required": ["x"],
"properties": {"x": {"type": "integer"}},
}
type_checker = Draft4Validator.TYPE_CHECKER.redefine(
u"object", is_object_or_named_tuple,
)
CustomValidator = extend(
Draft4Validator,
type_checker=type_checker,
validators={"required": required, "properties": properties},
)
validator = CustomValidator(schema)
Point = namedtuple("Point", ["x", "y"])
# Can now process required and properties
validator.validate(Point(x=4, y=5))
with self.assertRaises(ValidationError):
validator.validate(Point(x="not an integer", y=5))
| 5,902 | Python | 29.905759 | 78 | 0.609454 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/tests/test_validators.py | from collections import deque
from contextlib import contextmanager
from decimal import Decimal
from io import BytesIO
from unittest import TestCase
import json
import os
import sys
import tempfile
import unittest
from twisted.trial.unittest import SynchronousTestCase
import attr
from jsonschema import FormatChecker, TypeChecker, exceptions, validators
from jsonschema.compat import PY3, pathname2url
from jsonschema.tests._helpers import bug
def startswith(validator, startswith, instance, schema):
if not instance.startswith(startswith):
yield exceptions.ValidationError(u"Whoops!")
class TestCreateAndExtend(SynchronousTestCase):
def setUp(self):
self.addCleanup(
self.assertEqual,
validators.meta_schemas,
dict(validators.meta_schemas),
)
self.meta_schema = {u"$id": "some://meta/schema"}
self.validators = {u"startswith": startswith}
self.type_checker = TypeChecker()
self.Validator = validators.create(
meta_schema=self.meta_schema,
validators=self.validators,
type_checker=self.type_checker,
)
def test_attrs(self):
self.assertEqual(
(
self.Validator.VALIDATORS,
self.Validator.META_SCHEMA,
self.Validator.TYPE_CHECKER,
), (
self.validators,
self.meta_schema,
self.type_checker,
),
)
def test_init(self):
schema = {u"startswith": u"foo"}
self.assertEqual(self.Validator(schema).schema, schema)
def test_iter_errors(self):
schema = {u"startswith": u"hel"}
iter_errors = self.Validator(schema).iter_errors
errors = list(iter_errors(u"hello"))
self.assertEqual(errors, [])
expected_error = exceptions.ValidationError(
u"Whoops!",
instance=u"goodbye",
schema=schema,
validator=u"startswith",
validator_value=u"hel",
schema_path=deque([u"startswith"]),
)
errors = list(iter_errors(u"goodbye"))
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0]._contents(), expected_error._contents())
def test_if_a_version_is_provided_it_is_registered(self):
Validator = validators.create(
meta_schema={u"$id": "something"},
version="my version",
)
self.addCleanup(validators.meta_schemas.pop, "something")
self.assertEqual(Validator.__name__, "MyVersionValidator")
def test_if_a_version_is_not_provided_it_is_not_registered(self):
original = dict(validators.meta_schemas)
validators.create(meta_schema={u"id": "id"})
self.assertEqual(validators.meta_schemas, original)
def test_validates_registers_meta_schema_id(self):
meta_schema_key = "meta schema id"
my_meta_schema = {u"id": meta_schema_key}
validators.create(
meta_schema=my_meta_schema,
version="my version",
id_of=lambda s: s.get("id", ""),
)
self.addCleanup(validators.meta_schemas.pop, meta_schema_key)
self.assertIn(meta_schema_key, validators.meta_schemas)
def test_validates_registers_meta_schema_draft6_id(self):
meta_schema_key = "meta schema $id"
my_meta_schema = {u"$id": meta_schema_key}
validators.create(
meta_schema=my_meta_schema,
version="my version",
)
self.addCleanup(validators.meta_schemas.pop, meta_schema_key)
self.assertIn(meta_schema_key, validators.meta_schemas)
def test_create_default_types(self):
Validator = validators.create(meta_schema={}, validators=())
self.assertTrue(
all(
Validator({}).is_type(instance=instance, type=type)
for type, instance in [
(u"array", []),
(u"boolean", True),
(u"integer", 12),
(u"null", None),
(u"number", 12.0),
(u"object", {}),
(u"string", u"foo"),
]
),
)
def test_extend(self):
original = dict(self.Validator.VALIDATORS)
new = object()
Extended = validators.extend(
self.Validator,
validators={u"new": new},
)
self.assertEqual(
(
Extended.VALIDATORS,
Extended.META_SCHEMA,
Extended.TYPE_CHECKER,
self.Validator.VALIDATORS,
), (
dict(original, new=new),
self.Validator.META_SCHEMA,
self.Validator.TYPE_CHECKER,
original,
),
)
def test_extend_idof(self):
"""
Extending a validator preserves its notion of schema IDs.
"""
def id_of(schema):
return schema.get(u"__test__", self.Validator.ID_OF(schema))
correct_id = "the://correct/id/"
meta_schema = {
u"$id": "the://wrong/id/",
u"__test__": correct_id,
}
Original = validators.create(
meta_schema=meta_schema,
validators=self.validators,
type_checker=self.type_checker,
id_of=id_of,
)
self.assertEqual(Original.ID_OF(Original.META_SCHEMA), correct_id)
Derived = validators.extend(Original)
self.assertEqual(Derived.ID_OF(Derived.META_SCHEMA), correct_id)
class TestLegacyTypeChecking(SynchronousTestCase):
def test_create_default_types(self):
Validator = validators.create(meta_schema={}, validators=())
self.assertEqual(
set(Validator.DEFAULT_TYPES), {
u"array",
u"boolean",
u"integer",
u"null",
u"number",
u"object", u"string",
},
)
self.flushWarnings()
def test_extend(self):
Validator = validators.create(meta_schema={}, validators=())
original = dict(Validator.VALIDATORS)
new = object()
Extended = validators.extend(
Validator,
validators={u"new": new},
)
self.assertEqual(
(
Extended.VALIDATORS,
Extended.META_SCHEMA,
Extended.TYPE_CHECKER,
Validator.VALIDATORS,
Extended.DEFAULT_TYPES,
Extended({}).DEFAULT_TYPES,
self.flushWarnings()[0]["message"],
), (
dict(original, new=new),
Validator.META_SCHEMA,
Validator.TYPE_CHECKER,
original,
Validator.DEFAULT_TYPES,
Validator.DEFAULT_TYPES,
self.flushWarnings()[0]["message"],
),
)
def test_types_redefines_the_validators_type_checker(self):
schema = {"type": "string"}
self.assertFalse(validators.Draft7Validator(schema).is_valid(12))
validator = validators.Draft7Validator(
schema,
types={"string": (str, int)},
)
self.assertTrue(validator.is_valid(12))
self.flushWarnings()
def test_providing_default_types_warns(self):
self.assertWarns(
category=DeprecationWarning,
message=(
"The default_types argument is deprecated. "
"Use the type_checker argument instead."
),
# https://tm.tl/9363 :'(
filename=sys.modules[self.assertWarns.__module__].__file__,
f=validators.create,
meta_schema={},
validators={},
default_types={"foo": object},
)
def test_cannot_ask_for_default_types_with_non_default_type_checker(self):
"""
We raise an error when you ask a validator with non-default
type checker for its DEFAULT_TYPES.
The type checker argument is new, so no one but this library
itself should be trying to use it, and doing so while then
asking for DEFAULT_TYPES makes no sense (not to mention is
deprecated), since type checkers are not strictly about Python
type.
"""
Validator = validators.create(
meta_schema={},
validators={},
type_checker=TypeChecker(),
)
with self.assertRaises(validators._DontDoThat) as e:
Validator.DEFAULT_TYPES
self.assertIn(
"DEFAULT_TYPES cannot be used on Validators using TypeCheckers",
str(e.exception),
)
with self.assertRaises(validators._DontDoThat):
Validator({}).DEFAULT_TYPES
self.assertFalse(self.flushWarnings())
def test_providing_explicit_type_checker_does_not_warn(self):
Validator = validators.create(
meta_schema={},
validators={},
type_checker=TypeChecker(),
)
self.assertFalse(self.flushWarnings())
Validator({})
self.assertFalse(self.flushWarnings())
def test_providing_neither_does_not_warn(self):
Validator = validators.create(meta_schema={}, validators={})
self.assertFalse(self.flushWarnings())
Validator({})
self.assertFalse(self.flushWarnings())
def test_providing_default_types_with_type_checker_errors(self):
with self.assertRaises(TypeError) as e:
validators.create(
meta_schema={},
validators={},
default_types={"foo": object},
type_checker=TypeChecker(),
)
self.assertIn(
"Do not specify default_types when providing a type checker",
str(e.exception),
)
self.assertFalse(self.flushWarnings())
def test_extending_a_legacy_validator_with_a_type_checker_errors(self):
Validator = validators.create(
meta_schema={},
validators={},
default_types={u"array": list}
)
with self.assertRaises(TypeError) as e:
validators.extend(
Validator,
validators={},
type_checker=TypeChecker(),
)
self.assertIn(
(
"Cannot extend a validator created with default_types "
"with a type_checker. Update the validator to use a "
"type_checker when created."
),
str(e.exception),
)
self.flushWarnings()
def test_extending_a_legacy_validator_does_not_rewarn(self):
Validator = validators.create(meta_schema={}, default_types={})
self.assertTrue(self.flushWarnings())
validators.extend(Validator)
self.assertFalse(self.flushWarnings())
def test_accessing_default_types_warns(self):
Validator = validators.create(meta_schema={}, validators={})
self.assertFalse(self.flushWarnings())
self.assertWarns(
DeprecationWarning,
(
"The DEFAULT_TYPES attribute is deprecated. "
"See the type checker attached to this validator instead."
),
# https://tm.tl/9363 :'(
sys.modules[self.assertWarns.__module__].__file__,
getattr,
Validator,
"DEFAULT_TYPES",
)
def test_accessing_default_types_on_the_instance_warns(self):
Validator = validators.create(meta_schema={}, validators={})
self.assertFalse(self.flushWarnings())
self.assertWarns(
DeprecationWarning,
(
"The DEFAULT_TYPES attribute is deprecated. "
"See the type checker attached to this validator instead."
),
# https://tm.tl/9363 :'(
sys.modules[self.assertWarns.__module__].__file__,
getattr,
Validator({}),
"DEFAULT_TYPES",
)
def test_providing_types_to_init_warns(self):
Validator = validators.create(meta_schema={}, validators={})
self.assertFalse(self.flushWarnings())
self.assertWarns(
category=DeprecationWarning,
message=(
"The types argument is deprecated. "
"Provide a type_checker to jsonschema.validators.extend "
"instead."
),
# https://tm.tl/9363 :'(
filename=sys.modules[self.assertWarns.__module__].__file__,
f=Validator,
schema={},
types={"bar": object},
)
class TestIterErrors(TestCase):
def setUp(self):
self.validator = validators.Draft3Validator({})
def test_iter_errors(self):
instance = [1, 2]
schema = {
u"disallow": u"array",
u"enum": [["a", "b", "c"], ["d", "e", "f"]],
u"minItems": 3,
}
got = (e.message for e in self.validator.iter_errors(instance, schema))
expected = [
"%r is disallowed for [1, 2]" % (schema["disallow"],),
"[1, 2] is too short",
"[1, 2] is not one of %r" % (schema["enum"],),
]
self.assertEqual(sorted(got), sorted(expected))
def test_iter_errors_multiple_failures_one_validator(self):
instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
schema = {
u"properties": {
"foo": {u"type": "string"},
"bar": {u"minItems": 2},
"baz": {u"maximum": 10, u"enum": [2, 4, 6, 8]},
},
}
errors = list(self.validator.iter_errors(instance, schema))
self.assertEqual(len(errors), 4)
class TestValidationErrorMessages(TestCase):
def message_for(self, instance, schema, *args, **kwargs):
kwargs.setdefault("cls", validators.Draft3Validator)
with self.assertRaises(exceptions.ValidationError) as e:
validators.validate(instance, schema, *args, **kwargs)
return e.exception.message
def test_single_type_failure(self):
message = self.message_for(instance=1, schema={u"type": u"string"})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_single_type_list_failure(self):
message = self.message_for(instance=1, schema={u"type": [u"string"]})
self.assertEqual(message, "1 is not of type %r" % u"string")
def test_multiple_type_failure(self):
types = u"string", u"object"
message = self.message_for(instance=1, schema={u"type": list(types)})
self.assertEqual(message, "1 is not of type %r, %r" % types)
def test_object_without_title_type_failure(self):
type = {u"type": [{u"minimum": 3}]}
message = self.message_for(instance=1, schema={u"type": [type]})
self.assertEqual(message, "1 is less than the minimum of 3")
def test_object_with_named_type_failure(self):
schema = {u"type": [{u"name": "Foo", u"minimum": 3}]}
message = self.message_for(instance=1, schema=schema)
self.assertEqual(message, "1 is less than the minimum of 3")
def test_minimum(self):
message = self.message_for(instance=1, schema={"minimum": 2})
self.assertEqual(message, "1 is less than the minimum of 2")
def test_maximum(self):
message = self.message_for(instance=1, schema={"maximum": 0})
self.assertEqual(message, "1 is greater than the maximum of 0")
def test_dependencies_single_element(self):
depend, on = "bar", "foo"
schema = {u"dependencies": {depend: on}}
message = self.message_for(
instance={"bar": 2},
schema=schema,
cls=validators.Draft3Validator,
)
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
def test_dependencies_list_draft3(self):
depend, on = "bar", "foo"
schema = {u"dependencies": {depend: [on]}}
message = self.message_for(
instance={"bar": 2},
schema=schema,
cls=validators.Draft3Validator,
)
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
def test_dependencies_list_draft7(self):
depend, on = "bar", "foo"
schema = {u"dependencies": {depend: [on]}}
message = self.message_for(
instance={"bar": 2},
schema=schema,
cls=validators.Draft7Validator,
)
self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
def test_additionalItems_single_failure(self):
message = self.message_for(
instance=[2],
schema={u"items": [], u"additionalItems": False},
)
self.assertIn("(2 was unexpected)", message)
def test_additionalItems_multiple_failures(self):
message = self.message_for(
instance=[1, 2, 3],
schema={u"items": [], u"additionalItems": False}
)
self.assertIn("(1, 2, 3 were unexpected)", message)
def test_additionalProperties_single_failure(self):
additional = "foo"
schema = {u"additionalProperties": False}
message = self.message_for(instance={additional: 2}, schema=schema)
self.assertIn("(%r was unexpected)" % (additional,), message)
def test_additionalProperties_multiple_failures(self):
schema = {u"additionalProperties": False}
message = self.message_for(
instance=dict.fromkeys(["foo", "bar"]),
schema=schema,
)
self.assertIn(repr("foo"), message)
self.assertIn(repr("bar"), message)
self.assertIn("were unexpected)", message)
def test_const(self):
schema = {u"const": 12}
message = self.message_for(
instance={"foo": "bar"},
schema=schema,
cls=validators.Draft6Validator,
)
self.assertIn("12 was expected", message)
def test_contains(self):
schema = {u"contains": {u"const": 12}}
message = self.message_for(
instance=[2, {}, []],
schema=schema,
cls=validators.Draft6Validator,
)
self.assertIn(
"None of [2, {}, []] are valid under the given schema",
message,
)
def test_invalid_format_default_message(self):
checker = FormatChecker(formats=())
checker.checks(u"thing")(lambda value: False)
schema = {u"format": u"thing"}
message = self.message_for(
instance="bla",
schema=schema,
format_checker=checker,
)
self.assertIn(repr("bla"), message)
self.assertIn(repr("thing"), message)
self.assertIn("is not a", message)
def test_additionalProperties_false_patternProperties(self):
schema = {u"type": u"object",
u"additionalProperties": False,
u"patternProperties": {
u"^abc$": {u"type": u"string"},
u"^def$": {u"type": u"string"},
}}
message = self.message_for(
instance={u"zebra": 123},
schema=schema,
cls=validators.Draft4Validator,
)
self.assertEqual(
message,
"{} does not match any of the regexes: {}, {}".format(
repr(u"zebra"), repr(u"^abc$"), repr(u"^def$"),
),
)
message = self.message_for(
instance={u"zebra": 123, u"fish": 456},
schema=schema,
cls=validators.Draft4Validator,
)
self.assertEqual(
message,
"{}, {} do not match any of the regexes: {}, {}".format(
repr(u"fish"), repr(u"zebra"), repr(u"^abc$"), repr(u"^def$")
),
)
def test_False_schema(self):
message = self.message_for(
instance="something",
schema=False,
cls=validators.Draft7Validator,
)
self.assertIn("False schema does not allow 'something'", message)
class TestValidationErrorDetails(TestCase):
# TODO: These really need unit tests for each individual validator, rather
# than just these higher level tests.
def test_anyOf(self):
instance = 5
schema = {
"anyOf": [
{"minimum": 20},
{"type": "string"},
],
}
validator = validators.Draft4Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "anyOf")
self.assertEqual(e.validator_value, schema["anyOf"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["anyOf"]))
self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "minimum")
self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["anyOf"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "minimum"]))
self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
self.assertEqual(
e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
)
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "type")
self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
self.assertEqual(e2.instance, instance)
self.assertEqual(e2.schema, schema["anyOf"][1])
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque([]))
self.assertEqual(e2.relative_path, deque([]))
self.assertEqual(e2.absolute_path, deque([]))
self.assertEqual(e2.schema_path, deque([1, "type"]))
self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
self.assertEqual(len(e2.context), 0)
def test_type(self):
instance = {"foo": 1}
schema = {
"type": [
{"type": "integer"},
{
"type": "object",
"properties": {"foo": {"enum": [2]}},
},
],
}
validator = validators.Draft3Validator(schema)
errors = list(validator.iter_errors(instance))
self.assertEqual(len(errors), 1)
e = errors[0]
self.assertEqual(e.validator, "type")
self.assertEqual(e.validator_value, schema["type"])
self.assertEqual(e.instance, instance)
self.assertEqual(e.schema, schema)
self.assertIsNone(e.parent)
self.assertEqual(e.path, deque([]))
self.assertEqual(e.relative_path, deque([]))
self.assertEqual(e.absolute_path, deque([]))
self.assertEqual(e.schema_path, deque(["type"]))
self.assertEqual(e.relative_schema_path, deque(["type"]))
self.assertEqual(e.absolute_schema_path, deque(["type"]))
self.assertEqual(len(e.context), 2)
e1, e2 = sorted_errors(e.context)
self.assertEqual(e1.validator, "type")
self.assertEqual(e1.validator_value, schema["type"][0]["type"])
self.assertEqual(e1.instance, instance)
self.assertEqual(e1.schema, schema["type"][0])
self.assertIs(e1.parent, e)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e1.relative_path, deque([]))
self.assertEqual(e1.absolute_path, deque([]))
self.assertEqual(e1.schema_path, deque([0, "type"]))
self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
self.assertFalse(e1.context)
self.assertEqual(e2.validator, "enum")
self.assertEqual(e2.validator_value, [2])
self.assertEqual(e2.instance, 1)
self.assertEqual(e2.schema, {u"enum": [2]})
self.assertIs(e2.parent, e)
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e2.relative_path, deque(["foo"]))
self.assertEqual(e2.absolute_path, deque(["foo"]))
self.assertEqual(
e2.schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
)
self.assertEqual(
e2.absolute_schema_path,
deque(["type", 1, "properties", "foo", "enum"]),
)
self.assertFalse(e2.context)
def test_single_nesting(self):
instance = {"foo": 2, "bar": [1], "baz": 15, "quux": "spam"}
schema = {
"properties": {
"foo": {"type": "string"},
"bar": {"minItems": 2},
"baz": {"maximum": 10, "enum": [2, 4, 6, 8]},
},
}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["baz"]))
self.assertEqual(e3.path, deque(["baz"]))
self.assertEqual(e4.path, deque(["foo"]))
self.assertEqual(e1.relative_path, deque(["bar"]))
self.assertEqual(e2.relative_path, deque(["baz"]))
self.assertEqual(e3.relative_path, deque(["baz"]))
self.assertEqual(e4.relative_path, deque(["foo"]))
self.assertEqual(e1.absolute_path, deque(["bar"]))
self.assertEqual(e2.absolute_path, deque(["baz"]))
self.assertEqual(e3.absolute_path, deque(["baz"]))
self.assertEqual(e4.absolute_path, deque(["foo"]))
self.assertEqual(e1.validator, "minItems")
self.assertEqual(e2.validator, "enum")
self.assertEqual(e3.validator, "maximum")
self.assertEqual(e4.validator, "type")
def test_multiple_nesting(self):
instance = [1, {"foo": 2, "bar": {"baz": [1]}}, "quux"]
schema = {
"type": "string",
"items": {
"type": ["string", "object"],
"properties": {
"foo": {"enum": [1, 3]},
"bar": {
"type": "array",
"properties": {
"bar": {"required": True},
"baz": {"minItems": 2},
},
},
},
},
}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
self.assertEqual(e1.path, deque([]))
self.assertEqual(e2.path, deque([0]))
self.assertEqual(e3.path, deque([1, "bar"]))
self.assertEqual(e4.path, deque([1, "bar", "bar"]))
self.assertEqual(e5.path, deque([1, "bar", "baz"]))
self.assertEqual(e6.path, deque([1, "foo"]))
self.assertEqual(e1.schema_path, deque(["type"]))
self.assertEqual(e2.schema_path, deque(["items", "type"]))
self.assertEqual(
list(e3.schema_path), ["items", "properties", "bar", "type"],
)
self.assertEqual(
list(e4.schema_path),
["items", "properties", "bar", "properties", "bar", "required"],
)
self.assertEqual(
list(e5.schema_path),
["items", "properties", "bar", "properties", "baz", "minItems"]
)
self.assertEqual(
list(e6.schema_path), ["items", "properties", "foo", "enum"],
)
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "type")
self.assertEqual(e3.validator, "type")
self.assertEqual(e4.validator, "required")
self.assertEqual(e5.validator, "minItems")
self.assertEqual(e6.validator, "enum")
def test_recursive(self):
schema = {
"definitions": {
"node": {
"anyOf": [{
"type": "object",
"required": ["name", "children"],
"properties": {
"name": {
"type": "string",
},
"children": {
"type": "object",
"patternProperties": {
"^.*$": {
"$ref": "#/definitions/node",
},
},
},
},
}],
},
},
"type": "object",
"required": ["root"],
"properties": {"root": {"$ref": "#/definitions/node"}},
}
instance = {
"root": {
"name": "root",
"children": {
"a": {
"name": "a",
"children": {
"ab": {
"name": "ab",
# missing "children"
},
},
},
},
},
}
validator = validators.Draft4Validator(schema)
e, = validator.iter_errors(instance)
self.assertEqual(e.absolute_path, deque(["root"]))
self.assertEqual(
e.absolute_schema_path, deque(["properties", "root", "anyOf"]),
)
e1, = e.context
self.assertEqual(e1.absolute_path, deque(["root", "children", "a"]))
self.assertEqual(
e1.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
],
),
)
e2, = e1.context
self.assertEqual(
e2.absolute_path, deque(
["root", "children", "a", "children", "ab"],
),
)
self.assertEqual(
e2.absolute_schema_path, deque(
[
"properties",
"root",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
0,
"properties",
"children",
"patternProperties",
"^.*$",
"anyOf",
],
),
)
def test_additionalProperties(self):
instance = {"bar": "bar", "foo": 2}
schema = {"additionalProperties": {"type": "integer", "minimum": 5}}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_patternProperties(self):
instance = {"bar": 1, "foo": 2}
schema = {
"patternProperties": {
"bar": {"type": "string"},
"foo": {"minimum": 5},
},
}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque(["bar"]))
self.assertEqual(e2.path, deque(["foo"]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems(self):
instance = ["foo", 1]
schema = {
"items": [],
"additionalItems": {"type": "integer", "minimum": 5},
}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([0]))
self.assertEqual(e2.path, deque([1]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_additionalItems_with_items(self):
instance = ["foo", "bar", 1]
schema = {
"items": [{}],
"additionalItems": {"type": "integer", "minimum": 5},
}
validator = validators.Draft3Validator(schema)
errors = validator.iter_errors(instance)
e1, e2 = sorted_errors(errors)
self.assertEqual(e1.path, deque([1]))
self.assertEqual(e2.path, deque([2]))
self.assertEqual(e1.validator, "type")
self.assertEqual(e2.validator, "minimum")
def test_propertyNames(self):
instance = {"foo": 12}
schema = {"propertyNames": {"not": {"const": "foo"}}}
validator = validators.Draft7Validator(schema)
error, = validator.iter_errors(instance)
self.assertEqual(error.validator, "not")
self.assertEqual(
error.message,
"%r is not allowed for %r" % ({"const": "foo"}, "foo"),
)
self.assertEqual(error.path, deque([]))
self.assertEqual(error.schema_path, deque(["propertyNames", "not"]))
def test_if_then(self):
schema = {
"if": {"const": 12},
"then": {"const": 13},
}
validator = validators.Draft7Validator(schema)
error, = validator.iter_errors(12)
self.assertEqual(error.validator, "const")
self.assertEqual(error.message, "13 was expected")
self.assertEqual(error.path, deque([]))
self.assertEqual(error.schema_path, deque(["if", "then", "const"]))
def test_if_else(self):
schema = {
"if": {"const": 12},
"else": {"const": 13},
}
validator = validators.Draft7Validator(schema)
error, = validator.iter_errors(15)
self.assertEqual(error.validator, "const")
self.assertEqual(error.message, "13 was expected")
self.assertEqual(error.path, deque([]))
self.assertEqual(error.schema_path, deque(["if", "else", "const"]))
def test_boolean_schema_False(self):
validator = validators.Draft7Validator(False)
error, = validator.iter_errors(12)
self.assertEqual(
(
error.message,
error.validator,
error.validator_value,
error.instance,
error.schema,
error.schema_path,
),
(
"False schema does not allow 12",
None,
None,
12,
False,
deque([]),
),
)
def test_ref(self):
ref, schema = "someRef", {"additionalProperties": {"type": "integer"}}
validator = validators.Draft7Validator(
{"$ref": ref},
resolver=validators.RefResolver("", {}, store={ref: schema}),
)
error, = validator.iter_errors({"foo": "notAnInteger"})
self.assertEqual(
(
error.message,
error.validator,
error.validator_value,
error.instance,
error.absolute_path,
error.schema,
error.schema_path,
),
(
"'notAnInteger' is not of type 'integer'",
"type",
"integer",
"notAnInteger",
deque(["foo"]),
{"type": "integer"},
deque(["additionalProperties", "type"]),
),
)
class MetaSchemaTestsMixin(object):
# TODO: These all belong upstream
def test_invalid_properties(self):
with self.assertRaises(exceptions.SchemaError):
self.Validator.check_schema({"properties": {"test": object()}})
def test_minItems_invalid_string(self):
with self.assertRaises(exceptions.SchemaError):
# needs to be an integer
self.Validator.check_schema({"minItems": "1"})
def test_enum_allows_empty_arrays(self):
"""
Technically, all the spec says is they SHOULD have elements, not MUST.
See https://github.com/Julian/jsonschema/issues/529.
"""
self.Validator.check_schema({"enum": []})
def test_enum_allows_non_unique_items(self):
"""
Technically, all the spec says is they SHOULD be unique, not MUST.
See https://github.com/Julian/jsonschema/issues/529.
"""
self.Validator.check_schema({"enum": [12, 12]})
class ValidatorTestMixin(MetaSchemaTestsMixin, object):
def test_valid_instances_are_valid(self):
schema, instance = self.valid
self.assertTrue(self.Validator(schema).is_valid(instance))
def test_invalid_instances_are_not_valid(self):
schema, instance = self.invalid
self.assertFalse(self.Validator(schema).is_valid(instance))
def test_non_existent_properties_are_ignored(self):
self.Validator({object(): object()}).validate(instance=object())
def test_it_creates_a_ref_resolver_if_not_provided(self):
self.assertIsInstance(
self.Validator({}).resolver,
validators.RefResolver,
)
def test_it_delegates_to_a_ref_resolver(self):
ref, schema = "someCoolRef", {"type": "integer"}
resolver = validators.RefResolver("", {}, store={ref: schema})
validator = self.Validator({"$ref": ref}, resolver=resolver)
with self.assertRaises(exceptions.ValidationError):
validator.validate(None)
def test_it_delegates_to_a_legacy_ref_resolver(self):
"""
Legacy RefResolvers support only the context manager form of
resolution.
"""
class LegacyRefResolver(object):
@contextmanager
def resolving(this, ref):
self.assertEqual(ref, "the ref")
yield {"type": "integer"}
resolver = LegacyRefResolver()
schema = {"$ref": "the ref"}
with self.assertRaises(exceptions.ValidationError):
self.Validator(schema, resolver=resolver).validate(None)
def test_is_type_is_true_for_valid_type(self):
self.assertTrue(self.Validator({}).is_type("foo", "string"))
def test_is_type_is_false_for_invalid_type(self):
self.assertFalse(self.Validator({}).is_type("foo", "array"))
def test_is_type_evades_bool_inheriting_from_int(self):
self.assertFalse(self.Validator({}).is_type(True, "integer"))
self.assertFalse(self.Validator({}).is_type(True, "number"))
@unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
def test_string_a_bytestring_is_a_string(self):
self.Validator({"type": "string"}).validate(b"foo")
def test_patterns_can_be_native_strings(self):
"""
See https://github.com/Julian/jsonschema/issues/611.
"""
self.Validator({"pattern": "foo"}).validate("foo")
def test_it_can_validate_with_decimals(self):
schema = {"items": {"type": "number"}}
Validator = validators.extend(
self.Validator,
type_checker=self.Validator.TYPE_CHECKER.redefine(
"number",
lambda checker, thing: isinstance(
thing, (int, float, Decimal),
) and not isinstance(thing, bool),
)
)
validator = Validator(schema)
validator.validate([1, 1.1, Decimal(1) / Decimal(8)])
invalid = ["foo", {}, [], True, None]
self.assertEqual(
[error.instance for error in validator.iter_errors(invalid)],
invalid,
)
def test_it_returns_true_for_formats_it_does_not_know_about(self):
validator = self.Validator(
{"format": "carrot"}, format_checker=FormatChecker(),
)
validator.validate("bugs")
def test_it_does_not_validate_formats_by_default(self):
validator = self.Validator({})
self.assertIsNone(validator.format_checker)
def test_it_validates_formats_if_a_checker_is_provided(self):
checker = FormatChecker()
bad = ValueError("Bad!")
@checker.checks("foo", raises=ValueError)
def check(value):
if value == "good":
return True
elif value == "bad":
raise bad
else: # pragma: no cover
self.fail("What is {}? [Baby Don't Hurt Me]".format(value))
validator = self.Validator(
{"format": "foo"}, format_checker=checker,
)
validator.validate("good")
with self.assertRaises(exceptions.ValidationError) as cm:
validator.validate("bad")
# Make sure original cause is attached
self.assertIs(cm.exception.cause, bad)
def test_non_string_custom_type(self):
non_string_type = object()
schema = {"type": [non_string_type]}
Crazy = validators.extend(
self.Validator,
type_checker=self.Validator.TYPE_CHECKER.redefine(
non_string_type,
lambda checker, thing: isinstance(thing, int),
)
)
Crazy(schema).validate(15)
def test_it_properly_formats_tuples_in_errors(self):
"""
A tuple instance properly formats validation errors for uniqueItems.
See https://github.com/Julian/jsonschema/pull/224
"""
TupleValidator = validators.extend(
self.Validator,
type_checker=self.Validator.TYPE_CHECKER.redefine(
"array",
lambda checker, thing: isinstance(thing, tuple),
)
)
with self.assertRaises(exceptions.ValidationError) as e:
TupleValidator({"uniqueItems": True}).validate((1, 1))
self.assertIn("(1, 1) has non-unique elements", str(e.exception))
class AntiDraft6LeakMixin(object):
"""
Make sure functionality from draft 6 doesn't leak backwards in time.
"""
def test_True_is_not_a_schema(self):
with self.assertRaises(exceptions.SchemaError) as e:
self.Validator.check_schema(True)
self.assertIn("True is not of type", str(e.exception))
def test_False_is_not_a_schema(self):
with self.assertRaises(exceptions.SchemaError) as e:
self.Validator.check_schema(False)
self.assertIn("False is not of type", str(e.exception))
@unittest.skip(bug(523))
def test_True_is_not_a_schema_even_if_you_forget_to_check(self):
resolver = validators.RefResolver("", {})
with self.assertRaises(Exception) as e:
self.Validator(True, resolver=resolver).validate(12)
self.assertNotIsInstance(e.exception, exceptions.ValidationError)
@unittest.skip(bug(523))
def test_False_is_not_a_schema_even_if_you_forget_to_check(self):
resolver = validators.RefResolver("", {})
with self.assertRaises(Exception) as e:
self.Validator(False, resolver=resolver).validate(12)
self.assertNotIsInstance(e.exception, exceptions.ValidationError)
class TestDraft3Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase):
Validator = validators.Draft3Validator
valid = {}, {}
invalid = {"type": "integer"}, "foo"
def test_any_type_is_valid_for_type_any(self):
validator = self.Validator({"type": "any"})
validator.validate(object())
def test_any_type_is_redefinable(self):
"""
Sigh, because why not.
"""
Crazy = validators.extend(
self.Validator,
type_checker=self.Validator.TYPE_CHECKER.redefine(
"any", lambda checker, thing: isinstance(thing, int),
)
)
validator = Crazy({"type": "any"})
validator.validate(12)
with self.assertRaises(exceptions.ValidationError):
validator.validate("foo")
def test_is_type_is_true_for_any_type(self):
self.assertTrue(self.Validator({}).is_valid(object(), {"type": "any"}))
def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
self.assertTrue(self.Validator({}).is_type(True, "boolean"))
self.assertTrue(self.Validator({}).is_valid(True, {"type": "any"}))
class TestDraft4Validator(AntiDraft6LeakMixin, ValidatorTestMixin, TestCase):
Validator = validators.Draft4Validator
valid = {}, {}
invalid = {"type": "integer"}, "foo"
class TestDraft6Validator(ValidatorTestMixin, TestCase):
Validator = validators.Draft6Validator
valid = {}, {}
invalid = {"type": "integer"}, "foo"
class TestDraft7Validator(ValidatorTestMixin, TestCase):
Validator = validators.Draft7Validator
valid = {}, {}
invalid = {"type": "integer"}, "foo"
class TestValidatorFor(SynchronousTestCase):
def test_draft_3(self):
schema = {"$schema": "http://json-schema.org/draft-03/schema"}
self.assertIs(
validators.validator_for(schema),
validators.Draft3Validator,
)
schema = {"$schema": "http://json-schema.org/draft-03/schema#"}
self.assertIs(
validators.validator_for(schema),
validators.Draft3Validator,
)
def test_draft_4(self):
schema = {"$schema": "http://json-schema.org/draft-04/schema"}
self.assertIs(
validators.validator_for(schema),
validators.Draft4Validator,
)
schema = {"$schema": "http://json-schema.org/draft-04/schema#"}
self.assertIs(
validators.validator_for(schema),
validators.Draft4Validator,
)
def test_draft_6(self):
schema = {"$schema": "http://json-schema.org/draft-06/schema"}
self.assertIs(
validators.validator_for(schema),
validators.Draft6Validator,
)
schema = {"$schema": "http://json-schema.org/draft-06/schema#"}
self.assertIs(
validators.validator_for(schema),
validators.Draft6Validator,
)
def test_draft_7(self):
schema = {"$schema": "http://json-schema.org/draft-07/schema"}
self.assertIs(
validators.validator_for(schema),
validators.Draft7Validator,
)
schema = {"$schema": "http://json-schema.org/draft-07/schema#"}
self.assertIs(
validators.validator_for(schema),
validators.Draft7Validator,
)
def test_True(self):
self.assertIs(
validators.validator_for(True),
validators._LATEST_VERSION,
)
def test_False(self):
self.assertIs(
validators.validator_for(False),
validators._LATEST_VERSION,
)
def test_custom_validator(self):
Validator = validators.create(
meta_schema={"id": "meta schema id"},
version="12",
id_of=lambda s: s.get("id", ""),
)
schema = {"$schema": "meta schema id"}
self.assertIs(
validators.validator_for(schema),
Validator,
)
def test_custom_validator_draft6(self):
Validator = validators.create(
meta_schema={"$id": "meta schema $id"},
version="13",
)
schema = {"$schema": "meta schema $id"}
self.assertIs(
validators.validator_for(schema),
Validator,
)
def test_validator_for_jsonschema_default(self):
self.assertIs(validators.validator_for({}), validators._LATEST_VERSION)
def test_validator_for_custom_default(self):
self.assertIs(validators.validator_for({}, default=None), None)
def test_warns_if_meta_schema_specified_was_not_found(self):
self.assertWarns(
category=DeprecationWarning,
message=(
"The metaschema specified by $schema was not found. "
"Using the latest draft to validate, but this will raise "
"an error in the future."
),
# https://tm.tl/9363 :'(
filename=sys.modules[self.assertWarns.__module__].__file__,
f=validators.validator_for,
schema={u"$schema": "unknownSchema"},
default={},
)
def test_does_not_warn_if_meta_schema_is_unspecified(self):
validators.validator_for(schema={}, default={}),
self.assertFalse(self.flushWarnings())
class TestValidate(SynchronousTestCase):
def assertUses(self, schema, Validator):
result = []
self.patch(Validator, "check_schema", result.append)
validators.validate({}, schema)
self.assertEqual(result, [schema])
def test_draft3_validator_is_chosen(self):
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-03/schema#"},
Validator=validators.Draft3Validator,
)
# Make sure it works without the empty fragment
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-03/schema"},
Validator=validators.Draft3Validator,
)
def test_draft4_validator_is_chosen(self):
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-04/schema#"},
Validator=validators.Draft4Validator,
)
# Make sure it works without the empty fragment
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-04/schema"},
Validator=validators.Draft4Validator,
)
def test_draft6_validator_is_chosen(self):
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-06/schema#"},
Validator=validators.Draft6Validator,
)
# Make sure it works without the empty fragment
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-06/schema"},
Validator=validators.Draft6Validator,
)
def test_draft7_validator_is_chosen(self):
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-07/schema#"},
Validator=validators.Draft7Validator,
)
# Make sure it works without the empty fragment
self.assertUses(
schema={"$schema": "http://json-schema.org/draft-07/schema"},
Validator=validators.Draft7Validator,
)
def test_draft7_validator_is_the_default(self):
self.assertUses(schema={}, Validator=validators.Draft7Validator)
def test_validation_error_message(self):
with self.assertRaises(exceptions.ValidationError) as e:
validators.validate(12, {"type": "string"})
self.assertRegexpMatches(
str(e.exception),
"(?s)Failed validating u?'.*' in schema.*On instance",
)
def test_schema_error_message(self):
with self.assertRaises(exceptions.SchemaError) as e:
validators.validate(12, {"type": 12})
self.assertRegexpMatches(
str(e.exception),
"(?s)Failed validating u?'.*' in metaschema.*On schema",
)
def test_it_uses_best_match(self):
# This is a schema that best_match will recurse into
schema = {"oneOf": [{"type": "string"}, {"type": "array"}]}
with self.assertRaises(exceptions.ValidationError) as e:
validators.validate(12, schema)
self.assertIn("12 is not of type", str(e.exception))
class TestRefResolver(SynchronousTestCase):
base_uri = ""
stored_uri = "foo://stored"
stored_schema = {"stored": "schema"}
def setUp(self):
self.referrer = {}
self.store = {self.stored_uri: self.stored_schema}
self.resolver = validators.RefResolver(
self.base_uri, self.referrer, self.store,
)
def test_it_does_not_retrieve_schema_urls_from_the_network(self):
ref = validators.Draft3Validator.META_SCHEMA["id"]
self.patch(
self.resolver,
"resolve_remote",
lambda *args, **kwargs: self.fail("Should not have been called!"),
)
with self.resolver.resolving(ref) as resolved:
pass
self.assertEqual(resolved, validators.Draft3Validator.META_SCHEMA)
def test_it_resolves_local_refs(self):
ref = "#/properties/foo"
self.referrer["properties"] = {"foo": object()}
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, self.referrer["properties"]["foo"])
def test_it_resolves_local_refs_with_id(self):
schema = {"id": "http://bar/schema#", "a": {"foo": "bar"}}
resolver = validators.RefResolver.from_schema(
schema,
id_of=lambda schema: schema.get(u"id", u""),
)
with resolver.resolving("#/a") as resolved:
self.assertEqual(resolved, schema["a"])
with resolver.resolving("http://bar/schema#/a") as resolved:
self.assertEqual(resolved, schema["a"])
def test_it_retrieves_stored_refs(self):
with self.resolver.resolving(self.stored_uri) as resolved:
self.assertIs(resolved, self.stored_schema)
self.resolver.store["cached_ref"] = {"foo": 12}
with self.resolver.resolving("cached_ref#/foo") as resolved:
self.assertEqual(resolved, 12)
def test_it_retrieves_unstored_refs_via_requests(self):
ref = "http://bar#baz"
schema = {"baz": 12}
if "requests" in sys.modules:
self.addCleanup(
sys.modules.__setitem__, "requests", sys.modules["requests"],
)
sys.modules["requests"] = ReallyFakeRequests({"http://bar": schema})
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, 12)
def test_it_retrieves_unstored_refs_via_urlopen(self):
ref = "http://bar#baz"
schema = {"baz": 12}
if "requests" in sys.modules:
self.addCleanup(
sys.modules.__setitem__, "requests", sys.modules["requests"],
)
sys.modules["requests"] = None
@contextmanager
def fake_urlopen(url):
self.assertEqual(url, "http://bar")
yield BytesIO(json.dumps(schema).encode("utf8"))
self.addCleanup(setattr, validators, "urlopen", validators.urlopen)
validators.urlopen = fake_urlopen
with self.resolver.resolving(ref) as resolved:
pass
self.assertEqual(resolved, 12)
def test_it_retrieves_local_refs_via_urlopen(self):
with tempfile.NamedTemporaryFile(delete=False, mode="wt") as tempf:
self.addCleanup(os.remove, tempf.name)
json.dump({"foo": "bar"}, tempf)
ref = "file://{}#foo".format(pathname2url(tempf.name))
with self.resolver.resolving(ref) as resolved:
self.assertEqual(resolved, "bar")
def test_it_can_construct_a_base_uri_from_a_schema(self):
schema = {"id": "foo"}
resolver = validators.RefResolver.from_schema(
schema,
id_of=lambda schema: schema.get(u"id", u""),
)
self.assertEqual(resolver.base_uri, "foo")
self.assertEqual(resolver.resolution_scope, "foo")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("#") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("foo") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("foo#") as resolved:
self.assertEqual(resolved, schema)
def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
schema = {}
resolver = validators.RefResolver.from_schema(schema)
self.assertEqual(resolver.base_uri, "")
self.assertEqual(resolver.resolution_scope, "")
with resolver.resolving("") as resolved:
self.assertEqual(resolved, schema)
with resolver.resolving("#") as resolved:
self.assertEqual(resolved, schema)
def test_custom_uri_scheme_handlers(self):
def handler(url):
self.assertEqual(url, ref)
return schema
schema = {"foo": "bar"}
ref = "foo://bar"
resolver = validators.RefResolver("", {}, handlers={"foo": handler})
with resolver.resolving(ref) as resolved:
self.assertEqual(resolved, schema)
def test_cache_remote_on(self):
response = [object()]
def handler(url):
try:
return response.pop()
except IndexError: # pragma: no cover
self.fail("Response must not have been cached!")
ref = "foo://bar"
resolver = validators.RefResolver(
"", {}, cache_remote=True, handlers={"foo": handler},
)
with resolver.resolving(ref):
pass
with resolver.resolving(ref):
pass
def test_cache_remote_off(self):
response = [object()]
def handler(url):
try:
return response.pop()
except IndexError: # pragma: no cover
self.fail("Handler called twice!")
ref = "foo://bar"
resolver = validators.RefResolver(
"", {}, cache_remote=False, handlers={"foo": handler},
)
with resolver.resolving(ref):
pass
def test_if_you_give_it_junk_you_get_a_resolution_error(self):
error = ValueError("Oh no! What's this?")
def handler(url):
raise error
ref = "foo://bar"
resolver = validators.RefResolver("", {}, handlers={"foo": handler})
with self.assertRaises(exceptions.RefResolutionError) as err:
with resolver.resolving(ref):
self.fail("Shouldn't get this far!") # pragma: no cover
self.assertEqual(err.exception, exceptions.RefResolutionError(error))
def test_helpful_error_message_on_failed_pop_scope(self):
resolver = validators.RefResolver("", {})
resolver.pop_scope()
with self.assertRaises(exceptions.RefResolutionError) as exc:
resolver.pop_scope()
self.assertIn("Failed to pop the scope", str(exc.exception))
def sorted_errors(errors):
def key(error):
return (
[str(e) for e in error.path],
[str(e) for e in error.schema_path],
)
return sorted(errors, key=key)
@attr.s
class ReallyFakeRequests(object):
_responses = attr.ib()
def get(self, url):
response = self._responses.get(url)
if url is None: # pragma: no cover
raise ValueError("Unknown URL: " + repr(url))
return _ReallyFakeJSONResponse(json.dumps(response))
@attr.s
class _ReallyFakeJSONResponse(object):
_response = attr.ib()
def json(self):
return json.loads(self._response)
| 60,394 | Python | 33.256948 | 79 | 0.554078 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/benchmarks/issue232.py | #!/usr/bin/env python
"""
A performance benchmark using the example from issue #232.
See https://github.com/Julian/jsonschema/pull/232.
"""
from twisted.python.filepath import FilePath
from pyperf import Runner
from pyrsistent import m
from jsonschema.tests._suite import Version
import jsonschema
issue232 = Version(
path=FilePath(__file__).sibling("issue232"),
remotes=m(),
name="issue232",
)
if __name__ == "__main__":
issue232.benchmark(
runner=Runner(),
Validator=jsonschema.Draft4Validator,
)
| 541 | Python | 19.074073 | 58 | 0.696858 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/benchmarks/__init__.py | """
Benchmarks for validation.
This package is *not* public API.
"""
| 70 | Python | 10.833332 | 33 | 0.685714 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/jsonschema/benchmarks/json_schema_test_suite.py | #!/usr/bin/env python
"""
A performance benchmark using the official test suite.
This benchmarks jsonschema using every valid example in the
JSON-Schema-Test-Suite. It will take some time to complete.
"""
from pyperf import Runner
from jsonschema.tests._suite import Suite
if __name__ == "__main__":
Suite().benchmark(runner=Runner())
| 343 | Python | 21.933332 | 59 | 0.734694 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/_helpers.pyi | from typing import Any
class reify:
def __init__(self, wrapped: Any) -> None: ...
def __get__(self, inst: Any, owner: Any) -> Any: ...
def __set__(self, inst: Any, value: Any) -> None: ...
| 202 | unknown | 27.999996 | 57 | 0.549505 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/connector.py | import asyncio
import functools
import random
import sys
import traceback
import warnings
from collections import defaultdict, deque
from contextlib import suppress
from http.cookies import SimpleCookie
from itertools import cycle, islice
from time import monotonic
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
DefaultDict,
Dict,
Iterator,
List,
Optional,
Set,
Tuple,
Type,
Union,
cast,
)
import attr
from . import hdrs, helpers
from .abc import AbstractResolver
from .client_exceptions import (
ClientConnectionError,
ClientConnectorCertificateError,
ClientConnectorError,
ClientConnectorSSLError,
ClientHttpProxyError,
ClientProxyConnectionError,
ServerFingerprintMismatch,
UnixClientConnectorError,
cert_errors,
ssl_errors,
)
from .client_proto import ResponseHandler
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
from .helpers import (
PY_36,
ceil_timeout,
get_running_loop,
is_ip_address,
noop,
sentinel,
)
from .http import RESPONSES
from .locks import EventResultOrError
from .resolver import DefaultResolver
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
if TYPE_CHECKING: # pragma: no cover
from .client import ClientTimeout
from .client_reqrep import ConnectionKey
from .tracing import Trace
class _DeprecationWaiter:
__slots__ = ("_awaitable", "_awaited")
def __init__(self, awaitable: Awaitable[Any]) -> None:
self._awaitable = awaitable
self._awaited = False
def __await__(self) -> Any:
self._awaited = True
return self._awaitable.__await__()
def __del__(self) -> None:
if not self._awaited:
warnings.warn(
"Connector.close() is a coroutine, "
"please use await connector.close()",
DeprecationWarning,
)
class Connection:
_source_traceback = None
_transport = None
def __init__(
self,
connector: "BaseConnector",
key: "ConnectionKey",
protocol: ResponseHandler,
loop: asyncio.AbstractEventLoop,
) -> None:
self._key = key
self._connector = connector
self._loop = loop
self._protocol: Optional[ResponseHandler] = protocol
self._callbacks: List[Callable[[], None]] = []
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __repr__(self) -> str:
return f"Connection<{self._key}>"
def __del__(self, _warnings: Any = warnings) -> None:
if self._protocol is not None:
if PY_36:
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
if self._loop.is_closed():
return
self._connector._release(self._key, self._protocol, should_close=True)
context = {"client_connection": self, "message": "Unclosed connection"}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
@property
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn(
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
@property
def transport(self) -> Optional[asyncio.Transport]:
if self._protocol is None:
return None
return self._protocol.transport
@property
def protocol(self) -> Optional[ResponseHandler]:
return self._protocol
def add_callback(self, callback: Callable[[], None]) -> None:
if callback is not None:
self._callbacks.append(callback)
def _notify_release(self) -> None:
callbacks, self._callbacks = self._callbacks[:], []
for cb in callbacks:
with suppress(Exception):
cb()
def close(self) -> None:
self._notify_release()
if self._protocol is not None:
self._connector._release(self._key, self._protocol, should_close=True)
self._protocol = None
def release(self) -> None:
self._notify_release()
if self._protocol is not None:
self._connector._release(
self._key, self._protocol, should_close=self._protocol.should_close
)
self._protocol = None
@property
def closed(self) -> bool:
return self._protocol is None or not self._protocol.is_connected()
class _TransportPlaceholder:
"""placeholder for BaseConnector.connect function"""
def close(self) -> None:
pass
class BaseConnector:
"""Base connector class.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
enable_cleanup_closed - Enables clean-up closed ssl transports.
Disabled by default.
loop - Optional event loop.
"""
_closed = True # prevent AttributeError in __del__ if ctor was failed
_source_traceback = None
# abort transport after 2 seconds (cleanup broken connections)
_cleanup_closed_period = 2.0
def __init__(
self,
*,
keepalive_timeout: Union[object, None, float] = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
if force_close:
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
raise ValueError(
"keepalive_timeout cannot " "be set if force_close is True"
)
else:
if keepalive_timeout is sentinel:
keepalive_timeout = 15.0
loop = get_running_loop(loop)
self._closed = False
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
self._limit = limit
self._limit_per_host = limit_per_host
self._acquired: Set[ResponseHandler] = set()
self._acquired_per_host: DefaultDict[
ConnectionKey, Set[ResponseHandler]
] = defaultdict(set)
self._keepalive_timeout = cast(float, keepalive_timeout)
self._force_close = force_close
# {host_key: FIFO list of waiters}
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
self._loop = loop
self._factory = functools.partial(ResponseHandler, loop=loop)
self.cookies: SimpleCookie[str] = SimpleCookie()
# start keep-alive connection cleanup task
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
# start cleanup closed transports task
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
self._cleanup_closed_disabled = not enable_cleanup_closed
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
self._cleanup_closed()
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if not self._conns:
return
conns = [repr(c) for c in self._conns.values()]
self._close()
if PY_36:
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
context = {
"connector": self,
"connections": conns,
"message": "Unclosed connector",
}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __enter__(self) -> "BaseConnector":
warnings.warn(
'"with Connector():" is deprecated, '
'use "async with Connector():" instead',
DeprecationWarning,
)
return self
def __exit__(self, *exc: Any) -> None:
self._close()
async def __aenter__(self) -> "BaseConnector":
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]] = None,
exc_value: Optional[BaseException] = None,
exc_traceback: Optional[TracebackType] = None,
) -> None:
await self.close()
@property
def force_close(self) -> bool:
"""Ultimately close connection on releasing if True."""
return self._force_close
@property
def limit(self) -> int:
"""The total number for simultaneous connections.
If limit is 0 the connector has no limit.
The default limit size is 100.
"""
return self._limit
@property
def limit_per_host(self) -> int:
"""The limit for simultaneous connections to the same endpoint.
Endpoints are the same if they are have equal
(host, port, is_ssl) triple.
"""
return self._limit_per_host
def _cleanup(self) -> None:
"""Cleanup unused transports."""
if self._cleanup_handle:
self._cleanup_handle.cancel()
# _cleanup_handle should be unset, otherwise _release() will not
# recreate it ever!
self._cleanup_handle = None
now = self._loop.time()
timeout = self._keepalive_timeout
if self._conns:
connections = {}
deadline = now - timeout
for key, conns in self._conns.items():
alive = []
for proto, use_time in conns:
if proto.is_connected():
if use_time - deadline < 0:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
alive.append((proto, use_time))
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
if alive:
connections[key] = alive
self._conns = connections
if self._conns:
self._cleanup_handle = helpers.weakref_handle(
self, "_cleanup", timeout, self._loop
)
def _drop_acquired_per_host(
self, key: "ConnectionKey", val: ResponseHandler
) -> None:
acquired_per_host = self._acquired_per_host
if key not in acquired_per_host:
return
conns = acquired_per_host[key]
conns.remove(val)
if not conns:
del self._acquired_per_host[key]
def _cleanup_closed(self) -> None:
"""Double confirmation for transport close.
Some broken ssl servers may leave socket open without proper close.
"""
if self._cleanup_closed_handle:
self._cleanup_closed_handle.cancel()
for transport in self._cleanup_closed_transports:
if transport is not None:
transport.abort()
self._cleanup_closed_transports = []
if not self._cleanup_closed_disabled:
self._cleanup_closed_handle = helpers.weakref_handle(
self, "_cleanup_closed", self._cleanup_closed_period, self._loop
)
def close(self) -> Awaitable[None]:
"""Close all opened transports."""
self._close()
return _DeprecationWaiter(noop())
def _close(self) -> None:
if self._closed:
return
self._closed = True
try:
if self._loop.is_closed():
return
# cancel cleanup task
if self._cleanup_handle:
self._cleanup_handle.cancel()
# cancel cleanup close task
if self._cleanup_closed_handle:
self._cleanup_closed_handle.cancel()
for data in self._conns.values():
for proto, t0 in data:
proto.close()
for proto in self._acquired:
proto.close()
for transport in self._cleanup_closed_transports:
if transport is not None:
transport.abort()
finally:
self._conns.clear()
self._acquired.clear()
self._waiters.clear()
self._cleanup_handle = None
self._cleanup_closed_transports.clear()
self._cleanup_closed_handle = None
@property
def closed(self) -> bool:
"""Is connector closed.
A readonly property.
"""
return self._closed
def _available_connections(self, key: "ConnectionKey") -> int:
"""
Return number of available connections.
The limit, limit_per_host and the connection key are taken into account.
If it returns less than 1 means that there are no connections
available.
"""
if self._limit:
# total calc available connections
available = self._limit - len(self._acquired)
# check limit per host
if (
self._limit_per_host
and available > 0
and key in self._acquired_per_host
):
acquired = self._acquired_per_host.get(key)
assert acquired is not None
available = self._limit_per_host - len(acquired)
elif self._limit_per_host and key in self._acquired_per_host:
# check limit per host
acquired = self._acquired_per_host.get(key)
assert acquired is not None
available = self._limit_per_host - len(acquired)
else:
available = 1
return available
async def connect(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> Connection:
"""Get from pool or create new connection."""
key = req.connection_key
available = self._available_connections(key)
# Wait if there are no available connections or if there are/were
# waiters (i.e. don't steal connection from a waiter about to wake up)
if available <= 0 or key in self._waiters:
fut = self._loop.create_future()
# This connection will now count towards the limit.
self._waiters[key].append(fut)
if traces:
for trace in traces:
await trace.send_connection_queued_start()
try:
await fut
except BaseException as e:
if key in self._waiters:
# remove a waiter even if it was cancelled, normally it's
# removed when it's notified
try:
self._waiters[key].remove(fut)
except ValueError: # fut may no longer be in list
pass
raise e
finally:
if key in self._waiters and not self._waiters[key]:
del self._waiters[key]
if traces:
for trace in traces:
await trace.send_connection_queued_end()
proto = self._get(key)
if proto is None:
placeholder = cast(ResponseHandler, _TransportPlaceholder())
self._acquired.add(placeholder)
self._acquired_per_host[key].add(placeholder)
if traces:
for trace in traces:
await trace.send_connection_create_start()
try:
proto = await self._create_connection(req, traces, timeout)
if self._closed:
proto.close()
raise ClientConnectionError("Connector is closed.")
except BaseException:
if not self._closed:
self._acquired.remove(placeholder)
self._drop_acquired_per_host(key, placeholder)
self._release_waiter()
raise
else:
if not self._closed:
self._acquired.remove(placeholder)
self._drop_acquired_per_host(key, placeholder)
if traces:
for trace in traces:
await trace.send_connection_create_end()
else:
if traces:
# Acquire the connection to prevent race conditions with limits
placeholder = cast(ResponseHandler, _TransportPlaceholder())
self._acquired.add(placeholder)
self._acquired_per_host[key].add(placeholder)
for trace in traces:
await trace.send_connection_reuseconn()
self._acquired.remove(placeholder)
self._drop_acquired_per_host(key, placeholder)
self._acquired.add(proto)
self._acquired_per_host[key].add(proto)
return Connection(self, key, proto, self._loop)
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
try:
conns = self._conns[key]
except KeyError:
return None
t1 = self._loop.time()
while conns:
proto, t0 = conns.pop()
if proto.is_connected():
if t1 - t0 > self._keepalive_timeout:
transport = proto.transport
proto.close()
# only for SSL transports
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
if not conns:
# The very last connection was reclaimed: drop the key
del self._conns[key]
return proto
else:
transport = proto.transport
proto.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
# No more connections: drop the key
del self._conns[key]
return None
def _release_waiter(self) -> None:
"""
Iterates over all waiters until one to be released is found.
The one to be released is not finsihed and
belongs to a host that has available connections.
"""
if not self._waiters:
return
# Having the dict keys ordered this avoids to iterate
# at the same order at each call.
queues = list(self._waiters.keys())
random.shuffle(queues)
for key in queues:
if self._available_connections(key) < 1:
continue
waiters = self._waiters[key]
while waiters:
waiter = waiters.popleft()
if not waiter.done():
waiter.set_result(None)
return
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
if self._closed:
# acquired connection is already released on connector closing
return
try:
self._acquired.remove(proto)
self._drop_acquired_per_host(key, proto)
except KeyError: # pragma: no cover
# this may be result of undetermenistic order of objects
# finalization due garbage collection.
pass
else:
self._release_waiter()
def _release(
self,
key: "ConnectionKey",
protocol: ResponseHandler,
*,
should_close: bool = False,
) -> None:
if self._closed:
# acquired connection is already released on connector closing
return
self._release_acquired(key, protocol)
if self._force_close:
should_close = True
if should_close or protocol.should_close:
transport = protocol.transport
protocol.close()
if key.is_ssl and not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transport)
else:
conns = self._conns.get(key)
if conns is None:
conns = self._conns[key] = []
conns.append((protocol, self._loop.time()))
if self._cleanup_handle is None:
self._cleanup_handle = helpers.weakref_handle(
self, "_cleanup", self._keepalive_timeout, self._loop
)
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
raise NotImplementedError()
class _DNSCacheTable:
def __init__(self, ttl: Optional[float] = None) -> None:
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
self._timestamps: Dict[Tuple[str, int], float] = {}
self._ttl = ttl
def __contains__(self, host: object) -> bool:
return host in self._addrs_rr
def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
self._addrs_rr[key] = (cycle(addrs), len(addrs))
if self._ttl:
self._timestamps[key] = monotonic()
def remove(self, key: Tuple[str, int]) -> None:
self._addrs_rr.pop(key, None)
if self._ttl:
self._timestamps.pop(key, None)
def clear(self) -> None:
self._addrs_rr.clear()
self._timestamps.clear()
def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
loop, length = self._addrs_rr[key]
addrs = list(islice(loop, length))
# Consume one more element to shift internal state of `cycle`
next(loop)
return addrs
def expired(self, key: Tuple[str, int]) -> bool:
if self._ttl is None:
return False
return self._timestamps[key] + self._ttl < monotonic()
class TCPConnector(BaseConnector):
"""TCP connector.
verify_ssl - Set to True to check ssl certifications.
fingerprint - Pass the binary sha256
digest of the expected certificate in DER format to verify
that the certificate the server presents matches. See also
https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
resolver - Enable DNS lookups and use this
resolver
use_dns_cache - Use memory cache for DNS lookups.
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
family - socket address family
local_addr - local tuple of (host, port) to bind socket to
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
enable_cleanup_closed - Enables clean-up closed ssl transports.
Disabled by default.
loop - Optional event loop.
"""
def __init__(
self,
*,
verify_ssl: bool = True,
fingerprint: Optional[bytes] = None,
use_dns_cache: bool = True,
ttl_dns_cache: Optional[int] = 10,
family: int = 0,
ssl_context: Optional[SSLContext] = None,
ssl: Union[None, bool, Fingerprint, SSLContext] = None,
local_addr: Optional[Tuple[str, int]] = None,
resolver: Optional[AbstractResolver] = None,
keepalive_timeout: Union[None, float, object] = sentinel,
force_close: bool = False,
limit: int = 100,
limit_per_host: int = 0,
enable_cleanup_closed: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
):
super().__init__(
keepalive_timeout=keepalive_timeout,
force_close=force_close,
limit=limit,
limit_per_host=limit_per_host,
enable_cleanup_closed=enable_cleanup_closed,
loop=loop,
)
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
if resolver is None:
resolver = DefaultResolver(loop=self._loop)
self._resolver = resolver
self._use_dns_cache = use_dns_cache
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
self._family = family
self._local_addr = local_addr
def close(self) -> Awaitable[None]:
"""Close all ongoing DNS calls."""
for ev in self._throttle_dns_events.values():
ev.cancel()
return super().close()
@property
def family(self) -> int:
"""Socket family like AF_INET."""
return self._family
@property
def use_dns_cache(self) -> bool:
"""True if local DNS caching is enabled."""
return self._use_dns_cache
def clear_dns_cache(
self, host: Optional[str] = None, port: Optional[int] = None
) -> None:
"""Remove specified host/port or clear all dns local cache."""
if host is not None and port is not None:
self._cached_hosts.remove((host, port))
elif host is not None or port is not None:
raise ValueError("either both host and port " "or none of them are allowed")
else:
self._cached_hosts.clear()
async def _resolve_host(
self, host: str, port: int, traces: Optional[List["Trace"]] = None
) -> List[Dict[str, Any]]:
if is_ip_address(host):
return [
{
"hostname": host,
"host": host,
"port": port,
"family": self._family,
"proto": 0,
"flags": 0,
}
]
if not self._use_dns_cache:
if traces:
for trace in traces:
await trace.send_dns_resolvehost_start(host)
res = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
await trace.send_dns_resolvehost_end(host)
return res
key = (host, port)
if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
# get result early, before any await (#4014)
result = self._cached_hosts.next_addrs(key)
if traces:
for trace in traces:
await trace.send_dns_cache_hit(host)
return result
if key in self._throttle_dns_events:
# get event early, before any await (#4014)
event = self._throttle_dns_events[key]
if traces:
for trace in traces:
await trace.send_dns_cache_hit(host)
await event.wait()
else:
# update dict early, before any await (#4014)
self._throttle_dns_events[key] = EventResultOrError(self._loop)
if traces:
for trace in traces:
await trace.send_dns_cache_miss(host)
try:
if traces:
for trace in traces:
await trace.send_dns_resolvehost_start(host)
addrs = await self._resolver.resolve(host, port, family=self._family)
if traces:
for trace in traces:
await trace.send_dns_resolvehost_end(host)
self._cached_hosts.add(key, addrs)
self._throttle_dns_events[key].set()
except BaseException as e:
# any DNS exception, independently of the implementation
# is set for the waiters to raise the same exception.
self._throttle_dns_events[key].set(exc=e)
raise
finally:
self._throttle_dns_events.pop(key)
return self._cached_hosts.next_addrs(key)
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
"""Create connection.
Has same keyword arguments as BaseEventLoop.create_connection.
"""
if req.proxy:
_, proto = await self._create_proxy_connection(req, traces, timeout)
else:
_, proto = await self._create_direct_connection(req, traces, timeout)
return proto
@staticmethod
@functools.lru_cache(None)
def _make_ssl_context(verified: bool) -> SSLContext:
if verified:
return ssl.create_default_context()
else:
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
sslcontext.check_hostname = False
sslcontext.verify_mode = ssl.CERT_NONE
try:
sslcontext.options |= ssl.OP_NO_COMPRESSION
except AttributeError as attr_err:
warnings.warn(
"{!s}: The Python interpreter is compiled "
"against OpenSSL < 1.0.0. Ref: "
"https://docs.python.org/3/library/ssl.html"
"#ssl.OP_NO_COMPRESSION".format(attr_err),
)
sslcontext.set_default_verify_paths()
return sslcontext
def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
"""Logic to get the correct SSL context
0. if req.ssl is false, return None
1. if ssl_context is specified in req, use it
2. if _ssl_context is specified in self, use it
3. otherwise:
1. if verify_ssl is not specified in req, use self.ssl_context
(will generate a default context according to self.verify_ssl)
2. if verify_ssl is True in req, generate a default SSL context
3. if verify_ssl is False in req, generate a SSL context that
won't verify
"""
if req.is_ssl():
if ssl is None: # pragma: no cover
raise RuntimeError("SSL is not supported.")
sslcontext = req.ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
if sslcontext is not None:
# not verified or fingerprinted
return self._make_ssl_context(False)
sslcontext = self._ssl
if isinstance(sslcontext, ssl.SSLContext):
return sslcontext
if sslcontext is not None:
# not verified or fingerprinted
return self._make_ssl_context(False)
return self._make_ssl_context(True)
else:
return None
def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
ret = req.ssl
if isinstance(ret, Fingerprint):
return ret
ret = self._ssl
if isinstance(ret, Fingerprint):
return ret
return None
async def _wrap_create_connection(
self,
*args: Any,
req: "ClientRequest",
timeout: "ClientTimeout",
client_error: Type[Exception] = ClientConnectorError,
**kwargs: Any,
) -> Tuple[asyncio.Transport, ResponseHandler]:
try:
async with ceil_timeout(timeout.sock_connect):
return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise client_error(req.connection_key, exc) from exc
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
One case is that :py:meth:`asyncio.loop.start_tls` is not yet
implemented under Python 3.6. It is necessary for TLS-in-TLS so
that it is possible to send HTTPS queries through HTTPS proxies.
This doesn't affect regular HTTP requests, though.
"""
if not req.is_ssl():
return
proxy_url = req.proxy
assert proxy_url is not None
if proxy_url.scheme != "https":
return
self._check_loop_for_start_tls()
def _check_loop_for_start_tls(self) -> None:
try:
self._loop.start_tls
except AttributeError as attr_exc:
raise RuntimeError(
"An HTTPS request is being sent through an HTTPS proxy. "
"This needs support for TLS in TLS but it is not implemented "
"in your runtime for the stdlib asyncio.\n\n"
"Please upgrade to Python 3.7 or higher. For more details, "
"please see:\n"
"* https://bugs.python.org/issue37179\n"
"* https://github.com/python/cpython/pull/28073\n"
"* https://docs.aiohttp.org/en/stable/"
"client_advanced.html#proxy-support\n"
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
) from attr_exc
def _loop_supports_start_tls(self) -> bool:
try:
self._check_loop_for_start_tls()
except RuntimeError:
return False
else:
return True
def _warn_about_tls_in_tls(
self,
underlying_transport: asyncio.Transport,
req: "ClientRequest",
) -> None:
"""Issue a warning if the requested URL has HTTPS scheme."""
if req.request_info.url.scheme != "https":
return
asyncio_supports_tls_in_tls = getattr(
underlying_transport,
"_start_tls_compatible",
False,
)
if asyncio_supports_tls_in_tls:
return
warnings.warn(
"An HTTPS request is being sent through an HTTPS proxy. "
"This support for TLS in TLS is known to be disabled "
"in the stdlib asyncio. This is why you'll probably see "
"an error in the log below.\n\n"
"It is possible to enable it via monkeypatching under "
"Python 3.7 or higher. For more details, see:\n"
"* https://bugs.python.org/issue37179\n"
"* https://github.com/python/cpython/pull/28073\n\n"
"You can temporarily patch this as follows:\n"
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
RuntimeWarning,
source=self,
# Why `4`? At least 3 of the calls in the stack originate
# from the methods in this class.
stacklevel=3,
)
async def _start_tls_connection(
self,
underlying_transport: asyncio.Transport,
req: "ClientRequest",
timeout: "ClientTimeout",
client_error: Type[Exception] = ClientConnectorError,
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
"""Wrap the raw TCP transport with TLS."""
tls_proto = self._factory() # Create a brand new proto for TLS
# Safety of the `cast()` call here is based on the fact that
# internally `_get_ssl_context()` only returns `None` when
# `req.is_ssl()` evaluates to `False` which is never gonna happen
# in this code path. Of course, it's rather fragile
# maintainability-wise but this is to be solved separately.
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
try:
async with ceil_timeout(timeout.sock_connect):
try:
tls_transport = await self._loop.start_tls(
underlying_transport,
tls_proto,
sslcontext,
server_hostname=req.host,
ssl_handshake_timeout=timeout.total,
)
except BaseException:
# We need to close the underlying transport since
# `start_tls()` probably failed before it had a
# chance to do this:
underlying_transport.close()
raise
except cert_errors as exc:
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
except ssl_errors as exc:
raise ClientConnectorSSLError(req.connection_key, exc) from exc
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise client_error(req.connection_key, exc) from exc
except TypeError as type_err:
# Example cause looks like this:
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
# object at 0x7f760615e460> is not supported by start_tls()
raise ClientConnectionError(
"Cannot initialize a TLS-in-TLS connection to host "
f"{req.host!s}:{req.port:d} through an underlying connection "
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
f"[{type_err!s}]"
) from type_err
else:
tls_proto.connection_made(
tls_transport
) # Kick the state machine of the new TLS protocol
return tls_transport, tls_proto
async def _create_direct_connection(
self,
req: "ClientRequest",
traces: List["Trace"],
timeout: "ClientTimeout",
*,
client_error: Type[Exception] = ClientConnectorError,
) -> Tuple[asyncio.Transport, ResponseHandler]:
sslcontext = self._get_ssl_context(req)
fingerprint = self._get_fingerprint(req)
host = req.url.raw_host
assert host is not None
port = req.port
assert port is not None
host_resolved = asyncio.ensure_future(
self._resolve_host(host, port, traces=traces), loop=self._loop
)
try:
# Cancelling this lookup should not cancel the underlying lookup
# or else the cancel event will get broadcast to all the waiters
# across all connections.
hosts = await asyncio.shield(host_resolved)
except asyncio.CancelledError:
def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
with suppress(Exception, asyncio.CancelledError):
fut.result()
host_resolved.add_done_callback(drop_exception)
raise
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
# in case of proxy it is not ClientProxyConnectionError
# it is problem of resolving proxy ip itself
raise ClientConnectorError(req.connection_key, exc) from exc
last_exc: Optional[Exception] = None
for hinfo in hosts:
host = hinfo["host"]
port = hinfo["port"]
try:
transp, proto = await self._wrap_create_connection(
self._factory,
host,
port,
timeout=timeout,
ssl=sslcontext,
family=hinfo["family"],
proto=hinfo["proto"],
flags=hinfo["flags"],
server_hostname=hinfo["hostname"] if sslcontext else None,
local_addr=self._local_addr,
req=req,
client_error=client_error,
)
except ClientConnectorError as exc:
last_exc = exc
continue
if req.is_ssl() and fingerprint:
try:
fingerprint.check(transp)
except ServerFingerprintMismatch as exc:
transp.close()
if not self._cleanup_closed_disabled:
self._cleanup_closed_transports.append(transp)
last_exc = exc
continue
return transp, proto
else:
assert last_exc is not None
raise last_exc
async def _create_proxy_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
self._fail_on_no_start_tls(req)
runtime_has_start_tls = self._loop_supports_start_tls()
headers: Dict[str, str] = {}
if req.proxy_headers is not None:
headers = req.proxy_headers # type: ignore[assignment]
headers[hdrs.HOST] = req.headers[hdrs.HOST]
url = req.proxy
assert url is not None
proxy_req = ClientRequest(
hdrs.METH_GET,
url,
headers=headers,
auth=req.proxy_auth,
loop=self._loop,
ssl=req.ssl,
)
# create connection to proxy server
transport, proto = await self._create_direct_connection(
proxy_req, [], timeout, client_error=ClientProxyConnectionError
)
# Many HTTP proxies has buggy keepalive support. Let's not
# reuse connection but close it after processing every
# response.
proto.force_close()
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
if auth is not None:
if not req.is_ssl():
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
else:
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
if req.is_ssl():
if runtime_has_start_tls:
self._warn_about_tls_in_tls(transport, req)
# For HTTPS requests over HTTP proxy
# we must notify proxy to tunnel connection
# so we send CONNECT command:
# CONNECT www.python.org:443 HTTP/1.1
# Host: www.python.org
#
# next we must do TLS handshake and so on
# to do this we must wrap raw socket into secure one
# asyncio handles this perfectly
proxy_req.method = hdrs.METH_CONNECT
proxy_req.url = req.url
key = attr.evolve(
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
)
conn = Connection(self, key, proto, self._loop)
proxy_resp = await proxy_req.send(conn)
try:
protocol = conn._protocol
assert protocol is not None
# read_until_eof=True will ensure the connection isn't closed
# once the response is received and processed allowing
# START_TLS to work on the connection below.
protocol.set_response_params(read_until_eof=runtime_has_start_tls)
resp = await proxy_resp.start(conn)
except BaseException:
proxy_resp.close()
conn.close()
raise
else:
conn._protocol = None
conn._transport = None
try:
if resp.status != 200:
message = resp.reason
if message is None:
message = RESPONSES[resp.status][0]
raise ClientHttpProxyError(
proxy_resp.request_info,
resp.history,
status=resp.status,
message=message,
headers=resp.headers,
)
if not runtime_has_start_tls:
rawsock = transport.get_extra_info("socket", default=None)
if rawsock is None:
raise RuntimeError(
"Transport does not expose socket instance"
)
# Duplicate the socket, so now we can close proxy transport
rawsock = rawsock.dup()
except BaseException:
# It shouldn't be closed in `finally` because it's fed to
# `loop.start_tls()` and the docs say not to touch it after
# passing there.
transport.close()
raise
finally:
if not runtime_has_start_tls:
transport.close()
if not runtime_has_start_tls:
# HTTP proxy with support for upgrade to HTTPS
sslcontext = self._get_ssl_context(req)
return await self._wrap_create_connection(
self._factory,
timeout=timeout,
ssl=sslcontext,
sock=rawsock,
server_hostname=req.host,
req=req,
)
return await self._start_tls_connection(
# Access the old transport for the last time before it's
# closed and forgotten forever:
transport,
req=req,
timeout=timeout,
)
finally:
proxy_resp.close()
return transport, proto
class UnixConnector(BaseConnector):
"""Unix socket connector.
path - Unix socket path.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
loop - Optional event loop.
"""
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: Union[object, float, None] = sentinel,
limit: int = 100,
limit_per_host: int = 0,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
loop=loop,
)
self._path = path
@property
def path(self) -> str:
"""Path to unix socket."""
return self._path
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
async with ceil_timeout(timeout.sock_connect):
_, proto = await self._loop.create_unix_connection(
self._factory, self._path
)
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
return cast(ResponseHandler, proto)
class NamedPipeConnector(BaseConnector):
"""Named pipe connector.
Only supported by the proactor event loop.
See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
path - Windows named pipe path.
keepalive_timeout - (optional) Keep-alive timeout.
force_close - Set to True to force close and do reconnect
after each request (and between redirects).
limit - The total number of simultaneous connections.
limit_per_host - Number of simultaneous connections to one host.
loop - Optional event loop.
"""
def __init__(
self,
path: str,
force_close: bool = False,
keepalive_timeout: Union[object, float, None] = sentinel,
limit: int = 100,
limit_per_host: int = 0,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(
force_close=force_close,
keepalive_timeout=keepalive_timeout,
limit=limit,
limit_per_host=limit_per_host,
loop=loop,
)
if not isinstance(
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor " "loop under windows"
)
self._path = path
@property
def path(self) -> str:
"""Path to the named pipe."""
return self._path
async def _create_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
) -> ResponseHandler:
try:
async with ceil_timeout(timeout.sock_connect):
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
self._factory, self._path
)
# the drain is required so that the connection_made is called
# and transport is set otherwise it is not set before the
# `assert conn.transport is not None`
# in client.py's _request method
await asyncio.sleep(0)
# other option is to manually set transport like
# `proto.transport = trans`
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise ClientConnectorError(req.connection_key, exc) from exc
return cast(ResponseHandler, proto)
| 51,177 | Python | 34.198074 | 112 | 0.55566 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_middlewares.py | import re
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
from .typedefs import Handler
from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import SystemRoute
__all__ = (
"middleware",
"normalize_path_middleware",
)
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application
_Func = TypeVar("_Func")
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
alt_request = request.clone(rel_url=path)
match_info = await request.app.router.resolve(alt_request)
alt_request._match_info = match_info
if match_info.http_exception is None:
return True, alt_request
return False, request
def middleware(f: _Func) -> _Func:
f.__middleware_version__ = 1 # type: ignore[attr-defined]
return f
_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
def normalize_path_middleware(
*,
append_slash: bool = True,
remove_slash: bool = False,
merge_slashes: bool = True,
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
) -> _Middleware:
"""Factory for producing a middleware that normalizes the path of a request.
Normalizing means:
- Add or remove a trailing slash to the path.
- Double slashes are replaced by one.
The middleware returns as soon as it finds a path that resolves
correctly. The order if both merge and append/remove are enabled is
1) merge slashes
2) append/remove slash
3) both merge slashes and append/remove slash.
If the path resolves with at least one of those conditions, it will
redirect to the new path.
Only one of `append_slash` and `remove_slash` can be enabled. If both
are `True` the factory will raise an assertion error
If `append_slash` is `True` the middleware will append a slash when
needed. If a resource is defined with trailing slash and the request
comes without it, it will append it automatically.
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
the middleware will remove trailing slashes and redirect if the resource
is defined
If merge_slashes is True, merge multiple consecutive slashes in the
path into one.
"""
correct_configuration = not (append_slash and remove_slash)
assert correct_configuration, "Cannot both remove and append slash"
@middleware
async def impl(request: Request, handler: Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if "?" in request.raw_path:
path, query = request.raw_path.split("?", 1)
query = "?" + query
else:
query = ""
path = request.raw_path
if merge_slashes:
paths_to_check.append(re.sub("//+", "/", path))
if append_slash and not request.path.endswith("/"):
paths_to_check.append(path + "/")
if remove_slash and request.path.endswith("/"):
paths_to_check.append(path[:-1])
if merge_slashes and append_slash:
paths_to_check.append(re.sub("//+", "/", path + "/"))
if merge_slashes and remove_slash:
merged_slashes = re.sub("//+", "/", path)
paths_to_check.append(merged_slashes[:-1])
for path in paths_to_check:
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
resolves, request = await _check_request_resolves(request, path)
if resolves:
raise redirect_class(request.raw_path + query)
return await handler(request)
return impl
def _fix_request_current_app(app: "Application") -> _Middleware:
@middleware
async def impl(request: Request, handler: Handler) -> StreamResponse:
with request.match_info.set_current_app(app):
return await handler(request)
return impl
| 4,137 | Python | 33.483333 | 87 | 0.641528 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/tcp_helpers.py | """Helper methods to tune a TCP connection"""
import asyncio
import socket
from contextlib import suppress
from typing import Optional # noqa
__all__ = ("tcp_keepalive", "tcp_nodelay")
if hasattr(socket, "SO_KEEPALIVE"):
def tcp_keepalive(transport: asyncio.Transport) -> None:
sock = transport.get_extra_info("socket")
if sock is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
else:
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
pass
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
sock = transport.get_extra_info("socket")
if sock is None:
return
if sock.family not in (socket.AF_INET, socket.AF_INET6):
return
value = bool(value)
# socket may be closed already, on windows OSError get raised
with suppress(OSError):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
| 961 | Python | 24.315789 | 80 | 0.674298 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/formdata.py | import io
from typing import Any, Iterable, List, Optional
from urllib.parse import urlencode
from multidict import MultiDict, MultiDictProxy
from . import hdrs, multipart, payload
from .helpers import guess_filename
from .payload import Payload
__all__ = ("FormData",)
class FormData:
"""Helper class for form body generation.
Supports multipart/form-data and application/x-www-form-urlencoded.
"""
def __init__(
self,
fields: Iterable[Any] = (),
quote_fields: bool = True,
charset: Optional[str] = None,
) -> None:
self._writer = multipart.MultipartWriter("form-data")
self._fields: List[Any] = []
self._is_multipart = False
self._is_processed = False
self._quote_fields = quote_fields
self._charset = charset
if isinstance(fields, dict):
fields = list(fields.items())
elif not isinstance(fields, (list, tuple)):
fields = (fields,)
self.add_fields(*fields)
@property
def is_multipart(self) -> bool:
return self._is_multipart
def add_field(
self,
name: str,
value: Any,
*,
content_type: Optional[str] = None,
filename: Optional[str] = None,
content_transfer_encoding: Optional[str] = None,
) -> None:
if isinstance(value, io.IOBase):
self._is_multipart = True
elif isinstance(value, (bytes, bytearray, memoryview)):
if filename is None and content_transfer_encoding is None:
filename = name
type_options: MultiDict[str] = MultiDict({"name": name})
if filename is not None and not isinstance(filename, str):
raise TypeError(
"filename must be an instance of str. " "Got: %s" % filename
)
if filename is None and isinstance(value, io.IOBase):
filename = guess_filename(value, name)
if filename is not None:
type_options["filename"] = filename
self._is_multipart = True
headers = {}
if content_type is not None:
if not isinstance(content_type, str):
raise TypeError(
"content_type must be an instance of str. " "Got: %s" % content_type
)
headers[hdrs.CONTENT_TYPE] = content_type
self._is_multipart = True
if content_transfer_encoding is not None:
if not isinstance(content_transfer_encoding, str):
raise TypeError(
"content_transfer_encoding must be an instance"
" of str. Got: %s" % content_transfer_encoding
)
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
self._is_multipart = True
self._fields.append((type_options, headers, value))
def add_fields(self, *fields: Any) -> None:
to_add = list(fields)
while to_add:
rec = to_add.pop(0)
if isinstance(rec, io.IOBase):
k = guess_filename(rec, "unknown")
self.add_field(k, rec) # type: ignore[arg-type]
elif isinstance(rec, (MultiDictProxy, MultiDict)):
to_add.extend(rec.items())
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
k, fp = rec
self.add_field(k, fp) # type: ignore[arg-type]
else:
raise TypeError(
"Only io.IOBase, multidict and (name, file) "
"pairs allowed, use .add_field() for passing "
"more complex parameters, got {!r}".format(rec)
)
def _gen_form_urlencoded(self) -> payload.BytesPayload:
# form data (x-www-form-urlencoded)
data = []
for type_options, _, value in self._fields:
data.append((type_options["name"], value))
charset = self._charset if self._charset is not None else "utf-8"
if charset == "utf-8":
content_type = "application/x-www-form-urlencoded"
else:
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
return payload.BytesPayload(
urlencode(data, doseq=True, encoding=charset).encode(),
content_type=content_type,
)
def _gen_form_data(self) -> multipart.MultipartWriter:
"""Encode a list of fields using the multipart/form-data MIME format"""
if self._is_processed:
raise RuntimeError("Form data has been processed already")
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
part = payload.get_payload(
value,
content_type=headers[hdrs.CONTENT_TYPE],
headers=headers,
encoding=self._charset,
)
else:
part = payload.get_payload(
value, headers=headers, encoding=self._charset
)
except Exception as exc:
raise TypeError(
"Can not serialize value type: %r\n "
"headers: %r\n value: %r" % (type(value), headers, value)
) from exc
if dispparams:
part.set_content_disposition(
"form-data", quote_fields=self._quote_fields, **dispparams
)
# FIXME cgi.FieldStorage doesn't likes body parts with
# Content-Length which were sent via chunked transfer encoding
assert part.headers is not None
part.headers.popall(hdrs.CONTENT_LENGTH, None)
self._writer.append_payload(part)
self._is_processed = True
return self._writer
def __call__(self) -> Payload:
if self._is_multipart:
return self._gen_form_data()
else:
return self._gen_form_urlencoded()
| 6,106 | Python | 34.300578 | 88 | 0.546348 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_routedef.py | import abc
import os # noqa
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterator,
List,
Optional,
Sequence,
Type,
Union,
overload,
)
import attr
from . import hdrs
from .abc import AbstractView
from .typedefs import Handler, PathLike
if TYPE_CHECKING: # pragma: no cover
from .web_request import Request
from .web_response import StreamResponse
from .web_urldispatcher import AbstractRoute, UrlDispatcher
else:
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
__all__ = (
"AbstractRouteDef",
"RouteDef",
"StaticDef",
"RouteTableDef",
"head",
"options",
"get",
"post",
"patch",
"put",
"delete",
"route",
"view",
"static",
)
class AbstractRouteDef(abc.ABC):
@abc.abstractmethod
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
pass # pragma: no cover
_HandlerType = Union[Type[AbstractView], Handler]
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class RouteDef(AbstractRouteDef):
method: str
path: str
handler: _HandlerType
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(f", {name}={value!r}")
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
method=self.method, path=self.path, handler=self.handler, info="".join(info)
)
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
if self.method in hdrs.METH_ALL:
reg = getattr(router, "add_" + self.method.lower())
return [reg(self.path, self.handler, **self.kwargs)]
else:
return [
router.add_route(self.method, self.path, self.handler, **self.kwargs)
]
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
class StaticDef(AbstractRouteDef):
prefix: str
path: PathLike
kwargs: Dict[str, Any]
def __repr__(self) -> str:
info = []
for name, value in sorted(self.kwargs.items()):
info.append(f", {name}={value!r}")
return "<StaticDef {prefix} -> {path}" "{info}>".format(
prefix=self.prefix, path=self.path, info="".join(info)
)
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
resource = router.add_static(self.prefix, self.path, **self.kwargs)
routes = resource.get_info().get("routes", {})
return list(routes.values())
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return RouteDef(method, path, handler, kwargs)
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_HEAD, path, handler, **kwargs)
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
def get(
path: str,
handler: _HandlerType,
*,
name: Optional[str] = None,
allow_head: bool = True,
**kwargs: Any,
) -> RouteDef:
return route(
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
)
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_POST, path, handler, **kwargs)
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_PUT, path, handler, **kwargs)
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_PATCH, path, handler, **kwargs)
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
return route(hdrs.METH_DELETE, path, handler, **kwargs)
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
return route(hdrs.METH_ANY, path, handler, **kwargs)
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
return StaticDef(prefix, path, kwargs)
_Deco = Callable[[_HandlerType], _HandlerType]
class RouteTableDef(Sequence[AbstractRouteDef]):
"""Route definition table"""
def __init__(self) -> None:
self._items: List[AbstractRouteDef] = []
def __repr__(self) -> str:
return f"<RouteTableDef count={len(self._items)}>"
@overload
def __getitem__(self, index: int) -> AbstractRouteDef:
...
@overload
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
...
def __getitem__(self, index): # type: ignore[no-untyped-def]
return self._items[index]
def __iter__(self) -> Iterator[AbstractRouteDef]:
return iter(self._items)
def __len__(self) -> int:
return len(self._items)
def __contains__(self, item: object) -> bool:
return item in self._items
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
def inner(handler: _HandlerType) -> _HandlerType:
self._items.append(RouteDef(method, path, handler, kwargs))
return handler
return inner
def head(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_HEAD, path, **kwargs)
def get(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_GET, path, **kwargs)
def post(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_POST, path, **kwargs)
def put(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_PUT, path, **kwargs)
def patch(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_PATCH, path, **kwargs)
def delete(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_DELETE, path, **kwargs)
def options(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
def view(self, path: str, **kwargs: Any) -> _Deco:
return self.route(hdrs.METH_ANY, path, **kwargs)
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
self._items.append(StaticDef(prefix, path, kwargs))
| 6,152 | Python | 27.354839 | 88 | 0.617685 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/http.py | import http.server
import sys
from typing import Mapping, Tuple
from . import __version__
from .http_exceptions import HttpProcessingError as HttpProcessingError
from .http_parser import (
HeadersParser as HeadersParser,
HttpParser as HttpParser,
HttpRequestParser as HttpRequestParser,
HttpResponseParser as HttpResponseParser,
RawRequestMessage as RawRequestMessage,
RawResponseMessage as RawResponseMessage,
)
from .http_websocket import (
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
WS_KEY as WS_KEY,
WebSocketError as WebSocketError,
WebSocketReader as WebSocketReader,
WebSocketWriter as WebSocketWriter,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen as ws_ext_gen,
ws_ext_parse as ws_ext_parse,
)
from .http_writer import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
StreamWriter as StreamWriter,
)
__all__ = (
"HttpProcessingError",
"RESPONSES",
"SERVER_SOFTWARE",
# .http_writer
"StreamWriter",
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
# .http_parser
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
# .http_websocket
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"ws_ext_gen",
"ws_ext_parse",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
sys.version_info, __version__
)
RESPONSES: Mapping[int, Tuple[str, str]] = http.server.BaseHTTPRequestHandler.responses
| 1,800 | Python | 24.366197 | 87 | 0.698333 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/helpers.py | """Various helper functions"""
import asyncio
import base64
import binascii
import datetime
import functools
import inspect
import netrc
import os
import platform
import re
import sys
import time
import warnings
import weakref
from collections import namedtuple
from contextlib import suppress
from email.parser import HeaderParser
from email.utils import parsedate
from math import ceil
from pathlib import Path
from types import TracebackType
from typing import (
Any,
Callable,
ContextManager,
Dict,
Generator,
Generic,
Iterable,
Iterator,
List,
Mapping,
Optional,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from urllib.parse import quote
from urllib.request import getproxies, proxy_bypass
import async_timeout
import attr
from multidict import MultiDict, MultiDictProxy
from yarl import URL
from . import hdrs
from .log import client_logger, internal_logger
from .typedefs import PathLike, Protocol # noqa
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
IS_MACOS = platform.system() == "Darwin"
IS_WINDOWS = platform.system() == "Windows"
PY_36 = sys.version_info >= (3, 6)
PY_37 = sys.version_info >= (3, 7)
PY_38 = sys.version_info >= (3, 8)
PY_310 = sys.version_info >= (3, 10)
if sys.version_info < (3, 7):
import idna_ssl
idna_ssl.patch_match_hostname()
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Set["asyncio.Task[Any]"]:
tasks = list(asyncio.Task.all_tasks(loop))
return {t for t in tasks if not t.done()}
else:
all_tasks = asyncio.all_tasks
_T = TypeVar("_T")
_S = TypeVar("_S")
sentinel: Any = object()
NO_EXTENSIONS: bool = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
# for compatibility with older versions
DEBUG: bool = getattr(sys.flags, "dev_mode", False) or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
)
CHAR = {chr(i) for i in range(0, 128)}
CTL = {chr(i) for i in range(0, 32)} | {
chr(127),
}
SEPARATORS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
" ",
chr(9),
}
TOKEN = CHAR ^ CTL ^ SEPARATORS
class noop:
def __await__(self) -> Generator[None, None, None]:
yield
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
"""Http basic authentication helper."""
def __new__(
cls, login: str, password: str = "", encoding: str = "latin1"
) -> "BasicAuth":
if login is None:
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError("None is not allowed as password value")
if ":" in login:
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(" ", 1)
except ValueError:
raise ValueError("Could not parse authorization header.")
if auth_type.lower() != "basic":
raise ValueError("Unknown authorization method %s" % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode("ascii"), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError("Invalid base64 encoding.")
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(":", 1)
except ValueError:
raise ValueError("Invalid credentials.")
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or "", encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = (f"{self.login}:{self.password}").encode(self.encoding)
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
auth = BasicAuth.from_url(url)
if auth is None:
return url, None
else:
return url.with_user(None), auth
def netrc_from_env() -> Optional[netrc.netrc]:
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
return None
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ProxyInfo:
proxy: URL
proxy_auth: Optional[BasicAuth]
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {
k: URL(v)
for k, v in getproxies().items()
if k in ("http", "https", "ws", "wss")
}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme in ("https", "wss"):
client_logger.warning(
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
if proxy.host is not None:
auth_from_netrc = netrc_obj.authenticators(proxy.host)
if auth_from_netrc is not None:
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
# `user` and `account` both can be username,
# if `user` is None, use `account`
*logins, password = auth_from_netrc
login = logins[0] if logins[0] else logins[-1]
auth = BasicAuth(cast(str, login), cast(str, password))
ret[proto] = ProxyInfo(proxy, auth)
return ret
def current_task(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> "Optional[asyncio.Task[Any]]":
if sys.version_info >= (3, 7):
return asyncio.current_task(loop=loop)
else:
return asyncio.Task.current_task(loop=loop)
def get_running_loop(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> asyncio.AbstractEventLoop:
if loop is None:
loop = asyncio.get_event_loop()
if not loop.is_running():
warnings.warn(
"The object should be created within an async function",
DeprecationWarning,
stacklevel=3,
)
if loop.get_debug():
internal_logger.warning(
"The object should be created within an async function", stack_info=True
)
return loop
def isasyncgenfunction(obj: Any) -> bool:
func = getattr(inspect, "isasyncgenfunction", None)
if func is not None:
return func(obj) # type: ignore[no-any-return]
else:
return False
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth
@attr.s(auto_attribs=True, frozen=True, slots=True)
class MimeType:
type: str
subtype: str
suffix: str
parameters: "MultiDictProxy[str]"
@functools.lru_cache(maxsize=56)
def parse_mimetype(mimetype: str) -> MimeType:
"""Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'})
"""
if not mimetype:
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(";")
params: MultiDict[str] = MultiDict()
for item in parts[1:]:
if not item:
continue
key, value = cast(
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
)
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == "*":
fulltype = "*/*"
mtype, stype = (
cast(Tuple[str, str], fulltype.split("/", 1))
if "/" in fulltype
else (fulltype, "")
)
stype, suffix = (
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
)
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
)
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
return Path(name).name
return default
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
def quoted_string(content: str) -> str:
"""Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised.
"""
if not (QCONTENT > set(content)):
raise ValueError(f"bad content for quoted-string {content!r}")
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
def content_disposition_header(
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
) -> str:
"""Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError("bad content disposition type {!r}" "".format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError(
"bad content disposition parameter" " {!r}={!r}".format(key, val)
)
if quote_fields:
if key.lower() == "filename":
qval = quote(val, "", encoding=_charset)
lparams.append((key, '"%s"' % qval))
else:
try:
qval = quoted_string(val)
except ValueError:
qval = "".join(
(_charset, "''", quote(val, "", encoding=_charset))
)
lparams.append((key + "*", qval))
else:
lparams.append((key, '"%s"' % qval))
else:
qval = val.replace("\\", "\\\\").replace('"', '\\"')
lparams.append((key, '"%s"' % qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value
class _TSelf(Protocol, Generic[_T]):
_cache: Dict[str, _T]
class reify(Generic[_T]):
"""Use as a class method decorator.
It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
def __init__(self, wrapped: Callable[..., _T]) -> None:
self.wrapped = wrapped
self.__doc__ = wrapped.__doc__
self.name = wrapped.__name__
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
raise AttributeError("reified property is read-only")
reify_py = reify
try:
from ._helpers import reify as reify_c
if not NO_EXTENSIONS:
reify = reify_c # type: ignore[misc,assignment]
except ImportError:
pass
_ipv4_pattern = (
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
)
_ipv6_pattern = (
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
r":|:(:[A-F0-9]{1,4}){7})$"
)
_ipv4_regex = re.compile(_ipv4_pattern)
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
def _is_ip_address(
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
) -> bool:
if host is None:
return False
if isinstance(host, str):
return bool(regex.match(host))
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
return is_ipv4_address(host) or is_ipv6_address(host)
def next_whole_second() -> datetime.datetime:
"""Return current time rounded up to the next whole second."""
return datetime.datetime.now(datetime.timezone.utc).replace(
microsecond=0
) + datetime.timedelta(seconds=0)
_cached_current_datetime: Optional[int] = None
_cached_formatted_datetime = ""
def rfc822_formatted_time() -> str:
global _cached_current_datetime
global _cached_formatted_datetime
now = int(time.time())
if now != _cached_current_datetime:
# Weekday and month names for HTTP date/time formatting;
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = (
"", # Dummy so we can use 1-based month numbers
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
)
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd],
day,
_monthname[month],
year,
hh,
mm,
ss,
)
_cached_current_datetime = now
return _cached_formatted_datetime
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
ref, name = info
ob = ref()
if ob is not None:
with suppress(Exception):
getattr(ob, name)()
def weakref_handle(
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
return None
def call_later(
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if timeout > 5:
when = ceil(when)
return loop.call_at(when, cb)
return None
class TimeoutHandle:
"""Timeout handle"""
def __init__(
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
) -> None:
self._timeout = timeout
self._loop = loop
self._callbacks: List[
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
] = []
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> Optional[asyncio.Handle]:
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
return None
def timer(self) -> "BaseTimerContext":
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
return timer
else:
return TimerNoop()
def __call__(self) -> None:
for cb, args, kwargs in self._callbacks:
with suppress(Exception):
cb(*args, **kwargs)
self._callbacks.clear()
class BaseTimerContext(ContextManager["BaseTimerContext"]):
pass
class TimerNoop(BaseTimerContext):
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
return
class TimerContext(BaseTimerContext):
"""Low resolution timeout context manager"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._tasks: List[asyncio.Task[Any]] = []
self._cancelled = False
def __enter__(self) -> BaseTimerContext:
task = current_task(loop=self._loop)
if task is None:
raise RuntimeError(
"Timeout context manager should be used " "inside a task"
)
if self._cancelled:
raise asyncio.TimeoutError from None
self._tasks.append(task)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
if self._tasks:
self._tasks.pop()
if exc_type is asyncio.CancelledError and self._cancelled:
raise asyncio.TimeoutError from None
return None
def timeout(self) -> None:
if not self._cancelled:
for task in set(self._tasks):
task.cancel()
self._cancelled = True
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
if delay is None or delay <= 0:
return async_timeout.timeout(None)
loop = get_running_loop()
now = loop.time()
when = now + delay
if delay > 5:
when = ceil(when)
return async_timeout.timeout_at(when)
class HeadersMixin:
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
_content_type: Optional[str] = None
_content_dict: Optional[Dict[str, str]] = None
_stored_content_type = sentinel
def _parse_content_type(self, raw: str) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
msg = HeaderParser().parsestr("Content-Type: " + raw)
self._content_type = msg.get_content_type()
params = msg.get_params()
self._content_dict = dict(params[1:]) # First element is content type again
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore[return-value]
@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get("charset") # type: ignore[union-attr]
@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get( # type: ignore[attr-defined]
hdrs.CONTENT_LENGTH
)
if content_length is not None:
return int(content_length)
else:
return None
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
if not fut.done():
fut.set_result(result)
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
if not fut.done():
fut.set_exception(exc)
class ChainMapProxy(Mapping[str, Any]):
__slots__ = ("_maps",)
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError(
"Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__)
)
def __getitem__(self, key: str) -> Any:
for mapping in self._maps:
try:
return mapping[key]
except KeyError:
pass
raise KeyError(key)
def get(self, key: str, default: Any = None) -> Any:
return self[key] if key in self else default
def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore[arg-type]
def __iter__(self) -> Iterator[str]:
d: Dict[str, Any] = {}
for mapping in reversed(self._maps):
# reuses stored hash values if possible
d.update(mapping)
return iter(d)
def __contains__(self, key: object) -> bool:
return any(key in m for m in self._maps)
def __bool__(self) -> bool:
return any(self._maps)
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return f"ChainMapProxy({content})"
# https://tools.ietf.org/html/rfc7232#section-2.3
_ETAGC = r"[!#-}\x80-\xff]+"
_ETAGC_RE = re.compile(_ETAGC)
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
ETAG_ANY = "*"
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ETag:
value: str
is_weak: bool = False
def validate_etag_value(value: str) -> None:
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
raise ValueError(
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
)
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object"""
if date_str is not None:
timetuple = parsedate(date_str)
if timetuple is not None:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
| 26,361 | Python | 29.025057 | 88 | 0.579113 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_response.py | import asyncio
import collections.abc
import datetime
import enum
import json
import math
import time
import warnings
import zlib
from concurrent.futures import Executor
from http.cookies import Morsel, SimpleCookie
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterator,
Mapping,
MutableMapping,
Optional,
Tuple,
Union,
cast,
)
from multidict import CIMultiDict, istr
from . import hdrs, payload
from .abc import AbstractStreamWriter
from .helpers import (
ETAG_ANY,
PY_38,
QUOTED_ETAG_RE,
ETag,
HeadersMixin,
parse_http_date,
rfc822_formatted_time,
sentinel,
validate_etag_value,
)
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
from .payload import Payload
from .typedefs import JSONEncoder, LooseHeaders
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest
BaseClass = MutableMapping[str, Any]
else:
BaseClass = collections.abc.MutableMapping
if not PY_38:
# allow samesite to be used in python < 3.8
# already permitted in python 3.8, see https://bugs.python.org/issue29613
Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
class ContentCoding(enum.Enum):
# The content codings that we have support for.
#
# Additional registered codings are listed at:
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
deflate = "deflate"
gzip = "gzip"
identity = "identity"
############################################################
# HTTP Response classes
############################################################
class StreamResponse(BaseClass, HeadersMixin):
_length_check = True
def __init__(
self,
*,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
self._body = None
self._keep_alive: Optional[bool] = None
self._chunked = False
self._compression = False
self._compression_force: Optional[ContentCoding] = None
self._cookies: SimpleCookie[str] = SimpleCookie()
self._req: Optional[BaseRequest] = None
self._payload_writer: Optional[AbstractStreamWriter] = None
self._eof_sent = False
self._body_length = 0
self._state: Dict[str, Any] = {}
if headers is not None:
self._headers: CIMultiDict[str] = CIMultiDict(headers)
else:
self._headers = CIMultiDict()
self.set_status(status, reason)
@property
def prepared(self) -> bool:
return self._payload_writer is not None
@property
def task(self) -> "Optional[asyncio.Task[None]]":
if self._req:
return self._req.task
else:
return None
@property
def status(self) -> int:
return self._status
@property
def chunked(self) -> bool:
return self._chunked
@property
def compression(self) -> bool:
return self._compression
@property
def reason(self) -> str:
return self._reason
def set_status(
self,
status: int,
reason: Optional[str] = None,
_RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
) -> None:
assert not self.prepared, (
"Cannot change the response status code after " "the headers have been sent"
)
self._status = int(status)
if reason is None:
try:
reason = _RESPONSES[self._status][0]
except Exception:
reason = ""
self._reason = reason
@property
def keep_alive(self) -> Optional[bool]:
return self._keep_alive
def force_close(self) -> None:
self._keep_alive = False
@property
def body_length(self) -> int:
return self._body_length
@property
def output_length(self) -> int:
warnings.warn("output_length is deprecated", DeprecationWarning)
assert self._payload_writer
return self._payload_writer.buffer_size
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
"""Enables automatic chunked transfer encoding."""
self._chunked = True
if hdrs.CONTENT_LENGTH in self._headers:
raise RuntimeError(
"You can't enable chunked encoding when " "a content length is set"
)
if chunk_size is not None:
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
def enable_compression(
self, force: Optional[Union[bool, ContentCoding]] = None
) -> None:
"""Enables response compression encoding."""
# Backwards compatibility for when force was a bool <0.17.
if type(force) == bool:
force = ContentCoding.deflate if force else ContentCoding.identity
warnings.warn(
"Using boolean for force is deprecated #3318", DeprecationWarning
)
elif force is not None:
assert isinstance(force, ContentCoding), (
"force should one of " "None, bool or " "ContentEncoding"
)
self._compression = True
self._compression_force = force
@property
def headers(self) -> "CIMultiDict[str]":
return self._headers
@property
def cookies(self) -> "SimpleCookie[str]":
return self._cookies
def set_cookie(
self,
name: str,
value: str,
*,
expires: Optional[str] = None,
domain: Optional[str] = None,
max_age: Optional[Union[int, str]] = None,
path: str = "/",
secure: Optional[bool] = None,
httponly: Optional[bool] = None,
version: Optional[str] = None,
samesite: Optional[str] = None,
) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == "":
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c["expires"] = expires
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
del c["expires"]
if domain is not None:
c["domain"] = domain
if max_age is not None:
c["max-age"] = str(max_age)
elif "max-age" in c:
del c["max-age"]
c["path"] = path
if secure is not None:
c["secure"] = secure
if httponly is not None:
c["httponly"] = httponly
if version is not None:
c["version"] = version
if samesite is not None:
c["samesite"] = samesite
def del_cookie(
self, name: str, *, domain: Optional[str] = None, path: str = "/"
) -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(
name,
"",
max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain,
path=path,
)
@property
def content_length(self) -> Optional[int]:
# Just a placeholder for adding setter
return super().content_length
@content_length.setter
def content_length(self, value: Optional[int]) -> None:
if value is not None:
value = int(value)
if self._chunked:
raise RuntimeError(
"You can't set content length when " "chunked encoding is enable"
)
self._headers[hdrs.CONTENT_LENGTH] = str(value)
else:
self._headers.pop(hdrs.CONTENT_LENGTH, None)
@property
def content_type(self) -> str:
# Just a placeholder for adding setter
return super().content_type
@content_type.setter
def content_type(self, value: str) -> None:
self.content_type # read header values if needed
self._content_type = str(value)
self._generate_content_type_header()
@property
def charset(self) -> Optional[str]:
# Just a placeholder for adding setter
return super().charset
@charset.setter
def charset(self, value: Optional[str]) -> None:
ctype = self.content_type # read header values if needed
if ctype == "application/octet-stream":
raise RuntimeError(
"Setting charset for application/octet-stream "
"doesn't make sense, setup content_type first"
)
assert self._content_dict is not None
if value is None:
self._content_dict.pop("charset", None)
else:
self._content_dict["charset"] = str(value).lower()
self._generate_content_type_header()
@property
def last_modified(self) -> Optional[datetime.datetime]:
"""The value of Last-Modified HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
@last_modified.setter
def last_modified(
self, value: Optional[Union[int, float, datetime.datetime, str]]
) -> None:
if value is None:
self._headers.pop(hdrs.LAST_MODIFIED, None)
elif isinstance(value, (int, float)):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
)
elif isinstance(value, datetime.datetime):
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
)
elif isinstance(value, str):
self._headers[hdrs.LAST_MODIFIED] = value
@property
def etag(self) -> Optional[ETag]:
quoted_value = self._headers.get(hdrs.ETAG)
if not quoted_value:
return None
elif quoted_value == ETAG_ANY:
return ETag(value=ETAG_ANY)
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
if not match:
return None
is_weak, value = match.group(1, 2)
return ETag(
is_weak=bool(is_weak),
value=value,
)
@etag.setter
def etag(self, value: Optional[Union[ETag, str]]) -> None:
if value is None:
self._headers.pop(hdrs.ETAG, None)
elif (isinstance(value, str) and value == ETAG_ANY) or (
isinstance(value, ETag) and value.value == ETAG_ANY
):
self._headers[hdrs.ETAG] = ETAG_ANY
elif isinstance(value, str):
validate_etag_value(value)
self._headers[hdrs.ETAG] = f'"{value}"'
elif isinstance(value, ETag) and isinstance(value.value, str):
validate_etag_value(value.value)
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
self._headers[hdrs.ETAG] = hdr_value
else:
raise ValueError(
f"Unsupported etag type: {type(value)}. "
f"etag must be str, ETag or None"
)
def _generate_content_type_header(
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
) -> None:
assert self._content_dict is not None
assert self._content_type is not None
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
if params:
ctype = self._content_type + "; " + params
else:
ctype = self._content_type
self._headers[CONTENT_TYPE] = ctype
async def _do_start_compression(self, coding: ContentCoding) -> None:
if coding != ContentCoding.identity:
assert self._payload_writer is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._payload_writer.enable_compression(coding.value)
# Compressed payload may have different content length,
# remove the header
self._headers.popall(hdrs.CONTENT_LENGTH, None)
async def _start_compression(self, request: "BaseRequest") -> None:
if self._compression_force:
await self._do_start_compression(self._compression_force)
else:
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
for coding in ContentCoding:
if coding.value in accept_encoding:
await self._do_start_compression(coding)
return
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
if self._eof_sent:
return None
if self._payload_writer is not None:
return self._payload_writer
return await self._start(request)
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
self._req = request
writer = self._payload_writer = request._payload_writer
await self._prepare_headers()
await request._prepare_hook(self)
await self._write_headers()
return writer
async def _prepare_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
keep_alive = self._keep_alive
if keep_alive is None:
keep_alive = request.keep_alive
self._keep_alive = keep_alive
version = request.version
headers = self._headers
for cookie in self._cookies.values():
value = cookie.output(header="")[1:]
headers.add(hdrs.SET_COOKIE, value)
if self._compression:
await self._start_compression(request)
if self._chunked:
if version != HttpVersion11:
raise RuntimeError(
"Using chunked encoding is forbidden "
"for HTTP/{0.major}.{0.minor}".format(request.version)
)
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
elif self._length_check:
writer.length = self.content_length
if writer.length is None:
if version >= HttpVersion11 and self.status != 204:
writer.enable_chunking()
headers[hdrs.TRANSFER_ENCODING] = "chunked"
if hdrs.CONTENT_LENGTH in headers:
del headers[hdrs.CONTENT_LENGTH]
else:
keep_alive = False
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
del headers[hdrs.CONTENT_LENGTH]
if self.status not in (204, 304):
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
# connection header
if hdrs.CONNECTION not in headers:
if keep_alive:
if version == HttpVersion10:
headers[hdrs.CONNECTION] = "keep-alive"
else:
if version == HttpVersion11:
headers[hdrs.CONNECTION] = "close"
async def _write_headers(self) -> None:
request = self._req
assert request is not None
writer = self._payload_writer
assert writer is not None
# status line
version = request.version
status_line = "HTTP/{}.{} {} {}".format(
version[0], version[1], self._status, self._reason
)
await writer.write_headers(status_line, self._headers)
async def write(self, data: bytes) -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
raise RuntimeError("Cannot call write() after write_eof()")
if self._payload_writer is None:
raise RuntimeError("Cannot call write() before prepare()")
await self._payload_writer.write(data)
async def drain(self) -> None:
assert not self._eof_sent, "EOF has already been sent"
assert self._payload_writer is not None, "Response has not been started"
warnings.warn(
"drain method is deprecated, use await resp.write()",
DeprecationWarning,
stacklevel=2,
)
await self._payload_writer.drain()
async def write_eof(self, data: bytes = b"") -> None:
assert isinstance(
data, (bytes, bytearray, memoryview)
), "data argument must be byte-ish (%r)" % type(data)
if self._eof_sent:
return
assert self._payload_writer is not None, "Response has not been started"
await self._payload_writer.write_eof(data)
self._eof_sent = True
self._req = None
self._body_length = self._payload_writer.output_size
self._payload_writer = None
def __repr__(self) -> str:
if self._eof_sent:
info = "eof"
elif self.prepared:
assert self._req is not None
info = f"{self._req.method} {self._req.path} "
else:
info = "not prepared"
return f"<{self.__class__.__name__} {self.reason} {info}>"
def __getitem__(self, key: str) -> Any:
return self._state[key]
def __setitem__(self, key: str, value: Any) -> None:
self._state[key] = value
def __delitem__(self, key: str) -> None:
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str]:
return iter(self._state)
def __hash__(self) -> int:
return hash(id(self))
def __eq__(self, other: object) -> bool:
return self is other
class Response(StreamResponse):
def __init__(
self,
*,
body: Any = None,
status: int = 200,
reason: Optional[str] = None,
text: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: Optional[str] = None,
charset: Optional[str] = None,
zlib_executor_size: Optional[int] = None,
zlib_executor: Optional[Executor] = None,
) -> None:
if body is not None and text is not None:
raise ValueError("body and text are not allowed together")
if headers is None:
real_headers: CIMultiDict[str] = CIMultiDict()
elif not isinstance(headers, CIMultiDict):
real_headers = CIMultiDict(headers)
else:
real_headers = headers # = cast('CIMultiDict[str]', headers)
if content_type is not None and "charset" in content_type:
raise ValueError("charset must not be in content_type " "argument")
if text is not None:
if hdrs.CONTENT_TYPE in real_headers:
if content_type or charset:
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
# fast path for filling headers
if not isinstance(text, str):
raise TypeError("text argument must be str (%r)" % type(text))
if content_type is None:
content_type = "text/plain"
if charset is None:
charset = "utf-8"
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
body = text.encode(charset)
text = None
else:
if hdrs.CONTENT_TYPE in real_headers:
if content_type is not None or charset is not None:
raise ValueError(
"passing both Content-Type header and "
"content_type or charset params "
"is forbidden"
)
else:
if content_type is not None:
if charset is not None:
content_type += "; charset=" + charset
real_headers[hdrs.CONTENT_TYPE] = content_type
super().__init__(status=status, reason=reason, headers=real_headers)
if text is not None:
self.text = text
else:
self.body = body
self._compressed_body: Optional[bytes] = None
self._zlib_executor_size = zlib_executor_size
self._zlib_executor = zlib_executor
@property
def body(self) -> Optional[Union[bytes, Payload]]:
return self._body
@body.setter
def body(
self,
body: bytes,
CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
) -> None:
if body is None:
self._body: Optional[bytes] = None
self._body_payload: bool = False
elif isinstance(body, (bytes, bytearray)):
self._body = body
self._body_payload = False
else:
try:
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
except payload.LookupError:
raise ValueError("Unsupported body type %r" % type(body))
self._body_payload = True
headers = self._headers
# set content-length header if needed
if not self._chunked and CONTENT_LENGTH not in headers:
size = body.size
if size is not None:
headers[CONTENT_LENGTH] = str(size)
# set content-type
if CONTENT_TYPE not in headers:
headers[CONTENT_TYPE] = body.content_type
# copy payload headers
if body.headers:
for (key, value) in body.headers.items():
if key not in headers:
headers[key] = value
self._compressed_body = None
@property
def text(self) -> Optional[str]:
if self._body is None:
return None
return self._body.decode(self.charset or "utf-8")
@text.setter
def text(self, text: str) -> None:
assert text is None or isinstance(
text, str
), "text argument must be str (%r)" % type(text)
if self.content_type == "application/octet-stream":
self.content_type = "text/plain"
if self.charset is None:
self.charset = "utf-8"
self._body = text.encode(self.charset)
self._body_payload = False
self._compressed_body = None
@property
def content_length(self) -> Optional[int]:
if self._chunked:
return None
if hdrs.CONTENT_LENGTH in self._headers:
return super().content_length
if self._compressed_body is not None:
# Return length of the compressed body
return len(self._compressed_body)
elif self._body_payload:
# A payload without content length, or a compressed payload
return None
elif self._body is not None:
return len(self._body)
else:
return 0
@content_length.setter
def content_length(self, value: Optional[int]) -> None:
raise RuntimeError("Content length is set automatically")
async def write_eof(self, data: bytes = b"") -> None:
if self._eof_sent:
return
if self._compressed_body is None:
body: Optional[Union[bytes, Payload]] = self._body
else:
body = self._compressed_body
assert not data, f"data arg is not supported, got {data!r}"
assert self._req is not None
assert self._payload_writer is not None
if body is not None:
if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
await super().write_eof()
elif self._body_payload:
payload = cast(Payload, body)
await payload.write(self._payload_writer)
await super().write_eof()
else:
await super().write_eof(cast(bytes, body))
else:
await super().write_eof()
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
if not self._body_payload:
if self._body is not None:
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
else:
self._headers[hdrs.CONTENT_LENGTH] = "0"
return await super()._start(request)
def _compress_body(self, zlib_mode: int) -> None:
assert zlib_mode > 0
compressobj = zlib.compressobj(wbits=zlib_mode)
body_in = self._body
assert body_in is not None
self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
async def _do_start_compression(self, coding: ContentCoding) -> None:
if self._body_payload or self._chunked:
return await super()._do_start_compression(coding)
if coding != ContentCoding.identity:
# Instead of using _payload_writer.enable_compression,
# compress the whole body
zlib_mode = (
16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
)
body_in = self._body
assert body_in is not None
if (
self._zlib_executor_size is not None
and len(body_in) > self._zlib_executor_size
):
await asyncio.get_event_loop().run_in_executor(
self._zlib_executor, self._compress_body, zlib_mode
)
else:
self._compress_body(zlib_mode)
body_out = self._compressed_body
assert body_out is not None
self._headers[hdrs.CONTENT_ENCODING] = coding.value
self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
def json_response(
data: Any = sentinel,
*,
text: Optional[str] = None,
body: Optional[bytes] = None,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
) -> Response:
if data is not sentinel:
if text or body:
raise ValueError("only one of data, text, or body should be specified")
else:
text = dumps(data)
return Response(
text=text,
body=body,
status=status,
reason=reason,
headers=headers,
content_type=content_type,
)
| 27,471 | Python | 32.25908 | 91 | 0.562812 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/typedefs.py | import json
import os
import sys
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Iterable,
Mapping,
Tuple,
Union,
)
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
from yarl import URL
# These are for other modules to use (to avoid repeating the conditional import).
if sys.version_info >= (3, 8):
from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
else:
from typing_extensions import ( # noqa: F401
Final,
Protocol as Protocol,
TypedDict as TypedDict,
)
DEFAULT_JSON_ENCODER = json.dumps
DEFAULT_JSON_DECODER = json.loads
if TYPE_CHECKING: # pragma: no cover
_CIMultiDict = CIMultiDict[str]
_CIMultiDictProxy = CIMultiDictProxy[str]
_MultiDict = MultiDict[str]
_MultiDictProxy = MultiDictProxy[str]
from http.cookies import BaseCookie, Morsel
from .web import Request, StreamResponse
else:
_CIMultiDict = CIMultiDict
_CIMultiDictProxy = CIMultiDictProxy
_MultiDict = MultiDict
_MultiDictProxy = MultiDictProxy
Byteish = Union[bytes, bytearray, memoryview]
JSONEncoder = Callable[[Any], str]
JSONDecoder = Callable[[str], Any]
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
StrOrURL = Union[str, URL]
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
LooseCookiesIterables = Iterable[
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
]
LooseCookies = Union[
LooseCookiesMappings,
LooseCookiesIterables,
"BaseCookie[str]",
]
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
PathLike = Union[str, "os.PathLike[str]"]
| 1,766 | Python | 26.184615 | 85 | 0.713477 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/tracing.py | from types import SimpleNamespace
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
import attr
from aiosignal import Signal
from multidict import CIMultiDict
from yarl import URL
from .client_reqrep import ClientResponse
if TYPE_CHECKING: # pragma: no cover
from .client import ClientSession
from .typedefs import Protocol
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
class _SignalCallback(Protocol[_ParamT_contra]):
def __call__(
self,
__client_session: ClientSession,
__trace_config_ctx: SimpleNamespace,
__params: _ParamT_contra,
) -> Awaitable[None]:
...
__all__ = (
"TraceConfig",
"TraceRequestStartParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionCreateEndParams",
"TraceConnectionReuseconnParams",
"TraceDnsResolveHostStartParams",
"TraceDnsResolveHostEndParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceRequestRedirectParams",
"TraceRequestChunkSentParams",
"TraceResponseChunkReceivedParams",
"TraceRequestHeadersSentParams",
)
class TraceConfig:
"""First-class used to trace requests launched via ClientSession objects."""
def __init__(
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
) -> None:
self._on_request_start: Signal[
_SignalCallback[TraceRequestStartParams]
] = Signal(self)
self._on_request_chunk_sent: Signal[
_SignalCallback[TraceRequestChunkSentParams]
] = Signal(self)
self._on_response_chunk_received: Signal[
_SignalCallback[TraceResponseChunkReceivedParams]
] = Signal(self)
self._on_request_end: Signal[_SignalCallback[TraceRequestEndParams]] = Signal(
self
)
self._on_request_exception: Signal[
_SignalCallback[TraceRequestExceptionParams]
] = Signal(self)
self._on_request_redirect: Signal[
_SignalCallback[TraceRequestRedirectParams]
] = Signal(self)
self._on_connection_queued_start: Signal[
_SignalCallback[TraceConnectionQueuedStartParams]
] = Signal(self)
self._on_connection_queued_end: Signal[
_SignalCallback[TraceConnectionQueuedEndParams]
] = Signal(self)
self._on_connection_create_start: Signal[
_SignalCallback[TraceConnectionCreateStartParams]
] = Signal(self)
self._on_connection_create_end: Signal[
_SignalCallback[TraceConnectionCreateEndParams]
] = Signal(self)
self._on_connection_reuseconn: Signal[
_SignalCallback[TraceConnectionReuseconnParams]
] = Signal(self)
self._on_dns_resolvehost_start: Signal[
_SignalCallback[TraceDnsResolveHostStartParams]
] = Signal(self)
self._on_dns_resolvehost_end: Signal[
_SignalCallback[TraceDnsResolveHostEndParams]
] = Signal(self)
self._on_dns_cache_hit: Signal[
_SignalCallback[TraceDnsCacheHitParams]
] = Signal(self)
self._on_dns_cache_miss: Signal[
_SignalCallback[TraceDnsCacheMissParams]
] = Signal(self)
self._on_request_headers_sent: Signal[
_SignalCallback[TraceRequestHeadersSentParams]
] = Signal(self)
self._trace_config_ctx_factory = trace_config_ctx_factory
def trace_config_ctx(
self, trace_request_ctx: Optional[SimpleNamespace] = None
) -> SimpleNamespace:
"""Return a new trace_config_ctx instance"""
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
def freeze(self) -> None:
self._on_request_start.freeze()
self._on_request_chunk_sent.freeze()
self._on_response_chunk_received.freeze()
self._on_request_end.freeze()
self._on_request_exception.freeze()
self._on_request_redirect.freeze()
self._on_connection_queued_start.freeze()
self._on_connection_queued_end.freeze()
self._on_connection_create_start.freeze()
self._on_connection_create_end.freeze()
self._on_connection_reuseconn.freeze()
self._on_dns_resolvehost_start.freeze()
self._on_dns_resolvehost_end.freeze()
self._on_dns_cache_hit.freeze()
self._on_dns_cache_miss.freeze()
self._on_request_headers_sent.freeze()
@property
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
return self._on_request_start
@property
def on_request_chunk_sent(
self,
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
return self._on_request_chunk_sent
@property
def on_response_chunk_received(
self,
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
return self._on_response_chunk_received
@property
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
return self._on_request_end
@property
def on_request_exception(
self,
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
return self._on_request_exception
@property
def on_request_redirect(
self,
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
return self._on_request_redirect
@property
def on_connection_queued_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
return self._on_connection_queued_start
@property
def on_connection_queued_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
return self._on_connection_queued_end
@property
def on_connection_create_start(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
return self._on_connection_create_start
@property
def on_connection_create_end(
self,
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
return self._on_connection_create_end
@property
def on_connection_reuseconn(
self,
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
return self._on_connection_reuseconn
@property
def on_dns_resolvehost_start(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
return self._on_dns_resolvehost_start
@property
def on_dns_resolvehost_end(
self,
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
return self._on_dns_resolvehost_end
@property
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
return self._on_dns_cache_hit
@property
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
return self._on_dns_cache_miss
@property
def on_request_headers_sent(
self,
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
return self._on_request_headers_sent
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestStartParams:
"""Parameters sent by the `on_request_start` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestChunkSentParams:
"""Parameters sent by the `on_request_chunk_sent` signal"""
method: str
url: URL
chunk: bytes
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceResponseChunkReceivedParams:
"""Parameters sent by the `on_response_chunk_received` signal"""
method: str
url: URL
chunk: bytes
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestEndParams:
"""Parameters sent by the `on_request_end` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestExceptionParams:
"""Parameters sent by the `on_request_exception` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
exception: BaseException
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestRedirectParams:
"""Parameters sent by the `on_request_redirect` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
response: ClientResponse
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedStartParams:
"""Parameters sent by the `on_connection_queued_start` signal"""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionQueuedEndParams:
"""Parameters sent by the `on_connection_queued_end` signal"""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateStartParams:
"""Parameters sent by the `on_connection_create_start` signal"""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionCreateEndParams:
"""Parameters sent by the `on_connection_create_end` signal"""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceConnectionReuseconnParams:
"""Parameters sent by the `on_connection_reuseconn` signal"""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostStartParams:
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
host: str
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsResolveHostEndParams:
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
host: str
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheHitParams:
"""Parameters sent by the `on_dns_cache_hit` signal"""
host: str
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceDnsCacheMissParams:
"""Parameters sent by the `on_dns_cache_miss` signal"""
host: str
@attr.s(auto_attribs=True, frozen=True, slots=True)
class TraceRequestHeadersSentParams:
"""Parameters sent by the `on_request_headers_sent` signal"""
method: str
url: URL
headers: "CIMultiDict[str]"
class Trace:
"""Internal dependency holder class.
Used to keep together the main dependencies used
at the moment of send a signal.
"""
def __init__(
self,
session: "ClientSession",
trace_config: TraceConfig,
trace_config_ctx: SimpleNamespace,
) -> None:
self._trace_config = trace_config
self._trace_config_ctx = trace_config_ctx
self._session = session
async def send_request_start(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
return await self._trace_config.on_request_start.send(
self._session,
self._trace_config_ctx,
TraceRequestStartParams(method, url, headers),
)
async def send_request_chunk_sent(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_request_chunk_sent.send(
self._session,
self._trace_config_ctx,
TraceRequestChunkSentParams(method, url, chunk),
)
async def send_response_chunk_received(
self, method: str, url: URL, chunk: bytes
) -> None:
return await self._trace_config.on_response_chunk_received.send(
self._session,
self._trace_config_ctx,
TraceResponseChunkReceivedParams(method, url, chunk),
)
async def send_request_end(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config.on_request_end.send(
self._session,
self._trace_config_ctx,
TraceRequestEndParams(method, url, headers, response),
)
async def send_request_exception(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
exception: BaseException,
) -> None:
return await self._trace_config.on_request_exception.send(
self._session,
self._trace_config_ctx,
TraceRequestExceptionParams(method, url, headers, exception),
)
async def send_request_redirect(
self,
method: str,
url: URL,
headers: "CIMultiDict[str]",
response: ClientResponse,
) -> None:
return await self._trace_config._on_request_redirect.send(
self._session,
self._trace_config_ctx,
TraceRequestRedirectParams(method, url, headers, response),
)
async def send_connection_queued_start(self) -> None:
return await self._trace_config.on_connection_queued_start.send(
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
)
async def send_connection_queued_end(self) -> None:
return await self._trace_config.on_connection_queued_end.send(
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
)
async def send_connection_create_start(self) -> None:
return await self._trace_config.on_connection_create_start.send(
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
)
async def send_connection_create_end(self) -> None:
return await self._trace_config.on_connection_create_end.send(
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
)
async def send_connection_reuseconn(self) -> None:
return await self._trace_config.on_connection_reuseconn.send(
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
)
async def send_dns_resolvehost_start(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_start.send(
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
)
async def send_dns_resolvehost_end(self, host: str) -> None:
return await self._trace_config.on_dns_resolvehost_end.send(
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
)
async def send_dns_cache_hit(self, host: str) -> None:
return await self._trace_config.on_dns_cache_hit.send(
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
)
async def send_dns_cache_miss(self, host: str) -> None:
return await self._trace_config.on_dns_cache_miss.send(
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
)
async def send_request_headers(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
return await self._trace_config._on_request_headers_sent.send(
self._session,
self._trace_config_ctx,
TraceRequestHeadersSentParams(method, url, headers),
)
| 15,177 | Python | 31.088795 | 87 | 0.657508 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/abc.py | import asyncio
import logging
from abc import ABC, abstractmethod
from collections.abc import Sized
from http.cookies import BaseCookie, Morsel
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Dict,
Generator,
Iterable,
List,
Optional,
Tuple,
)
from multidict import CIMultiDict
from yarl import URL
from .helpers import get_running_loop
from .typedefs import LooseCookies
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application
from .web_exceptions import HTTPException
from .web_request import BaseRequest, Request
from .web_response import StreamResponse
else:
BaseRequest = Request = Application = StreamResponse = None
HTTPException = None
class AbstractRouter(ABC):
def __init__(self) -> None:
self._frozen = False
def post_init(self, app: Application) -> None:
"""Post init stage.
Not an abstract method for sake of backward compatibility,
but if the router wants to be aware of the application
it can override this.
"""
@property
def frozen(self) -> bool:
return self._frozen
def freeze(self) -> None:
"""Freeze router."""
self._frozen = True
@abstractmethod
async def resolve(self, request: Request) -> "AbstractMatchInfo":
"""Return MATCH_INFO for given request"""
class AbstractMatchInfo(ABC):
@property # pragma: no branch
@abstractmethod
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
"""Execute matched request handler"""
@property
@abstractmethod
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
"""Expect handler for 100-continue processing"""
@property # pragma: no branch
@abstractmethod
def http_exception(self) -> Optional[HTTPException]:
"""HTTPException instance raised on router's resolving, or None"""
@abstractmethod # pragma: no branch
def get_info(self) -> Dict[str, Any]:
"""Return a dict with additional info useful for introspection"""
@property # pragma: no branch
@abstractmethod
def apps(self) -> Tuple[Application, ...]:
"""Stack of nested applications.
Top level application is left-most element.
"""
@abstractmethod
def add_app(self, app: Application) -> None:
"""Add application to the nested apps stack."""
@abstractmethod
def freeze(self) -> None:
"""Freeze the match info.
The method is called after route resolution.
After the call .add_app() is forbidden.
"""
class AbstractView(ABC):
"""Abstract class based view."""
def __init__(self, request: Request) -> None:
self._request = request
@property
def request(self) -> Request:
"""Request instance."""
return self._request
@abstractmethod
def __await__(self) -> Generator[Any, None, StreamResponse]:
"""Execute the view handler."""
class AbstractResolver(ABC):
"""Abstract DNS resolver."""
@abstractmethod
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
"""Return IP address for given hostname"""
@abstractmethod
async def close(self) -> None:
"""Release resolver"""
if TYPE_CHECKING: # pragma: no cover
IterableBase = Iterable[Morsel[str]]
else:
IterableBase = Iterable
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
class AbstractCookieJar(Sized, IterableBase):
"""Abstract Cookie Jar."""
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
@abstractmethod
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
"""Clear all cookies if no predicate is passed."""
@abstractmethod
def clear_domain(self, domain: str) -> None:
"""Clear all cookies for domain and all subdomains."""
@abstractmethod
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
@abstractmethod
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
"""Return the jar's cookies filtered by their attributes."""
class AbstractStreamWriter(ABC):
"""Abstract stream writer."""
buffer_size = 0
output_size = 0
length: Optional[int] = 0
@abstractmethod
async def write(self, chunk: bytes) -> None:
"""Write chunk into stream."""
@abstractmethod
async def write_eof(self, chunk: bytes = b"") -> None:
"""Write last chunk."""
@abstractmethod
async def drain(self) -> None:
"""Flush the write buffer."""
@abstractmethod
def enable_compression(self, encoding: str = "deflate") -> None:
"""Enable HTTP body compression"""
@abstractmethod
def enable_chunking(self) -> None:
"""Enable HTTP chunked mode"""
@abstractmethod
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write HTTP headers"""
class AbstractAccessLogger(ABC):
"""Abstract writer to access log."""
def __init__(self, logger: logging.Logger, log_format: str) -> None:
self.logger = logger
self.log_format = log_format
@abstractmethod
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
"""Emit log to logger."""
| 5,505 | Python | 25.471154 | 87 | 0.641054 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/http_exceptions.py | """Low-level http related exceptions."""
from typing import Optional, Union
from .typedefs import _CIMultiDict
__all__ = ("HttpProcessingError",)
class HttpProcessingError(Exception):
"""HTTP error.
Shortcut for raising HTTP errors with custom code, message and headers.
code: HTTP Error code.
message: (optional) Error message.
headers: (optional) Headers to be sent in response, a list of pairs
"""
code = 0
message = ""
headers = None
def __init__(
self,
*,
code: Optional[int] = None,
message: str = "",
headers: Optional[_CIMultiDict] = None,
) -> None:
if code is not None:
self.code = code
self.headers = headers
self.message = message
def __str__(self) -> str:
return f"{self.code}, message={self.message!r}"
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self}>"
class BadHttpMessage(HttpProcessingError):
code = 400
message = "Bad Request"
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
super().__init__(message=message, headers=headers)
self.args = (message,)
class HttpBadRequest(BadHttpMessage):
code = 400
message = "Bad Request"
class PayloadEncodingError(BadHttpMessage):
"""Base class for payload errors"""
class ContentEncodingError(PayloadEncodingError):
"""Content encoding error."""
class TransferEncodingError(PayloadEncodingError):
"""transfer encoding error."""
class ContentLengthError(PayloadEncodingError):
"""Not enough data for satisfy content length header."""
class LineTooLong(BadHttpMessage):
def __init__(
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
) -> None:
super().__init__(
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
)
self.args = (line, limit, actual_size)
class InvalidHeader(BadHttpMessage):
def __init__(self, hdr: Union[bytes, str]) -> None:
if isinstance(hdr, bytes):
hdr = hdr.decode("utf-8", "surrogateescape")
super().__init__(f"Invalid HTTP Header: {hdr}")
self.hdr = hdr
self.args = (hdr,)
class BadStatusLine(BadHttpMessage):
def __init__(self, line: str = "") -> None:
if not isinstance(line, str):
line = repr(line)
super().__init__(f"Bad status line {line!r}")
self.args = (line,)
self.line = line
class InvalidURLError(BadHttpMessage):
pass
| 2,586 | Python | 23.40566 | 88 | 0.607502 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_fileresponse.py | import asyncio
import mimetypes
import os
import pathlib
import sys
from typing import ( # noqa
IO,
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Iterator,
List,
Optional,
Tuple,
Union,
cast,
)
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import ETAG_ANY, ETag
from .typedefs import Final, LooseHeaders
from .web_exceptions import (
HTTPNotModified,
HTTPPartialContent,
HTTPPreconditionFailed,
HTTPRequestRangeNotSatisfiable,
)
from .web_response import StreamResponse
__all__ = ("FileResponse",)
if TYPE_CHECKING: # pragma: no cover
from .web_request import BaseRequest
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
class FileResponse(StreamResponse):
"""A response object can be used to send files."""
def __init__(
self,
path: Union[str, pathlib.Path],
chunk_size: int = 256 * 1024,
status: int = 200,
reason: Optional[str] = None,
headers: Optional[LooseHeaders] = None,
) -> None:
super().__init__(status=status, reason=reason, headers=headers)
if isinstance(path, str):
path = pathlib.Path(path)
self._path = path
self._chunk_size = chunk_size
async def _sendfile_fallback(
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
# To keep memory usage low,fobj is transferred in chunks
# controlled by the constructor's chunk_size argument.
chunk_size = self._chunk_size
loop = asyncio.get_event_loop()
await loop.run_in_executor(None, fobj.seek, offset)
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
while chunk:
await writer.write(chunk)
count = count - chunk_size
if count <= 0:
break
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
await writer.drain()
return writer
async def _sendfile(
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
) -> AbstractStreamWriter:
writer = await super().prepare(request)
assert writer is not None
if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
return await self._sendfile_fallback(writer, fobj, offset, count)
loop = request._loop
transport = request.transport
assert transport is not None
try:
await loop.sendfile(transport, fobj, offset, count)
except NotImplementedError:
return await self._sendfile_fallback(writer, fobj, offset, count)
await super().write_eof()
return writer
@staticmethod
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
if len(etags) == 1 and etags[0].value == ETAG_ANY:
return True
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
async def _not_modified(
self, request: "BaseRequest", etag_value: str, last_modified: float
) -> Optional[AbstractStreamWriter]:
self.set_status(HTTPNotModified.status_code)
self._length_check = False
self.etag = etag_value # type: ignore[assignment]
self.last_modified = last_modified # type: ignore[assignment]
# Delete any Content-Length headers provided by user. HTTP 304
# should always have empty response body
return await super().prepare(request)
async def _precondition_failed(
self, request: "BaseRequest"
) -> Optional[AbstractStreamWriter]:
self.set_status(HTTPPreconditionFailed.status_code)
self.content_length = 0
return await super().prepare(request)
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
filepath = self._path
gzip = False
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
gzip_path = filepath.with_name(filepath.name + ".gz")
if gzip_path.is_file():
filepath = gzip_path
gzip = True
loop = asyncio.get_event_loop()
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
last_modified = st.st_mtime
# https://tools.ietf.org/html/rfc7232#section-6
ifmatch = request.if_match
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
return await self._precondition_failed(request)
unmodsince = request.if_unmodified_since
if (
unmodsince is not None
and ifmatch is None
and st.st_mtime > unmodsince.timestamp()
):
return await self._precondition_failed(request)
ifnonematch = request.if_none_match
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
return await self._not_modified(request, etag_value, last_modified)
modsince = request.if_modified_since
if (
modsince is not None
and ifnonematch is None
and st.st_mtime <= modsince.timestamp()
):
return await self._not_modified(request, etag_value, last_modified)
if hdrs.CONTENT_TYPE not in self.headers:
ct, encoding = mimetypes.guess_type(str(filepath))
if not ct:
ct = "application/octet-stream"
should_set_ct = True
else:
encoding = "gzip" if gzip else None
should_set_ct = False
status = self._status
file_size = st.st_size
count = file_size
start = None
ifrange = request.if_range
if ifrange is None or st.st_mtime <= ifrange.timestamp():
# If-Range header check:
# condition = cached date >= last modification date
# return 206 if True else 200.
# if False:
# Range header would not be processed, return 200
# if True but Range header missing
# return 200
try:
rng = request.http_range
start = rng.start
end = rng.stop
except ValueError:
# https://tools.ietf.org/html/rfc7233:
# A server generating a 416 (Range Not Satisfiable) response to
# a byte-range request SHOULD send a Content-Range header field
# with an unsatisfied-range value.
# The complete-length in a 416 response indicates the current
# length of the selected representation.
#
# Will do the same below. Many servers ignore this and do not
# send a Content-Range header with HTTP 416
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
# If a range request has been made, convert start, end slice
# notation into file pointer offset and count
if start is not None or end is not None:
if start < 0 and end is None: # return tail of file
start += file_size
if start < 0:
# if Range:bytes=-1000 in request header but file size
# is only 200, there would be trouble without this
start = 0
count = file_size - start
else:
# rfc7233:If the last-byte-pos value is
# absent, or if the value is greater than or equal to
# the current length of the representation data,
# the byte range is interpreted as the remainder
# of the representation (i.e., the server replaces the
# value of last-byte-pos with a value that is one less than
# the current length of the selected representation).
count = (
min(end if end is not None else file_size, file_size) - start
)
if start >= file_size:
# HTTP 416 should be returned in this case.
#
# According to https://tools.ietf.org/html/rfc7233:
# If a valid byte-range-set includes at least one
# byte-range-spec with a first-byte-pos that is less than
# the current length of the representation, or at least one
# suffix-byte-range-spec with a non-zero suffix-length,
# then the byte-range-set is satisfiable. Otherwise, the
# byte-range-set is unsatisfiable.
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
return await super().prepare(request)
status = HTTPPartialContent.status_code
# Even though you are sending the whole file, you should still
# return a HTTP 206 for a Range request.
self.set_status(status)
if should_set_ct:
self.content_type = ct # type: ignore[assignment]
if encoding:
self.headers[hdrs.CONTENT_ENCODING] = encoding
if gzip:
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
self.etag = etag_value # type: ignore[assignment]
self.last_modified = st.st_mtime # type: ignore[assignment]
self.content_length = count
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
real_start = cast(int, start)
if status == HTTPPartialContent.status_code:
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
real_start, real_start + count - 1, file_size
)
# If we are sending 0 bytes calling sendfile() will throw a ValueError
if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
return await super().prepare(request)
fobj = await loop.run_in_executor(None, filepath.open, "rb")
if start: # be aware that start could be None or int=0 here.
offset = start
else:
offset = 0
try:
return await self._sendfile(request, fobj, offset, count)
finally:
await loop.run_in_executor(None, fobj.close)
| 10,784 | Python | 36.318339 | 88 | 0.58355 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/hdrs.py | """HTTP Headers constants."""
# After changing the file content call ./tools/gen.py
# to regenerate the headers parser
import sys
from typing import Set
from multidict import istr
if sys.version_info >= (3, 8):
from typing import Final
else:
from typing_extensions import Final
METH_ANY: Final[str] = "*"
METH_CONNECT: Final[str] = "CONNECT"
METH_HEAD: Final[str] = "HEAD"
METH_GET: Final[str] = "GET"
METH_DELETE: Final[str] = "DELETE"
METH_OPTIONS: Final[str] = "OPTIONS"
METH_PATCH: Final[str] = "PATCH"
METH_POST: Final[str] = "POST"
METH_PUT: Final[str] = "PUT"
METH_TRACE: Final[str] = "TRACE"
METH_ALL: Final[Set[str]] = {
METH_CONNECT,
METH_HEAD,
METH_GET,
METH_DELETE,
METH_OPTIONS,
METH_PATCH,
METH_POST,
METH_PUT,
METH_TRACE,
}
ACCEPT: Final[istr] = istr("Accept")
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
AGE: Final[istr] = istr("Age")
ALLOW: Final[istr] = istr("Allow")
AUTHORIZATION: Final[istr] = istr("Authorization")
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
CONNECTION: Final[istr] = istr("Connection")
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
CONTENT_MD5: Final[istr] = istr("Content-MD5")
CONTENT_RANGE: Final[istr] = istr("Content-Range")
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
CONTENT_TYPE: Final[istr] = istr("Content-Type")
COOKIE: Final[istr] = istr("Cookie")
DATE: Final[istr] = istr("Date")
DESTINATION: Final[istr] = istr("Destination")
DIGEST: Final[istr] = istr("Digest")
ETAG: Final[istr] = istr("Etag")
EXPECT: Final[istr] = istr("Expect")
EXPIRES: Final[istr] = istr("Expires")
FORWARDED: Final[istr] = istr("Forwarded")
FROM: Final[istr] = istr("From")
HOST: Final[istr] = istr("Host")
IF_MATCH: Final[istr] = istr("If-Match")
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
IF_RANGE: Final[istr] = istr("If-Range")
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
LINK: Final[istr] = istr("Link")
LOCATION: Final[istr] = istr("Location")
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
ORIGIN: Final[istr] = istr("Origin")
PRAGMA: Final[istr] = istr("Pragma")
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
RANGE: Final[istr] = istr("Range")
REFERER: Final[istr] = istr("Referer")
RETRY_AFTER: Final[istr] = istr("Retry-After")
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
SERVER: Final[istr] = istr("Server")
SET_COOKIE: Final[istr] = istr("Set-Cookie")
TE: Final[istr] = istr("TE")
TRAILER: Final[istr] = istr("Trailer")
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
UPGRADE: Final[istr] = istr("Upgrade")
URI: Final[istr] = istr("URI")
USER_AGENT: Final[istr] = istr("User-Agent")
VARY: Final[istr] = istr("Vary")
VIA: Final[istr] = istr("Via")
WANT_DIGEST: Final[istr] = istr("Want-Digest")
WARNING: Final[istr] = istr("Warning")
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
| 4,724 | Python | 40.086956 | 88 | 0.715072 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/client_ws.py | """WebSocket client for asyncio."""
import asyncio
from typing import Any, Optional, cast
import async_timeout
from .client_exceptions import ClientError
from .client_reqrep import ClientResponse
from .helpers import call_later, set_result
from .http import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
WebSocketError,
WSCloseCode,
WSMessage,
WSMsgType,
)
from .http_websocket import WebSocketWriter # WSMessage
from .streams import EofStream, FlowControlDataQueue
from .typedefs import (
DEFAULT_JSON_DECODER,
DEFAULT_JSON_ENCODER,
JSONDecoder,
JSONEncoder,
)
class ClientWebSocketResponse:
def __init__(
self,
reader: "FlowControlDataQueue[WSMessage]",
writer: WebSocketWriter,
protocol: Optional[str],
response: ClientResponse,
timeout: float,
autoclose: bool,
autoping: bool,
loop: asyncio.AbstractEventLoop,
*,
receive_timeout: Optional[float] = None,
heartbeat: Optional[float] = None,
compress: int = 0,
client_notakeover: bool = False,
) -> None:
self._response = response
self._conn = response.connection
self._writer = writer
self._reader = reader
self._protocol = protocol
self._closed = False
self._closing = False
self._close_code: Optional[int] = None
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
self._loop = loop
self._waiting: Optional[asyncio.Future[bool]] = None
self._exception: Optional[BaseException] = None
self._compress = compress
self._client_notakeover = client_notakeover
self._reset_heartbeat()
def _cancel_heartbeat(self) -> None:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = None
if self._heartbeat_cb is not None:
self._heartbeat_cb.cancel()
self._heartbeat_cb = None
def _reset_heartbeat(self) -> None:
self._cancel_heartbeat()
if self._heartbeat is not None:
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
# fire-and-forget a task is not perfect but maybe ok for
# sending ping. Otherwise we need a long-living heartbeat
# task in the class.
self._loop.create_task(self._writer.ping())
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if not self._closed:
self._closed = True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
self._response.close()
@property
def closed(self) -> bool:
return self._closed
@property
def close_code(self) -> Optional[int]:
return self._close_code
@property
def protocol(self) -> Optional[str]:
return self._protocol
@property
def compress(self) -> int:
return self._compress
@property
def client_notakeover(self) -> bool:
return self._client_notakeover
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""extra info from connection transport"""
conn = self._response.connection
if conn is None:
return default
transport = conn.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes = b"") -> None:
await self._writer.ping(message)
async def pong(self, message: bytes = b"") -> None:
await self._writer.pong(message)
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
if not isinstance(data, str):
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(
self,
data: Any,
compress: Optional[int] = None,
*,
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
await self._waiting
if not self._closed:
self._cancel_heartbeat()
self._closed = True
try:
await self._writer.close(code, message)
except asyncio.CancelledError:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._response.close()
raise
except Exception as exc:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
self._response.close()
return True
if self._closing:
self._response.close()
return True
while True:
try:
async with async_timeout.timeout(self._timeout):
msg = await self._reader.read()
except asyncio.CancelledError:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._response.close()
raise
except Exception as exc:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
self._response.close()
return True
if msg.type == WSMsgType.CLOSE:
self._close_code = msg.data
self._response.close()
return True
else:
return False
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
while True:
if self._waiting is not None:
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
return WS_CLOSED_MESSAGE
elif self._closing:
await self.close()
return WS_CLOSED_MESSAGE
try:
self._waiting = self._loop.create_future()
try:
async with async_timeout.timeout(timeout or self._receive_timeout):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
waiter = self._waiting
self._waiting = None
set_result(waiter, True)
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except EofStream:
self._close_code = WSCloseCode.OK
await self.close()
return WSMessage(WSMsgType.CLOSED, None, None)
except ClientError:
self._closed = True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
return WS_CLOSED_MESSAGE
except WebSocketError as exc:
self._close_code = exc.code
await self.close(code=exc.code)
return WSMessage(WSMsgType.ERROR, exc, None)
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
if msg.type == WSMsgType.CLOSE:
self._closing = True
self._close_code = msg.data
if not self._closed and self._autoclose:
await self.close()
elif msg.type == WSMsgType.CLOSING:
self._closing = True
elif msg.type == WSMsgType.PING and self._autoping:
await self.pong(msg.data)
continue
elif msg.type == WSMsgType.PONG and self._autoping:
continue
return msg
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
return cast(str, msg.data)
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return cast(bytes, msg.data)
async def receive_json(
self,
*,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
timeout: Optional[float] = None,
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
def __aiter__(self) -> "ClientWebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg
| 10,516 | Python | 33.940199 | 87 | 0.56609 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/multipart.py | import base64
import binascii
import json
import re
import uuid
import warnings
import zlib
from collections import deque
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
Deque,
Dict,
Iterator,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from urllib.parse import parse_qsl, unquote, urlencode
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
from .hdrs import (
CONTENT_DISPOSITION,
CONTENT_ENCODING,
CONTENT_LENGTH,
CONTENT_TRANSFER_ENCODING,
CONTENT_TYPE,
)
from .helpers import CHAR, TOKEN, parse_mimetype, reify
from .http import HeadersParser
from .payload import (
JsonPayload,
LookupError,
Order,
Payload,
StringPayload,
get_payload,
payload_type,
)
from .streams import StreamReader
__all__ = (
"MultipartReader",
"MultipartWriter",
"BodyPartReader",
"BadContentDispositionHeader",
"BadContentDispositionParam",
"parse_content_disposition",
"content_disposition_filename",
)
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import ClientResponse
class BadContentDispositionHeader(RuntimeWarning):
pass
class BadContentDispositionParam(RuntimeWarning):
pass
def parse_content_disposition(
header: Optional[str],
) -> Tuple[Optional[str], Dict[str, str]]:
def is_token(string: str) -> bool:
return bool(string) and TOKEN >= set(string)
def is_quoted(string: str) -> bool:
return string[0] == string[-1] == '"'
def is_rfc5987(string: str) -> bool:
return is_token(string) and string.count("'") == 2
def is_extended_param(string: str) -> bool:
return string.endswith("*")
def is_continuous_param(string: str) -> bool:
pos = string.find("*") + 1
if not pos:
return False
substring = string[pos:-1] if string.endswith("*") else string[pos:]
return substring.isdigit()
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
return re.sub(f"\\\\([{chars}])", "\\1", text)
if not header:
return None, {}
disptype, *parts = header.split(";")
if not is_token(disptype):
warnings.warn(BadContentDispositionHeader(header))
return None, {}
params: Dict[str, str] = {}
while parts:
item = parts.pop(0)
if "=" not in item:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
key, value = item.split("=", 1)
key = key.lower().strip()
value = value.lstrip()
if key in params:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
if not is_token(key):
warnings.warn(BadContentDispositionParam(item))
continue
elif is_continuous_param(key):
if is_quoted(value):
value = unescape(value[1:-1])
elif not is_token(value):
warnings.warn(BadContentDispositionParam(item))
continue
elif is_extended_param(key):
if is_rfc5987(value):
encoding, _, value = value.split("'", 2)
encoding = encoding or "utf-8"
else:
warnings.warn(BadContentDispositionParam(item))
continue
try:
value = unquote(value, encoding, "strict")
except UnicodeDecodeError: # pragma: nocover
warnings.warn(BadContentDispositionParam(item))
continue
else:
failed = True
if is_quoted(value):
failed = False
value = unescape(value[1:-1].lstrip("\\/"))
elif is_token(value):
failed = False
elif parts:
# maybe just ; in filename, in any case this is just
# one case fix, for proper fix we need to redesign parser
_value = f"{value};{parts[0]}"
if is_quoted(_value):
parts.pop(0)
value = unescape(_value[1:-1].lstrip("\\/"))
failed = False
if failed:
warnings.warn(BadContentDispositionHeader(header))
return None, {}
params[key] = value
return disptype.lower(), params
def content_disposition_filename(
params: Mapping[str, str], name: str = "filename"
) -> Optional[str]:
name_suf = "%s*" % name
if not params:
return None
elif name_suf in params:
return params[name_suf]
elif name in params:
return params[name]
else:
parts = []
fnparams = sorted(
(key, value) for key, value in params.items() if key.startswith(name_suf)
)
for num, (key, value) in enumerate(fnparams):
_, tail = key.split("*", 1)
if tail.endswith("*"):
tail = tail[:-1]
if tail == str(num):
parts.append(value)
else:
break
if not parts:
return None
value = "".join(parts)
if "'" in value:
encoding, _, value = value.split("'", 2)
encoding = encoding or "utf-8"
return unquote(value, encoding, "strict")
return value
class MultipartResponseWrapper:
"""Wrapper around the MultipartReader.
It takes care about
underlying connection and close it when it needs in.
"""
def __init__(
self,
resp: "ClientResponse",
stream: "MultipartReader",
) -> None:
self.resp = resp
self.stream = stream
def __aiter__(self) -> "MultipartResponseWrapper":
return self
async def __anext__(
self,
) -> Union["MultipartReader", "BodyPartReader"]:
part = await self.next()
if part is None:
raise StopAsyncIteration
return part
def at_eof(self) -> bool:
"""Returns True when all response data had been read."""
return self.resp.content.at_eof()
async def next(
self,
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
"""Emits next multipart reader object."""
item = await self.stream.next()
if self.stream.at_eof():
await self.release()
return item
async def release(self) -> None:
"""Release the connection gracefully.
All remaining content is read to the void.
"""
await self.resp.release()
class BodyPartReader:
"""Multipart reader for single body part."""
chunk_size = 8192
def __init__(
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
) -> None:
self.headers = headers
self._boundary = boundary
self._content = content
self._at_eof = False
length = self.headers.get(CONTENT_LENGTH, None)
self._length = int(length) if length is not None else None
self._read_bytes = 0
# TODO: typeing.Deque is not supported by Python 3.5
self._unread: Deque[bytes] = deque()
self._prev_chunk: Optional[bytes] = None
self._content_eof = 0
self._cache: Dict[str, Any] = {}
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore[return-value]
async def __anext__(self) -> bytes:
part = await self.next()
if part is None:
raise StopAsyncIteration
return part
async def next(self) -> Optional[bytes]:
item = await self.read()
if not item:
return None
return item
async def read(self, *, decode: bool = False) -> bytes:
"""Reads body part data.
decode: Decodes data following by encoding
method from Content-Encoding header. If it missed
data remains untouched
"""
if self._at_eof:
return b""
data = bytearray()
while not self._at_eof:
data.extend(await self.read_chunk(self.chunk_size))
if decode:
return self.decode(data)
return data
async def read_chunk(self, size: int = chunk_size) -> bytes:
"""Reads body part content chunk of the specified size.
size: chunk size
"""
if self._at_eof:
return b""
if self._length:
chunk = await self._read_chunk_from_length(size)
else:
chunk = await self._read_chunk_from_stream(size)
self._read_bytes += len(chunk)
if self._read_bytes == self._length:
self._at_eof = True
if self._at_eof:
clrf = await self._content.readline()
assert (
b"\r\n" == clrf
), "reader did not read all the data or it is malformed"
return chunk
async def _read_chunk_from_length(self, size: int) -> bytes:
# Reads body part content chunk of the specified size.
# The body part must has Content-Length header with proper value.
assert self._length is not None, "Content-Length required for chunked read"
chunk_size = min(size, self._length - self._read_bytes)
chunk = await self._content.read(chunk_size)
return chunk
async def _read_chunk_from_stream(self, size: int) -> bytes:
# Reads content chunk of body part with unknown length.
# The Content-Length header for body part is not necessary.
assert (
size >= len(self._boundary) + 2
), "Chunk size must be greater or equal than boundary length + 2"
first_chunk = self._prev_chunk is None
if first_chunk:
self._prev_chunk = await self._content.read(size)
chunk = await self._content.read(size)
self._content_eof += int(self._content.at_eof())
assert self._content_eof < 3, "Reading after EOF"
assert self._prev_chunk is not None
window = self._prev_chunk + chunk
sub = b"\r\n" + self._boundary
if first_chunk:
idx = window.find(sub)
else:
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
if idx >= 0:
# pushing boundary back to content
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
self._content.unread_data(window[idx:])
if size > idx:
self._prev_chunk = self._prev_chunk[:idx]
chunk = window[len(self._prev_chunk) : idx]
if not chunk:
self._at_eof = True
result = self._prev_chunk
self._prev_chunk = chunk
return result
async def readline(self) -> bytes:
"""Reads body part by line by line."""
if self._at_eof:
return b""
if self._unread:
line = self._unread.popleft()
else:
line = await self._content.readline()
if line.startswith(self._boundary):
# the very last boundary may not come with \r\n,
# so set single rules for everyone
sline = line.rstrip(b"\r\n")
boundary = self._boundary
last_boundary = self._boundary + b"--"
# ensure that we read exactly the boundary, not something alike
if sline == boundary or sline == last_boundary:
self._at_eof = True
self._unread.append(line)
return b""
else:
next_line = await self._content.readline()
if next_line.startswith(self._boundary):
line = line[:-2] # strip CRLF but only once
self._unread.append(next_line)
return line
async def release(self) -> None:
"""Like read(), but reads all the data to the void."""
if self._at_eof:
return
while not self._at_eof:
await self.read_chunk(self.chunk_size)
async def text(self, *, encoding: Optional[str] = None) -> str:
"""Like read(), but assumes that body part contains text data."""
data = await self.read(decode=True)
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
encoding = encoding or self.get_charset(default="utf-8")
return data.decode(encoding)
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
"""Like read(), but assumes that body parts contains JSON data."""
data = await self.read(decode=True)
if not data:
return None
encoding = encoding or self.get_charset(default="utf-8")
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contain form urlencoded data."""
data = await self.read(decode=True)
if not data:
return []
if encoding is not None:
real_encoding = encoding
else:
real_encoding = self.get_charset(default="utf-8")
return parse_qsl(
data.rstrip().decode(real_encoding),
keep_blank_values=True,
encoding=real_encoding,
)
def at_eof(self) -> bool:
"""Returns True if the boundary was reached or False otherwise."""
return self._at_eof
def decode(self, data: bytes) -> bytes:
"""Decodes data.
Decoding is done according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
data = self._decode_content_transfer(data)
if CONTENT_ENCODING in self.headers:
return self._decode_content(data)
return data
def _decode_content(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
if encoding == "deflate":
return zlib.decompress(data, -zlib.MAX_WBITS)
elif encoding == "gzip":
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
elif encoding == "identity":
return data
else:
raise RuntimeError(f"unknown content encoding: {encoding}")
def _decode_content_transfer(self, data: bytes) -> bytes:
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
if encoding == "base64":
return base64.b64decode(data)
elif encoding == "quoted-printable":
return binascii.a2b_qp(data)
elif encoding in ("binary", "8bit", "7bit"):
return data
else:
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(encoding)
)
def get_charset(self, default: str) -> str:
"""Returns charset parameter from Content-Type header or default."""
ctype = self.headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
return mimetype.parameters.get("charset", default)
@reify
def name(self) -> Optional[str]:
"""Returns name specified in Content-Disposition header.
If the header is missing or malformed, returns None.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "name")
@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header.
Returns None if the header is missing or malformed.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "filename")
@payload_type(BodyPartReader, order=Order.try_first)
class BodyPartReaderPayload(Payload):
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
params: Dict[str, str] = {}
if value.name is not None:
params["name"] = value.name
if value.filename is not None:
params["filename"] = value.filename
if params:
self.set_content_disposition("attachment", True, **params)
async def write(self, writer: Any) -> None:
field = self._value
chunk = await field.read_chunk(size=2**16)
while chunk:
await writer.write(field.decode(chunk))
chunk = await field.read_chunk(size=2**16)
class MultipartReader:
"""Multipart body reader."""
#: Response wrapper, used when multipart readers constructs from response.
response_wrapper_cls = MultipartResponseWrapper
#: Multipart reader class, used to handle multipart/* body parts.
#: None points to type(self)
multipart_reader_cls = None
#: Body part reader class for non multipart/* content types.
part_reader_cls = BodyPartReader
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
self.headers = headers
self._boundary = ("--" + self._get_boundary()).encode()
self._content = content
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
self._at_eof = False
self._at_bof = True
self._unread: List[bytes] = []
def __aiter__(
self,
) -> AsyncIterator["BodyPartReader"]:
return self # type: ignore[return-value]
async def __anext__(
self,
) -> Optional[Union["MultipartReader", BodyPartReader]]:
part = await self.next()
if part is None:
raise StopAsyncIteration
return part
@classmethod
def from_response(
cls,
response: "ClientResponse",
) -> MultipartResponseWrapper:
"""Constructs reader instance from HTTP response.
:param response: :class:`~aiohttp.client.ClientResponse` instance
"""
obj = cls.response_wrapper_cls(
response, cls(response.headers, response.content)
)
return obj
def at_eof(self) -> bool:
"""Returns True if the final boundary was reached, false otherwise."""
return self._at_eof
async def next(
self,
) -> Optional[Union["MultipartReader", BodyPartReader]]:
"""Emits the next multipart body part."""
# So, if we're at BOF, we need to skip till the boundary.
if self._at_eof:
return None
await self._maybe_release_last_part()
if self._at_bof:
await self._read_until_first_boundary()
self._at_bof = False
else:
await self._read_boundary()
if self._at_eof: # we just read the last boundary, nothing to do there
return None
self._last_part = await self.fetch_next_part()
return self._last_part
async def release(self) -> None:
"""Reads all the body parts to the void till the final boundary."""
while not self._at_eof:
item = await self.next()
if item is None:
break
await item.release()
async def fetch_next_part(
self,
) -> Union["MultipartReader", BodyPartReader]:
"""Returns the next body part reader."""
headers = await self._read_headers()
return self._get_part_reader(headers)
def _get_part_reader(
self,
headers: "CIMultiDictProxy[str]",
) -> Union["MultipartReader", BodyPartReader]:
"""Dispatches the response by the `Content-Type` header.
Returns a suitable reader instance.
:param dict headers: Response headers
"""
ctype = headers.get(CONTENT_TYPE, "")
mimetype = parse_mimetype(ctype)
if mimetype.type == "multipart":
if self.multipart_reader_cls is None:
return type(self)(headers, self._content)
return self.multipart_reader_cls(headers, self._content)
else:
return self.part_reader_cls(self._boundary, headers, self._content)
def _get_boundary(self) -> str:
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
assert mimetype.type == "multipart", "multipart/* content type expected"
if "boundary" not in mimetype.parameters:
raise ValueError(
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
)
boundary = mimetype.parameters["boundary"]
if len(boundary) > 70:
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
return boundary
async def _readline(self) -> bytes:
if self._unread:
return self._unread.pop()
return await self._content.readline()
async def _read_until_first_boundary(self) -> None:
while True:
chunk = await self._readline()
if chunk == b"":
raise ValueError(
"Could not find starting boundary %r" % (self._boundary)
)
chunk = chunk.rstrip()
if chunk == self._boundary:
return
elif chunk == self._boundary + b"--":
self._at_eof = True
return
async def _read_boundary(self) -> None:
chunk = (await self._readline()).rstrip()
if chunk == self._boundary:
pass
elif chunk == self._boundary + b"--":
self._at_eof = True
epilogue = await self._readline()
next_line = await self._readline()
# the epilogue is expected and then either the end of input or the
# parent multipart boundary, if the parent boundary is found then
# it should be marked as unread and handed to the parent for
# processing
if next_line[:2] == b"--":
self._unread.append(next_line)
# otherwise the request is likely missing an epilogue and both
# lines should be passed to the parent for processing
# (this handles the old behavior gracefully)
else:
self._unread.extend([next_line, epilogue])
else:
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
async def _read_headers(self) -> "CIMultiDictProxy[str]":
lines = [b""]
while True:
chunk = await self._content.readline()
chunk = chunk.strip()
lines.append(chunk)
if not chunk:
break
parser = HeadersParser()
headers, raw_headers = parser.parse_headers(lines)
return headers
async def _maybe_release_last_part(self) -> None:
"""Ensures that the last read body part is read completely."""
if self._last_part is not None:
if not self._last_part.at_eof():
await self._last_part.release()
self._unread.extend(self._last_part._unread)
self._last_part = None
_Part = Tuple[Payload, str, str]
class MultipartWriter(Payload):
"""Multipart body writer."""
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
boundary = boundary if boundary is not None else uuid.uuid4().hex
# The underlying Payload API demands a str (utf-8), not bytes,
# so we need to ensure we don't lose anything during conversion.
# As a result, require the boundary to be ASCII only.
# In both situations.
try:
self._boundary = boundary.encode("ascii")
except UnicodeEncodeError:
raise ValueError("boundary should contain ASCII only chars") from None
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
super().__init__(None, content_type=ctype)
self._parts: List[_Part] = []
def __enter__(self) -> "MultipartWriter":
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
pass
def __iter__(self) -> Iterator[_Part]:
return iter(self._parts)
def __len__(self) -> int:
return len(self._parts)
def __bool__(self) -> bool:
return True
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
@property
def _boundary_value(self) -> str:
"""Wrap boundary parameter value in quotes, if necessary.
Reads self.boundary and returns a unicode sting.
"""
# Refer to RFCs 7231, 7230, 5234.
#
# parameter = token "=" ( token / quoted-string )
# token = 1*tchar
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
# obs-text = %x80-FF
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
# / DIGIT / ALPHA
# ; any VCHAR, except delimiters
# VCHAR = %x21-7E
value = self._boundary
if re.match(self._valid_tchar_regex, value):
return value.decode("ascii") # cannot fail
if re.search(self._invalid_qdtext_char_regex, value):
raise ValueError("boundary value contains invalid characters")
# escape %x5C and %x22
quoted_value_content = value.replace(b"\\", b"\\\\")
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
return '"' + quoted_value_content.decode("ascii") + '"'
@property
def boundary(self) -> str:
return self._boundary.decode("ascii")
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
if headers is None:
headers = CIMultiDict()
if isinstance(obj, Payload):
obj.headers.update(headers)
return self.append_payload(obj)
else:
try:
payload = get_payload(obj, headers=headers)
except LookupError:
raise TypeError("Cannot create payload from %r" % obj)
else:
return self.append_payload(payload)
def append_payload(self, payload: Payload) -> Payload:
"""Adds a new body part to multipart writer."""
# compression
encoding: Optional[str] = payload.headers.get(
CONTENT_ENCODING,
"",
).lower()
if encoding and encoding not in ("deflate", "gzip", "identity"):
raise RuntimeError(f"unknown content encoding: {encoding}")
if encoding == "identity":
encoding = None
# te encoding
te_encoding: Optional[str] = payload.headers.get(
CONTENT_TRANSFER_ENCODING,
"",
).lower()
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
raise RuntimeError(
"unknown content transfer encoding: {}" "".format(te_encoding)
)
if te_encoding == "binary":
te_encoding = None
# size
size = payload.size
if size is not None and not (encoding or te_encoding):
payload.headers[CONTENT_LENGTH] = str(size)
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
return payload
def append_json(
self, obj: Any, headers: Optional[MultiMapping[str]] = None
) -> Payload:
"""Helper to append JSON part."""
if headers is None:
headers = CIMultiDict()
return self.append_payload(JsonPayload(obj, headers=headers))
def append_form(
self,
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
headers: Optional[MultiMapping[str]] = None,
) -> Payload:
"""Helper to append form urlencoded part."""
assert isinstance(obj, (Sequence, Mapping))
if headers is None:
headers = CIMultiDict()
if isinstance(obj, Mapping):
obj = list(obj.items())
data = urlencode(obj, doseq=True)
return self.append_payload(
StringPayload(
data, headers=headers, content_type="application/x-www-form-urlencoded"
)
)
@property
def size(self) -> Optional[int]:
"""Size of the payload."""
total = 0
for part, encoding, te_encoding in self._parts:
if encoding or te_encoding or part.size is None:
return None
total += int(
2
+ len(self._boundary)
+ 2
+ part.size # b'--'+self._boundary+b'\r\n'
+ len(part._binary_headers)
+ 2 # b'\r\n'
)
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
return total
async def write(self, writer: Any, close_boundary: bool = True) -> None:
"""Write body."""
for part, encoding, te_encoding in self._parts:
await writer.write(b"--" + self._boundary + b"\r\n")
await writer.write(part._binary_headers)
if encoding or te_encoding:
w = MultipartPayloadWriter(writer)
if encoding:
w.enable_compression(encoding)
if te_encoding:
w.enable_encoding(te_encoding)
await part.write(w) # type: ignore[arg-type]
await w.write_eof()
else:
await part.write(writer)
await writer.write(b"\r\n")
if close_boundary:
await writer.write(b"--" + self._boundary + b"--\r\n")
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
self._writer = writer
self._encoding: Optional[str] = None
self._compress: Any = None
self._encoding_buffer: Optional[bytearray] = None
def enable_encoding(self, encoding: str) -> None:
if encoding == "base64":
self._encoding = encoding
self._encoding_buffer = bytearray()
elif encoding == "quoted-printable":
self._encoding = "quoted-printable"
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
async def write_eof(self) -> None:
if self._compress is not None:
chunk = self._compress.flush()
if chunk:
self._compress = None
await self.write(chunk)
if self._encoding == "base64":
if self._encoding_buffer:
await self._writer.write(base64.b64encode(self._encoding_buffer))
async def write(self, chunk: bytes) -> None:
if self._compress is not None:
if chunk:
chunk = self._compress.compress(chunk)
if not chunk:
return
if self._encoding == "base64":
buf = self._encoding_buffer
assert buf is not None
buf.extend(chunk)
if buf:
div, mod = divmod(len(buf), 3)
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
if enc_chunk:
b64chunk = base64.b64encode(enc_chunk)
await self._writer.write(b64chunk)
elif self._encoding == "quoted-printable":
await self._writer.write(binascii.b2a_qp(chunk))
else:
await self._writer.write(chunk)
| 32,313 | Python | 32.590437 | 98 | 0.564541 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/client_reqrep.py | import asyncio
import codecs
import functools
import io
import re
import sys
import traceback
import warnings
from hashlib import md5, sha1, sha256
from http.cookies import CookieError, Morsel, SimpleCookie
from types import MappingProxyType, TracebackType
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterable,
List,
Mapping,
Optional,
Tuple,
Type,
Union,
cast,
)
import attr
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
from yarl import URL
from . import hdrs, helpers, http, multipart, payload
from .abc import AbstractStreamWriter
from .client_exceptions import (
ClientConnectionError,
ClientOSError,
ClientResponseError,
ContentTypeError,
InvalidURL,
ServerFingerprintMismatch,
)
from .formdata import FormData
from .helpers import (
PY_36,
BaseTimerContext,
BasicAuth,
HeadersMixin,
TimerNoop,
noop,
reify,
set_result,
)
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
from .log import client_logger
from .streams import StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
JSONDecoder,
LooseCookies,
LooseHeaders,
RawHeaders,
)
try:
import ssl
from ssl import SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
try:
import cchardet as chardet
except ImportError: # pragma: no cover
import charset_normalizer as chardet # type: ignore[no-redef]
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
if TYPE_CHECKING: # pragma: no cover
from .client import ClientSession
from .connector import Connection
from .tracing import Trace
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ContentDisposition:
type: Optional[str]
parameters: "MappingProxyType[str, str]"
filename: Optional[str]
@attr.s(auto_attribs=True, frozen=True, slots=True)
class RequestInfo:
url: URL
method: str
headers: "CIMultiDictProxy[str]"
real_url: URL = attr.ib()
@real_url.default
def real_url_default(self) -> URL:
return self.url
class Fingerprint:
HASHFUNC_BY_DIGESTLEN = {
16: md5,
20: sha1,
32: sha256,
}
def __init__(self, fingerprint: bytes) -> None:
digestlen = len(fingerprint)
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
if not hashfunc:
raise ValueError("fingerprint has invalid length")
elif hashfunc is md5 or hashfunc is sha1:
raise ValueError(
"md5 and sha1 are insecure and " "not supported. Use sha256."
)
self._hashfunc = hashfunc
self._fingerprint = fingerprint
@property
def fingerprint(self) -> bytes:
return self._fingerprint
def check(self, transport: asyncio.Transport) -> None:
if not transport.get_extra_info("sslcontext"):
return
sslobj = transport.get_extra_info("ssl_object")
cert = sslobj.getpeercert(binary_form=True)
got = self._hashfunc(cert).digest()
if got != self._fingerprint:
host, port, *_ = transport.get_extra_info("peername")
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
if ssl is not None:
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
else: # pragma: no cover
SSL_ALLOWED_TYPES = type(None)
def _merge_ssl_params(
ssl: Union["SSLContext", bool, Fingerprint, None],
verify_ssl: Optional[bool],
ssl_context: Optional["SSLContext"],
fingerprint: Optional[bytes],
) -> Union["SSLContext", bool, Fingerprint, None]:
if verify_ssl is not None and not verify_ssl:
warnings.warn(
"verify_ssl is deprecated, use ssl=False instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = False
if ssl_context is not None:
warnings.warn(
"ssl_context is deprecated, use ssl=context instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = ssl_context
if fingerprint is not None:
warnings.warn(
"fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
DeprecationWarning,
stacklevel=3,
)
if ssl is not None:
raise ValueError(
"verify_ssl, ssl_context, fingerprint and ssl "
"parameters are mutually exclusive"
)
else:
ssl = Fingerprint(fingerprint)
if not isinstance(ssl, SSL_ALLOWED_TYPES):
raise TypeError(
"ssl should be SSLContext, bool, Fingerprint or None, "
"got {!r} instead.".format(ssl)
)
return ssl
@attr.s(auto_attribs=True, slots=True, frozen=True)
class ConnectionKey:
# the key should contain an information about used proxy / TLS
# to prevent reusing wrong connections from a pool
host: str
port: Optional[int]
is_ssl: bool
ssl: Union[SSLContext, None, bool, Fingerprint]
proxy: Optional[URL]
proxy_auth: Optional[BasicAuth]
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
def _is_expected_content_type(
response_content_type: str, expected_content_type: str
) -> bool:
if expected_content_type == "application/json":
return json_re.match(response_content_type) is not None
return expected_content_type in response_content_type
class ClientRequest:
GET_METHODS = {
hdrs.METH_GET,
hdrs.METH_HEAD,
hdrs.METH_OPTIONS,
hdrs.METH_TRACE,
}
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
DEFAULT_HEADERS = {
hdrs.ACCEPT: "*/*",
hdrs.ACCEPT_ENCODING: "gzip, deflate",
}
body = b""
auth = None
response = None
_writer = None # async task for streaming data
_continue = None # waiter future for '100 Continue' response
# N.B.
# Adding __del__ method with self._writer closing doesn't make sense
# because _writer is instance method, thus it keeps a reference to self.
# Until writer has finished finalizer will not be called.
def __init__(
self,
method: str,
url: URL,
*,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Iterable[str] = frozenset(),
data: Any = None,
cookies: Optional[LooseCookies] = None,
auth: Optional[BasicAuth] = None,
version: http.HttpVersion = http.HttpVersion11,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
loop: Optional[asyncio.AbstractEventLoop] = None,
response_class: Optional[Type["ClientResponse"]] = None,
proxy: Optional[URL] = None,
proxy_auth: Optional[BasicAuth] = None,
timer: Optional[BaseTimerContext] = None,
session: Optional["ClientSession"] = None,
ssl: Union[SSLContext, bool, Fingerprint, None] = None,
proxy_headers: Optional[LooseHeaders] = None,
traces: Optional[List["Trace"]] = None,
):
if loop is None:
loop = asyncio.get_event_loop()
assert isinstance(url, URL), url
assert isinstance(proxy, (URL, type(None))), proxy
# FIXME: session is None in tests only, need to fix tests
# assert session is not None
self._session = cast("ClientSession", session)
if params:
q = MultiDict(url.query)
url2 = url.with_query(params)
q.extend(url2.query)
url = url.with_query(q)
self.original_url = url
self.url = url.with_fragment(None)
self.method = method.upper()
self.chunked = chunked
self.compress = compress
self.loop = loop
self.length = None
if response_class is None:
real_response_class = ClientResponse
else:
real_response_class = response_class
self.response_class: Type[ClientResponse] = real_response_class
self._timer = timer if timer is not None else TimerNoop()
self._ssl = ssl
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self.update_version(version)
self.update_host(url)
self.update_headers(headers)
self.update_auto_headers(skip_auto_headers)
self.update_cookies(cookies)
self.update_content_encoding(data)
self.update_auth(auth)
self.update_proxy(proxy, proxy_auth, proxy_headers)
self.update_body_from_data(data)
if data is not None or self.method not in self.GET_METHODS:
self.update_transfer_encoding()
self.update_expect_continue(expect100)
if traces is None:
traces = []
self._traces = traces
def is_ssl(self) -> bool:
return self.url.scheme in ("https", "wss")
@property
def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
return self._ssl
@property
def connection_key(self) -> ConnectionKey:
proxy_headers = self.proxy_headers
if proxy_headers:
h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
else:
h = None
return ConnectionKey(
self.host,
self.port,
self.is_ssl(),
self.ssl,
self.proxy,
self.proxy_auth,
h,
)
@property
def host(self) -> str:
ret = self.url.raw_host
assert ret is not None
return ret
@property
def port(self) -> Optional[int]:
return self.url.port
@property
def request_info(self) -> RequestInfo:
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
return RequestInfo(self.url, self.method, headers, self.original_url)
def update_host(self, url: URL) -> None:
"""Update destination host, port and connection type (ssl)."""
# get host/port
if not url.raw_host:
raise InvalidURL(url)
# basic auth info
username, password = url.user, url.password
if username:
self.auth = helpers.BasicAuth(username, password or "")
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
"""Convert request version to two elements tuple.
parser HTTP version '1.1' => (1, 1)
"""
if isinstance(version, str):
v = [part.strip() for part in version.split(".", 1)]
try:
version = http.HttpVersion(int(v[0]), int(v[1]))
except ValueError:
raise ValueError(
f"Can not parse http version number: {version}"
) from None
self.version = version
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
"""Update request headers."""
self.headers: CIMultiDict[str] = CIMultiDict()
# add host
netloc = cast(str, self.url.raw_host)
if helpers.is_ipv6_address(netloc):
netloc = f"[{netloc}]"
if self.url.port is not None and not self.url.is_default_port():
netloc += ":" + str(self.url.port)
self.headers[hdrs.HOST] = netloc
if headers:
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
headers = headers.items() # type: ignore[assignment]
for key, value in headers: # type: ignore[misc]
# A special case for Host header
if key.lower() == "host":
self.headers[key] = value
else:
self.headers.add(key, value)
def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
self.skip_auto_headers = CIMultiDict(
(hdr, None) for hdr in sorted(skip_auto_headers)
)
used_headers = self.headers.copy()
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
for hdr, val in self.DEFAULT_HEADERS.items():
if hdr not in used_headers:
self.headers.add(hdr, val)
if hdrs.USER_AGENT not in used_headers:
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
"""Update request cookies header."""
if not cookies:
return
c: SimpleCookie[str] = SimpleCookie()
if hdrs.COOKIE in self.headers:
c.load(self.headers.get(hdrs.COOKIE, ""))
del self.headers[hdrs.COOKIE]
if isinstance(cookies, Mapping):
iter_cookies = cookies.items()
else:
iter_cookies = cookies # type: ignore[assignment]
for name, value in iter_cookies:
if isinstance(value, Morsel):
# Preserve coded_value
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value # type: ignore[assignment]
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
def update_content_encoding(self, data: Any) -> None:
"""Set request content encoding."""
if data is None:
return
enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
if enc:
if self.compress:
raise ValueError(
"compress can not be set " "if Content-Encoding header is set"
)
elif self.compress:
if not isinstance(self.compress, str):
self.compress = "deflate"
self.headers[hdrs.CONTENT_ENCODING] = self.compress
self.chunked = True # enable chunked, no need to deal with length
def update_transfer_encoding(self) -> None:
"""Analyze transfer-encoding header."""
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
if "chunked" in te:
if self.chunked:
raise ValueError(
"chunked can not be set "
'if "Transfer-Encoding: chunked" header is set'
)
elif self.chunked:
if hdrs.CONTENT_LENGTH in self.headers:
raise ValueError(
"chunked can not be set " "if Content-Length header is set"
)
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
def update_auth(self, auth: Optional[BasicAuth]) -> None:
"""Set basic auth."""
if auth is None:
auth = self.auth
if auth is None:
return
if not isinstance(auth, helpers.BasicAuth):
raise TypeError("BasicAuth() tuple is required instead")
self.headers[hdrs.AUTHORIZATION] = auth.encode()
def update_body_from_data(self, body: Any) -> None:
if body is None:
return
# FormData
if isinstance(body, FormData):
body = body()
try:
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
except payload.LookupError:
body = FormData(body)()
self.body = body
# enable chunked encoding if needed
if not self.chunked:
if hdrs.CONTENT_LENGTH not in self.headers:
size = body.size
if size is None:
self.chunked = True
else:
if hdrs.CONTENT_LENGTH not in self.headers:
self.headers[hdrs.CONTENT_LENGTH] = str(size)
# copy payload headers
assert body.headers
for (key, value) in body.headers.items():
if key in self.headers:
continue
if key in self.skip_auto_headers:
continue
self.headers[key] = value
def update_expect_continue(self, expect: bool = False) -> None:
if expect:
self.headers[hdrs.EXPECT] = "100-continue"
elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
expect = True
if expect:
self._continue = self.loop.create_future()
def update_proxy(
self,
proxy: Optional[URL],
proxy_auth: Optional[BasicAuth],
proxy_headers: Optional[LooseHeaders],
) -> None:
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
self.proxy = proxy
self.proxy_auth = proxy_auth
self.proxy_headers = proxy_headers
def keep_alive(self) -> bool:
if self.version < HttpVersion10:
# keep alive not supported at all
return False
if self.version == HttpVersion10:
if self.headers.get(hdrs.CONNECTION) == "keep-alive":
return True
else: # no headers means we close for Http 1.0
return False
elif self.headers.get(hdrs.CONNECTION) == "close":
return False
return True
async def write_bytes(
self, writer: AbstractStreamWriter, conn: "Connection"
) -> None:
"""Support coroutines that yields bytes objects."""
# 100 response
if self._continue is not None:
await writer.drain()
await self._continue
protocol = conn.protocol
assert protocol is not None
try:
if isinstance(self.body, payload.Payload):
await self.body.write(writer)
else:
if isinstance(self.body, (bytes, bytearray)):
self.body = (self.body,) # type: ignore[assignment]
for chunk in self.body:
await writer.write(chunk) # type: ignore[arg-type]
await writer.write_eof()
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
protocol.set_exception(exc)
else:
new_exc = ClientOSError(
exc.errno, "Can not write request body for %s" % self.url
)
new_exc.__context__ = exc
new_exc.__cause__ = exc
protocol.set_exception(new_exc)
except asyncio.CancelledError as exc:
if not conn.closed:
protocol.set_exception(exc)
except Exception as exc:
protocol.set_exception(exc)
finally:
self._writer = None
async def send(self, conn: "Connection") -> "ClientResponse":
# Specify request target:
# - CONNECT request must send authority form URI
# - not CONNECT proxy must send absolute form URI
# - most common is origin form URI
if self.method == hdrs.METH_CONNECT:
connect_host = self.url.raw_host
assert connect_host is not None
if helpers.is_ipv6_address(connect_host):
connect_host = f"[{connect_host}]"
path = f"{connect_host}:{self.url.port}"
elif self.proxy and not self.is_ssl():
path = str(self.url)
else:
path = self.url.raw_path
if self.url.raw_query_string:
path += "?" + self.url.raw_query_string
protocol = conn.protocol
assert protocol is not None
writer = StreamWriter(
protocol,
self.loop,
on_chunk_sent=functools.partial(
self._on_chunk_request_sent, self.method, self.url
),
on_headers_sent=functools.partial(
self._on_headers_request_sent, self.method, self.url
),
)
if self.compress:
writer.enable_compression(self.compress)
if self.chunked is not None:
writer.enable_chunking()
# set default content-type
if (
self.method in self.POST_METHODS
and hdrs.CONTENT_TYPE not in self.skip_auto_headers
and hdrs.CONTENT_TYPE not in self.headers
):
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
# set the connection header
connection = self.headers.get(hdrs.CONNECTION)
if not connection:
if self.keep_alive():
if self.version == HttpVersion10:
connection = "keep-alive"
else:
if self.version == HttpVersion11:
connection = "close"
if connection is not None:
self.headers[hdrs.CONNECTION] = connection
# status + headers
status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
self.method, path, self.version
)
await writer.write_headers(status_line, self.headers)
self._writer = self.loop.create_task(self.write_bytes(writer, conn))
response_class = self.response_class
assert response_class is not None
self.response = response_class(
self.method,
self.original_url,
writer=self._writer,
continue100=self._continue,
timer=self._timer,
request_info=self.request_info,
traces=self._traces,
loop=self.loop,
session=self._session,
)
return self.response
async def close(self) -> None:
if self._writer is not None:
try:
await self._writer
finally:
self._writer = None
def terminate(self) -> None:
if self._writer is not None:
if not self.loop.is_closed():
self._writer.cancel()
self._writer = None
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
for trace in self._traces:
await trace.send_request_chunk_sent(method, url, chunk)
async def _on_headers_request_sent(
self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
for trace in self._traces:
await trace.send_request_headers(method, url, headers)
class ClientResponse(HeadersMixin):
# from the Status-Line of the response
version = None # HTTP-Version
status: int = None # type: ignore[assignment] # Status-Code
reason = None # Reason-Phrase
content: StreamReader = None # type: ignore[assignment] # Payload stream
_headers: "CIMultiDictProxy[str]" = None # type: ignore[assignment]
_raw_headers: RawHeaders = None # type: ignore[assignment] # Response raw headers
_connection = None # current connection
_source_traceback = None
# setted up by ClientRequest after ClientResponse object creation
# post-init stage allows to not change ctor signature
_closed = True # to allow __del__ for non-initialized properly response
_released = False
def __init__(
self,
method: str,
url: URL,
*,
writer: "asyncio.Task[None]",
continue100: Optional["asyncio.Future[bool]"],
timer: BaseTimerContext,
request_info: RequestInfo,
traces: List["Trace"],
loop: asyncio.AbstractEventLoop,
session: "ClientSession",
) -> None:
assert isinstance(url, URL)
self.method = method
self.cookies: SimpleCookie[str] = SimpleCookie()
self._real_url = url
self._url = url.with_fragment(None)
self._body: Any = None
self._writer: Optional[asyncio.Task[None]] = writer
self._continue = continue100 # None by default
self._closed = True
self._history: Tuple[ClientResponse, ...] = ()
self._request_info = request_info
self._timer = timer if timer is not None else TimerNoop()
self._cache: Dict[str, Any] = {}
self._traces = traces
self._loop = loop
# store a reference to session #1985
self._session: Optional[ClientSession] = session
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
@reify
def url(self) -> URL:
return self._url
@reify
def url_obj(self) -> URL:
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
return self._url
@reify
def real_url(self) -> URL:
return self._real_url
@reify
def host(self) -> str:
assert self._url.host is not None
return self._url.host
@reify
def headers(self) -> "CIMultiDictProxy[str]":
return self._headers
@reify
def raw_headers(self) -> RawHeaders:
return self._raw_headers
@reify
def request_info(self) -> RequestInfo:
return self._request_info
@reify
def content_disposition(self) -> Optional[ContentDisposition]:
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
if raw is None:
return None
disposition_type, params_dct = multipart.parse_content_disposition(raw)
params = MappingProxyType(params_dct)
filename = multipart.content_disposition_filename(params)
return ContentDisposition(disposition_type, params, filename)
def __del__(self, _warnings: Any = warnings) -> None:
if self._closed:
return
if self._connection is not None:
self._connection.release()
self._cleanup_writer()
if self._loop.get_debug():
if PY_36:
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
context = {"client_response": self, "message": "Unclosed response"}
if self._source_traceback:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def __repr__(self) -> str:
out = io.StringIO()
ascii_encodable_url = str(self.url)
if self.reason:
ascii_encodable_reason = self.reason.encode(
"ascii", "backslashreplace"
).decode("ascii")
else:
ascii_encodable_reason = self.reason
print(
"<ClientResponse({}) [{} {}]>".format(
ascii_encodable_url, self.status, ascii_encodable_reason
),
file=out,
)
print(self.headers, file=out)
return out.getvalue()
@property
def connection(self) -> Optional["Connection"]:
return self._connection
@reify
def history(self) -> Tuple["ClientResponse", ...]:
"""A sequence of of responses, if redirects occurred."""
return self._history
@reify
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
links_str = ", ".join(self.headers.getall("link", []))
if not links_str:
return MultiDictProxy(MultiDict())
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
for val in re.split(r",(?=\s*<)", links_str):
match = re.match(r"\s*<(.*)>(.*)", val)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
url, params_str = match.groups()
params = params_str.split(";")[1:]
link: MultiDict[Union[str, URL]] = MultiDict()
for param in params:
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
if match is None: # pragma: no cover
# the check exists to suppress mypy error
continue
key, _, value, _ = match.groups()
link.add(key, value)
key = link.get("rel", url) # type: ignore[assignment]
link.add("url", self.url.join(URL(url)))
links.add(key, MultiDictProxy(link))
return MultiDictProxy(links)
async def start(self, connection: "Connection") -> "ClientResponse":
"""Start response processing."""
self._closed = False
self._protocol = connection.protocol
self._connection = connection
with self._timer:
while True:
# read response
try:
protocol = self._protocol
message, payload = await protocol.read() # type: ignore[union-attr]
except http.HttpProcessingError as exc:
raise ClientResponseError(
self.request_info,
self.history,
status=exc.code,
message=exc.message,
headers=exc.headers,
) from exc
if message.code < 100 or message.code > 199 or message.code == 101:
break
if self._continue is not None:
set_result(self._continue, True)
self._continue = None
# payload eof handler
payload.on_eof(self._response_eof)
# response status
self.version = message.version
self.status = message.code
self.reason = message.reason
# headers
self._headers = message.headers # type is CIMultiDictProxy
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
# payload
self.content = payload
# cookies
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
try:
self.cookies.load(hdr)
except CookieError as exc:
client_logger.warning("Can not load response cookies: %s", exc)
return self
def _response_eof(self) -> None:
if self._closed:
return
if self._connection is not None:
# websocket, protocol could be None because
# connection could be detached
if (
self._connection.protocol is not None
and self._connection.protocol.upgraded
):
return
self._connection.release()
self._connection = None
self._closed = True
self._cleanup_writer()
@property
def closed(self) -> bool:
return self._closed
def close(self) -> None:
if not self._released:
self._notify_content()
if self._closed:
return
self._closed = True
if self._loop is None or self._loop.is_closed():
return
if self._connection is not None:
self._connection.close()
self._connection = None
self._cleanup_writer()
def release(self) -> Any:
if not self._released:
self._notify_content()
if self._closed:
return noop()
self._closed = True
if self._connection is not None:
self._connection.release()
self._connection = None
self._cleanup_writer()
return noop()
@property
def ok(self) -> bool:
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
This is **not** a check for ``200 OK`` but a check that the response
status is under 400.
"""
return 400 > self.status
def raise_for_status(self) -> None:
if not self.ok:
# reason should always be not None for a started response
assert self.reason is not None
self.release()
raise ClientResponseError(
self.request_info,
self.history,
status=self.status,
message=self.reason,
headers=self.headers,
)
def _cleanup_writer(self) -> None:
if self._writer is not None:
self._writer.cancel()
self._writer = None
self._session = None
def _notify_content(self) -> None:
content = self.content
if content and content.exception() is None:
content.set_exception(ClientConnectionError("Connection closed"))
self._released = True
async def wait_for_close(self) -> None:
if self._writer is not None:
try:
await self._writer
finally:
self._writer = None
self.release()
async def read(self) -> bytes:
"""Read response payload."""
if self._body is None:
try:
self._body = await self.content.read()
for trace in self._traces:
await trace.send_response_chunk_received(
self.method, self.url, self._body
)
except BaseException:
self.close()
raise
elif self._released:
raise ClientConnectionError("Connection closed")
return self._body # type: ignore[no-any-return]
def get_encoding(self) -> str:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
mimetype = helpers.parse_mimetype(ctype)
encoding = mimetype.parameters.get("charset")
if encoding:
try:
codecs.lookup(encoding)
except LookupError:
encoding = None
if not encoding:
if mimetype.type == "application" and (
mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
# RFC 7159 states that the default encoding is UTF-8.
# RFC 7483 defines application/rdap+json
encoding = "utf-8"
elif self._body is None:
raise RuntimeError(
"Cannot guess the encoding of " "a not yet read body"
)
else:
encoding = chardet.detect(self._body)["encoding"]
if not encoding:
encoding = "utf-8"
return encoding
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
"""Read response payload and decode."""
if self._body is None:
await self.read()
if encoding is None:
encoding = self.get_encoding()
return self._body.decode( # type: ignore[no-any-return,union-attr]
encoding, errors=errors
)
async def json(
self,
*,
encoding: Optional[str] = None,
loads: JSONDecoder = DEFAULT_JSON_DECODER,
content_type: Optional[str] = "application/json",
) -> Any:
"""Read and decodes JSON response."""
if self._body is None:
await self.read()
if content_type:
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
if not _is_expected_content_type(ctype, content_type):
raise ContentTypeError(
self.request_info,
self.history,
message=(
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
),
headers=self.headers,
)
stripped = self._body.strip() # type: ignore[union-attr]
if not stripped:
return None
if encoding is None:
encoding = self.get_encoding()
return loads(stripped.decode(encoding))
async def __aenter__(self) -> "ClientResponse":
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# similar to _RequestContextManager, we do not need to check
# for exceptions, response object can close connection
# if state is broken
self.release()
| 36,973 | Python | 31.576211 | 88 | 0.565845 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/log.py | import logging
access_logger = logging.getLogger("aiohttp.access")
client_logger = logging.getLogger("aiohttp.client")
internal_logger = logging.getLogger("aiohttp.internal")
server_logger = logging.getLogger("aiohttp.server")
web_logger = logging.getLogger("aiohttp.web")
ws_logger = logging.getLogger("aiohttp.websocket")
| 325 | Python | 35.222218 | 55 | 0.787692 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/client_exceptions.py | """HTTP related errors."""
import asyncio
import warnings
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
from .http_parser import RawResponseMessage
from .typedefs import LooseHeaders
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = SSLContext = None # type: ignore[assignment]
if TYPE_CHECKING: # pragma: no cover
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
else:
RequestInfo = ClientResponse = ConnectionKey = None
__all__ = (
"ClientError",
"ClientConnectionError",
"ClientOSError",
"ClientConnectorError",
"ClientProxyConnectionError",
"ClientSSLError",
"ClientConnectorSSLError",
"ClientConnectorCertificateError",
"ServerConnectionError",
"ServerTimeoutError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ClientResponseError",
"ClientHttpProxyError",
"WSServerHandshakeError",
"ContentTypeError",
"ClientPayloadError",
"InvalidURL",
)
class ClientError(Exception):
"""Base class for client connection errors."""
class ClientResponseError(ClientError):
"""Connection error during reading response.
request_info: instance of RequestInfo
"""
def __init__(
self,
request_info: RequestInfo,
history: Tuple[ClientResponse, ...],
*,
code: Optional[int] = None,
status: Optional[int] = None,
message: str = "",
headers: Optional[LooseHeaders] = None,
) -> None:
self.request_info = request_info
if code is not None:
if status is not None:
raise ValueError(
"Both code and status arguments are provided; "
"code is deprecated, use status instead"
)
warnings.warn(
"code argument is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
if status is not None:
self.status = status
elif code is not None:
self.status = code
else:
self.status = 0
self.message = message
self.headers = headers
self.history = history
self.args = (request_info, history)
def __str__(self) -> str:
return "{}, message={!r}, url={!r}".format(
self.status,
self.message,
self.request_info.real_url,
)
def __repr__(self) -> str:
args = f"{self.request_info!r}, {self.history!r}"
if self.status != 0:
args += f", status={self.status!r}"
if self.message != "":
args += f", message={self.message!r}"
if self.headers is not None:
args += f", headers={self.headers!r}"
return f"{type(self).__name__}({args})"
@property
def code(self) -> int:
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
return self.status
@code.setter
def code(self, value: int) -> None:
warnings.warn(
"code property is deprecated, use status instead",
DeprecationWarning,
stacklevel=2,
)
self.status = value
class ContentTypeError(ClientResponseError):
"""ContentType found is not valid."""
class WSServerHandshakeError(ClientResponseError):
"""websocket server handshake error."""
class ClientHttpProxyError(ClientResponseError):
"""HTTP proxy error.
Raised in :class:`aiohttp.connector.TCPConnector` if
proxy responds with status other than ``200 OK``
on ``CONNECT`` request.
"""
class TooManyRedirects(ClientResponseError):
"""Client was redirected too many times."""
class ClientConnectionError(ClientError):
"""Base class for client socket errors."""
class ClientOSError(ClientConnectionError, OSError):
"""OSError error."""
class ClientConnectorError(ClientOSError):
"""Client connector error.
Raised in :class:`aiohttp.connector.TCPConnector` if
a connection can not be established.
"""
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
self._conn_key = connection_key
self._os_error = os_error
super().__init__(os_error.errno, os_error.strerror)
self.args = (connection_key, os_error)
@property
def os_error(self) -> OSError:
return self._os_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
return self._conn_key.ssl
def __str__(self) -> str:
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
self, self.ssl if self.ssl is not None else "default", self.strerror
)
# OSError.__reduce__ does too much black magick
__reduce__ = BaseException.__reduce__
class ClientProxyConnectionError(ClientConnectorError):
"""Proxy connection error.
Raised in :class:`aiohttp.connector.TCPConnector` if
connection to proxy can not be established.
"""
class UnixClientConnectorError(ClientConnectorError):
"""Unix connector error.
Raised in :py:class:`aiohttp.connector.UnixConnector`
if connection to unix socket can not be established.
"""
def __init__(
self, path: str, connection_key: ConnectionKey, os_error: OSError
) -> None:
self._path = path
super().__init__(connection_key, os_error)
@property
def path(self) -> str:
return self._path
def __str__(self) -> str:
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
self, self.ssl if self.ssl is not None else "default", self.strerror
)
class ServerConnectionError(ClientConnectionError):
"""Server connection errors."""
class ServerDisconnectedError(ServerConnectionError):
"""Server disconnected."""
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
if message is None:
message = "Server disconnected"
self.args = (message,)
self.message = message
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
"""Server timeout error."""
class ServerFingerprintMismatch(ServerConnectionError):
"""SSL certificate does not match expected fingerprint."""
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
self.expected = expected
self.got = got
self.host = host
self.port = port
self.args = (expected, got, host, port)
def __repr__(self) -> str:
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
self.__class__.__name__, self.expected, self.got, self.host, self.port
)
class ClientPayloadError(ClientError):
"""Response payload error."""
class InvalidURL(ClientError, ValueError):
"""Invalid URL.
URL used for fetching is malformed, e.g. it doesn't contains host
part.
"""
# Derive from ValueError for backward compatibility
def __init__(self, url: Any) -> None:
# The type of url is not yarl.URL because the exception can be raised
# on URL(url) call
super().__init__(url)
@property
def url(self) -> Any:
return self.args[0]
def __repr__(self) -> str:
return f"<{self.__class__.__name__} {self.url}>"
class ClientSSLError(ClientConnectorError):
"""Base error for ssl.*Errors."""
if ssl is not None:
cert_errors = (ssl.CertificateError,)
cert_errors_bases = (
ClientSSLError,
ssl.CertificateError,
)
ssl_errors = (ssl.SSLError,)
ssl_error_bases = (ClientSSLError, ssl.SSLError)
else: # pragma: no cover
cert_errors = tuple()
cert_errors_bases = (
ClientSSLError,
ValueError,
)
ssl_errors = tuple()
ssl_error_bases = (ClientSSLError,)
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
"""Response ssl error."""
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
"""Response certificate error."""
def __init__(
self, connection_key: ConnectionKey, certificate_error: Exception
) -> None:
self._conn_key = connection_key
self._certificate_error = certificate_error
self.args = (connection_key, certificate_error)
@property
def certificate_error(self) -> Exception:
return self._certificate_error
@property
def host(self) -> str:
return self._conn_key.host
@property
def port(self) -> Optional[int]:
return self._conn_key.port
@property
def ssl(self) -> bool:
return self._conn_key.is_ssl
def __str__(self) -> str:
return (
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
"[{0.certificate_error.__class__.__name__}: "
"{0.certificate_error.args}]".format(self)
)
| 9,270 | Python | 26.029154 | 86 | 0.613916 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/http_websocket.py | """WebSocket protocol versions 13 and 8."""
import asyncio
import collections
import json
import random
import re
import sys
import zlib
from enum import IntEnum
from struct import Struct
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
from .streams import DataQueue
from .typedefs import Final
__all__ = (
"WS_CLOSED_MESSAGE",
"WS_CLOSING_MESSAGE",
"WS_KEY",
"WebSocketReader",
"WebSocketWriter",
"WSMessage",
"WebSocketError",
"WSMsgType",
"WSCloseCode",
)
class WSCloseCode(IntEnum):
OK = 1000
GOING_AWAY = 1001
PROTOCOL_ERROR = 1002
UNSUPPORTED_DATA = 1003
ABNORMAL_CLOSURE = 1006
INVALID_TEXT = 1007
POLICY_VIOLATION = 1008
MESSAGE_TOO_BIG = 1009
MANDATORY_EXTENSION = 1010
INTERNAL_ERROR = 1011
SERVICE_RESTART = 1012
TRY_AGAIN_LATER = 1013
BAD_GATEWAY = 1014
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
class WSMsgType(IntEnum):
# websocket spec types
CONTINUATION = 0x0
TEXT = 0x1
BINARY = 0x2
PING = 0x9
PONG = 0xA
CLOSE = 0x8
# aiohttp specific types
CLOSING = 0x100
CLOSED = 0x101
ERROR = 0x102
text = TEXT
binary = BINARY
ping = PING
pong = PONG
close = CLOSE
closing = CLOSING
closed = CLOSED
error = ERROR
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
UNPACK_LEN2 = Struct("!H").unpack_from
UNPACK_LEN3 = Struct("!Q").unpack_from
UNPACK_CLOSE_CODE = Struct("!H").unpack
PACK_LEN1 = Struct("!BB").pack
PACK_LEN2 = Struct("!BBH").pack
PACK_LEN3 = Struct("!BBQ").pack
PACK_CLOSE_CODE = Struct("!H").pack
MSG_SIZE: Final[int] = 2**14
DEFAULT_LIMIT: Final[int] = 2**16
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
class WSMessage(_WSMessageBase):
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
"""Return parsed JSON data.
.. versionadded:: 0.22
"""
return loads(self.data)
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
class WebSocketError(Exception):
"""WebSocket protocol parser error."""
def __init__(self, code: int, message: str) -> None:
self.code = code
super().__init__(code, message)
def __str__(self) -> str:
return cast(str, self.args[1])
class WSHandshakeError(Exception):
"""WebSocket protocol handshake error."""
native_byteorder: Final[str] = sys.byteorder
# Used by _websocket_mask_python
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
"""Websocket masking function.
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
object of any length. The contents of `data` are masked with `mask`,
as specified in section 5.3 of RFC 6455.
Note that this function mutates the `data` argument.
This pure-python implementation may be replaced by an optimized
version when available.
"""
assert isinstance(data, bytearray), data
assert len(mask) == 4, mask
if data:
a, b, c, d = (_XOR_TABLE[n] for n in mask)
data[::4] = data[::4].translate(a)
data[1::4] = data[1::4].translate(b)
data[2::4] = data[2::4].translate(c)
data[3::4] = data[3::4].translate(d)
if NO_EXTENSIONS: # pragma: no cover
_websocket_mask = _websocket_mask_python
else:
try:
from ._websocket import _websocket_mask_cython # type: ignore[import]
_websocket_mask = _websocket_mask_cython
except ImportError: # pragma: no cover
_websocket_mask = _websocket_mask_python
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
r"^(?:;\s*(?:"
r"(server_no_context_takeover)|"
r"(client_no_context_takeover)|"
r"(server_max_window_bits(?:=(\d+))?)|"
r"(client_max_window_bits(?:=(\d+))?)))*$"
)
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
if not extstr:
return 0, False
compress = 0
notakeover = False
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
defext = ext.group(1)
# Return compress = 15 when get `permessage-deflate`
if not defext:
compress = 15
break
match = _WS_EXT_RE.match(defext)
if match:
compress = 15
if isserver:
# Server never fail to detect compress handshake.
# Server does not need to send max wbit to client
if match.group(4):
compress = int(match.group(4))
# Group3 must match if group4 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# CONTINUE to next extension
if compress > 15 or compress < 9:
compress = 0
continue
if match.group(1):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
else:
if match.group(6):
compress = int(match.group(6))
# Group5 must match if group6 matches
# Compress wbit 8 does not support in zlib
# If compress level not support,
# FAIL the parse progress
if compress > 15 or compress < 9:
raise WSHandshakeError("Invalid window size")
if match.group(2):
notakeover = True
# Ignore regex group 5 & 6 for client_max_window_bits
break
# Return Fail if client side and not match
elif not isserver:
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
return compress, notakeover
def ws_ext_gen(
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
) -> str:
# client_notakeover=False not used for server
# compress wbit 8 does not support in zlib
if compress < 9 or compress > 15:
raise ValueError(
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
)
enabledext = ["permessage-deflate"]
if not isserver:
enabledext.append("client_max_window_bits")
if compress < 15:
enabledext.append("server_max_window_bits=" + str(compress))
if server_notakeover:
enabledext.append("server_no_context_takeover")
# if client_notakeover:
# enabledext.append('client_no_context_takeover')
return "; ".join(enabledext)
class WSParserState(IntEnum):
READ_HEADER = 1
READ_PAYLOAD_LENGTH = 2
READ_PAYLOAD_MASK = 3
READ_PAYLOAD = 4
class WebSocketReader:
def __init__(
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
) -> None:
self.queue = queue
self._max_msg_size = max_msg_size
self._exc: Optional[BaseException] = None
self._partial = bytearray()
self._state = WSParserState.READ_HEADER
self._opcode: Optional[int] = None
self._frame_fin = False
self._frame_opcode: Optional[int] = None
self._frame_payload = bytearray()
self._tail = b""
self._has_mask = False
self._frame_mask: Optional[bytes] = None
self._payload_length = 0
self._payload_length_flag = 0
self._compressed: Optional[bool] = None
self._decompressobj: Any = None # zlib.decompressobj actually
self._compress = compress
def feed_eof(self) -> None:
self.queue.feed_eof()
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
if self._exc:
return True, data
try:
return self._feed_data(data)
except Exception as exc:
self._exc = exc
self.queue.set_exception(exc)
return True, b""
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
for fin, opcode, payload, compressed in self.parse_frame(data):
if compressed and not self._decompressobj:
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
if opcode == WSMsgType.CLOSE:
if len(payload) >= 2:
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
f"Invalid close code: {close_code}",
)
try:
close_message = payload[2:].decode("utf-8")
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
elif payload:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
f"Invalid close frame: {fin} {opcode} {payload!r}",
)
else:
msg = WSMessage(WSMsgType.CLOSE, 0, "")
self.queue.feed_data(msg, 0)
elif opcode == WSMsgType.PING:
self.queue.feed_data(
WSMessage(WSMsgType.PING, payload, ""), len(payload)
)
elif opcode == WSMsgType.PONG:
self.queue.feed_data(
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
)
elif (
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
and self._opcode is None
):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
)
else:
# load text/binary
if not fin:
# got partial frame payload
if opcode != WSMsgType.CONTINUATION:
self._opcode = opcode
self._partial.extend(payload)
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size
),
)
else:
# previous frame was non finished
# we should get continuation opcode
if self._partial:
if opcode != WSMsgType.CONTINUATION:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"The opcode in non-fin frame is expected "
"to be zero, got {!r}".format(opcode),
)
if opcode == WSMsgType.CONTINUATION:
assert self._opcode is not None
opcode = self._opcode
self._opcode = None
self._partial.extend(payload)
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Message size {} exceeds limit {}".format(
len(self._partial), self._max_msg_size
),
)
# Decompress process must to be done after all packets
# received.
if compressed:
self._partial.extend(_WS_DEFLATE_TRAILING)
payload_merged = self._decompressobj.decompress(
self._partial, self._max_msg_size
)
if self._decompressobj.unconsumed_tail:
left = len(self._decompressobj.unconsumed_tail)
raise WebSocketError(
WSCloseCode.MESSAGE_TOO_BIG,
"Decompressed message size {} exceeds limit {}".format(
self._max_msg_size + left, self._max_msg_size
),
)
else:
payload_merged = bytes(self._partial)
self._partial.clear()
if opcode == WSMsgType.TEXT:
try:
text = payload_merged.decode("utf-8")
self.queue.feed_data(
WSMessage(WSMsgType.TEXT, text, ""), len(text)
)
except UnicodeDecodeError as exc:
raise WebSocketError(
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
) from exc
else:
self.queue.feed_data(
WSMessage(WSMsgType.BINARY, payload_merged, ""),
len(payload_merged),
)
return False, b""
def parse_frame(
self, buf: bytes
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
"""Return the next frame from the socket."""
frames = []
if self._tail:
buf, self._tail = self._tail + buf, b""
start_pos = 0
buf_length = len(buf)
while True:
# read header
if self._state == WSParserState.READ_HEADER:
if buf_length - start_pos >= 2:
data = buf[start_pos : start_pos + 2]
start_pos += 2
first_byte, second_byte = data
fin = (first_byte >> 7) & 1
rsv1 = (first_byte >> 6) & 1
rsv2 = (first_byte >> 5) & 1
rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xF
# frame-fin = %x0 ; more frames of this message follow
# / %x1 ; final frame of this message
# frame-rsv1 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv2 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
# frame-rsv3 = %x0 ;
# 1 bit, MUST be 0 unless negotiated otherwise
#
# Remove rsv1 from this test for deflate development
if rsv2 or rsv3 or (rsv1 and not self._compress):
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Received frame with non-zero reserved bits",
)
if opcode > 0x7 and fin == 0:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Received fragmented control frame",
)
has_mask = (second_byte >> 7) & 1
length = second_byte & 0x7F
# Control frames MUST have a payload
# length of 125 bytes or less
if opcode > 0x7 and length > 125:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Control frame payload cannot be " "larger than 125 bytes",
)
# Set compress status if last package is FIN
# OR set compress status if this is first fragment
# Raise error if not first fragment with rsv1 = 0x1
if self._frame_fin or self._compressed is None:
self._compressed = True if rsv1 else False
elif rsv1:
raise WebSocketError(
WSCloseCode.PROTOCOL_ERROR,
"Received frame with non-zero reserved bits",
)
self._frame_fin = bool(fin)
self._frame_opcode = opcode
self._has_mask = bool(has_mask)
self._payload_length_flag = length
self._state = WSParserState.READ_PAYLOAD_LENGTH
else:
break
# read payload length
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
length = self._payload_length_flag
if length == 126:
if buf_length - start_pos >= 2:
data = buf[start_pos : start_pos + 2]
start_pos += 2
length = UNPACK_LEN2(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD
)
else:
break
elif length > 126:
if buf_length - start_pos >= 8:
data = buf[start_pos : start_pos + 8]
start_pos += 8
length = UNPACK_LEN3(data)[0]
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD
)
else:
break
else:
self._payload_length = length
self._state = (
WSParserState.READ_PAYLOAD_MASK
if self._has_mask
else WSParserState.READ_PAYLOAD
)
# read payload mask
if self._state == WSParserState.READ_PAYLOAD_MASK:
if buf_length - start_pos >= 4:
self._frame_mask = buf[start_pos : start_pos + 4]
start_pos += 4
self._state = WSParserState.READ_PAYLOAD
else:
break
if self._state == WSParserState.READ_PAYLOAD:
length = self._payload_length
payload = self._frame_payload
chunk_len = buf_length - start_pos
if length >= chunk_len:
self._payload_length = length - chunk_len
payload.extend(buf[start_pos:])
start_pos = buf_length
else:
self._payload_length = 0
payload.extend(buf[start_pos : start_pos + length])
start_pos = start_pos + length
if self._payload_length == 0:
if self._has_mask:
assert self._frame_mask is not None
_websocket_mask(self._frame_mask, payload)
frames.append(
(self._frame_fin, self._frame_opcode, payload, self._compressed)
)
self._frame_payload = bytearray()
self._state = WSParserState.READ_HEADER
else:
break
self._tail = buf[start_pos:]
return frames
class WebSocketWriter:
def __init__(
self,
protocol: BaseProtocol,
transport: asyncio.Transport,
*,
use_mask: bool = False,
limit: int = DEFAULT_LIMIT,
random: Any = random.Random(),
compress: int = 0,
notakeover: bool = False,
) -> None:
self.protocol = protocol
self.transport = transport
self.use_mask = use_mask
self.randrange = random.randrange
self.compress = compress
self.notakeover = notakeover
self._closing = False
self._limit = limit
self._output_size = 0
self._compressobj: Any = None # actually compressobj
async def _send_frame(
self, message: bytes, opcode: int, compress: Optional[int] = None
) -> None:
"""Send a frame over the websocket with message as its payload."""
if self._closing and not (opcode & WSMsgType.CLOSE):
raise ConnectionResetError("Cannot write to closing transport")
rsv = 0
# Only compress larger packets (disabled)
# Does small packet needs to be compressed?
# if self.compress and opcode < 8 and len(message) > 124:
if (compress or self.compress) and opcode < 8:
if compress:
# Do not set self._compress if compressing is for this frame
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
else: # self.compress
if not self._compressobj:
self._compressobj = zlib.compressobj(
level=zlib.Z_BEST_SPEED, wbits=-self.compress
)
compressobj = self._compressobj
message = compressobj.compress(message)
message = message + compressobj.flush(
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
)
if message.endswith(_WS_DEFLATE_TRAILING):
message = message[:-4]
rsv = rsv | 0x40
msg_length = len(message)
use_mask = self.use_mask
if use_mask:
mask_bit = 0x80
else:
mask_bit = 0
if msg_length < 126:
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
elif msg_length < (1 << 16):
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
else:
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
if use_mask:
mask = self.randrange(0, 0xFFFFFFFF)
mask = mask.to_bytes(4, "big")
message = bytearray(message)
_websocket_mask(mask, message)
self._write(header + mask + message)
self._output_size += len(header) + len(mask) + len(message)
else:
if len(message) > MSG_SIZE:
self._write(header)
self._write(message)
else:
self._write(header + message)
self._output_size += len(header) + len(message)
if self._output_size > self._limit:
self._output_size = 0
await self.protocol._drain_helper()
def _write(self, data: bytes) -> None:
if self.transport is None or self.transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
self.transport.write(data)
async def pong(self, message: bytes = b"") -> None:
"""Send pong message."""
if isinstance(message, str):
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PONG)
async def ping(self, message: bytes = b"") -> None:
"""Send ping message."""
if isinstance(message, str):
message = message.encode("utf-8")
await self._send_frame(message, WSMsgType.PING)
async def send(
self,
message: Union[str, bytes],
binary: bool = False,
compress: Optional[int] = None,
) -> None:
"""Send a frame over the websocket with message as its payload."""
if isinstance(message, str):
message = message.encode("utf-8")
if binary:
await self._send_frame(message, WSMsgType.BINARY, compress)
else:
await self._send_frame(message, WSMsgType.TEXT, compress)
async def close(self, code: int = 1000, message: bytes = b"") -> None:
"""Close the websocket, sending the specified code and message."""
if isinstance(message, str):
message = message.encode("utf-8")
try:
await self._send_frame(
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
)
finally:
self._closing = True
| 25,299 | Python | 35.039886 | 88 | 0.504131 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/__init__.py | __version__ = "3.8.3"
from typing import Tuple
from . import hdrs as hdrs
from .client import (
BaseConnector as BaseConnector,
ClientConnectionError as ClientConnectionError,
ClientConnectorCertificateError as ClientConnectorCertificateError,
ClientConnectorError as ClientConnectorError,
ClientConnectorSSLError as ClientConnectorSSLError,
ClientError as ClientError,
ClientHttpProxyError as ClientHttpProxyError,
ClientOSError as ClientOSError,
ClientPayloadError as ClientPayloadError,
ClientProxyConnectionError as ClientProxyConnectionError,
ClientRequest as ClientRequest,
ClientResponse as ClientResponse,
ClientResponseError as ClientResponseError,
ClientSession as ClientSession,
ClientSSLError as ClientSSLError,
ClientTimeout as ClientTimeout,
ClientWebSocketResponse as ClientWebSocketResponse,
ContentTypeError as ContentTypeError,
Fingerprint as Fingerprint,
InvalidURL as InvalidURL,
NamedPipeConnector as NamedPipeConnector,
RequestInfo as RequestInfo,
ServerConnectionError as ServerConnectionError,
ServerDisconnectedError as ServerDisconnectedError,
ServerFingerprintMismatch as ServerFingerprintMismatch,
ServerTimeoutError as ServerTimeoutError,
TCPConnector as TCPConnector,
TooManyRedirects as TooManyRedirects,
UnixConnector as UnixConnector,
WSServerHandshakeError as WSServerHandshakeError,
request as request,
)
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
from .formdata import FormData as FormData
from .helpers import BasicAuth, ChainMapProxy, ETag
from .http import (
HttpVersion as HttpVersion,
HttpVersion10 as HttpVersion10,
HttpVersion11 as HttpVersion11,
WebSocketError as WebSocketError,
WSCloseCode as WSCloseCode,
WSMessage as WSMessage,
WSMsgType as WSMsgType,
)
from .multipart import (
BadContentDispositionHeader as BadContentDispositionHeader,
BadContentDispositionParam as BadContentDispositionParam,
BodyPartReader as BodyPartReader,
MultipartReader as MultipartReader,
MultipartWriter as MultipartWriter,
content_disposition_filename as content_disposition_filename,
parse_content_disposition as parse_content_disposition,
)
from .payload import (
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
AsyncIterablePayload as AsyncIterablePayload,
BufferedReaderPayload as BufferedReaderPayload,
BytesIOPayload as BytesIOPayload,
BytesPayload as BytesPayload,
IOBasePayload as IOBasePayload,
JsonPayload as JsonPayload,
Payload as Payload,
StringIOPayload as StringIOPayload,
StringPayload as StringPayload,
TextIOPayload as TextIOPayload,
get_payload as get_payload,
payload_type as payload_type,
)
from .payload_streamer import streamer as streamer
from .resolver import (
AsyncResolver as AsyncResolver,
DefaultResolver as DefaultResolver,
ThreadedResolver as ThreadedResolver,
)
from .streams import (
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
DataQueue as DataQueue,
EofStream as EofStream,
FlowControlDataQueue as FlowControlDataQueue,
StreamReader as StreamReader,
)
from .tracing import (
TraceConfig as TraceConfig,
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
TraceRequestEndParams as TraceRequestEndParams,
TraceRequestExceptionParams as TraceRequestExceptionParams,
TraceRequestRedirectParams as TraceRequestRedirectParams,
TraceRequestStartParams as TraceRequestStartParams,
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
)
__all__: Tuple[str, ...] = (
"hdrs",
# client
"BaseConnector",
"ClientConnectionError",
"ClientConnectorCertificateError",
"ClientConnectorError",
"ClientConnectorSSLError",
"ClientError",
"ClientHttpProxyError",
"ClientOSError",
"ClientPayloadError",
"ClientProxyConnectionError",
"ClientResponse",
"ClientRequest",
"ClientResponseError",
"ClientSSLError",
"ClientSession",
"ClientTimeout",
"ClientWebSocketResponse",
"ContentTypeError",
"Fingerprint",
"InvalidURL",
"RequestInfo",
"ServerConnectionError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ServerTimeoutError",
"TCPConnector",
"TooManyRedirects",
"UnixConnector",
"NamedPipeConnector",
"WSServerHandshakeError",
"request",
# cookiejar
"CookieJar",
"DummyCookieJar",
# formdata
"FormData",
# helpers
"BasicAuth",
"ChainMapProxy",
"ETag",
# http
"HttpVersion",
"HttpVersion10",
"HttpVersion11",
"WSMsgType",
"WSCloseCode",
"WSMessage",
"WebSocketError",
# multipart
"BadContentDispositionHeader",
"BadContentDispositionParam",
"BodyPartReader",
"MultipartReader",
"MultipartWriter",
"content_disposition_filename",
"parse_content_disposition",
# payload
"AsyncIterablePayload",
"BufferedReaderPayload",
"BytesIOPayload",
"BytesPayload",
"IOBasePayload",
"JsonPayload",
"PAYLOAD_REGISTRY",
"Payload",
"StringIOPayload",
"StringPayload",
"TextIOPayload",
"get_payload",
"payload_type",
# payload_streamer
"streamer",
# resolver
"AsyncResolver",
"DefaultResolver",
"ThreadedResolver",
# streams
"DataQueue",
"EMPTY_PAYLOAD",
"EofStream",
"FlowControlDataQueue",
"StreamReader",
# tracing
"TraceConfig",
"TraceConnectionCreateEndParams",
"TraceConnectionCreateStartParams",
"TraceConnectionQueuedEndParams",
"TraceConnectionQueuedStartParams",
"TraceConnectionReuseconnParams",
"TraceDnsCacheHitParams",
"TraceDnsCacheMissParams",
"TraceDnsResolveHostEndParams",
"TraceDnsResolveHostStartParams",
"TraceRequestChunkSentParams",
"TraceRequestEndParams",
"TraceRequestExceptionParams",
"TraceRequestRedirectParams",
"TraceRequestStartParams",
"TraceResponseChunkReceivedParams",
)
try:
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
except ImportError: # pragma: no cover
pass
| 6,870 | Python | 30.663594 | 79 | 0.757496 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_exceptions.py | import warnings
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
from yarl import URL
from .typedefs import LooseHeaders, StrOrURL
from .web_response import Response
__all__ = (
"HTTPException",
"HTTPError",
"HTTPRedirection",
"HTTPSuccessful",
"HTTPOk",
"HTTPCreated",
"HTTPAccepted",
"HTTPNonAuthoritativeInformation",
"HTTPNoContent",
"HTTPResetContent",
"HTTPPartialContent",
"HTTPMultipleChoices",
"HTTPMovedPermanently",
"HTTPFound",
"HTTPSeeOther",
"HTTPNotModified",
"HTTPUseProxy",
"HTTPTemporaryRedirect",
"HTTPPermanentRedirect",
"HTTPClientError",
"HTTPBadRequest",
"HTTPUnauthorized",
"HTTPPaymentRequired",
"HTTPForbidden",
"HTTPNotFound",
"HTTPMethodNotAllowed",
"HTTPNotAcceptable",
"HTTPProxyAuthenticationRequired",
"HTTPRequestTimeout",
"HTTPConflict",
"HTTPGone",
"HTTPLengthRequired",
"HTTPPreconditionFailed",
"HTTPRequestEntityTooLarge",
"HTTPRequestURITooLong",
"HTTPUnsupportedMediaType",
"HTTPRequestRangeNotSatisfiable",
"HTTPExpectationFailed",
"HTTPMisdirectedRequest",
"HTTPUnprocessableEntity",
"HTTPFailedDependency",
"HTTPUpgradeRequired",
"HTTPPreconditionRequired",
"HTTPTooManyRequests",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPUnavailableForLegalReasons",
"HTTPServerError",
"HTTPInternalServerError",
"HTTPNotImplemented",
"HTTPBadGateway",
"HTTPServiceUnavailable",
"HTTPGatewayTimeout",
"HTTPVersionNotSupported",
"HTTPVariantAlsoNegotiates",
"HTTPInsufficientStorage",
"HTTPNotExtended",
"HTTPNetworkAuthenticationRequired",
)
############################################################
# HTTP Exceptions
############################################################
class HTTPException(Response, Exception):
# You should set in subclasses:
# status = 200
status_code = -1
empty_body = False
__http_exception__ = True
def __init__(
self,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if body is not None:
warnings.warn(
"body argument is deprecated for http web exceptions",
DeprecationWarning,
)
Response.__init__(
self,
status=self.status_code,
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
Exception.__init__(self, self.reason)
if self.body is None and not self.empty_body:
self.text = f"{self.status}: {self.reason}"
def __bool__(self) -> bool:
return True
class HTTPError(HTTPException):
"""Base class for exceptions with status codes in the 400s and 500s."""
class HTTPRedirection(HTTPException):
"""Base class for exceptions with status codes in the 300s."""
class HTTPSuccessful(HTTPException):
"""Base class for exceptions with status codes in the 200s."""
class HTTPOk(HTTPSuccessful):
status_code = 200
class HTTPCreated(HTTPSuccessful):
status_code = 201
class HTTPAccepted(HTTPSuccessful):
status_code = 202
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
status_code = 203
class HTTPNoContent(HTTPSuccessful):
status_code = 204
empty_body = True
class HTTPResetContent(HTTPSuccessful):
status_code = 205
empty_body = True
class HTTPPartialContent(HTTPSuccessful):
status_code = 206
############################################################
# 3xx redirection
############################################################
class _HTTPMove(HTTPRedirection):
def __init__(
self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
if not location:
raise ValueError("HTTP redirects need a location to redirect to.")
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Location"] = str(URL(location))
self.location = location
class HTTPMultipleChoices(_HTTPMove):
status_code = 300
class HTTPMovedPermanently(_HTTPMove):
status_code = 301
class HTTPFound(_HTTPMove):
status_code = 302
# This one is safe after a POST (the redirected location will be
# retrieved with GET):
class HTTPSeeOther(_HTTPMove):
status_code = 303
class HTTPNotModified(HTTPRedirection):
# FIXME: this should include a date or etag header
status_code = 304
empty_body = True
class HTTPUseProxy(_HTTPMove):
# Not a move, but looks a little like one
status_code = 305
class HTTPTemporaryRedirect(_HTTPMove):
status_code = 307
class HTTPPermanentRedirect(_HTTPMove):
status_code = 308
############################################################
# 4xx client error
############################################################
class HTTPClientError(HTTPError):
pass
class HTTPBadRequest(HTTPClientError):
status_code = 400
class HTTPUnauthorized(HTTPClientError):
status_code = 401
class HTTPPaymentRequired(HTTPClientError):
status_code = 402
class HTTPForbidden(HTTPClientError):
status_code = 403
class HTTPNotFound(HTTPClientError):
status_code = 404
class HTTPMethodNotAllowed(HTTPClientError):
status_code = 405
def __init__(
self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
allow = ",".join(sorted(allowed_methods))
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Allow"] = allow
self.allowed_methods: Set[str] = set(allowed_methods)
self.method = method.upper()
class HTTPNotAcceptable(HTTPClientError):
status_code = 406
class HTTPProxyAuthenticationRequired(HTTPClientError):
status_code = 407
class HTTPRequestTimeout(HTTPClientError):
status_code = 408
class HTTPConflict(HTTPClientError):
status_code = 409
class HTTPGone(HTTPClientError):
status_code = 410
class HTTPLengthRequired(HTTPClientError):
status_code = 411
class HTTPPreconditionFailed(HTTPClientError):
status_code = 412
class HTTPRequestEntityTooLarge(HTTPClientError):
status_code = 413
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
kwargs.setdefault(
"text",
"Maximum request body size {} exceeded, "
"actual body size {}".format(max_size, actual_size),
)
super().__init__(**kwargs)
class HTTPRequestURITooLong(HTTPClientError):
status_code = 414
class HTTPUnsupportedMediaType(HTTPClientError):
status_code = 415
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
status_code = 416
class HTTPExpectationFailed(HTTPClientError):
status_code = 417
class HTTPMisdirectedRequest(HTTPClientError):
status_code = 421
class HTTPUnprocessableEntity(HTTPClientError):
status_code = 422
class HTTPFailedDependency(HTTPClientError):
status_code = 424
class HTTPUpgradeRequired(HTTPClientError):
status_code = 426
class HTTPPreconditionRequired(HTTPClientError):
status_code = 428
class HTTPTooManyRequests(HTTPClientError):
status_code = 429
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
status_code = 431
class HTTPUnavailableForLegalReasons(HTTPClientError):
status_code = 451
def __init__(
self,
link: str,
*,
headers: Optional[LooseHeaders] = None,
reason: Optional[str] = None,
body: Any = None,
text: Optional[str] = None,
content_type: Optional[str] = None,
) -> None:
super().__init__(
headers=headers,
reason=reason,
body=body,
text=text,
content_type=content_type,
)
self.headers["Link"] = '<%s>; rel="blocked-by"' % link
self.link = link
############################################################
# 5xx Server Error
############################################################
# Response status codes beginning with the digit "5" indicate cases in
# which the server is aware that it has erred or is incapable of
# performing the request. Except when responding to a HEAD request, the
# server SHOULD include an entity containing an explanation of the error
# situation, and whether it is a temporary or permanent condition. User
# agents SHOULD display any included entity to the user. These response
# codes are applicable to any request method.
class HTTPServerError(HTTPError):
pass
class HTTPInternalServerError(HTTPServerError):
status_code = 500
class HTTPNotImplemented(HTTPServerError):
status_code = 501
class HTTPBadGateway(HTTPServerError):
status_code = 502
class HTTPServiceUnavailable(HTTPServerError):
status_code = 503
class HTTPGatewayTimeout(HTTPServerError):
status_code = 504
class HTTPVersionNotSupported(HTTPServerError):
status_code = 505
class HTTPVariantAlsoNegotiates(HTTPServerError):
status_code = 506
class HTTPInsufficientStorage(HTTPServerError):
status_code = 507
class HTTPNotExtended(HTTPServerError):
status_code = 510
class HTTPNetworkAuthenticationRequired(HTTPServerError):
status_code = 511
| 10,098 | Python | 21.848416 | 83 | 0.630125 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_server.py | """Low level HTTP server."""
import asyncio
from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
from .abc import AbstractStreamWriter
from .helpers import get_running_loop
from .http_parser import RawRequestMessage
from .streams import StreamReader
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
from .web_request import BaseRequest
__all__ = ("Server",)
class Server:
def __init__(
self,
handler: _RequestHandler,
*,
request_factory: Optional[_RequestFactory] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any
) -> None:
self._loop = get_running_loop(loop)
self._connections: Dict[RequestHandler, asyncio.Transport] = {}
self._kwargs = kwargs
self.requests_count = 0
self.request_handler = handler
self.request_factory = request_factory or self._make_request
@property
def connections(self) -> List[RequestHandler]:
return list(self._connections.keys())
def connection_made(
self, handler: RequestHandler, transport: asyncio.Transport
) -> None:
self._connections[handler] = transport
def connection_lost(
self, handler: RequestHandler, exc: Optional[BaseException] = None
) -> None:
if handler in self._connections:
del self._connections[handler]
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
) -> BaseRequest:
return BaseRequest(message, payload, protocol, writer, task, self._loop)
async def shutdown(self, timeout: Optional[float] = None) -> None:
coros = [conn.shutdown(timeout) for conn in self._connections]
await asyncio.gather(*coros)
self._connections.clear()
def __call__(self) -> RequestHandler:
return RequestHandler(self, loop=self._loop, **self._kwargs)
| 2,050 | Python | 31.555555 | 80 | 0.656098 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web.py | import asyncio
import logging
import socket
import sys
from argparse import ArgumentParser
from collections.abc import Iterable
from importlib import import_module
from typing import (
Any,
Awaitable,
Callable,
Iterable as TypingIterable,
List,
Optional,
Set,
Type,
Union,
cast,
)
from .abc import AbstractAccessLogger
from .helpers import all_tasks
from .log import access_logger
from .web_app import Application as Application, CleanupError as CleanupError
from .web_exceptions import (
HTTPAccepted as HTTPAccepted,
HTTPBadGateway as HTTPBadGateway,
HTTPBadRequest as HTTPBadRequest,
HTTPClientError as HTTPClientError,
HTTPConflict as HTTPConflict,
HTTPCreated as HTTPCreated,
HTTPError as HTTPError,
HTTPException as HTTPException,
HTTPExpectationFailed as HTTPExpectationFailed,
HTTPFailedDependency as HTTPFailedDependency,
HTTPForbidden as HTTPForbidden,
HTTPFound as HTTPFound,
HTTPGatewayTimeout as HTTPGatewayTimeout,
HTTPGone as HTTPGone,
HTTPInsufficientStorage as HTTPInsufficientStorage,
HTTPInternalServerError as HTTPInternalServerError,
HTTPLengthRequired as HTTPLengthRequired,
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
HTTPMovedPermanently as HTTPMovedPermanently,
HTTPMultipleChoices as HTTPMultipleChoices,
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
HTTPNoContent as HTTPNoContent,
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
HTTPNotAcceptable as HTTPNotAcceptable,
HTTPNotExtended as HTTPNotExtended,
HTTPNotFound as HTTPNotFound,
HTTPNotImplemented as HTTPNotImplemented,
HTTPNotModified as HTTPNotModified,
HTTPOk as HTTPOk,
HTTPPartialContent as HTTPPartialContent,
HTTPPaymentRequired as HTTPPaymentRequired,
HTTPPermanentRedirect as HTTPPermanentRedirect,
HTTPPreconditionFailed as HTTPPreconditionFailed,
HTTPPreconditionRequired as HTTPPreconditionRequired,
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
HTTPRedirection as HTTPRedirection,
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
HTTPRequestTimeout as HTTPRequestTimeout,
HTTPRequestURITooLong as HTTPRequestURITooLong,
HTTPResetContent as HTTPResetContent,
HTTPSeeOther as HTTPSeeOther,
HTTPServerError as HTTPServerError,
HTTPServiceUnavailable as HTTPServiceUnavailable,
HTTPSuccessful as HTTPSuccessful,
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
HTTPTooManyRequests as HTTPTooManyRequests,
HTTPUnauthorized as HTTPUnauthorized,
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
HTTPUpgradeRequired as HTTPUpgradeRequired,
HTTPUseProxy as HTTPUseProxy,
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
HTTPVersionNotSupported as HTTPVersionNotSupported,
)
from .web_fileresponse import FileResponse as FileResponse
from .web_log import AccessLogger
from .web_middlewares import (
middleware as middleware,
normalize_path_middleware as normalize_path_middleware,
)
from .web_protocol import (
PayloadAccessError as PayloadAccessError,
RequestHandler as RequestHandler,
RequestPayloadError as RequestPayloadError,
)
from .web_request import (
BaseRequest as BaseRequest,
FileField as FileField,
Request as Request,
)
from .web_response import (
ContentCoding as ContentCoding,
Response as Response,
StreamResponse as StreamResponse,
json_response as json_response,
)
from .web_routedef import (
AbstractRouteDef as AbstractRouteDef,
RouteDef as RouteDef,
RouteTableDef as RouteTableDef,
StaticDef as StaticDef,
delete as delete,
get as get,
head as head,
options as options,
patch as patch,
post as post,
put as put,
route as route,
static as static,
view as view,
)
from .web_runner import (
AppRunner as AppRunner,
BaseRunner as BaseRunner,
BaseSite as BaseSite,
GracefulExit as GracefulExit,
NamedPipeSite as NamedPipeSite,
ServerRunner as ServerRunner,
SockSite as SockSite,
TCPSite as TCPSite,
UnixSite as UnixSite,
)
from .web_server import Server as Server
from .web_urldispatcher import (
AbstractResource as AbstractResource,
AbstractRoute as AbstractRoute,
DynamicResource as DynamicResource,
PlainResource as PlainResource,
PrefixedSubAppResource as PrefixedSubAppResource,
Resource as Resource,
ResourceRoute as ResourceRoute,
StaticResource as StaticResource,
UrlDispatcher as UrlDispatcher,
UrlMappingMatchInfo as UrlMappingMatchInfo,
View as View,
)
from .web_ws import (
WebSocketReady as WebSocketReady,
WebSocketResponse as WebSocketResponse,
WSMsgType as WSMsgType,
)
__all__ = (
# web_app
"Application",
"CleanupError",
# web_exceptions
"HTTPAccepted",
"HTTPBadGateway",
"HTTPBadRequest",
"HTTPClientError",
"HTTPConflict",
"HTTPCreated",
"HTTPError",
"HTTPException",
"HTTPExpectationFailed",
"HTTPFailedDependency",
"HTTPForbidden",
"HTTPFound",
"HTTPGatewayTimeout",
"HTTPGone",
"HTTPInsufficientStorage",
"HTTPInternalServerError",
"HTTPLengthRequired",
"HTTPMethodNotAllowed",
"HTTPMisdirectedRequest",
"HTTPMovedPermanently",
"HTTPMultipleChoices",
"HTTPNetworkAuthenticationRequired",
"HTTPNoContent",
"HTTPNonAuthoritativeInformation",
"HTTPNotAcceptable",
"HTTPNotExtended",
"HTTPNotFound",
"HTTPNotImplemented",
"HTTPNotModified",
"HTTPOk",
"HTTPPartialContent",
"HTTPPaymentRequired",
"HTTPPermanentRedirect",
"HTTPPreconditionFailed",
"HTTPPreconditionRequired",
"HTTPProxyAuthenticationRequired",
"HTTPRedirection",
"HTTPRequestEntityTooLarge",
"HTTPRequestHeaderFieldsTooLarge",
"HTTPRequestRangeNotSatisfiable",
"HTTPRequestTimeout",
"HTTPRequestURITooLong",
"HTTPResetContent",
"HTTPSeeOther",
"HTTPServerError",
"HTTPServiceUnavailable",
"HTTPSuccessful",
"HTTPTemporaryRedirect",
"HTTPTooManyRequests",
"HTTPUnauthorized",
"HTTPUnavailableForLegalReasons",
"HTTPUnprocessableEntity",
"HTTPUnsupportedMediaType",
"HTTPUpgradeRequired",
"HTTPUseProxy",
"HTTPVariantAlsoNegotiates",
"HTTPVersionNotSupported",
# web_fileresponse
"FileResponse",
# web_middlewares
"middleware",
"normalize_path_middleware",
# web_protocol
"PayloadAccessError",
"RequestHandler",
"RequestPayloadError",
# web_request
"BaseRequest",
"FileField",
"Request",
# web_response
"ContentCoding",
"Response",
"StreamResponse",
"json_response",
# web_routedef
"AbstractRouteDef",
"RouteDef",
"RouteTableDef",
"StaticDef",
"delete",
"get",
"head",
"options",
"patch",
"post",
"put",
"route",
"static",
"view",
# web_runner
"AppRunner",
"BaseRunner",
"BaseSite",
"GracefulExit",
"ServerRunner",
"SockSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
# web_server
"Server",
# web_urldispatcher
"AbstractResource",
"AbstractRoute",
"DynamicResource",
"PlainResource",
"PrefixedSubAppResource",
"Resource",
"ResourceRoute",
"StaticResource",
"UrlDispatcher",
"UrlMappingMatchInfo",
"View",
# web_ws
"WebSocketReady",
"WebSocketResponse",
"WSMsgType",
# web
"run_app",
)
try:
from ssl import SSLContext
except ImportError: # pragma: no cover
SSLContext = Any # type: ignore[misc,assignment]
HostSequence = TypingIterable[str]
async def _run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
# A internal functio to actually do all dirty job for application running
if asyncio.iscoroutine(app):
app = await app # type: ignore[misc]
app = cast(Application, app)
runner = AppRunner(
app,
handle_signals=handle_signals,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
keepalive_timeout=keepalive_timeout,
)
await runner.setup()
sites: List[BaseSite] = []
try:
if host is not None:
if isinstance(host, (str, bytes, bytearray, memoryview)):
sites.append(
TCPSite(
runner,
host,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
else:
for h in host:
sites.append(
TCPSite(
runner,
h,
port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
elif path is None and sock is None or port is not None:
sites.append(
TCPSite(
runner,
port=port,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
if path is not None:
if isinstance(path, (str, bytes, bytearray, memoryview)):
sites.append(
UnixSite(
runner,
path,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for p in path:
sites.append(
UnixSite(
runner,
p,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
if sock is not None:
if not isinstance(sock, Iterable):
sites.append(
SockSite(
runner,
sock,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
else:
for s in sock:
sites.append(
SockSite(
runner,
s,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
)
for site in sites:
await site.start()
if print: # pragma: no branch
names = sorted(str(s.name) for s in runner.sites)
print(
"======== Running on {} ========\n"
"(Press CTRL+C to quit)".format(", ".join(names))
)
# sleep forever by 1 hour intervals,
# on Windows before Python 3.8 wake up every 1 second to handle
# Ctrl+C smoothly
if sys.platform == "win32" and sys.version_info < (3, 8):
delay = 1
else:
delay = 3600
while True:
await asyncio.sleep(delay)
finally:
await runner.cleanup()
def _cancel_tasks(
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
) -> None:
if not to_cancel:
return
for task in to_cancel:
task.cancel()
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
for task in to_cancel:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler(
{
"message": "unhandled exception during asyncio.run() shutdown",
"exception": task.exception(),
"task": task,
}
)
def run_app(
app: Union[Application, Awaitable[Application]],
*,
host: Optional[Union[str, HostSequence]] = None,
port: Optional[int] = None,
path: Optional[str] = None,
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
shutdown_timeout: float = 60.0,
keepalive_timeout: float = 75.0,
ssl_context: Optional[SSLContext] = None,
print: Callable[..., None] = print,
backlog: int = 128,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log_format: str = AccessLogger.LOG_FORMAT,
access_log: Optional[logging.Logger] = access_logger,
handle_signals: bool = True,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
"""Run an app locally"""
if loop is None:
loop = asyncio.new_event_loop()
# Configure if and only if in debugging mode and using the default logger
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
if access_log.level == logging.NOTSET:
access_log.setLevel(logging.DEBUG)
if not access_log.hasHandlers():
access_log.addHandler(logging.StreamHandler())
main_task = loop.create_task(
_run_app(
app,
host=host,
port=port,
path=path,
sock=sock,
shutdown_timeout=shutdown_timeout,
keepalive_timeout=keepalive_timeout,
ssl_context=ssl_context,
print=print,
backlog=backlog,
access_log_class=access_log_class,
access_log_format=access_log_format,
access_log=access_log,
handle_signals=handle_signals,
reuse_address=reuse_address,
reuse_port=reuse_port,
)
)
try:
asyncio.set_event_loop(loop)
loop.run_until_complete(main_task)
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
pass
finally:
_cancel_tasks({main_task}, loop)
_cancel_tasks(all_tasks(loop), loop)
loop.run_until_complete(loop.shutdown_asyncgens())
loop.close()
def main(argv: List[str]) -> None:
arg_parser = ArgumentParser(
description="aiohttp.web Application server", prog="aiohttp.web"
)
arg_parser.add_argument(
"entry_func",
help=(
"Callable returning the `aiohttp.web.Application` instance to "
"run. Should be specified in the 'module:function' syntax."
),
metavar="entry-func",
)
arg_parser.add_argument(
"-H",
"--hostname",
help="TCP/IP hostname to serve on (default: %(default)r)",
default="localhost",
)
arg_parser.add_argument(
"-P",
"--port",
help="TCP/IP port to serve on (default: %(default)r)",
type=int,
default="8080",
)
arg_parser.add_argument(
"-U",
"--path",
help="Unix file system path to serve on. Specifying a path will cause "
"hostname and port arguments to be ignored.",
)
args, extra_argv = arg_parser.parse_known_args(argv)
# Import logic
mod_str, _, func_str = args.entry_func.partition(":")
if not func_str or not mod_str:
arg_parser.error("'entry-func' not in 'module:function' syntax")
if mod_str.startswith("."):
arg_parser.error("relative module names not supported")
try:
module = import_module(mod_str)
except ImportError as ex:
arg_parser.error(f"unable to import {mod_str}: {ex}")
try:
func = getattr(module, func_str)
except AttributeError:
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
# Compatibility logic
if args.path is not None and not hasattr(socket, "AF_UNIX"):
arg_parser.error(
"file system paths not supported by your operating" " environment"
)
logging.basicConfig(level=logging.DEBUG)
app = func(extra_argv)
run_app(app, host=args.hostname, port=args.port, path=args.path)
arg_parser.exit(message="Stopped\n")
if __name__ == "__main__": # pragma: no branch
main(sys.argv[1:]) # pragma: no cover
| 18,081 | Python | 29.699491 | 83 | 0.612356 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/locks.py | import asyncio
import collections
from typing import Any, Deque, Optional
class EventResultOrError:
"""Event asyncio lock helper class.
Wraps the Event asyncio lock allowing either to awake the
locked Tasks without any error or raising an exception.
thanks to @vorpalsmith for the simple design.
"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._exc: Optional[BaseException] = None
self._event = asyncio.Event()
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
def set(self, exc: Optional[BaseException] = None) -> None:
self._exc = exc
self._event.set()
async def wait(self) -> Any:
waiter = self._loop.create_task(self._event.wait())
self._waiters.append(waiter)
try:
val = await waiter
finally:
self._waiters.remove(waiter)
if self._exc is not None:
raise self._exc
return val
def cancel(self) -> None:
"""Cancel all waiters"""
for waiter in self._waiters:
waiter.cancel()
| 1,136 | Python | 26.071428 | 71 | 0.611796 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_runner.py | import asyncio
import signal
import socket
from abc import ABC, abstractmethod
from typing import Any, List, Optional, Set
from yarl import URL
from .web_app import Application
from .web_server import Server
try:
from ssl import SSLContext
except ImportError:
SSLContext = object # type: ignore[misc,assignment]
__all__ = (
"BaseSite",
"TCPSite",
"UnixSite",
"NamedPipeSite",
"SockSite",
"BaseRunner",
"AppRunner",
"ServerRunner",
"GracefulExit",
)
class GracefulExit(SystemExit):
code = 1
def _raise_graceful_exit() -> None:
raise GracefulExit()
class BaseSite(ABC):
__slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
def __init__(
self,
runner: "BaseRunner",
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
if runner.server is None:
raise RuntimeError("Call runner.setup() before making a site")
self._runner = runner
self._shutdown_timeout = shutdown_timeout
self._ssl_context = ssl_context
self._backlog = backlog
self._server: Optional[asyncio.AbstractServer] = None
@property
@abstractmethod
def name(self) -> str:
pass # pragma: no cover
@abstractmethod
async def start(self) -> None:
self._runner._reg_site(self)
async def stop(self) -> None:
self._runner._check_site(self)
if self._server is None:
self._runner._unreg_site(self)
return # not started yet
self._server.close()
# named pipes do not have wait_closed property
if hasattr(self._server, "wait_closed"):
await self._server.wait_closed()
await self._runner.shutdown()
assert self._runner.server
await self._runner.server.shutdown(self._shutdown_timeout)
self._runner._unreg_site(self)
class TCPSite(BaseSite):
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
def __init__(
self,
runner: "BaseRunner",
host: Optional[str] = None,
port: Optional[int] = None,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
reuse_address: Optional[bool] = None,
reuse_port: Optional[bool] = None,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._host = host
if port is None:
port = 8443 if self._ssl_context else 8080
self._port = port
self._reuse_address = reuse_address
self._reuse_port = reuse_port
@property
def name(self) -> str:
scheme = "https" if self._ssl_context else "http"
host = "0.0.0.0" if self._host is None else self._host
return str(URL.build(scheme=scheme, host=host, port=self._port))
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server(
server,
self._host,
self._port,
ssl=self._ssl_context,
backlog=self._backlog,
reuse_address=self._reuse_address,
reuse_port=self._reuse_port,
)
class UnixSite(BaseSite):
__slots__ = ("_path",)
def __init__(
self,
runner: "BaseRunner",
path: str,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._path = path
@property
def name(self) -> str:
scheme = "https" if self._ssl_context else "http"
return f"{scheme}://unix:{self._path}:"
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_unix_server(
server, self._path, ssl=self._ssl_context, backlog=self._backlog
)
class NamedPipeSite(BaseSite):
__slots__ = ("_path",)
def __init__(
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
) -> None:
loop = asyncio.get_event_loop()
if not isinstance(
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
):
raise RuntimeError(
"Named Pipes only available in proactor" "loop under windows"
)
super().__init__(runner, shutdown_timeout=shutdown_timeout)
self._path = path
@property
def name(self) -> str:
return self._path
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
server, self._path
)
self._server = _server[0]
class SockSite(BaseSite):
__slots__ = ("_sock", "_name")
def __init__(
self,
runner: "BaseRunner",
sock: socket.socket,
*,
shutdown_timeout: float = 60.0,
ssl_context: Optional[SSLContext] = None,
backlog: int = 128,
) -> None:
super().__init__(
runner,
shutdown_timeout=shutdown_timeout,
ssl_context=ssl_context,
backlog=backlog,
)
self._sock = sock
scheme = "https" if self._ssl_context else "http"
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
name = f"{scheme}://unix:{sock.getsockname()}:"
else:
host, port = sock.getsockname()[:2]
name = str(URL.build(scheme=scheme, host=host, port=port))
self._name = name
@property
def name(self) -> str:
return self._name
async def start(self) -> None:
await super().start()
loop = asyncio.get_event_loop()
server = self._runner.server
assert server is not None
self._server = await loop.create_server(
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
)
class BaseRunner(ABC):
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
self._handle_signals = handle_signals
self._kwargs = kwargs
self._server: Optional[Server] = None
self._sites: List[BaseSite] = []
@property
def server(self) -> Optional[Server]:
return self._server
@property
def addresses(self) -> List[Any]:
ret: List[Any] = []
for site in self._sites:
server = site._server
if server is not None:
sockets = server.sockets
if sockets is not None:
for sock in sockets:
ret.append(sock.getsockname())
return ret
@property
def sites(self) -> Set[BaseSite]:
return set(self._sites)
async def setup(self) -> None:
loop = asyncio.get_event_loop()
if self._handle_signals:
try:
loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
except NotImplementedError: # pragma: no cover
# add_signal_handler is not implemented on Windows
pass
self._server = await self._make_server()
@abstractmethod
async def shutdown(self) -> None:
pass # pragma: no cover
async def cleanup(self) -> None:
loop = asyncio.get_event_loop()
# The loop over sites is intentional, an exception on gather()
# leaves self._sites in unpredictable state.
# The loop guaranties that a site is either deleted on success or
# still present on failure
for site in list(self._sites):
await site.stop()
await self._cleanup_server()
self._server = None
if self._handle_signals:
try:
loop.remove_signal_handler(signal.SIGINT)
loop.remove_signal_handler(signal.SIGTERM)
except NotImplementedError: # pragma: no cover
# remove_signal_handler is not implemented on Windows
pass
@abstractmethod
async def _make_server(self) -> Server:
pass # pragma: no cover
@abstractmethod
async def _cleanup_server(self) -> None:
pass # pragma: no cover
def _reg_site(self, site: BaseSite) -> None:
if site in self._sites:
raise RuntimeError(f"Site {site} is already registered in runner {self}")
self._sites.append(site)
def _check_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError(f"Site {site} is not registered in runner {self}")
def _unreg_site(self, site: BaseSite) -> None:
if site not in self._sites:
raise RuntimeError(f"Site {site} is not registered in runner {self}")
self._sites.remove(site)
class ServerRunner(BaseRunner):
"""Low-level web server runner"""
__slots__ = ("_web_server",)
def __init__(
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
self._web_server = web_server
async def shutdown(self) -> None:
pass
async def _make_server(self) -> Server:
return self._web_server
async def _cleanup_server(self) -> None:
pass
class AppRunner(BaseRunner):
"""Web Application runner"""
__slots__ = ("_app",)
def __init__(
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
) -> None:
super().__init__(handle_signals=handle_signals, **kwargs)
if not isinstance(app, Application):
raise TypeError(
"The first argument should be web.Application "
"instance, got {!r}".format(app)
)
self._app = app
@property
def app(self) -> Application:
return self._app
async def shutdown(self) -> None:
await self._app.shutdown()
async def _make_server(self) -> Server:
loop = asyncio.get_event_loop()
self._app._set_loop(loop)
self._app.on_startup.freeze()
await self._app.startup()
self._app.freeze()
return self._app._make_handler(loop=loop, **self._kwargs)
async def _cleanup_server(self) -> None:
await self._app.cleanup()
| 11,157 | Python | 28.209424 | 87 | 0.565295 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/test_utils.py | """Utilities shared by tests."""
import asyncio
import contextlib
import gc
import inspect
import ipaddress
import os
import socket
import sys
import warnings
from abc import ABC, abstractmethod
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
Callable,
Iterator,
List,
Optional,
Type,
Union,
cast,
)
from unittest import mock
from aiosignal import Signal
from multidict import CIMultiDict, CIMultiDictProxy
from yarl import URL
import aiohttp
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
from . import ClientSession, hdrs
from .abc import AbstractCookieJar
from .client_reqrep import ClientResponse
from .client_ws import ClientWebSocketResponse
from .helpers import PY_38, sentinel
from .http import HttpVersion, RawRequestMessage
from .web import (
Application,
AppRunner,
BaseRunner,
Request,
Server,
ServerRunner,
SockSite,
UrlMappingMatchInfo,
)
from .web_protocol import _RequestHandler
if TYPE_CHECKING: # pragma: no cover
from ssl import SSLContext
else:
SSLContext = None
if PY_38:
from unittest import IsolatedAsyncioTestCase as TestCase
else:
from asynctest import TestCase # type: ignore[no-redef]
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
def get_unused_port_socket(
host: str, family: socket.AddressFamily = socket.AF_INET
) -> socket.socket:
return get_port_socket(host, 0, family)
def get_port_socket(
host: str, port: int, family: socket.AddressFamily
) -> socket.socket:
s = socket.socket(family, socket.SOCK_STREAM)
if REUSE_ADDRESS:
# Windows has different semantics for SO_REUSEADDR,
# so don't set it. Ref:
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((host, port))
return s
def unused_port() -> int:
"""Return a port that is unused on the current host."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("127.0.0.1", 0))
return cast(int, s.getsockname()[1])
class BaseTestServer(ABC):
__test__ = False
def __init__(
self,
*,
scheme: Union[str, object] = sentinel,
loop: Optional[asyncio.AbstractEventLoop] = None,
host: str = "127.0.0.1",
port: Optional[int] = None,
skip_url_asserts: bool = False,
socket_factory: Callable[
[str, int, socket.AddressFamily], socket.socket
] = get_port_socket,
**kwargs: Any,
) -> None:
self._loop = loop
self.runner: Optional[BaseRunner] = None
self._root: Optional[URL] = None
self.host = host
self.port = port
self._closed = False
self.scheme = scheme
self.skip_url_asserts = skip_url_asserts
self.socket_factory = socket_factory
async def start_server(
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
) -> None:
if self.runner:
return
self._loop = loop
self._ssl = kwargs.pop("ssl", None)
self.runner = await self._make_runner(**kwargs)
await self.runner.setup()
if not self.port:
self.port = 0
try:
version = ipaddress.ip_address(self.host).version
except ValueError:
version = 4
family = socket.AF_INET6 if version == 6 else socket.AF_INET
_sock = self.socket_factory(self.host, self.port, family)
self.host, self.port = _sock.getsockname()[:2]
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
await site.start()
server = site._server
assert server is not None
sockets = server.sockets
assert sockets is not None
self.port = sockets[0].getsockname()[1]
if self.scheme is sentinel:
if self._ssl:
scheme = "https"
else:
scheme = "http"
self.scheme = scheme
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
@abstractmethod # pragma: no cover
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
pass
def make_url(self, path: str) -> URL:
assert self._root is not None
url = URL(path)
if not self.skip_url_asserts:
assert not url.is_absolute()
return self._root.join(url)
else:
return URL(str(self._root) + path)
@property
def started(self) -> bool:
return self.runner is not None
@property
def closed(self) -> bool:
return self._closed
@property
def handler(self) -> Server:
# for backward compatibility
# web.Server instance
runner = self.runner
assert runner is not None
assert runner.server is not None
return runner.server
async def close(self) -> None:
"""Close all fixtures created by the test client.
After that point, the TestClient is no longer usable.
This is an idempotent function: running close multiple times
will not have any additional effects.
close is also run when the object is garbage collected, and on
exit when used as a context manager.
"""
if self.started and not self.closed:
assert self.runner is not None
await self.runner.cleanup()
self._root = None
self.port = None
self._closed = True
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> "BaseTestServer":
await self.start_server(loop=self._loop)
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
await self.close()
class TestServer(BaseTestServer):
def __init__(
self,
app: Application,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
):
self.app = app
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
return AppRunner(self.app, **kwargs)
class RawTestServer(BaseTestServer):
def __init__(
self,
handler: _RequestHandler,
*,
scheme: Union[str, object] = sentinel,
host: str = "127.0.0.1",
port: Optional[int] = None,
**kwargs: Any,
) -> None:
self._handler = handler
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
return ServerRunner(srv, debug=debug, **kwargs)
class TestClient:
"""
A test client implementation.
To write functional tests for aiohttp based servers.
"""
__test__ = False
def __init__(
self,
server: BaseTestServer,
*,
cookie_jar: Optional[AbstractCookieJar] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
**kwargs: Any,
) -> None:
if not isinstance(server, BaseTestServer):
raise TypeError(
"server must be TestServer " "instance, found type: %r" % type(server)
)
self._server = server
self._loop = loop
if cookie_jar is None:
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
self._closed = False
self._responses: List[ClientResponse] = []
self._websockets: List[ClientWebSocketResponse] = []
async def start_server(self) -> None:
await self._server.start_server(loop=self._loop)
@property
def host(self) -> str:
return self._server.host
@property
def port(self) -> Optional[int]:
return self._server.port
@property
def server(self) -> BaseTestServer:
return self._server
@property
def app(self) -> Optional[Application]:
return cast(Optional[Application], getattr(self._server, "app", None))
@property
def session(self) -> ClientSession:
"""An internal aiohttp.ClientSession.
Unlike the methods on the TestClient, client session requests
do not automatically include the host in the url queried, and
will require an absolute path to the resource.
"""
return self._session
def make_url(self, path: str) -> URL:
return self._server.make_url(path)
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
resp = await self._session.request(method, self.make_url(path), **kwargs)
# save it to close later
self._responses.append(resp)
return resp
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
"""Routes a request to tested http server.
The interface is identical to aiohttp.ClientSession.request,
except the loop kwarg is overridden by the instance used by the
test server.
"""
return _RequestContextManager(self._request(method, path, **kwargs))
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP GET request."""
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP POST request."""
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP OPTIONS request."""
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP HEAD request."""
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PUT request."""
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
"""Perform an HTTP PATCH request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
"""Initiate websocket connection.
The api corresponds to aiohttp.ClientSession.ws_connect.
"""
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
self._websockets.append(ws)
return ws
async def close(self) -> None:
"""Close all fixtures created by the test client.
After that point, the TestClient is no longer usable.
This is an idempotent function: running close multiple times
will not have any additional effects.
close is also run on exit when used as a(n) (asynchronous)
context manager.
"""
if not self._closed:
for resp in self._responses:
resp.close()
for ws in self._websockets:
await ws.close()
await self._session.close()
await self._server.close()
self._closed = True
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> "TestClient":
await self.start_server()
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
await self.close()
class AioHTTPTestCase(TestCase):
"""A base class to allow for unittest web applications using aiohttp.
Provides the following:
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
* self.loop (asyncio.BaseEventLoop): the event loop in which the
application and server are running.
* self.app (aiohttp.web.Application): the application returned by
self.get_application()
Note that the TestClient's methods are asynchronous: you have to
execute function on the test client using asynchronous methods.
"""
async def get_application(self) -> Application:
"""Get application.
This method should be overridden
to return the aiohttp.web.Application
object to test.
"""
return self.get_app()
def get_app(self) -> Application:
"""Obsolete method used to constructing web application.
Use .get_application() coroutine instead.
"""
raise RuntimeError("Did you forget to define get_application()?")
def setUp(self) -> None:
if not PY_38:
asyncio.get_event_loop().run_until_complete(self.asyncSetUp())
async def asyncSetUp(self) -> None:
try:
self.loop = asyncio.get_running_loop()
except (AttributeError, RuntimeError): # AttributeError->py36
self.loop = asyncio.get_event_loop_policy().get_event_loop()
return await self.setUpAsync()
async def setUpAsync(self) -> None:
self.app = await self.get_application()
self.server = await self.get_server(self.app)
self.client = await self.get_client(self.server)
await self.client.start_server()
def tearDown(self) -> None:
if not PY_38:
self.loop.run_until_complete(self.asyncTearDown())
async def asyncTearDown(self) -> None:
return await self.tearDownAsync()
async def tearDownAsync(self) -> None:
await self.client.close()
async def get_server(self, app: Application) -> TestServer:
"""Return a TestServer instance."""
return TestServer(app, loop=self.loop)
async def get_client(self, server: TestServer) -> TestClient:
"""Return a TestClient instance."""
return TestClient(server, loop=self.loop)
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
"""
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
In 3.8+, this does nothing.
"""
warnings.warn(
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
DeprecationWarning,
stacklevel=2,
)
return func
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
@contextlib.contextmanager
def loop_context(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
) -> Iterator[asyncio.AbstractEventLoop]:
"""A contextmanager that creates an event_loop, for test purposes.
Handles the creation and cleanup of a test loop.
"""
loop = setup_test_loop(loop_factory)
yield loop
teardown_test_loop(loop, fast=fast)
def setup_test_loop(
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
) -> asyncio.AbstractEventLoop:
"""Create and return an asyncio.BaseEventLoop instance.
The caller should also call teardown_test_loop,
once they are done with the loop.
"""
loop = loop_factory()
try:
module = loop.__class__.__module__
skip_watcher = "uvloop" in module
except AttributeError: # pragma: no cover
# Just in case
skip_watcher = True
asyncio.set_event_loop(loop)
if sys.platform != "win32" and not skip_watcher:
policy = asyncio.get_event_loop_policy()
watcher: asyncio.AbstractChildWatcher
try: # Python >= 3.8
# Refs:
# * https://github.com/pytest-dev/pytest-xdist/issues/620
# * https://stackoverflow.com/a/58614689/595220
# * https://bugs.python.org/issue35621
# * https://github.com/python/cpython/pull/14344
watcher = asyncio.ThreadedChildWatcher()
except AttributeError: # Python < 3.8
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(loop)
with contextlib.suppress(NotImplementedError):
policy.set_child_watcher(watcher)
return loop
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
"""Teardown and cleanup an event_loop created by setup_test_loop."""
closed = loop.is_closed()
if not closed:
loop.call_soon(loop.stop)
loop.run_forever()
loop.close()
if not fast:
gc.collect()
asyncio.set_event_loop(None)
def _create_app_mock() -> mock.MagicMock:
def get_dict(app: Any, key: str) -> Any:
return app.__app_dict[key]
def set_dict(app: Any, key: str, value: Any) -> None:
app.__app_dict[key] = value
app = mock.MagicMock(spec=Application)
app.__app_dict = {}
app.__getitem__ = get_dict
app.__setitem__ = set_dict
app._debug = False
app.on_response_prepare = Signal(app)
app.on_response_prepare.freeze()
return app
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
transport = mock.Mock()
def get_extra_info(key: str) -> Optional[SSLContext]:
if key == "sslcontext":
return sslcontext
else:
return None
transport.get_extra_info.side_effect = get_extra_info
return transport
def make_mocked_request(
method: str,
path: str,
headers: Any = None,
*,
match_info: Any = sentinel,
version: HttpVersion = HttpVersion(1, 1),
closing: bool = False,
app: Any = None,
writer: Any = sentinel,
protocol: Any = sentinel,
transport: Any = sentinel,
payload: Any = sentinel,
sslcontext: Optional[SSLContext] = None,
client_max_size: int = 1024**2,
loop: Any = ...,
) -> Request:
"""Creates mocked web.Request testing purposes.
Useful in unit tests, when spinning full web server is overkill or
specific conditions and errors are hard to trigger.
"""
task = mock.Mock()
if loop is ...:
loop = mock.Mock()
loop.create_future.return_value = ()
if version < HttpVersion(1, 1):
closing = True
if headers:
headers = CIMultiDictProxy(CIMultiDict(headers))
raw_hdrs = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
else:
headers = CIMultiDictProxy(CIMultiDict())
raw_hdrs = ()
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
message = RawRequestMessage(
method,
path,
version,
headers,
raw_hdrs,
closing,
None,
False,
chunked,
URL(path),
)
if app is None:
app = _create_app_mock()
if transport is sentinel:
transport = _create_transport(sslcontext)
if protocol is sentinel:
protocol = mock.Mock()
protocol.transport = transport
if writer is sentinel:
writer = mock.Mock()
writer.write_headers = make_mocked_coro(None)
writer.write = make_mocked_coro(None)
writer.write_eof = make_mocked_coro(None)
writer.drain = make_mocked_coro(None)
writer.transport = transport
protocol.transport = transport
protocol.writer = writer
if payload is sentinel:
payload = mock.Mock()
req = Request(
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
)
match_info = UrlMappingMatchInfo(
{} if match_info is sentinel else match_info, mock.Mock()
)
match_info.add_app(app)
req._match_info = match_info
return req
def make_mocked_coro(
return_value: Any = sentinel, raise_exception: Any = sentinel
) -> Any:
"""Creates a coroutine mock."""
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
if raise_exception is not sentinel:
raise raise_exception
if not inspect.isawaitable(return_value):
return return_value
await return_value
return mock.Mock(wraps=mock_coro)
| 21,434 | Python | 29.318246 | 107 | 0.618737 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/client_proto.py | import asyncio
from contextlib import suppress
from typing import Any, Optional, Tuple
from .base_protocol import BaseProtocol
from .client_exceptions import (
ClientOSError,
ClientPayloadError,
ServerDisconnectedError,
ServerTimeoutError,
)
from .helpers import BaseTimerContext
from .http import HttpResponseParser, RawResponseMessage
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
"""Helper class to adapt between Protocol and StreamReader."""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
BaseProtocol.__init__(self, loop=loop)
DataQueue.__init__(self, loop)
self._should_close = False
self._payload: Optional[StreamReader] = None
self._skip_payload = False
self._payload_parser = None
self._timer = None
self._tail = b""
self._upgraded = False
self._parser: Optional[HttpResponseParser] = None
self._read_timeout: Optional[float] = None
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
@property
def upgraded(self) -> bool:
return self._upgraded
@property
def should_close(self) -> bool:
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
return True
return (
self._should_close
or self._upgraded
or self.exception() is not None
or self._payload_parser is not None
or len(self) > 0
or bool(self._tail)
)
def force_close(self) -> None:
self._should_close = True
def close(self) -> None:
transport = self.transport
if transport is not None:
transport.close()
self.transport = None
self._payload = None
self._drop_timeout()
def is_connected(self) -> bool:
return self.transport is not None and not self.transport.is_closing()
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._drop_timeout()
if self._payload_parser is not None:
with suppress(Exception):
self._payload_parser.feed_eof()
uncompleted = None
if self._parser is not None:
try:
uncompleted = self._parser.feed_eof()
except Exception:
if self._payload is not None:
self._payload.set_exception(
ClientPayloadError("Response payload is not completed")
)
if not self.is_eof():
if isinstance(exc, OSError):
exc = ClientOSError(*exc.args)
if exc is None:
exc = ServerDisconnectedError(uncompleted)
# assigns self._should_close to True as side effect,
# we do it anyway below
self.set_exception(exc)
self._should_close = True
self._parser = None
self._payload = None
self._payload_parser = None
self._reading_paused = False
super().connection_lost(exc)
def eof_received(self) -> None:
# should call parser.feed_eof() most likely
self._drop_timeout()
def pause_reading(self) -> None:
super().pause_reading()
self._drop_timeout()
def resume_reading(self) -> None:
super().resume_reading()
self._reschedule_timeout()
def set_exception(self, exc: BaseException) -> None:
self._should_close = True
self._drop_timeout()
super().set_exception(exc)
def set_parser(self, parser: Any, payload: Any) -> None:
# TODO: actual types are:
# parser: WebSocketReader
# payload: FlowControlDataQueue
# but they are not generi enough
# Need an ABC for both types
self._payload = payload
self._payload_parser = parser
self._drop_timeout()
if self._tail:
data, self._tail = self._tail, b""
self.data_received(data)
def set_response_params(
self,
*,
timer: Optional[BaseTimerContext] = None,
skip_payload: bool = False,
read_until_eof: bool = False,
auto_decompress: bool = True,
read_timeout: Optional[float] = None,
read_bufsize: int = 2**16,
) -> None:
self._skip_payload = skip_payload
self._read_timeout = read_timeout
self._reschedule_timeout()
self._parser = HttpResponseParser(
self,
self._loop,
read_bufsize,
timer=timer,
payload_exception=ClientPayloadError,
response_with_body=not skip_payload,
read_until_eof=read_until_eof,
auto_decompress=auto_decompress,
)
if self._tail:
data, self._tail = self._tail, b""
self.data_received(data)
def _drop_timeout(self) -> None:
if self._read_timeout_handle is not None:
self._read_timeout_handle.cancel()
self._read_timeout_handle = None
def _reschedule_timeout(self) -> None:
timeout = self._read_timeout
if self._read_timeout_handle is not None:
self._read_timeout_handle.cancel()
if timeout:
self._read_timeout_handle = self._loop.call_later(
timeout, self._on_read_timeout
)
else:
self._read_timeout_handle = None
def _on_read_timeout(self) -> None:
exc = ServerTimeoutError("Timeout on reading data from socket")
self.set_exception(exc)
if self._payload is not None:
self._payload.set_exception(exc)
def data_received(self, data: bytes) -> None:
self._reschedule_timeout()
if not data:
return
# custom payload parser
if self._payload_parser is not None:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self._payload = None
self._payload_parser = None
if tail:
self.data_received(tail)
return
else:
if self._upgraded or self._parser is None:
# i.e. websocket connection, websocket parser is not set yet
self._tail += data
else:
# parse http messages
try:
messages, upgraded, tail = self._parser.feed_data(data)
except BaseException as exc:
if self.transport is not None:
# connection.release() could be called BEFORE
# data_received(), the transport is already
# closed in this case
self.transport.close()
# should_close is True after the call
self.set_exception(exc)
return
self._upgraded = upgraded
payload: Optional[StreamReader] = None
for message, payload in messages:
if message.should_close:
self._should_close = True
self._payload = payload
if self._skip_payload or message.code in (204, 304):
self.feed_data((message, EMPTY_PAYLOAD), 0)
else:
self.feed_data((message, payload), 0)
if payload is not None:
# new message(s) was processed
# register timeout handler unsubscribing
# either on end-of-stream or immediately for
# EMPTY_PAYLOAD
if payload is not EMPTY_PAYLOAD:
payload.on_eof(self._drop_timeout)
else:
self._drop_timeout()
if tail:
if upgraded:
self.data_received(tail)
else:
self._tail = tail
| 8,170 | Python | 31.424603 | 88 | 0.544553 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/pytest_plugin.py | import asyncio
import contextlib
import warnings
from collections.abc import Callable
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
import pytest
from aiohttp.helpers import PY_37, isasyncgenfunction
from aiohttp.web import Application
from .test_utils import (
BaseTestServer,
RawTestServer,
TestClient,
TestServer,
loop_context,
setup_test_loop,
teardown_test_loop,
unused_port as _unused_port,
)
try:
import uvloop
except ImportError: # pragma: no cover
uvloop = None
try:
import tokio
except ImportError: # pragma: no cover
tokio = None
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
def pytest_addoption(parser): # type: ignore[no-untyped-def]
parser.addoption(
"--aiohttp-fast",
action="store_true",
default=False,
help="run tests faster by disabling extra checks",
)
parser.addoption(
"--aiohttp-loop",
action="store",
default="pyloop",
help="run tests with specific loop: pyloop, uvloop, tokio or all",
)
parser.addoption(
"--aiohttp-enable-loop-debug",
action="store_true",
default=False,
help="enable event loop debug mode",
)
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
"""Set up pytest fixture.
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
if isasyncgenfunction(func):
# async generator fixture
is_async_gen = True
elif asyncio.iscoroutinefunction(func):
# regular async fixture
is_async_gen = False
else:
# not an async fixture, nothing to do
return
strip_request = False
if "request" not in fixturedef.argnames:
fixturedef.argnames += ("request",)
strip_request = True
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
request = kwargs["request"]
if strip_request:
del kwargs["request"]
# if neither the fixture nor the test use the 'loop' fixture,
# 'getfixturevalue' will fail because the test is not parameterized
# (this can be removed someday if 'loop' is no longer parameterized)
if "loop" not in request.fixturenames:
raise Exception(
"Asynchronous fixtures must depend on the 'loop' fixture or "
"be used in tests depending from it."
)
_loop = request.getfixturevalue("loop")
if is_async_gen:
# for async generators, we need to advance the generator once,
# then advance it again in a finalizer
gen = func(*args, **kwargs)
def finalizer(): # type: ignore[no-untyped-def]
try:
return _loop.run_until_complete(gen.__anext__())
except StopAsyncIteration:
pass
request.addfinalizer(finalizer)
return _loop.run_until_complete(gen.__anext__())
else:
return _loop.run_until_complete(func(*args, **kwargs))
fixturedef.func = wrapper
@pytest.fixture
def fast(request): # type: ignore[no-untyped-def]
"""--fast config option"""
return request.config.getoption("--aiohttp-fast")
@pytest.fixture
def loop_debug(request): # type: ignore[no-untyped-def]
"""--enable-loop-debug config option"""
return request.config.getoption("--aiohttp-enable-loop-debug")
@contextlib.contextmanager
def _runtime_warning_context(): # type: ignore[no-untyped-def]
"""Context manager which checks for RuntimeWarnings.
This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised.
"""
with warnings.catch_warnings(record=True) as _warnings:
yield
rw = [
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
for w in _warnings
if w.category == RuntimeWarning
]
if rw:
raise RuntimeError(
"{} Runtime Warning{},\n{}".format(
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
)
)
@contextlib.contextmanager
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
"""Passthrough loop context.
Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
# loop already exists, pass it straight through
yield loop
else:
# this shadows loop_context's standard behavior
loop = setup_test_loop()
yield loop
teardown_test_loop(loop, fast=fast)
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
"""Fix pytest collecting for coroutines."""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj))
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
"""Run coroutines in an event loop instead of a normal function call."""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get(
"proactor_loop"
) or pyfuncitem.funcargs.get("loop", None)
with _runtime_warning_context():
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
testargs = {
arg: pyfuncitem.funcargs[arg]
for arg in pyfuncitem._fixtureinfo.argnames
}
_loop.run_until_complete(pyfuncitem.obj(**testargs))
return True
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
if "loop_factory" not in metafunc.fixturenames:
return
loops = metafunc.config.option.aiohttp_loop
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
if uvloop is not None: # pragma: no cover
avail_factories["uvloop"] = uvloop.EventLoopPolicy
if tokio is not None: # pragma: no cover
avail_factories["tokio"] = tokio.EventLoopPolicy
if loops == "all":
loops = "pyloop,uvloop?,tokio?"
factories = {} # type: ignore[var-annotated]
for name in loops.split(","):
required = not name.endswith("?")
name = name.strip(" ?")
if name not in avail_factories: # pragma: no cover
if required:
raise ValueError(
"Unknown loop '%s', available loops: %s"
% (name, list(factories.keys()))
)
else:
continue
factories[name] = avail_factories[name]
metafunc.parametrize(
"loop_factory", list(factories.values()), ids=list(factories.keys())
)
@pytest.fixture
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
"""Return an instance of the event loop."""
policy = loop_factory()
asyncio.set_event_loop_policy(policy)
with loop_context(fast=fast) as _loop:
if loop_debug:
_loop.set_debug(True) # pragma: no cover
asyncio.set_event_loop(_loop)
yield _loop
@pytest.fixture
def proactor_loop(): # type: ignore[no-untyped-def]
if not PY_37:
policy = asyncio.get_event_loop_policy()
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
else:
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
asyncio.set_event_loop_policy(policy)
with loop_context(policy.new_event_loop) as _loop:
asyncio.set_event_loop(_loop)
yield _loop
@pytest.fixture
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_unused_port fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_unused_port
@pytest.fixture
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
"""Return a port that is unused on the current host."""
return _unused_port
@pytest.fixture
def aiohttp_server(loop): # type: ignore[no-untyped-def]
"""Factory to create a TestServer instance, given an app.
aiohttp_server(app, **kwargs)
"""
servers = []
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = TestServer(app, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
return server
yield go
async def finalize() -> None:
while servers:
await servers.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_server
@pytest.fixture
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
"""Factory to create a RawTestServer instance, given a web handler.
aiohttp_raw_server(handler, **kwargs)
"""
servers = []
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
server = RawTestServer(handler, port=port)
await server.start_server(loop=loop, **kwargs)
servers.append(server)
return server
yield go
async def finalize() -> None:
while servers:
await servers.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
aiohttp_raw_server,
):
warnings.warn(
"Deprecated, use aiohttp_raw_server fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_raw_server
@pytest.fixture
def aiohttp_client(
loop: asyncio.AbstractEventLoop,
) -> Generator[AiohttpClient, None, None]:
"""Factory to create a TestClient instance.
aiohttp_client(app, **kwargs)
aiohttp_client(server, **kwargs)
aiohttp_client(raw_server, **kwargs)
"""
clients = []
async def go(
__param: Union[Application, BaseTestServer],
*args: Any,
server_kwargs: Optional[Dict[str, Any]] = None,
**kwargs: Any
) -> TestClient:
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
__param, (Application, BaseTestServer)
):
__param = __param(loop, *args, **kwargs)
kwargs = {}
else:
assert not args, "args should be empty"
if isinstance(__param, Application):
server_kwargs = server_kwargs or {}
server = TestServer(__param, loop=loop, **server_kwargs)
client = TestClient(server, loop=loop, **kwargs)
elif isinstance(__param, BaseTestServer):
client = TestClient(__param, loop=loop, **kwargs)
else:
raise ValueError("Unknown argument type: %r" % type(__param))
await client.start_server()
clients.append(client)
return client
yield go
async def finalize() -> None:
while clients:
await clients.pop().close()
loop.run_until_complete(finalize())
@pytest.fixture
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
warnings.warn(
"Deprecated, use aiohttp_client fixture instead",
DeprecationWarning,
stacklevel=2,
)
return aiohttp_client
| 11,772 | Python | 29.033163 | 88 | 0.621475 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_app.py | import asyncio
import logging
import warnings
from functools import partial, update_wrapper
from typing import (
TYPE_CHECKING,
Any,
AsyncIterator,
Awaitable,
Callable,
Dict,
Iterable,
Iterator,
List,
Mapping,
MutableMapping,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from aiosignal import Signal
from frozenlist import FrozenList
from . import hdrs
from .abc import (
AbstractAccessLogger,
AbstractMatchInfo,
AbstractRouter,
AbstractStreamWriter,
)
from .helpers import DEBUG
from .http_parser import RawRequestMessage
from .log import web_logger
from .streams import StreamReader
from .web_log import AccessLogger
from .web_middlewares import _fix_request_current_app
from .web_protocol import RequestHandler
from .web_request import Request
from .web_response import StreamResponse
from .web_routedef import AbstractRouteDef
from .web_server import Server
from .web_urldispatcher import (
AbstractResource,
AbstractRoute,
Domain,
MaskDomain,
MatchedSubAppResource,
PrefixedSubAppResource,
UrlDispatcher,
)
__all__ = ("Application", "CleanupError")
if TYPE_CHECKING: # pragma: no cover
from .typedefs import Handler
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
_Middleware = Union[
Callable[[Request, Handler], Awaitable[StreamResponse]],
Callable[["Application", Handler], Awaitable[Handler]], # old-style
]
_Middlewares = FrozenList[_Middleware]
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
_Subapps = List["Application"]
else:
# No type checker mode, skip types
_AppSignal = Signal
_RespPrepareSignal = Signal
_Middleware = Callable
_Middlewares = FrozenList
_MiddlewaresHandlers = Optional[Sequence]
_Subapps = List
class Application(MutableMapping[str, Any]):
ATTRS = frozenset(
[
"logger",
"_debug",
"_router",
"_loop",
"_handler_args",
"_middlewares",
"_middlewares_handlers",
"_run_middlewares",
"_state",
"_frozen",
"_pre_frozen",
"_subapps",
"_on_response_prepare",
"_on_startup",
"_on_shutdown",
"_on_cleanup",
"_client_max_size",
"_cleanup_ctx",
]
)
def __init__(
self,
*,
logger: logging.Logger = web_logger,
router: Optional[UrlDispatcher] = None,
middlewares: Iterable[_Middleware] = (),
handler_args: Optional[Mapping[str, Any]] = None,
client_max_size: int = 1024**2,
loop: Optional[asyncio.AbstractEventLoop] = None,
debug: Any = ..., # mypy doesn't support ellipsis
) -> None:
if router is None:
router = UrlDispatcher()
else:
warnings.warn(
"router argument is deprecated", DeprecationWarning, stacklevel=2
)
assert isinstance(router, AbstractRouter), router
if loop is not None:
warnings.warn(
"loop argument is deprecated", DeprecationWarning, stacklevel=2
)
if debug is not ...:
warnings.warn(
"debug argument is deprecated", DeprecationWarning, stacklevel=2
)
self._debug = debug
self._router: UrlDispatcher = router
self._loop = loop
self._handler_args = handler_args
self.logger = logger
self._middlewares: _Middlewares = FrozenList(middlewares)
# initialized on freezing
self._middlewares_handlers: _MiddlewaresHandlers = None
# initialized on freezing
self._run_middlewares: Optional[bool] = None
self._state: Dict[str, Any] = {}
self._frozen = False
self._pre_frozen = False
self._subapps: _Subapps = []
self._on_response_prepare: _RespPrepareSignal = Signal(self)
self._on_startup: _AppSignal = Signal(self)
self._on_shutdown: _AppSignal = Signal(self)
self._on_cleanup: _AppSignal = Signal(self)
self._cleanup_ctx = CleanupContext()
self._on_startup.append(self._cleanup_ctx._on_startup)
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
self._client_max_size = client_max_size
def __init_subclass__(cls: Type["Application"]) -> None:
warnings.warn(
"Inheritance class {} from web.Application "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2,
)
if DEBUG: # pragma: no cover
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn(
"Setting custom web.Application.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
# MutableMapping API
def __eq__(self, other: object) -> bool:
return self is other
def __getitem__(self, key: str) -> Any:
return self._state[key]
def _check_frozen(self) -> None:
if self._frozen:
warnings.warn(
"Changing state of started or joined " "application is deprecated",
DeprecationWarning,
stacklevel=3,
)
def __setitem__(self, key: str, value: Any) -> None:
self._check_frozen()
self._state[key] = value
def __delitem__(self, key: str) -> None:
self._check_frozen()
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str]:
return iter(self._state)
########
@property
def loop(self) -> asyncio.AbstractEventLoop:
# Technically the loop can be None
# but we mask it by explicit type cast
# to provide more convinient type annotation
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
return cast(asyncio.AbstractEventLoop, self._loop)
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
if loop is None:
loop = asyncio.get_event_loop()
if self._loop is not None and self._loop is not loop:
raise RuntimeError(
"web.Application instance initialized with different loop"
)
self._loop = loop
# set loop debug
if self._debug is ...:
self._debug = loop.get_debug()
# set loop to sub applications
for subapp in self._subapps:
subapp._set_loop(loop)
@property
def pre_frozen(self) -> bool:
return self._pre_frozen
def pre_freeze(self) -> None:
if self._pre_frozen:
return
self._pre_frozen = True
self._middlewares.freeze()
self._router.freeze()
self._on_response_prepare.freeze()
self._cleanup_ctx.freeze()
self._on_startup.freeze()
self._on_shutdown.freeze()
self._on_cleanup.freeze()
self._middlewares_handlers = tuple(self._prepare_middleware())
# If current app and any subapp do not have middlewares avoid run all
# of the code footprint that it implies, which have a middleware
# hardcoded per app that sets up the current_app attribute. If no
# middlewares are configured the handler will receive the proper
# current_app without needing all of this code.
self._run_middlewares = True if self.middlewares else False
for subapp in self._subapps:
subapp.pre_freeze()
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
@property
def frozen(self) -> bool:
return self._frozen
def freeze(self) -> None:
if self._frozen:
return
self.pre_freeze()
self._frozen = True
for subapp in self._subapps:
subapp.freeze()
@property
def debug(self) -> bool:
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
return self._debug # type: ignore[no-any-return]
def _reg_subapp_signals(self, subapp: "Application") -> None:
def reg_handler(signame: str) -> None:
subsig = getattr(subapp, signame)
async def handler(app: "Application") -> None:
await subsig.send(subapp)
appsig = getattr(self, signame)
appsig.append(handler)
reg_handler("on_startup")
reg_handler("on_shutdown")
reg_handler("on_cleanup")
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
if not isinstance(prefix, str):
raise TypeError("Prefix must be str")
prefix = prefix.rstrip("/")
if not prefix:
raise ValueError("Prefix cannot be empty")
factory = partial(PrefixedSubAppResource, prefix, subapp)
return self._add_subapp(factory, subapp)
def _add_subapp(
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
) -> AbstractResource:
if self.frozen:
raise RuntimeError("Cannot add sub application to frozen application")
if subapp.frozen:
raise RuntimeError("Cannot add frozen application")
resource = resource_factory()
self.router.register_resource(resource)
self._reg_subapp_signals(subapp)
self._subapps.append(subapp)
subapp.pre_freeze()
if self._loop is not None:
subapp._set_loop(self._loop)
return resource
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
elif "*" in domain:
rule: Domain = MaskDomain(domain)
else:
rule = Domain(domain)
factory = partial(MatchedSubAppResource, rule, subapp)
return self._add_subapp(factory, subapp)
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
return self.router.add_routes(routes)
@property
def on_response_prepare(self) -> _RespPrepareSignal:
return self._on_response_prepare
@property
def on_startup(self) -> _AppSignal:
return self._on_startup
@property
def on_shutdown(self) -> _AppSignal:
return self._on_shutdown
@property
def on_cleanup(self) -> _AppSignal:
return self._on_cleanup
@property
def cleanup_ctx(self) -> "CleanupContext":
return self._cleanup_ctx
@property
def router(self) -> UrlDispatcher:
return self._router
@property
def middlewares(self) -> _Middlewares:
return self._middlewares
def _make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
if not issubclass(access_log_class, AbstractAccessLogger):
raise TypeError(
"access_log_class must be subclass of "
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
)
self._set_loop(loop)
self.freeze()
kwargs["debug"] = self._debug
kwargs["access_log_class"] = access_log_class
if self._handler_args:
for k, v in self._handler_args.items():
kwargs[k] = v
return Server(
self._handle, # type: ignore[arg-type]
request_factory=self._make_request,
loop=self._loop,
**kwargs,
)
def make_handler(
self,
*,
loop: Optional[asyncio.AbstractEventLoop] = None,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
**kwargs: Any,
) -> Server:
warnings.warn(
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
DeprecationWarning,
stacklevel=2,
)
return self._make_handler(
loop=loop, access_log_class=access_log_class, **kwargs
)
async def startup(self) -> None:
"""Causes on_startup signal
Should be called in the event loop along with the request handler.
"""
await self.on_startup.send(self)
async def shutdown(self) -> None:
"""Causes on_shutdown signal
Should be called before cleanup()
"""
await self.on_shutdown.send(self)
async def cleanup(self) -> None:
"""Causes on_cleanup signal
Should be called after shutdown()
"""
if self.on_cleanup.frozen:
await self.on_cleanup.send(self)
else:
# If an exception occurs in startup, ensure cleanup contexts are completed.
await self._cleanup_ctx._on_cleanup(self)
def _make_request(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: RequestHandler,
writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
_cls: Type[Request] = Request,
) -> Request:
return _cls(
message,
payload,
protocol,
writer,
task,
self._loop,
client_max_size=self._client_max_size,
)
def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
for m in reversed(self._middlewares):
if getattr(m, "__middleware_version__", None) == 1:
yield m, True
else:
warnings.warn(
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
DeprecationWarning,
stacklevel=2,
)
yield m, False
yield _fix_request_current_app(self), True
async def _handle(self, request: Request) -> StreamResponse:
loop = asyncio.get_event_loop()
debug = loop.get_debug()
match_info = await self._router.resolve(request)
if debug: # pragma: no cover
if not isinstance(match_info, AbstractMatchInfo):
raise TypeError(
"match_info should be AbstractMatchInfo "
"instance, not {!r}".format(match_info)
)
match_info.add_app(self)
match_info.freeze()
resp = None
request._match_info = match_info
expect = request.headers.get(hdrs.EXPECT)
if expect:
resp = await match_info.expect_handler(request)
await request.writer.drain()
if resp is None:
handler = match_info.handler
if self._run_middlewares:
for app in match_info.apps[::-1]:
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
if new_style:
handler = update_wrapper(
partial(m, handler=handler), handler
)
else:
handler = await m(app, handler) # type: ignore[arg-type]
resp = await handler(request)
return resp
def __call__(self) -> "Application":
"""gunicorn compatibility"""
return self
def __repr__(self) -> str:
return f"<Application 0x{id(self):x}>"
def __bool__(self) -> bool:
return True
class CleanupError(RuntimeError):
@property
def exceptions(self) -> List[BaseException]:
return cast(List[BaseException], self.args[1])
if TYPE_CHECKING: # pragma: no cover
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
else:
_CleanupContextBase = FrozenList
class CleanupContext(_CleanupContextBase):
def __init__(self) -> None:
super().__init__()
self._exits: List[AsyncIterator[None]] = []
async def _on_startup(self, app: Application) -> None:
for cb in self:
it = cb(app).__aiter__()
await it.__anext__()
self._exits.append(it)
async def _on_cleanup(self, app: Application) -> None:
errors = []
for it in reversed(self._exits):
try:
await it.__anext__()
except StopAsyncIteration:
pass
except Exception as exc:
errors.append(exc)
else:
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
if errors:
if len(errors) == 1:
raise errors[0]
else:
raise CleanupError("Multiple errors on cleanup stage", errors)
| 17,170 | Python | 29.772401 | 101 | 0.57583 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/http_writer.py | """Http related parsers and protocol."""
import asyncio
import zlib
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
from multidict import CIMultiDict
from .abc import AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
class HttpVersion(NamedTuple):
major: int
minor: int
HttpVersion10 = HttpVersion(1, 0)
HttpVersion11 = HttpVersion(1, 1)
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
class StreamWriter(AbstractStreamWriter):
def __init__(
self,
protocol: BaseProtocol,
loop: asyncio.AbstractEventLoop,
on_chunk_sent: _T_OnChunkSent = None,
on_headers_sent: _T_OnHeadersSent = None,
) -> None:
self._protocol = protocol
self._transport = protocol.transport
self.loop = loop
self.length = None
self.chunked = False
self.buffer_size = 0
self.output_size = 0
self._eof = False
self._compress: Any = None
self._drain_waiter = None
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
@property
def transport(self) -> Optional[asyncio.Transport]:
return self._transport
@property
def protocol(self) -> BaseProtocol:
return self._protocol
def enable_chunking(self) -> None:
self.chunked = True
def enable_compression(
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
) -> None:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
def _write(self, chunk: bytes) -> None:
size = len(chunk)
self.buffer_size += size
self.output_size += size
if self._transport is None or self._transport.is_closing():
raise ConnectionResetError("Cannot write to closing transport")
self._transport.write(chunk)
async def write(
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
) -> None:
"""Writes chunk of data to a stream.
write_eof() indicates end of stream.
writer can't be used after write_eof() method being called.
write() return drain future.
"""
if self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if isinstance(chunk, memoryview):
if chunk.nbytes != len(chunk):
# just reshape it
chunk = chunk.cast("c")
if self._compress is not None:
chunk = self._compress.compress(chunk)
if not chunk:
return
if self.length is not None:
chunk_len = len(chunk)
if self.length >= chunk_len:
self.length = self.length - chunk_len
else:
chunk = chunk[: self.length]
self.length = 0
if not chunk:
return
if chunk:
if self.chunked:
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len_pre + chunk + b"\r\n"
self._write(chunk)
if self.buffer_size > LIMIT and drain:
self.buffer_size = 0
await self.drain()
async def write_headers(
self, status_line: str, headers: "CIMultiDict[str]"
) -> None:
"""Write request/response status and headers."""
if self._on_headers_sent is not None:
await self._on_headers_sent(headers)
# status + headers
buf = _serialize_headers(status_line, headers)
self._write(buf)
async def write_eof(self, chunk: bytes = b"") -> None:
if self._eof:
return
if chunk and self._on_chunk_sent is not None:
await self._on_chunk_sent(chunk)
if self._compress:
if chunk:
chunk = self._compress.compress(chunk)
chunk = chunk + self._compress.flush()
if chunk and self.chunked:
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
if self.chunked:
if chunk:
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
else:
chunk = b"0\r\n\r\n"
if chunk:
self._write(chunk)
await self.drain()
self._eof = True
self._transport = None
async def drain(self) -> None:
"""Flush the write buffer.
The intended use is to write
await w.write(data)
await w.drain()
"""
if self._protocol.transport is not None:
await self._protocol._drain_helper()
def _safe_header(string: str) -> str:
if "\r" in string or "\n" in string:
raise ValueError(
"Newline or carriage return detected in headers. "
"Potential header injection attack."
)
return string
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
return line.encode("utf-8")
_serialize_headers = _py_serialize_headers
try:
import aiohttp._http_writer as _http_writer # type: ignore[import]
_c_serialize_headers = _http_writer._serialize_headers
if not NO_EXTENSIONS:
_serialize_headers = _c_serialize_headers
except ImportError:
pass
| 5,952 | Python | 28.616915 | 88 | 0.572077 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/base_protocol.py | import asyncio
from typing import Optional, cast
from .tcp_helpers import tcp_nodelay
class BaseProtocol(asyncio.Protocol):
__slots__ = (
"_loop",
"_paused",
"_drain_waiter",
"_connection_lost",
"_reading_paused",
"transport",
)
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop: asyncio.AbstractEventLoop = loop
self._paused = False
self._drain_waiter: Optional[asyncio.Future[None]] = None
self._connection_lost = False
self._reading_paused = False
self.transport: Optional[asyncio.Transport] = None
def pause_writing(self) -> None:
assert not self._paused
self._paused = True
def resume_writing(self) -> None:
assert self._paused
self._paused = False
waiter = self._drain_waiter
if waiter is not None:
self._drain_waiter = None
if not waiter.done():
waiter.set_result(None)
def pause_reading(self) -> None:
if not self._reading_paused and self.transport is not None:
try:
self.transport.pause_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = True
def resume_reading(self) -> None:
if self._reading_paused and self.transport is not None:
try:
self.transport.resume_reading()
except (AttributeError, NotImplementedError, RuntimeError):
pass
self._reading_paused = False
def connection_made(self, transport: asyncio.BaseTransport) -> None:
tr = cast(asyncio.Transport, transport)
tcp_nodelay(tr, True)
self.transport = tr
def connection_lost(self, exc: Optional[BaseException]) -> None:
self._connection_lost = True
# Wake up the writer if currently paused.
self.transport = None
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
return
self._drain_waiter = None
if waiter.done():
return
if exc is None:
waiter.set_result(None)
else:
waiter.set_exception(exc)
async def _drain_helper(self) -> None:
if self._connection_lost:
raise ConnectionResetError("Connection lost")
if not self._paused:
return
waiter = self._drain_waiter
if waiter is None:
waiter = self._loop.create_future()
self._drain_waiter = waiter
await asyncio.shield(waiter)
| 2,676 | Python | 29.420454 | 72 | 0.576233 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_protocol.py | import asyncio
import asyncio.streams
import traceback
import warnings
from collections import deque
from contextlib import suppress
from html import escape as html_escape
from http import HTTPStatus
from logging import Logger
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Deque,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
import attr
import yarl
from .abc import AbstractAccessLogger, AbstractStreamWriter
from .base_protocol import BaseProtocol
from .helpers import ceil_timeout
from .http import (
HttpProcessingError,
HttpRequestParser,
HttpVersion10,
RawRequestMessage,
StreamWriter,
)
from .log import access_logger, server_logger
from .streams import EMPTY_PAYLOAD, StreamReader
from .tcp_helpers import tcp_keepalive
from .web_exceptions import HTTPException
from .web_log import AccessLogger
from .web_request import BaseRequest
from .web_response import Response, StreamResponse
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
if TYPE_CHECKING: # pragma: no cover
from .web_server import Server
_RequestFactory = Callable[
[
RawRequestMessage,
StreamReader,
"RequestHandler",
AbstractStreamWriter,
"asyncio.Task[None]",
],
BaseRequest,
]
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
ERROR = RawRequestMessage(
"UNKNOWN",
"/",
HttpVersion10,
{}, # type: ignore[arg-type]
{}, # type: ignore[arg-type]
True,
None,
False,
False,
yarl.URL("/"),
)
class RequestPayloadError(Exception):
"""Payload parsing error."""
class PayloadAccessError(Exception):
"""Payload was accessed after response was sent."""
@attr.s(auto_attribs=True, frozen=True, slots=True)
class _ErrInfo:
status: int
exc: BaseException
message: str
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
class RequestHandler(BaseProtocol):
"""HTTP protocol implementation.
RequestHandler handles incoming HTTP request. It reads request line,
request headers and request payload and calls handle_request() method.
By default it always returns with 404 response.
RequestHandler handles errors in incoming request, like bad
status line, bad headers or incomplete payload. If any error occurs,
connection gets closed.
keepalive_timeout -- number of seconds before closing
keep-alive connection
tcp_keepalive -- TCP keep-alive is on, default is on
debug -- enable debug mode
logger -- custom logger object
access_log_class -- custom class for access_logger
access_log -- custom logging object
access_log_format -- access log format string
loop -- Optional event loop
max_line_size -- Optional maximum header line size
max_field_size -- Optional maximum header field size
max_headers -- Optional maximum header size
"""
KEEPALIVE_RESCHEDULE_DELAY = 1
__slots__ = (
"_request_count",
"_keepalive",
"_manager",
"_request_handler",
"_request_factory",
"_tcp_keepalive",
"_keepalive_time",
"_keepalive_handle",
"_keepalive_timeout",
"_lingering_time",
"_messages",
"_message_tail",
"_waiter",
"_task_handler",
"_upgrade",
"_payload_parser",
"_request_parser",
"_reading_paused",
"logger",
"debug",
"access_log",
"access_logger",
"_close",
"_force_close",
"_current_request",
)
def __init__(
self,
manager: "Server",
*,
loop: asyncio.AbstractEventLoop,
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
tcp_keepalive: bool = True,
logger: Logger = server_logger,
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
access_log: Logger = access_logger,
access_log_format: str = AccessLogger.LOG_FORMAT,
debug: bool = False,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
lingering_time: float = 10.0,
read_bufsize: int = 2**16,
auto_decompress: bool = True,
):
super().__init__(loop)
self._request_count = 0
self._keepalive = False
self._current_request: Optional[BaseRequest] = None
self._manager: Optional[Server] = manager
self._request_handler: Optional[_RequestHandler] = manager.request_handler
self._request_factory: Optional[_RequestFactory] = manager.request_factory
self._tcp_keepalive = tcp_keepalive
# placeholder to be replaced on keepalive timeout setup
self._keepalive_time = 0.0
self._keepalive_handle: Optional[asyncio.Handle] = None
self._keepalive_timeout = keepalive_timeout
self._lingering_time = float(lingering_time)
self._messages: Deque[_MsgType] = deque()
self._message_tail = b""
self._waiter: Optional[asyncio.Future[None]] = None
self._task_handler: Optional[asyncio.Task[None]] = None
self._upgrade = False
self._payload_parser: Any = None
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
self,
loop,
read_bufsize,
max_line_size=max_line_size,
max_field_size=max_field_size,
max_headers=max_headers,
payload_exception=RequestPayloadError,
auto_decompress=auto_decompress,
)
self.logger = logger
self.debug = debug
self.access_log = access_log
if access_log:
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
access_log, access_log_format
)
else:
self.access_logger = None
self._close = False
self._force_close = False
def __repr__(self) -> str:
return "<{} {}>".format(
self.__class__.__name__,
"connected" if self.transport is not None else "disconnected",
)
@property
def keepalive_timeout(self) -> float:
return self._keepalive_timeout
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
"""Do worker process exit preparations.
We need to clean up everything and stop accepting requests.
It is especially important for keep-alive connections.
"""
self._force_close = True
if self._keepalive_handle is not None:
self._keepalive_handle.cancel()
if self._waiter:
self._waiter.cancel()
# wait for handlers
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
async with ceil_timeout(timeout):
if self._current_request is not None:
self._current_request._cancel(asyncio.CancelledError())
if self._task_handler is not None and not self._task_handler.done():
await self._task_handler
# force-close non-idle handler
if self._task_handler is not None:
self._task_handler.cancel()
if self.transport is not None:
self.transport.close()
self.transport = None
def connection_made(self, transport: asyncio.BaseTransport) -> None:
super().connection_made(transport)
real_transport = cast(asyncio.Transport, transport)
if self._tcp_keepalive:
tcp_keepalive(real_transport)
self._task_handler = self._loop.create_task(self.start())
assert self._manager is not None
self._manager.connection_made(self, real_transport)
def connection_lost(self, exc: Optional[BaseException]) -> None:
if self._manager is None:
return
self._manager.connection_lost(self, exc)
super().connection_lost(exc)
self._manager = None
self._force_close = True
self._request_factory = None
self._request_handler = None
self._request_parser = None
if self._keepalive_handle is not None:
self._keepalive_handle.cancel()
if self._current_request is not None:
if exc is None:
exc = ConnectionResetError("Connection lost")
self._current_request._cancel(exc)
if self._waiter is not None:
self._waiter.cancel()
self._task_handler = None
if self._payload_parser is not None:
self._payload_parser.feed_eof()
self._payload_parser = None
def set_parser(self, parser: Any) -> None:
# Actual type is WebReader
assert self._payload_parser is None
self._payload_parser = parser
if self._message_tail:
self._payload_parser.feed_data(self._message_tail)
self._message_tail = b""
def eof_received(self) -> None:
pass
def data_received(self, data: bytes) -> None:
if self._force_close or self._close:
return
# parse http messages
messages: Sequence[_MsgType]
if self._payload_parser is None and not self._upgrade:
assert self._request_parser is not None
try:
messages, upgraded, tail = self._request_parser.feed_data(data)
except HttpProcessingError as exc:
messages = [
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
]
upgraded = False
tail = b""
for msg, payload in messages or ():
self._request_count += 1
self._messages.append((msg, payload))
waiter = self._waiter
if messages and waiter is not None and not waiter.done():
# don't set result twice
waiter.set_result(None)
self._upgrade = upgraded
if upgraded and tail:
self._message_tail = tail
# no parser, just store
elif self._payload_parser is None and self._upgrade and data:
self._message_tail += data
# feed payload
elif data:
eof, tail = self._payload_parser.feed_data(data)
if eof:
self.close()
def keep_alive(self, val: bool) -> None:
"""Set keep-alive connection mode.
:param bool val: new state.
"""
self._keepalive = val
if self._keepalive_handle:
self._keepalive_handle.cancel()
self._keepalive_handle = None
def close(self) -> None:
"""Close connection.
Stop accepting new pipelining messages and close
connection when handlers done processing messages.
"""
self._close = True
if self._waiter:
self._waiter.cancel()
def force_close(self) -> None:
"""Forcefully close connection."""
self._force_close = True
if self._waiter:
self._waiter.cancel()
if self.transport is not None:
self.transport.close()
self.transport = None
def log_access(
self, request: BaseRequest, response: StreamResponse, time: float
) -> None:
if self.access_logger is not None:
self.access_logger.log(request, response, self._loop.time() - time)
def log_debug(self, *args: Any, **kw: Any) -> None:
if self.debug:
self.logger.debug(*args, **kw)
def log_exception(self, *args: Any, **kw: Any) -> None:
self.logger.exception(*args, **kw)
def _process_keepalive(self) -> None:
if self._force_close or not self._keepalive:
return
next = self._keepalive_time + self._keepalive_timeout
# handler in idle state
if self._waiter:
if self._loop.time() > next:
self.force_close()
return
# not all request handlers are done,
# reschedule itself to next second
self._keepalive_handle = self._loop.call_later(
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
)
async def _handle_request(
self,
request: BaseRequest,
start_time: float,
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
) -> Tuple[StreamResponse, bool]:
assert self._request_handler is not None
try:
try:
self._current_request = request
resp = await request_handler(request)
finally:
self._current_request = None
except HTTPException as exc:
resp = exc
reset = await self.finish_response(request, resp, start_time)
except asyncio.CancelledError:
raise
except asyncio.TimeoutError as exc:
self.log_debug("Request handler timed out.", exc_info=exc)
resp = self.handle_error(request, 504)
reset = await self.finish_response(request, resp, start_time)
except Exception as exc:
resp = self.handle_error(request, 500, exc)
reset = await self.finish_response(request, resp, start_time)
else:
# Deprecation warning (See #2415)
if getattr(resp, "__http_exception__", False):
warnings.warn(
"returning HTTPException object is deprecated "
"(#2415) and will be removed, "
"please raise the exception instead",
DeprecationWarning,
)
reset = await self.finish_response(request, resp, start_time)
return resp, reset
async def start(self) -> None:
"""Process incoming request.
It reads request line, request headers and request payload, then
calls handle_request() method. Subclass has to override
handle_request(). start() handles various exceptions in request
or response handling. Connection is being closed always unless
keep_alive(True) specified.
"""
loop = self._loop
handler = self._task_handler
assert handler is not None
manager = self._manager
assert manager is not None
keepalive_timeout = self._keepalive_timeout
resp = None
assert self._request_factory is not None
assert self._request_handler is not None
while not self._force_close:
if not self._messages:
try:
# wait for next request
self._waiter = loop.create_future()
await self._waiter
except asyncio.CancelledError:
break
finally:
self._waiter = None
message, payload = self._messages.popleft()
start = loop.time()
manager.requests_count += 1
writer = StreamWriter(self, loop)
if isinstance(message, _ErrInfo):
# make request_factory work
request_handler = self._make_error_handler(message)
message = ERROR
else:
request_handler = self._request_handler
request = self._request_factory(message, payload, self, writer, handler)
try:
# a new task is used for copy context vars (#3406)
task = self._loop.create_task(
self._handle_request(request, start, request_handler)
)
try:
resp, reset = await task
except (asyncio.CancelledError, ConnectionError):
self.log_debug("Ignored premature client disconnection")
break
# Drop the processed task from asyncio.Task.all_tasks() early
del task
if reset:
self.log_debug("Ignored premature client disconnection 2")
break
# notify server about keep-alive
self._keepalive = bool(resp.keep_alive)
# check payload
if not payload.is_eof():
lingering_time = self._lingering_time
if not self._force_close and lingering_time:
self.log_debug(
"Start lingering close timer for %s sec.", lingering_time
)
now = loop.time()
end_t = now + lingering_time
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
while not payload.is_eof() and now < end_t:
async with ceil_timeout(end_t - now):
# read and ignore
await payload.readany()
now = loop.time()
# if payload still uncompleted
if not payload.is_eof() and not self._force_close:
self.log_debug("Uncompleted request.")
self.close()
payload.set_exception(PayloadAccessError())
except asyncio.CancelledError:
self.log_debug("Ignored premature client disconnection ")
break
except RuntimeError as exc:
if self.debug:
self.log_exception("Unhandled runtime exception", exc_info=exc)
self.force_close()
except Exception as exc:
self.log_exception("Unhandled exception", exc_info=exc)
self.force_close()
finally:
if self.transport is None and resp is not None:
self.log_debug("Ignored premature client disconnection.")
elif not self._force_close:
if self._keepalive and not self._close:
# start keep-alive timer
if keepalive_timeout is not None:
now = self._loop.time()
self._keepalive_time = now
if self._keepalive_handle is None:
self._keepalive_handle = loop.call_at(
now + keepalive_timeout, self._process_keepalive
)
else:
break
# remove handler, close transport if no handlers left
if not self._force_close:
self._task_handler = None
if self.transport is not None:
self.transport.close()
async def finish_response(
self, request: BaseRequest, resp: StreamResponse, start_time: float
) -> bool:
"""Prepare the response and write_eof, then log access.
This has to
be called within the context of any exception so the access logger
can get exception information. Returns True if the client disconnects
prematurely.
"""
if self._request_parser is not None:
self._request_parser.set_upgraded(False)
self._upgrade = False
if self._message_tail:
self._request_parser.feed_data(self._message_tail)
self._message_tail = b""
try:
prepare_meth = resp.prepare
except AttributeError:
if resp is None:
raise RuntimeError("Missing return " "statement on request handler")
else:
raise RuntimeError(
"Web-handler should return "
"a response instance, "
"got {!r}".format(resp)
)
try:
await prepare_meth(request)
await resp.write_eof()
except ConnectionError:
self.log_access(request, resp, start_time)
return True
else:
self.log_access(request, resp, start_time)
return False
def handle_error(
self,
request: BaseRequest,
status: int = 500,
exc: Optional[BaseException] = None,
message: Optional[str] = None,
) -> StreamResponse:
"""Handle errors.
Returns HTTP response with specific status code. Logs additional
information. It always closes current connection.
"""
self.log_exception("Error handling request", exc_info=exc)
# some data already got sent, connection is broken
if request.writer.output_size > 0:
raise ConnectionError(
"Response is sent already, cannot send another response "
"with the error message"
)
ct = "text/plain"
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
tb = None
if self.debug:
with suppress(Exception):
tb = traceback.format_exc()
if "text/html" in request.headers.get("Accept", ""):
if tb:
tb = html_escape(tb)
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
message = (
"<html><head>"
"<title>{title}</title>"
"</head><body>\n<h1>{title}</h1>"
"\n{msg}\n</body></html>\n"
).format(title=title, msg=msg)
ct = "text/html"
else:
if tb:
msg = tb
message = title + "\n\n" + msg
resp = Response(status=status, text=message, content_type=ct)
resp.force_close()
return resp
def _make_error_handler(
self, err_info: _ErrInfo
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
async def handler(request: BaseRequest) -> StreamResponse:
return self.handle_error(
request, err_info.status, err_info.exc, err_info.message
)
return handler
| 22,399 | Python | 31.941176 | 87 | 0.561052 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/streams.py | import asyncio
import collections
import warnings
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
from .base_protocol import BaseProtocol
from .helpers import BaseTimerContext, set_exception, set_result
from .log import internal_logger
from .typedefs import Final
__all__ = (
"EMPTY_PAYLOAD",
"EofStream",
"StreamReader",
"DataQueue",
"FlowControlDataQueue",
)
_T = TypeVar("_T")
class EofStream(Exception):
"""eof stream indication."""
class AsyncStreamIterator(Generic[_T]):
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
self.read_func = read_func
def __aiter__(self) -> "AsyncStreamIterator[_T]":
return self
async def __anext__(self) -> _T:
try:
rv = await self.read_func()
except EofStream:
raise StopAsyncIteration
if rv == b"":
raise StopAsyncIteration
return rv
class ChunkTupleAsyncStreamIterator:
def __init__(self, stream: "StreamReader") -> None:
self._stream = stream
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
return self
async def __anext__(self) -> Tuple[bytes, bool]:
rv = await self._stream.readchunk()
if rv == (b"", False):
raise StopAsyncIteration
return rv
class AsyncStreamReaderMixin:
def __aiter__(self) -> AsyncStreamIterator[bytes]:
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
"""Returns an asynchronous iterator that yields chunks of size n.
Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
)
def iter_any(self) -> AsyncStreamIterator[bytes]:
"""Yield all available data as soon as it is received.
Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
"""Yield chunks of data as they are received by the server.
The yielded objects are tuples
of (bytes, bool) as returned by the StreamReader.readchunk method.
Python-3.5 available for Python 3.5+ only
"""
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
class StreamReader(AsyncStreamReaderMixin):
"""An enhancement of asyncio.StreamReader.
Supports asynchronous iteration by line, chunk or as available::
async for line in reader:
...
async for chunk in reader.iter_chunked(1024):
...
async for slice in reader.iter_any():
...
"""
total_bytes = 0
def __init__(
self,
protocol: BaseProtocol,
limit: int,
*,
timer: Optional[BaseTimerContext] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
self._protocol = protocol
self._low_water = limit
self._high_water = limit * 2
if loop is None:
loop = asyncio.get_event_loop()
self._loop = loop
self._size = 0
self._cursor = 0
self._http_chunk_splits: Optional[List[int]] = None
self._buffer: Deque[bytes] = collections.deque()
self._buffer_offset = 0
self._eof = False
self._waiter: Optional[asyncio.Future[None]] = None
self._eof_waiter: Optional[asyncio.Future[None]] = None
self._exception: Optional[BaseException] = None
self._timer = timer
self._eof_callbacks: List[Callable[[], None]] = []
def __repr__(self) -> str:
info = [self.__class__.__name__]
if self._size:
info.append("%d bytes" % self._size)
if self._eof:
info.append("eof")
if self._low_water != 2**16: # default limit
info.append("low=%d high=%d" % (self._low_water, self._high_water))
if self._waiter:
info.append("w=%r" % self._waiter)
if self._exception:
info.append("e=%r" % self._exception)
return "<%s>" % " ".join(info)
def get_read_buffer_limits(self) -> Tuple[int, int]:
return (self._low_water, self._high_water)
def exception(self) -> Optional[BaseException]:
return self._exception
def set_exception(self, exc: BaseException) -> None:
self._exception = exc
self._eof_callbacks.clear()
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_exception(waiter, exc)
waiter = self._eof_waiter
if waiter is not None:
self._eof_waiter = None
set_exception(waiter, exc)
def on_eof(self, callback: Callable[[], None]) -> None:
if self._eof:
try:
callback()
except Exception:
internal_logger.exception("Exception in eof callback")
else:
self._eof_callbacks.append(callback)
def feed_eof(self) -> None:
self._eof = True
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, None)
waiter = self._eof_waiter
if waiter is not None:
self._eof_waiter = None
set_result(waiter, None)
for cb in self._eof_callbacks:
try:
cb()
except Exception:
internal_logger.exception("Exception in eof callback")
self._eof_callbacks.clear()
def is_eof(self) -> bool:
"""Return True if 'feed_eof' was called."""
return self._eof
def at_eof(self) -> bool:
"""Return True if the buffer is empty and 'feed_eof' was called."""
return self._eof and not self._buffer
async def wait_eof(self) -> None:
if self._eof:
return
assert self._eof_waiter is None
self._eof_waiter = self._loop.create_future()
try:
await self._eof_waiter
finally:
self._eof_waiter = None
def unread_data(self, data: bytes) -> None:
"""rollback reading some data from stream, inserting it to buffer head."""
warnings.warn(
"unread_data() is deprecated "
"and will be removed in future releases (#3260)",
DeprecationWarning,
stacklevel=2,
)
if not data:
return
if self._buffer_offset:
self._buffer[0] = self._buffer[0][self._buffer_offset :]
self._buffer_offset = 0
self._size += len(data)
self._cursor -= len(data)
self._buffer.appendleft(data)
self._eof_counter = 0
# TODO: size is ignored, remove the param later
def feed_data(self, data: bytes, size: int = 0) -> None:
assert not self._eof, "feed_data after feed_eof"
if not data:
return
self._size += len(data)
self._buffer.append(data)
self.total_bytes += len(data)
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, None)
if self._size > self._high_water and not self._protocol._reading_paused:
self._protocol.pause_reading()
def begin_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
if self.total_bytes:
raise RuntimeError(
"Called begin_http_chunk_receiving when" "some data was already fed"
)
self._http_chunk_splits = []
def end_http_chunk_receiving(self) -> None:
if self._http_chunk_splits is None:
raise RuntimeError(
"Called end_chunk_receiving without calling "
"begin_chunk_receiving first"
)
# self._http_chunk_splits contains logical byte offsets from start of
# the body transfer. Each offset is the offset of the end of a chunk.
# "Logical" means bytes, accessible for a user.
# If no chunks containig logical data were received, current position
# is difinitely zero.
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
if self.total_bytes == pos:
# We should not add empty chunks here. So we check for that.
# Note, when chunked + gzip is used, we can receive a chunk
# of compressed data, but that data may not be enough for gzip FSM
# to yield any uncompressed data. That's why current position may
# not change after receiving a chunk.
return
self._http_chunk_splits.append(self.total_bytes)
# wake up readchunk when end of http chunk received
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, None)
async def _wait(self, func_name: str) -> None:
# StreamReader uses a future to link the protocol feed_data() method
# to a read coroutine. Running two read coroutines at the same time
# would have an unexpected behaviour. It would not possible to know
# which coroutine would get the next data.
if self._waiter is not None:
raise RuntimeError(
"%s() called while another coroutine is "
"already waiting for incoming data" % func_name
)
waiter = self._waiter = self._loop.create_future()
try:
if self._timer:
with self._timer:
await waiter
else:
await waiter
finally:
self._waiter = None
async def readline(self) -> bytes:
return await self.readuntil()
async def readuntil(self, separator: bytes = b"\n") -> bytes:
seplen = len(separator)
if seplen == 0:
raise ValueError("Separator should be at least one-byte string")
if self._exception is not None:
raise self._exception
chunk = b""
chunk_size = 0
not_enough = True
while not_enough:
while self._buffer and not_enough:
offset = self._buffer_offset
ichar = self._buffer[0].find(separator, offset) + 1
# Read from current offset to found separator or to the end.
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
chunk += data
chunk_size += len(data)
if ichar:
not_enough = False
if chunk_size > self._high_water:
raise ValueError("Chunk too big")
if self._eof:
break
if not_enough:
await self._wait("readuntil")
return chunk
async def read(self, n: int = -1) -> bytes:
if self._exception is not None:
raise self._exception
# migration problem; with DataQueue you have to catch
# EofStream exception, so common way is to run payload.read() inside
# infinite loop. what can cause real infinite loop with StreamReader
# lets keep this code one major release.
if __debug__:
if self._eof and not self._buffer:
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
if self._eof_counter > 5:
internal_logger.warning(
"Multiple access to StreamReader in eof state, "
"might be infinite loop.",
stack_info=True,
)
if not n:
return b""
if n < 0:
# This used to just loop creating a new waiter hoping to
# collect everything in self._buffer, but that would
# deadlock if the subprocess sends more than self.limit
# bytes. So just call self.readany() until EOF.
blocks = []
while True:
block = await self.readany()
if not block:
break
blocks.append(block)
return b"".join(blocks)
# TODO: should be `if` instead of `while`
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait("read")
return self._read_nowait(n)
async def readany(self) -> bytes:
if self._exception is not None:
raise self._exception
# TODO: should be `if` instead of `while`
# because waiter maybe triggered on chunk end,
# without feeding any data
while not self._buffer and not self._eof:
await self._wait("readany")
return self._read_nowait(-1)
async def readchunk(self) -> Tuple[bytes, bool]:
"""Returns a tuple of (data, end_of_http_chunk).
When chunked transfer
encoding is used, end_of_http_chunk is a boolean indicating if the end
of the data corresponds to the end of a HTTP chunk , otherwise it is
always False.
"""
while True:
if self._exception is not None:
raise self._exception
while self._http_chunk_splits:
pos = self._http_chunk_splits.pop(0)
if pos == self._cursor:
return (b"", True)
if pos > self._cursor:
return (self._read_nowait(pos - self._cursor), True)
internal_logger.warning(
"Skipping HTTP chunk end due to data "
"consumption beyond chunk boundary"
)
if self._buffer:
return (self._read_nowait_chunk(-1), False)
# return (self._read_nowait(-1), False)
if self._eof:
# Special case for signifying EOF.
# (b'', True) is not a final return value actually.
return (b"", False)
await self._wait("readchunk")
async def readexactly(self, n: int) -> bytes:
if self._exception is not None:
raise self._exception
blocks: List[bytes] = []
while n > 0:
block = await self.read(n)
if not block:
partial = b"".join(blocks)
raise asyncio.IncompleteReadError(partial, len(partial) + n)
blocks.append(block)
n -= len(block)
return b"".join(blocks)
def read_nowait(self, n: int = -1) -> bytes:
# default was changed to be consistent with .read(-1)
#
# I believe the most users don't know about the method and
# they are not affected.
if self._exception is not None:
raise self._exception
if self._waiter and not self._waiter.done():
raise RuntimeError(
"Called while some coroutine is waiting for incoming data."
)
return self._read_nowait(n)
def _read_nowait_chunk(self, n: int) -> bytes:
first_buffer = self._buffer[0]
offset = self._buffer_offset
if n != -1 and len(first_buffer) - offset > n:
data = first_buffer[offset : offset + n]
self._buffer_offset += n
elif offset:
self._buffer.popleft()
data = first_buffer[offset:]
self._buffer_offset = 0
else:
data = self._buffer.popleft()
self._size -= len(data)
self._cursor += len(data)
chunk_splits = self._http_chunk_splits
# Prevent memory leak: drop useless chunk splits
while chunk_splits and chunk_splits[0] < self._cursor:
chunk_splits.pop(0)
if self._size < self._low_water and self._protocol._reading_paused:
self._protocol.resume_reading()
return data
def _read_nowait(self, n: int) -> bytes:
"""Read not more than n bytes, or whole buffer if n == -1"""
chunks = []
while self._buffer:
chunk = self._read_nowait_chunk(n)
chunks.append(chunk)
if n != -1:
n -= len(chunk)
if n == 0:
break
return b"".join(chunks) if chunks else b""
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
def __init__(self) -> None:
pass
def exception(self) -> Optional[BaseException]:
return None
def set_exception(self, exc: BaseException) -> None:
pass
def on_eof(self, callback: Callable[[], None]) -> None:
try:
callback()
except Exception:
internal_logger.exception("Exception in eof callback")
def feed_eof(self) -> None:
pass
def is_eof(self) -> bool:
return True
def at_eof(self) -> bool:
return True
async def wait_eof(self) -> None:
return
def feed_data(self, data: bytes, n: int = 0) -> None:
pass
async def readline(self) -> bytes:
return b""
async def read(self, n: int = -1) -> bytes:
return b""
# TODO add async def readuntil
async def readany(self) -> bytes:
return b""
async def readchunk(self) -> Tuple[bytes, bool]:
return (b"", True)
async def readexactly(self, n: int) -> bytes:
raise asyncio.IncompleteReadError(b"", n)
def read_nowait(self, n: int = -1) -> bytes:
return b""
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
class DataQueue(Generic[_T]):
"""DataQueue is a general-purpose blocking queue with one reader."""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._eof = False
self._waiter: Optional[asyncio.Future[None]] = None
self._exception: Optional[BaseException] = None
self._size = 0
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
def __len__(self) -> int:
return len(self._buffer)
def is_eof(self) -> bool:
return self._eof
def at_eof(self) -> bool:
return self._eof and not self._buffer
def exception(self) -> Optional[BaseException]:
return self._exception
def set_exception(self, exc: BaseException) -> None:
self._eof = True
self._exception = exc
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_exception(waiter, exc)
def feed_data(self, data: _T, size: int = 0) -> None:
self._size += size
self._buffer.append((data, size))
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, None)
def feed_eof(self) -> None:
self._eof = True
waiter = self._waiter
if waiter is not None:
self._waiter = None
set_result(waiter, None)
async def read(self) -> _T:
if not self._buffer and not self._eof:
assert not self._waiter
self._waiter = self._loop.create_future()
try:
await self._waiter
except (asyncio.CancelledError, asyncio.TimeoutError):
self._waiter = None
raise
if self._buffer:
data, size = self._buffer.popleft()
self._size -= size
return data
else:
if self._exception is not None:
raise self._exception
else:
raise EofStream
def __aiter__(self) -> AsyncStreamIterator[_T]:
return AsyncStreamIterator(self.read)
class FlowControlDataQueue(DataQueue[_T]):
"""FlowControlDataQueue resumes and pauses an underlying stream.
It is a destination for parsed data.
"""
def __init__(
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
) -> None:
super().__init__(loop=loop)
self._protocol = protocol
self._limit = limit * 2
def feed_data(self, data: _T, size: int = 0) -> None:
super().feed_data(data, size)
if self._size > self._limit and not self._protocol._reading_paused:
self._protocol.pause_reading()
async def read(self) -> _T:
try:
return await super().read()
finally:
if self._size < self._limit and self._protocol._reading_paused:
self._protocol.resume_reading()
| 20,758 | Python | 30.405446 | 88 | 0.55646 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/resolver.py | import asyncio
import socket
from typing import Any, Dict, List, Optional, Type, Union
from .abc import AbstractResolver
from .helpers import get_running_loop
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
try:
import aiodns
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
except ImportError: # pragma: no cover
aiodns = None
aiodns_default = False
class ThreadedResolver(AbstractResolver):
"""Threaded resolver.
Uses an Executor for synchronous getaddrinfo() calls.
concurrent.futures.ThreadPoolExecutor is used by default.
"""
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self._loop = get_running_loop(loop)
async def resolve(
self, hostname: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
infos = await self._loop.getaddrinfo(
hostname,
port,
type=socket.SOCK_STREAM,
family=family,
flags=socket.AI_ADDRCONFIG,
)
hosts = []
for family, _, proto, _, address in infos:
if family == socket.AF_INET6:
if len(address) < 3:
# IPv6 is not supported by Python build,
# or IPv6 is not enabled in the host
continue
if address[3]: # type: ignore[misc]
# This is essential for link-local IPv6 addresses.
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
# getnameinfo() unconditionally, but performance makes sense.
host, _port = socket.getnameinfo(
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
)
port = int(_port)
else:
host, port = address[:2]
else: # IPv4
assert family == socket.AF_INET
host, port = address # type: ignore[misc]
hosts.append(
{
"hostname": hostname,
"host": host,
"port": port,
"family": family,
"proto": proto,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
return hosts
async def close(self) -> None:
pass
class AsyncResolver(AbstractResolver):
"""Use the `aiodns` package to make asynchronous DNS lookups"""
def __init__(
self,
loop: Optional[asyncio.AbstractEventLoop] = None,
*args: Any,
**kwargs: Any
) -> None:
if aiodns is None:
raise RuntimeError("Resolver requires aiodns library")
self._loop = get_running_loop(loop)
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
if not hasattr(self._resolver, "gethostbyname"):
# aiodns 1.1 is not available, fallback to DNSResolver.query
self.resolve = self._resolve_with_query # type: ignore
async def resolve(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
try:
resp = await self._resolver.gethostbyname(host, family)
except aiodns.error.DNSError as exc:
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
raise OSError(msg) from exc
hosts = []
for address in resp.addresses:
hosts.append(
{
"hostname": host,
"host": address,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
}
)
if not hosts:
raise OSError("DNS lookup failed")
return hosts
async def _resolve_with_query(
self, host: str, port: int = 0, family: int = socket.AF_INET
) -> List[Dict[str, Any]]:
if family == socket.AF_INET6:
qtype = "AAAA"
else:
qtype = "A"
try:
resp = await self._resolver.query(host, qtype)
except aiodns.error.DNSError as exc:
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
raise OSError(msg) from exc
hosts = []
for rr in resp:
hosts.append(
{
"hostname": host,
"host": rr.host,
"port": port,
"family": family,
"proto": 0,
"flags": socket.AI_NUMERICHOST,
}
)
if not hosts:
raise OSError("DNS lookup failed")
return hosts
async def close(self) -> None:
self._resolver.cancel()
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
| 5,092 | Python | 30.63354 | 85 | 0.522388 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/cookiejar.py | import asyncio
import contextlib
import datetime
import os # noqa
import pathlib
import pickle
import re
from collections import defaultdict
from http.cookies import BaseCookie, Morsel, SimpleCookie
from typing import ( # noqa
DefaultDict,
Dict,
Iterable,
Iterator,
List,
Mapping,
Optional,
Set,
Tuple,
Union,
cast,
)
from yarl import URL
from .abc import AbstractCookieJar, ClearCookiePredicate
from .helpers import is_ip_address, next_whole_second
from .typedefs import LooseCookies, PathLike, StrOrURL
__all__ = ("CookieJar", "DummyCookieJar")
CookieItem = Union[str, "Morsel[str]"]
class CookieJar(AbstractCookieJar):
"""Implements cookie storage adhering to RFC 6265."""
DATE_TOKENS_RE = re.compile(
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
)
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
DATE_MONTH_RE = re.compile(
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
re.I,
)
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2**31 - 1)
def __init__(
self,
*,
unsafe: bool = False,
quote_cookie: bool = True,
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> None:
super().__init__(loop=loop)
self._cookies: DefaultDict[str, SimpleCookie[str]] = defaultdict(SimpleCookie)
self._host_only_cookies: Set[Tuple[str, str]] = set()
self._unsafe = unsafe
self._quote_cookie = quote_cookie
if treat_as_secure_origin is None:
treat_as_secure_origin = []
elif isinstance(treat_as_secure_origin, URL):
treat_as_secure_origin = [treat_as_secure_origin.origin()]
elif isinstance(treat_as_secure_origin, str):
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
else:
treat_as_secure_origin = [
URL(url).origin() if isinstance(url, str) else url.origin()
for url in treat_as_secure_origin
]
self._treat_as_secure_origin = treat_as_secure_origin
self._next_expiration = next_whole_second()
self._expirations: Dict[Tuple[str, str], datetime.datetime] = {}
# #4515: datetime.max may not be representable on 32-bit platforms
self._max_time = self.MAX_TIME
try:
self._max_time.timestamp()
except OverflowError:
self._max_time = self.MAX_32BIT_TIME
def save(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode="wb") as f:
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
def load(self, file_path: PathLike) -> None:
file_path = pathlib.Path(file_path)
with file_path.open(mode="rb") as f:
self._cookies = pickle.load(f)
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
if predicate is None:
self._next_expiration = next_whole_second()
self._cookies.clear()
self._host_only_cookies.clear()
self._expirations.clear()
return
to_del = []
now = datetime.datetime.now(datetime.timezone.utc)
for domain, cookie in self._cookies.items():
for name, morsel in cookie.items():
key = (domain, name)
if (
key in self._expirations and self._expirations[key] <= now
) or predicate(morsel):
to_del.append(key)
for domain, name in to_del:
key = (domain, name)
self._host_only_cookies.discard(key)
if key in self._expirations:
del self._expirations[(domain, name)]
self._cookies[domain].pop(name, None)
next_expiration = min(self._expirations.values(), default=self._max_time)
try:
self._next_expiration = next_expiration.replace(
microsecond=0
) + datetime.timedelta(seconds=1)
except OverflowError:
self._next_expiration = self._max_time
def clear_domain(self, domain: str) -> None:
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
def __iter__(self) -> "Iterator[Morsel[str]]":
self._do_expiration()
for val in self._cookies.values():
yield from val.values()
def __len__(self) -> int:
return sum(1 for i in self)
def _do_expiration(self) -> None:
self.clear(lambda x: False)
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
self._next_expiration = min(self._next_expiration, when)
self._expirations[(domain, name)] = when
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
"""Update cookies."""
hostname = response_url.raw_host
if not self._unsafe and is_ip_address(hostname):
# Don't accept cookies from IPs
return
if isinstance(cookies, Mapping):
cookies = cookies.items()
for name, cookie in cookies:
if not isinstance(cookie, Morsel):
tmp: SimpleCookie[str] = SimpleCookie()
tmp[name] = cookie # type: ignore[assignment]
cookie = tmp[name]
domain = cookie["domain"]
# ignore domains with trailing dots
if domain.endswith("."):
domain = ""
del cookie["domain"]
if not domain and hostname is not None:
# Set the cookie's domain to the response hostname
# and set its host-only-flag
self._host_only_cookies.add((hostname, name))
domain = cookie["domain"] = hostname
if domain.startswith("."):
# Remove leading dot
domain = domain[1:]
cookie["domain"] = domain
if hostname and not self._is_domain_match(domain, hostname):
# Setting cookies for different domains is not allowed
continue
path = cookie["path"]
if not path or not path.startswith("/"):
# Set the cookie's path to the response path
path = response_url.path
if not path.startswith("/"):
path = "/"
else:
# Cut everything from the last slash to the end
path = "/" + path[1 : path.rfind("/")]
cookie["path"] = path
max_age = cookie["max-age"]
if max_age:
try:
delta_seconds = int(max_age)
try:
max_age_expiration = datetime.datetime.now(
datetime.timezone.utc
) + datetime.timedelta(seconds=delta_seconds)
except OverflowError:
max_age_expiration = self._max_time
self._expire_cookie(max_age_expiration, domain, name)
except ValueError:
cookie["max-age"] = ""
else:
expires = cookie["expires"]
if expires:
expire_time = self._parse_date(expires)
if expire_time:
self._expire_cookie(expire_time, domain, name)
else:
cookie["expires"] = ""
self._cookies[domain][name] = cookie
self._do_expiration()
def filter_cookies(
self, request_url: URL = URL()
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
"""Returns this jar's cookies filtered by their attributes."""
self._do_expiration()
request_url = URL(request_url)
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
SimpleCookie() if self._quote_cookie else BaseCookie()
)
hostname = request_url.raw_host or ""
request_origin = URL()
with contextlib.suppress(ValueError):
request_origin = request_url.origin()
is_not_secure = (
request_url.scheme not in ("https", "wss")
and request_origin not in self._treat_as_secure_origin
)
for cookie in self:
name = cookie.key
domain = cookie["domain"]
# Send shared cookies
if not domain:
filtered[name] = cookie.value
continue
if not self._unsafe and is_ip_address(hostname):
continue
if (domain, name) in self._host_only_cookies:
if domain != hostname:
continue
elif not self._is_domain_match(domain, hostname):
continue
if not self._is_path_match(request_url.path, cookie["path"]):
continue
if is_not_secure and cookie["secure"]:
continue
# It's critical we use the Morsel so the coded_value
# (based on cookie version) is preserved
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
filtered[name] = mrsl_val
return filtered
@staticmethod
def _is_domain_match(domain: str, hostname: str) -> bool:
"""Implements domain matching adhering to RFC 6265."""
if hostname == domain:
return True
if not hostname.endswith(domain):
return False
non_matching = hostname[: -len(domain)]
if not non_matching.endswith("."):
return False
return not is_ip_address(hostname)
@staticmethod
def _is_path_match(req_path: str, cookie_path: str) -> bool:
"""Implements path matching adhering to RFC 6265."""
if not req_path.startswith("/"):
req_path = "/"
if req_path == cookie_path:
return True
if not req_path.startswith(cookie_path):
return False
if cookie_path.endswith("/"):
return True
non_matching = req_path[len(cookie_path) :]
return non_matching.startswith("/")
@classmethod
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
"""Implements date string parsing adhering to RFC 6265."""
if not date_str:
return None
found_time = False
found_day = False
found_month = False
found_year = False
hour = minute = second = 0
day = 0
month = 0
year = 0
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
token = token_match.group("token")
if not found_time:
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
hour, minute, second = (int(s) for s in time_match.groups())
continue
if not found_day:
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
if day_match:
found_day = True
day = int(day_match.group())
continue
if not found_month:
month_match = cls.DATE_MONTH_RE.match(token)
if month_match:
found_month = True
assert month_match.lastindex is not None
month = month_match.lastindex
continue
if not found_year:
year_match = cls.DATE_YEAR_RE.match(token)
if year_match:
found_year = True
year = int(year_match.group())
if 70 <= year <= 99:
year += 1900
elif 0 <= year <= 69:
year += 2000
if False in (found_day, found_month, found_year, found_time):
return None
if not 1 <= day <= 31:
return None
if year < 1601 or hour > 23 or minute > 59 or second > 59:
return None
return datetime.datetime(
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
)
class DummyCookieJar(AbstractCookieJar):
"""Implements a dummy cookie storage.
It can be used with the ClientSession when no cookie processing is needed.
"""
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
super().__init__(loop=loop)
def __iter__(self) -> "Iterator[Morsel[str]]":
while False:
yield None
def __len__(self) -> int:
return 0
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
pass
def clear_domain(self, domain: str) -> None:
pass
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
pass
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
return SimpleCookie()
| 13,514 | Python | 31.803398 | 87 | 0.542696 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/client.py | """HTTP Client for asyncio."""
import asyncio
import base64
import hashlib
import json
import os
import sys
import traceback
import warnings
from contextlib import suppress
from types import SimpleNamespace, TracebackType
from typing import (
Any,
Awaitable,
Callable,
Coroutine,
FrozenSet,
Generator,
Generic,
Iterable,
List,
Mapping,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
)
import attr
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
from yarl import URL
from . import hdrs, http, payload
from .abc import AbstractCookieJar
from .client_exceptions import (
ClientConnectionError as ClientConnectionError,
ClientConnectorCertificateError as ClientConnectorCertificateError,
ClientConnectorError as ClientConnectorError,
ClientConnectorSSLError as ClientConnectorSSLError,
ClientError as ClientError,
ClientHttpProxyError as ClientHttpProxyError,
ClientOSError as ClientOSError,
ClientPayloadError as ClientPayloadError,
ClientProxyConnectionError as ClientProxyConnectionError,
ClientResponseError as ClientResponseError,
ClientSSLError as ClientSSLError,
ContentTypeError as ContentTypeError,
InvalidURL as InvalidURL,
ServerConnectionError as ServerConnectionError,
ServerDisconnectedError as ServerDisconnectedError,
ServerFingerprintMismatch as ServerFingerprintMismatch,
ServerTimeoutError as ServerTimeoutError,
TooManyRedirects as TooManyRedirects,
WSServerHandshakeError as WSServerHandshakeError,
)
from .client_reqrep import (
ClientRequest as ClientRequest,
ClientResponse as ClientResponse,
Fingerprint as Fingerprint,
RequestInfo as RequestInfo,
_merge_ssl_params,
)
from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
from .connector import (
BaseConnector as BaseConnector,
NamedPipeConnector as NamedPipeConnector,
TCPConnector as TCPConnector,
UnixConnector as UnixConnector,
)
from .cookiejar import CookieJar
from .helpers import (
DEBUG,
PY_36,
BasicAuth,
TimeoutHandle,
ceil_timeout,
get_env_proxy_for_url,
get_running_loop,
sentinel,
strip_auth_from_url,
)
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
from .streams import FlowControlDataQueue
from .tracing import Trace, TraceConfig
from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
__all__ = (
# client_exceptions
"ClientConnectionError",
"ClientConnectorCertificateError",
"ClientConnectorError",
"ClientConnectorSSLError",
"ClientError",
"ClientHttpProxyError",
"ClientOSError",
"ClientPayloadError",
"ClientProxyConnectionError",
"ClientResponseError",
"ClientSSLError",
"ContentTypeError",
"InvalidURL",
"ServerConnectionError",
"ServerDisconnectedError",
"ServerFingerprintMismatch",
"ServerTimeoutError",
"TooManyRedirects",
"WSServerHandshakeError",
# client_reqrep
"ClientRequest",
"ClientResponse",
"Fingerprint",
"RequestInfo",
# connector
"BaseConnector",
"TCPConnector",
"UnixConnector",
"NamedPipeConnector",
# client_ws
"ClientWebSocketResponse",
# client
"ClientSession",
"ClientTimeout",
"request",
)
try:
from ssl import SSLContext
except ImportError: # pragma: no cover
SSLContext = object # type: ignore[misc,assignment]
@attr.s(auto_attribs=True, frozen=True, slots=True)
class ClientTimeout:
total: Optional[float] = None
connect: Optional[float] = None
sock_read: Optional[float] = None
sock_connect: Optional[float] = None
# pool_queue_timeout: Optional[float] = None
# dns_resolution_timeout: Optional[float] = None
# socket_connect_timeout: Optional[float] = None
# connection_acquiring_timeout: Optional[float] = None
# new_connection_timeout: Optional[float] = None
# http_header_timeout: Optional[float] = None
# response_body_timeout: Optional[float] = None
# to create a timeout specific for a single request, either
# - create a completely new one to overwrite the default
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
# to overwrite the defaults
# 5 Minute default read timeout
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
_RetType = TypeVar("_RetType")
class ClientSession:
"""First-class interface for making HTTP requests."""
ATTRS = frozenset(
[
"_base_url",
"_source_traceback",
"_connector",
"requote_redirect_url",
"_loop",
"_cookie_jar",
"_connector_owner",
"_default_auth",
"_version",
"_json_serialize",
"_requote_redirect_url",
"_timeout",
"_raise_for_status",
"_auto_decompress",
"_trust_env",
"_default_headers",
"_skip_auto_headers",
"_request_class",
"_response_class",
"_ws_response_class",
"_trace_configs",
"_read_bufsize",
]
)
_source_traceback = None # type: Optional[traceback.StackSummary]
_connector = None # type: Optional[BaseConnector]
def __init__(
self,
base_url: Optional[StrOrURL] = None,
*,
connector: Optional[BaseConnector] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
cookies: Optional[LooseCookies] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
json_serialize: JSONEncoder = json.dumps,
request_class: Type[ClientRequest] = ClientRequest,
response_class: Type[ClientResponse] = ClientResponse,
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
version: HttpVersion = http.HttpVersion11,
cookie_jar: Optional[AbstractCookieJar] = None,
connector_owner: bool = True,
raise_for_status: bool = False,
read_timeout: Union[float, object] = sentinel,
conn_timeout: Optional[float] = None,
timeout: Union[object, ClientTimeout] = sentinel,
auto_decompress: bool = True,
trust_env: bool = False,
requote_redirect_url: bool = True,
trace_configs: Optional[List[TraceConfig]] = None,
read_bufsize: int = 2**16,
) -> None:
if loop is None:
if connector is not None:
loop = connector._loop
loop = get_running_loop(loop)
if base_url is None or isinstance(base_url, URL):
self._base_url: Optional[URL] = base_url
else:
self._base_url = URL(base_url)
assert (
self._base_url.origin() == self._base_url
), "Only absolute URLs without path part are supported"
if connector is None:
connector = TCPConnector(loop=loop)
if connector._loop is not loop:
raise RuntimeError("Session and connector has to use same event loop")
self._loop = loop
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
if cookie_jar is None:
cookie_jar = CookieJar(loop=loop)
self._cookie_jar = cookie_jar
if cookies is not None:
self._cookie_jar.update_cookies(cookies)
self._connector = connector
self._connector_owner = connector_owner
self._default_auth = auth
self._version = version
self._json_serialize = json_serialize
if timeout is sentinel:
self._timeout = DEFAULT_TIMEOUT
if read_timeout is not sentinel:
warnings.warn(
"read_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
self._timeout = attr.evolve(self._timeout, total=read_timeout)
if conn_timeout is not None:
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
warnings.warn(
"conn_timeout is deprecated, " "use timeout argument instead",
DeprecationWarning,
stacklevel=2,
)
else:
self._timeout = timeout # type: ignore[assignment]
if read_timeout is not sentinel:
raise ValueError(
"read_timeout and timeout parameters "
"conflict, please setup "
"timeout.read"
)
if conn_timeout is not None:
raise ValueError(
"conn_timeout and timeout parameters "
"conflict, please setup "
"timeout.connect"
)
self._raise_for_status = raise_for_status
self._auto_decompress = auto_decompress
self._trust_env = trust_env
self._requote_redirect_url = requote_redirect_url
self._read_bufsize = read_bufsize
# Convert to list of tuples
if headers:
real_headers: CIMultiDict[str] = CIMultiDict(headers)
else:
real_headers = CIMultiDict()
self._default_headers: CIMultiDict[str] = real_headers
if skip_auto_headers is not None:
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
else:
self._skip_auto_headers = frozenset()
self._request_class = request_class
self._response_class = response_class
self._ws_response_class = ws_response_class
self._trace_configs = trace_configs or []
for trace_config in self._trace_configs:
trace_config.freeze()
def __init_subclass__(cls: Type["ClientSession"]) -> None:
warnings.warn(
"Inheritance class {} from ClientSession "
"is discouraged".format(cls.__name__),
DeprecationWarning,
stacklevel=2,
)
if DEBUG:
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn(
"Setting custom ClientSession.{} attribute "
"is discouraged".format(name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
def __del__(self, _warnings: Any = warnings) -> None:
if not self.closed:
if PY_36:
kwargs = {"source": self}
else:
kwargs = {}
_warnings.warn(
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
)
context = {"client_session": self, "message": "Unclosed client session"}
if self._source_traceback is not None:
context["source_traceback"] = self._source_traceback
self._loop.call_exception_handler(context)
def request(
self, method: str, url: StrOrURL, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP request."""
return _RequestContextManager(self._request(method, url, **kwargs))
def _build_url(self, str_or_url: StrOrURL) -> URL:
url = URL(str_or_url)
if self._base_url is None:
return url
else:
assert not url.is_absolute() and url.path.startswith("/")
return self._base_url.join(url)
async def _request(
self,
method: str,
str_or_url: StrOrURL,
*,
params: Optional[Mapping[str, str]] = None,
data: Any = None,
json: Any = None,
cookies: Optional[LooseCookies] = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
raise_for_status: Optional[bool] = None,
read_until_eof: bool = True,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
timeout: Union[ClientTimeout, object] = sentinel,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
proxy_headers: Optional[LooseHeaders] = None,
trace_request_ctx: Optional[SimpleNamespace] = None,
read_bufsize: Optional[int] = None,
) -> ClientResponse:
# NOTE: timeout clamps existing connect and read timeouts. We cannot
# set the default to None because we need to detect if the user wants
# to use the existing timeouts by setting timeout to None.
if self.closed:
raise RuntimeError("Session is closed")
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
if data is not None and json is not None:
raise ValueError(
"data and json parameters can not be used at the same time"
)
elif json is not None:
data = payload.JsonPayload(json, dumps=self._json_serialize)
if not isinstance(chunked, bool) and chunked is not None:
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
redirects = 0
history = []
version = self._version
# Merge with default headers and transform to CIMultiDict
headers = self._prepare_headers(headers)
proxy_headers = self._prepare_headers(proxy_headers)
try:
url = self._build_url(str_or_url)
except ValueError as e:
raise InvalidURL(str_or_url) from e
skip_headers = set(self._skip_auto_headers)
if skip_auto_headers is not None:
for i in skip_auto_headers:
skip_headers.add(istr(i))
if proxy is not None:
try:
proxy = URL(proxy)
except ValueError as e:
raise InvalidURL(proxy) from e
if timeout is sentinel:
real_timeout: ClientTimeout = self._timeout
else:
if not isinstance(timeout, ClientTimeout):
real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
else:
real_timeout = timeout
# timeout is cumulative for all request operations
# (request, redirects, responses, data consuming)
tm = TimeoutHandle(self._loop, real_timeout.total)
handle = tm.start()
if read_bufsize is None:
read_bufsize = self._read_bufsize
traces = [
Trace(
self,
trace_config,
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
)
for trace_config in self._trace_configs
]
for trace in traces:
await trace.send_request_start(method, url.update_query(params), headers)
timer = tm.timer()
try:
with timer:
while True:
url, auth_from_url = strip_auth_from_url(url)
if auth and auth_from_url:
raise ValueError(
"Cannot combine AUTH argument with "
"credentials encoded in URL"
)
if auth is None:
auth = auth_from_url
if auth is None:
auth = self._default_auth
# It would be confusing if we support explicit
# Authorization header with auth argument
if (
headers is not None
and auth is not None
and hdrs.AUTHORIZATION in headers
):
raise ValueError(
"Cannot combine AUTHORIZATION header "
"with AUTH argument or credentials "
"encoded in URL"
)
all_cookies = self._cookie_jar.filter_cookies(url)
if cookies is not None:
tmp_cookie_jar = CookieJar()
tmp_cookie_jar.update_cookies(cookies)
req_cookies = tmp_cookie_jar.filter_cookies(url)
if req_cookies:
all_cookies.load(req_cookies)
if proxy is not None:
proxy = URL(proxy)
elif self._trust_env:
with suppress(LookupError):
proxy, proxy_auth = get_env_proxy_for_url(url)
req = self._request_class(
method,
url,
params=params,
headers=headers,
skip_auto_headers=skip_headers,
data=data,
cookies=all_cookies,
auth=auth,
version=version,
compress=compress,
chunked=chunked,
expect100=expect100,
loop=self._loop,
response_class=self._response_class,
proxy=proxy,
proxy_auth=proxy_auth,
timer=timer,
session=self,
ssl=ssl,
proxy_headers=proxy_headers,
traces=traces,
)
# connection timeout
try:
async with ceil_timeout(real_timeout.connect):
assert self._connector is not None
conn = await self._connector.connect(
req, traces=traces, timeout=real_timeout
)
except asyncio.TimeoutError as exc:
raise ServerTimeoutError(
"Connection timeout " "to host {}".format(url)
) from exc
assert conn.transport is not None
assert conn.protocol is not None
conn.protocol.set_response_params(
timer=timer,
skip_payload=method.upper() == "HEAD",
read_until_eof=read_until_eof,
auto_decompress=self._auto_decompress,
read_timeout=real_timeout.sock_read,
read_bufsize=read_bufsize,
)
try:
try:
resp = await req.send(conn)
try:
await resp.start(conn)
except BaseException:
resp.close()
raise
except BaseException:
conn.close()
raise
except ClientError:
raise
except OSError as exc:
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
raise
raise ClientOSError(*exc.args) from exc
self._cookie_jar.update_cookies(resp.cookies, resp.url)
# redirects
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
for trace in traces:
await trace.send_request_redirect(
method, url.update_query(params), headers, resp
)
redirects += 1
history.append(resp)
if max_redirects and redirects >= max_redirects:
resp.close()
raise TooManyRedirects(
history[0].request_info, tuple(history)
)
# For 301 and 302, mimic IE, now changed in RFC
# https://github.com/kennethreitz/requests/pull/269
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
resp.status in (301, 302) and resp.method == hdrs.METH_POST
):
method = hdrs.METH_GET
data = None
if headers.get(hdrs.CONTENT_LENGTH):
headers.pop(hdrs.CONTENT_LENGTH)
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
hdrs.URI
)
if r_url is None:
# see github.com/aio-libs/aiohttp/issues/2022
break
else:
# reading from correct redirection
# response is forbidden
resp.release()
try:
parsed_url = URL(
r_url, encoded=not self._requote_redirect_url
)
except ValueError as e:
raise InvalidURL(r_url) from e
scheme = parsed_url.scheme
if scheme not in ("http", "https", ""):
resp.close()
raise ValueError("Can redirect only to http or https")
elif not scheme:
parsed_url = url.join(parsed_url)
if url.origin() != parsed_url.origin():
auth = None
headers.pop(hdrs.AUTHORIZATION, None)
url = parsed_url
params = None
resp.release()
continue
break
# check response status
if raise_for_status is None:
raise_for_status = self._raise_for_status
if raise_for_status:
resp.raise_for_status()
# register connection
if handle is not None:
if resp.connection is not None:
resp.connection.add_callback(handle.cancel)
else:
handle.cancel()
resp._history = tuple(history)
for trace in traces:
await trace.send_request_end(
method, url.update_query(params), headers, resp
)
return resp
except BaseException as e:
# cleanup timer
tm.close()
if handle:
handle.cancel()
handle = None
for trace in traces:
await trace.send_request_exception(
method, url.update_query(params), headers, e
)
raise
def ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Union[SSLContext, bool, None, Fingerprint] = None,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> "_WSRequestContextManager":
"""Initiate websocket connection."""
return _WSRequestContextManager(
self._ws_connect(
url,
method=method,
protocols=protocols,
timeout=timeout,
receive_timeout=receive_timeout,
autoclose=autoclose,
autoping=autoping,
heartbeat=heartbeat,
auth=auth,
origin=origin,
params=params,
headers=headers,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
verify_ssl=verify_ssl,
fingerprint=fingerprint,
ssl_context=ssl_context,
proxy_headers=proxy_headers,
compress=compress,
max_msg_size=max_msg_size,
)
)
async def _ws_connect(
self,
url: StrOrURL,
*,
method: str = hdrs.METH_GET,
protocols: Iterable[str] = (),
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
auth: Optional[BasicAuth] = None,
origin: Optional[str] = None,
params: Optional[Mapping[str, str]] = None,
headers: Optional[LooseHeaders] = None,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
ssl: Union[SSLContext, bool, None, Fingerprint] = None,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
ssl_context: Optional[SSLContext] = None,
proxy_headers: Optional[LooseHeaders] = None,
compress: int = 0,
max_msg_size: int = 4 * 1024 * 1024,
) -> ClientWebSocketResponse:
if headers is None:
real_headers: CIMultiDict[str] = CIMultiDict()
else:
real_headers = CIMultiDict(headers)
default_headers = {
hdrs.UPGRADE: "websocket",
hdrs.CONNECTION: "upgrade",
hdrs.SEC_WEBSOCKET_VERSION: "13",
}
for key, value in default_headers.items():
real_headers.setdefault(key, value)
sec_key = base64.b64encode(os.urandom(16))
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
if protocols:
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
if origin is not None:
real_headers[hdrs.ORIGIN] = origin
if compress:
extstr = ws_ext_gen(compress=compress)
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
# send request
resp = await self.request(
method,
url,
params=params,
headers=real_headers,
read_until_eof=False,
auth=auth,
proxy=proxy,
proxy_auth=proxy_auth,
ssl=ssl,
proxy_headers=proxy_headers,
)
try:
# check handshake
if resp.status != 101:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid response status",
status=resp.status,
headers=resp.headers,
)
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid upgrade header",
status=resp.status,
headers=resp.headers,
)
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid connection header",
status=resp.status,
headers=resp.headers,
)
# key calculation
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
if r_key != match:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message="Invalid challenge response",
status=resp.status,
headers=resp.headers,
)
# websocket protocol
protocol = None
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
resp_protocols = [
proto.strip()
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
]
for proto in resp_protocols:
if proto in protocols:
protocol = proto
break
# websocket compress
notakeover = False
if compress:
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
if compress_hdrs:
try:
compress, notakeover = ws_ext_parse(compress_hdrs)
except WSHandshakeError as exc:
raise WSServerHandshakeError(
resp.request_info,
resp.history,
message=exc.args[0],
status=resp.status,
headers=resp.headers,
) from exc
else:
compress = 0
notakeover = False
conn = resp.connection
assert conn is not None
conn_proto = conn.protocol
assert conn_proto is not None
transport = conn.transport
assert transport is not None
reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
conn_proto, 2**16, loop=self._loop
)
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
writer = WebSocketWriter(
conn_proto,
transport,
use_mask=True,
compress=compress,
notakeover=notakeover,
)
except BaseException:
resp.close()
raise
else:
return self._ws_response_class(
reader,
writer,
protocol,
resp,
timeout,
autoclose,
autoping,
self._loop,
receive_timeout=receive_timeout,
heartbeat=heartbeat,
compress=compress,
client_notakeover=notakeover,
)
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
"""Add default headers and transform it to CIMultiDict"""
# Convert headers to MultiDict
result = CIMultiDict(self._default_headers)
if headers:
if not isinstance(headers, (MultiDictProxy, MultiDict)):
headers = CIMultiDict(headers)
added_names: Set[str] = set()
for key, value in headers.items():
if key in added_names:
result.add(key, value)
else:
result[key] = value
added_names.add(key)
return result
def get(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP GET request."""
return _RequestContextManager(
self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
)
def options(
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP OPTIONS request."""
return _RequestContextManager(
self._request(
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
)
)
def head(
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP HEAD request."""
return _RequestContextManager(
self._request(
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
)
)
def post(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP POST request."""
return _RequestContextManager(
self._request(hdrs.METH_POST, url, data=data, **kwargs)
)
def put(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PUT request."""
return _RequestContextManager(
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
)
def patch(
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
) -> "_RequestContextManager":
"""Perform HTTP PATCH request."""
return _RequestContextManager(
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
)
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
"""Perform HTTP DELETE request."""
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
async def close(self) -> None:
"""Close underlying connector.
Release all acquired resources.
"""
if not self.closed:
if self._connector is not None and self._connector_owner:
await self._connector.close()
self._connector = None
@property
def closed(self) -> bool:
"""Is client session closed.
A readonly property.
"""
return self._connector is None or self._connector.closed
@property
def connector(self) -> Optional[BaseConnector]:
"""Connector instance used for the session."""
return self._connector
@property
def cookie_jar(self) -> AbstractCookieJar:
"""The session cookies."""
return self._cookie_jar
@property
def version(self) -> Tuple[int, int]:
"""The session HTTP protocol version."""
return self._version
@property
def requote_redirect_url(self) -> bool:
"""Do URL requoting on redirection handling."""
return self._requote_redirect_url
@requote_redirect_url.setter
def requote_redirect_url(self, val: bool) -> None:
"""Do URL requoting on redirection handling."""
warnings.warn(
"session.requote_redirect_url modification " "is deprecated #2778",
DeprecationWarning,
stacklevel=2,
)
self._requote_redirect_url = val
@property
def loop(self) -> asyncio.AbstractEventLoop:
"""Session's loop."""
warnings.warn(
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
@property
def timeout(self) -> ClientTimeout:
"""Timeout for the session."""
return self._timeout
@property
def headers(self) -> "CIMultiDict[str]":
"""The default headers of the client session."""
return self._default_headers
@property
def skip_auto_headers(self) -> FrozenSet[istr]:
"""Headers for which autogeneration should be skipped"""
return self._skip_auto_headers
@property
def auth(self) -> Optional[BasicAuth]:
"""An object that represents HTTP Basic Authorization"""
return self._default_auth
@property
def json_serialize(self) -> JSONEncoder:
"""Json serializer callable"""
return self._json_serialize
@property
def connector_owner(self) -> bool:
"""Should connector be closed on session closing"""
return self._connector_owner
@property
def raise_for_status(
self,
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
"""Should `ClientResponse.raise_for_status()` be called for each response."""
return self._raise_for_status
@property
def auto_decompress(self) -> bool:
"""Should the body response be automatically decompressed."""
return self._auto_decompress
@property
def trust_env(self) -> bool:
"""
Should proxies information from environment or netrc be trusted.
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present.
"""
return self._trust_env
@property
def trace_configs(self) -> List[TraceConfig]:
"""A list of TraceConfig instances used for client tracing"""
return self._trace_configs
def detach(self) -> None:
"""Detach connector from session without closing the former.
Session is switched to closed state anyway.
"""
self._connector = None
def __enter__(self) -> None:
raise TypeError("Use async with instead")
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
# __exit__ should exist in pair with __enter__ but never executed
pass # pragma: no cover
async def __aenter__(self) -> "ClientSession":
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
await self.close()
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
__slots__ = ("_coro", "_resp")
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
self._coro = coro
def send(self, arg: None) -> "asyncio.Future[Any]":
return self._coro.send(arg)
def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
self._coro.throw(arg)
def close(self) -> None:
return self._coro.close()
def __await__(self) -> Generator[Any, None, _RetType]:
ret = self._coro.__await__()
return ret
def __iter__(self) -> Generator[Any, None, _RetType]:
return self.__await__()
async def __aenter__(self) -> _RetType:
self._resp = await self._coro
return self._resp
class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
__slots__ = ()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
# We're basing behavior on the exception as it can be caused by
# user code unrelated to the status of the connection. If you
# would like to close a connection you must do that
# explicitly. Otherwise connection error handling should kick in
# and close/recycle the connection as required.
self._resp.release()
class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
__slots__ = ()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
await self._resp.close()
class _SessionRequestContextManager:
__slots__ = ("_coro", "_resp", "_session")
def __init__(
self,
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
session: ClientSession,
) -> None:
self._coro = coro
self._resp: Optional[ClientResponse] = None
self._session = session
async def __aenter__(self) -> ClientResponse:
try:
self._resp = await self._coro
except BaseException:
await self._session.close()
raise
else:
return self._resp
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc: Optional[BaseException],
tb: Optional[TracebackType],
) -> None:
assert self._resp is not None
self._resp.close()
await self._session.close()
def request(
method: str,
url: StrOrURL,
*,
params: Optional[Mapping[str, str]] = None,
data: Any = None,
json: Any = None,
headers: Optional[LooseHeaders] = None,
skip_auto_headers: Optional[Iterable[str]] = None,
auth: Optional[BasicAuth] = None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
expect100: bool = False,
raise_for_status: Optional[bool] = None,
read_until_eof: bool = True,
proxy: Optional[StrOrURL] = None,
proxy_auth: Optional[BasicAuth] = None,
timeout: Union[ClientTimeout, object] = sentinel,
cookies: Optional[LooseCookies] = None,
version: HttpVersion = http.HttpVersion11,
connector: Optional[BaseConnector] = None,
read_bufsize: Optional[int] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> _SessionRequestContextManager:
"""Constructs and sends a request.
Returns response object.
method - HTTP method
url - request url
params - (optional) Dictionary or bytes to be sent in the query
string of the new request
data - (optional) Dictionary, bytes, or file-like object to
send in the body of the request
json - (optional) Any json compatible python object
headers - (optional) Dictionary of HTTP Headers to send with
the request
cookies - (optional) Dict object to send with the request
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
auth - aiohttp.helpers.BasicAuth
allow_redirects - (optional) If set to False, do not follow
redirects
version - Request HTTP version.
compress - Set to True if request has to be compressed
with deflate encoding.
chunked - Set to chunk size for chunked transfer encoding.
expect100 - Expect 100-continue response from server.
connector - BaseConnector sub-class instance to support
connection pooling.
read_until_eof - Read response until eof if response
does not have Content-Length header.
loop - Optional event loop.
timeout - Optional ClientTimeout settings structure, 5min
total timeout by default.
Usage::
>>> import aiohttp
>>> resp = await aiohttp.request('GET', 'http://python.org/')
>>> resp
<ClientResponse(python.org/) [200]>
>>> data = await resp.read()
"""
connector_owner = False
if connector is None:
connector_owner = True
connector = TCPConnector(loop=loop, force_close=True)
session = ClientSession(
loop=loop,
cookies=cookies,
version=version,
timeout=timeout,
connector=connector,
connector_owner=connector_owner,
)
return _SessionRequestContextManager(
session._request(
method,
url,
params=params,
data=data,
json=json,
headers=headers,
skip_auto_headers=skip_auto_headers,
auth=auth,
allow_redirects=allow_redirects,
max_redirects=max_redirects,
compress=compress,
chunked=chunked,
expect100=expect100,
raise_for_status=raise_for_status,
read_until_eof=read_until_eof,
proxy=proxy,
proxy_auth=proxy_auth,
read_bufsize=read_bufsize,
),
session,
)
| 45,037 | Python | 33.485452 | 88 | 0.545374 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_ws.py | import asyncio
import base64
import binascii
import hashlib
import json
from typing import Any, Iterable, Optional, Tuple, cast
import async_timeout
import attr
from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import call_later, set_result
from .http import (
WS_CLOSED_MESSAGE,
WS_CLOSING_MESSAGE,
WS_KEY,
WebSocketError,
WebSocketReader,
WebSocketWriter,
WSCloseCode,
WSMessage,
WSMsgType as WSMsgType,
ws_ext_gen,
ws_ext_parse,
)
from .log import ws_logger
from .streams import EofStream, FlowControlDataQueue
from .typedefs import Final, JSONDecoder, JSONEncoder
from .web_exceptions import HTTPBadRequest, HTTPException
from .web_request import BaseRequest
from .web_response import StreamResponse
__all__ = (
"WebSocketResponse",
"WebSocketReady",
"WSMsgType",
)
THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
@attr.s(auto_attribs=True, frozen=True, slots=True)
class WebSocketReady:
ok: bool
protocol: Optional[str]
def __bool__(self) -> bool:
return self.ok
class WebSocketResponse(StreamResponse):
_length_check = False
def __init__(
self,
*,
timeout: float = 10.0,
receive_timeout: Optional[float] = None,
autoclose: bool = True,
autoping: bool = True,
heartbeat: Optional[float] = None,
protocols: Iterable[str] = (),
compress: bool = True,
max_msg_size: int = 4 * 1024 * 1024,
) -> None:
super().__init__(status=101)
self._protocols = protocols
self._ws_protocol: Optional[str] = None
self._writer: Optional[WebSocketWriter] = None
self._reader: Optional[FlowControlDataQueue[WSMessage]] = None
self._closed = False
self._closing = False
self._conn_lost = 0
self._close_code: Optional[int] = None
self._loop: Optional[asyncio.AbstractEventLoop] = None
self._waiting: Optional[asyncio.Future[bool]] = None
self._exception: Optional[BaseException] = None
self._timeout = timeout
self._receive_timeout = receive_timeout
self._autoclose = autoclose
self._autoping = autoping
self._heartbeat = heartbeat
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
if heartbeat is not None:
self._pong_heartbeat = heartbeat / 2.0
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
self._compress = compress
self._max_msg_size = max_msg_size
def _cancel_heartbeat(self) -> None:
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = None
if self._heartbeat_cb is not None:
self._heartbeat_cb.cancel()
self._heartbeat_cb = None
def _reset_heartbeat(self) -> None:
self._cancel_heartbeat()
if self._heartbeat is not None:
assert self._loop is not None
self._heartbeat_cb = call_later(
self._send_heartbeat, self._heartbeat, self._loop
)
def _send_heartbeat(self) -> None:
if self._heartbeat is not None and not self._closed:
assert self._loop is not None
# fire-and-forget a task is not perfect but maybe ok for
# sending ping. Otherwise we need a long-living heartbeat
# task in the class.
self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
if self._pong_response_cb is not None:
self._pong_response_cb.cancel()
self._pong_response_cb = call_later(
self._pong_not_received, self._pong_heartbeat, self._loop
)
def _pong_not_received(self) -> None:
if self._req is not None and self._req.transport is not None:
self._closed = True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
self._req.transport.close()
async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
# make pre-check to don't hide it by do_handshake() exceptions
if self._payload_writer is not None:
return self._payload_writer
protocol, writer = self._pre_start(request)
payload_writer = await super().prepare(request)
assert payload_writer is not None
self._post_start(request, protocol, writer)
await payload_writer.drain()
return payload_writer
def _handshake(
self, request: BaseRequest
) -> Tuple["CIMultiDict[str]", str, bool, bool]:
headers = request.headers
if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
raise HTTPBadRequest(
text=(
"No WebSocket UPGRADE hdr: {}\n Can "
'"Upgrade" only to "WebSocket".'
).format(headers.get(hdrs.UPGRADE))
)
if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
raise HTTPBadRequest(
text="No CONNECTION upgrade hdr: {}".format(
headers.get(hdrs.CONNECTION)
)
)
# find common sub-protocol between client and server
protocol = None
if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
req_protocols = [
str(proto.strip())
for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
]
for proto in req_protocols:
if proto in self._protocols:
protocol = proto
break
else:
# No overlap found: Return no protocol as per spec
ws_logger.warning(
"Client protocols %r don’t overlap server-known ones %r",
req_protocols,
self._protocols,
)
# check supported version
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
if version not in ("13", "8", "7"):
raise HTTPBadRequest(text=f"Unsupported version: {version}")
# check client handshake for validity
key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
try:
if not key or len(base64.b64decode(key)) != 16:
raise HTTPBadRequest(text=f"Handshake error: {key!r}")
except binascii.Error:
raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
accept_val = base64.b64encode(
hashlib.sha1(key.encode() + WS_KEY).digest()
).decode()
response_headers = CIMultiDict(
{
hdrs.UPGRADE: "websocket",
hdrs.CONNECTION: "upgrade",
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
}
)
notakeover = False
compress = 0
if self._compress:
extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
# Server side always get return with no exception.
# If something happened, just drop compress extension
compress, notakeover = ws_ext_parse(extensions, isserver=True)
if compress:
enabledext = ws_ext_gen(
compress=compress, isserver=True, server_notakeover=notakeover
)
response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
if protocol:
response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
return (
response_headers,
protocol,
compress,
notakeover,
) # type: ignore[return-value]
def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
self._loop = request._loop
headers, protocol, compress, notakeover = self._handshake(request)
self.set_status(101)
self.headers.update(headers)
self.force_close()
self._compress = compress
transport = request._protocol.transport
assert transport is not None
writer = WebSocketWriter(
request._protocol, transport, compress=compress, notakeover=notakeover
)
return protocol, writer
def _post_start(
self, request: BaseRequest, protocol: str, writer: WebSocketWriter
) -> None:
self._ws_protocol = protocol
self._writer = writer
self._reset_heartbeat()
loop = self._loop
assert loop is not None
self._reader = FlowControlDataQueue(request._protocol, 2**16, loop=loop)
request.protocol.set_parser(
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
)
# disable HTTP keepalive for WebSocket
request.protocol.keep_alive(False)
def can_prepare(self, request: BaseRequest) -> WebSocketReady:
if self._writer is not None:
raise RuntimeError("Already started")
try:
_, protocol, _, _ = self._handshake(request)
except HTTPException:
return WebSocketReady(False, None)
else:
return WebSocketReady(True, protocol)
@property
def closed(self) -> bool:
return self._closed
@property
def close_code(self) -> Optional[int]:
return self._close_code
@property
def ws_protocol(self) -> Optional[str]:
return self._ws_protocol
@property
def compress(self) -> bool:
return self._compress
def exception(self) -> Optional[BaseException]:
return self._exception
async def ping(self, message: bytes = b"") -> None:
if self._writer is None:
raise RuntimeError("Call .prepare() first")
await self._writer.ping(message)
async def pong(self, message: bytes = b"") -> None:
# unsolicited pong
if self._writer is None:
raise RuntimeError("Call .prepare() first")
await self._writer.pong(message)
async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError("Call .prepare() first")
if not isinstance(data, str):
raise TypeError("data argument must be str (%r)" % type(data))
await self._writer.send(data, binary=False, compress=compress)
async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
if self._writer is None:
raise RuntimeError("Call .prepare() first")
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError("data argument must be byte-ish (%r)" % type(data))
await self._writer.send(data, binary=True, compress=compress)
async def send_json(
self,
data: Any,
compress: Optional[bool] = None,
*,
dumps: JSONEncoder = json.dumps,
) -> None:
await self.send_str(dumps(data), compress=compress)
async def write_eof(self) -> None: # type: ignore[override]
if self._eof_sent:
return
if self._payload_writer is None:
raise RuntimeError("Response has not been started")
await self.close()
self._eof_sent = True
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
if self._writer is None:
raise RuntimeError("Call .prepare() first")
self._cancel_heartbeat()
reader = self._reader
assert reader is not None
# we need to break `receive()` cycle first,
# `close()` may be called from different task
if self._waiting is not None and not self._closed:
reader.feed_data(WS_CLOSING_MESSAGE, 0)
await self._waiting
if not self._closed:
self._closed = True
try:
await self._writer.close(code, message)
writer = self._payload_writer
assert writer is not None
await writer.drain()
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except Exception as exc:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
return True
if self._closing:
return True
reader = self._reader
assert reader is not None
try:
async with async_timeout.timeout(self._timeout):
msg = await reader.read()
except asyncio.CancelledError:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except Exception as exc:
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = exc
return True
if msg.type == WSMsgType.CLOSE:
self._close_code = msg.data
return True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
self._exception = asyncio.TimeoutError()
return True
else:
return False
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
if self._reader is None:
raise RuntimeError("Call .prepare() first")
loop = self._loop
assert loop is not None
while True:
if self._waiting is not None:
raise RuntimeError("Concurrent call to receive() is not allowed")
if self._closed:
self._conn_lost += 1
if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
raise RuntimeError("WebSocket connection is closed.")
return WS_CLOSED_MESSAGE
elif self._closing:
return WS_CLOSING_MESSAGE
try:
self._waiting = loop.create_future()
try:
async with async_timeout.timeout(timeout or self._receive_timeout):
msg = await self._reader.read()
self._reset_heartbeat()
finally:
waiter = self._waiting
set_result(waiter, True)
self._waiting = None
except (asyncio.CancelledError, asyncio.TimeoutError):
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
raise
except EofStream:
self._close_code = WSCloseCode.OK
await self.close()
return WSMessage(WSMsgType.CLOSED, None, None)
except WebSocketError as exc:
self._close_code = exc.code
await self.close(code=exc.code)
return WSMessage(WSMsgType.ERROR, exc, None)
except Exception as exc:
self._exception = exc
self._closing = True
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
await self.close()
return WSMessage(WSMsgType.ERROR, exc, None)
if msg.type == WSMsgType.CLOSE:
self._closing = True
self._close_code = msg.data
if not self._closed and self._autoclose:
await self.close()
elif msg.type == WSMsgType.CLOSING:
self._closing = True
elif msg.type == WSMsgType.PING and self._autoping:
await self.pong(msg.data)
continue
elif msg.type == WSMsgType.PONG and self._autoping:
continue
return msg
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
msg = await self.receive(timeout)
if msg.type != WSMsgType.TEXT:
raise TypeError(
"Received message {}:{!r} is not WSMsgType.TEXT".format(
msg.type, msg.data
)
)
return cast(str, msg.data)
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
msg = await self.receive(timeout)
if msg.type != WSMsgType.BINARY:
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
return cast(bytes, msg.data)
async def receive_json(
self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
) -> Any:
data = await self.receive_str(timeout=timeout)
return loads(data)
async def write(self, data: bytes) -> None:
raise RuntimeError("Cannot call .write() for websocket")
def __aiter__(self) -> "WebSocketResponse":
return self
async def __anext__(self) -> WSMessage:
msg = await self.receive()
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
raise StopAsyncIteration
return msg
def _cancel(self, exc: BaseException) -> None:
if self._reader is not None:
self._reader.set_exception(exc)
| 17,142 | Python | 34.129098 | 87 | 0.573329 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_log.py | import datetime
import functools
import logging
import os
import re
from collections import namedtuple
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
from .abc import AbstractAccessLogger
from .web_request import BaseRequest
from .web_response import StreamResponse
KeyMethod = namedtuple("KeyMethod", "key method")
class AccessLogger(AbstractAccessLogger):
"""Helper object to log access.
Usage:
log = logging.getLogger("spam")
log_format = "%a %{User-Agent}i"
access_logger = AccessLogger(log, log_format)
access_logger.log(request, response, time)
Format:
%% The percent sign
%a Remote IP-address (IP-address of proxy if using reverse proxy)
%t Time when the request was started to process
%P The process ID of the child that serviced the request
%r First line of request
%s Response status code
%b Size of response in bytes, including HTTP headers
%T Time taken to serve the request, in seconds
%Tf Time taken to serve the request, in seconds with floating fraction
in .06f format
%D Time taken to serve the request, in microseconds
%{FOO}i request.headers['FOO']
%{FOO}o response.headers['FOO']
%{FOO}e os.environ['FOO']
"""
LOG_FORMAT_MAP = {
"a": "remote_address",
"t": "request_start_time",
"P": "process_id",
"r": "first_request_line",
"s": "response_status",
"b": "response_size",
"T": "request_time",
"Tf": "request_time_frac",
"D": "request_time_micro",
"i": "request_header",
"o": "response_header",
}
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
CLEANUP_RE = re.compile(r"(%[^s])")
_FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {}
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
"""Initialise the logger.
logger is a logger object to be used for logging.
log_format is a string with apache compatible log format description.
"""
super().__init__(logger, log_format=log_format)
_compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
if not _compiled_format:
_compiled_format = self.compile_format(log_format)
AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
self._log_format, self._methods = _compiled_format
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
"""Translate log_format into form usable by modulo formatting
All known atoms will be replaced with %s
Also methods for formatting of those atoms will be added to
_methods in appropriate order
For example we have log_format = "%a %t"
This format will be translated to "%s %s"
Also contents of _methods will be
[self._format_a, self._format_t]
These method will be called and results will be passed
to translated string format.
Each _format_* method receive 'args' which is list of arguments
given to self.log
Exceptions are _format_e, _format_i and _format_o methods which
also receive key name (by functools.partial)
"""
# list of (key, method) tuples, we don't use an OrderedDict as users
# can repeat the same key more than once
methods = list()
for atom in self.FORMAT_RE.findall(log_format):
if atom[1] == "":
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
m = getattr(AccessLogger, "_format_%s" % atom[0])
key_method = KeyMethod(format_key1, m)
else:
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
m = getattr(AccessLogger, "_format_%s" % atom[2])
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
methods.append(key_method)
log_format = self.FORMAT_RE.sub(r"%s", log_format)
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
return log_format, methods
@staticmethod
def _format_i(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
if request is None:
return "(no headers)"
# suboptimal, make istr(key) once
return request.headers.get(key, "-")
@staticmethod
def _format_o(
key: str, request: BaseRequest, response: StreamResponse, time: float
) -> str:
# suboptimal, make istr(key) once
return response.headers.get(key, "-")
@staticmethod
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return "-"
ip = request.remote
return ip if ip is not None else "-"
@staticmethod
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
now = datetime.datetime.utcnow()
start_time = now - datetime.timedelta(seconds=time)
return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
@staticmethod
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "<%s>" % os.getpid()
@staticmethod
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
if request is None:
return "-"
return "{} {} HTTP/{}.{}".format(
request.method,
request.path_qs,
request.version.major,
request.version.minor,
)
@staticmethod
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.status
@staticmethod
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
return response.body_length
@staticmethod
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time))
@staticmethod
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
return "%06f" % time
@staticmethod
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
return str(round(time * 1000000))
def _format_line(
self, request: BaseRequest, response: StreamResponse, time: float
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
return [(key, method(request, response, time)) for key, method in self._methods]
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
try:
fmt_info = self._format_line(request, response, time)
values = list()
extra = dict()
for key, value in fmt_info:
values.append(value)
if key.__class__ is str:
extra[key] = value
else:
k1, k2 = key # type: ignore[misc]
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
dct[k2] = value # type: ignore[index,has-type]
extra[k1] = dct # type: ignore[has-type,assignment]
self.logger.info(self._log_format % tuple(values), extra=extra)
except Exception:
self.logger.exception("Error in logging")
| 7,557 | Python | 35.162679 | 88 | 0.598915 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/payload_streamer.py | """
Payload implemenation for coroutines as data provider.
As a simple case, you can upload data from file::
@aiohttp.streamer
async def file_sender(writer, file_name=None):
with open(file_name, 'rb') as f:
chunk = f.read(2**16)
while chunk:
await writer.write(chunk)
chunk = f.read(2**16)
Then you can use `file_sender` like this:
async with session.post('http://httpbin.org/post',
data=file_sender(file_name='huge_file')) as resp:
print(await resp.text())
..note:: Coroutine must accept `writer` as first argument
"""
import types
import warnings
from typing import Any, Awaitable, Callable, Dict, Tuple
from .abc import AbstractStreamWriter
from .payload import Payload, payload_type
__all__ = ("streamer",)
class _stream_wrapper:
def __init__(
self,
coro: Callable[..., Awaitable[None]],
args: Tuple[Any, ...],
kwargs: Dict[str, Any],
) -> None:
self.coro = types.coroutine(coro)
self.args = args
self.kwargs = kwargs
async def __call__(self, writer: AbstractStreamWriter) -> None:
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
class streamer:
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
warnings.warn(
"@streamer is deprecated, use async generators instead",
DeprecationWarning,
stacklevel=2,
)
self.coro = coro
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
return _stream_wrapper(self.coro, args, kwargs)
@payload_type(_stream_wrapper)
class StreamWrapperPayload(Payload):
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)
@payload_type(streamer)
class StreamPayload(StreamWrapperPayload):
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
super().__init__(value(), *args, **kwargs)
async def write(self, writer: AbstractStreamWriter) -> None:
await self._value(writer)
| 2,112 | Python | 26.802631 | 84 | 0.619318 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/payload.py | import asyncio
import enum
import io
import json
import mimetypes
import os
import warnings
from abc import ABC, abstractmethod
from itertools import chain
from typing import (
IO,
TYPE_CHECKING,
Any,
ByteString,
Dict,
Iterable,
Optional,
TextIO,
Tuple,
Type,
Union,
)
from multidict import CIMultiDict
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
PY_36,
content_disposition_header,
guess_filename,
parse_mimetype,
sentinel,
)
from .streams import StreamReader
from .typedefs import Final, JSONEncoder, _CIMultiDict
__all__ = (
"PAYLOAD_REGISTRY",
"get_payload",
"payload_type",
"Payload",
"BytesPayload",
"StringPayload",
"IOBasePayload",
"BytesIOPayload",
"BufferedReaderPayload",
"TextIOPayload",
"StringIOPayload",
"JsonPayload",
"AsyncIterablePayload",
)
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
if TYPE_CHECKING: # pragma: no cover
from typing import List
class LookupError(Exception):
pass
class Order(str, enum.Enum):
normal = "normal"
try_first = "try_first"
try_last = "try_last"
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
def register_payload(
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
) -> None:
PAYLOAD_REGISTRY.register(factory, type, order=order)
class payload_type:
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
self.type = type
self.order = order
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
register_payload(factory, self.type, order=self.order)
return factory
PayloadType = Type["Payload"]
_PayloadRegistryItem = Tuple[PayloadType, Any]
class PayloadRegistry:
"""Payload registry.
note: we need zope.interface for more efficient adapter search
"""
def __init__(self) -> None:
self._first: List[_PayloadRegistryItem] = []
self._normal: List[_PayloadRegistryItem] = []
self._last: List[_PayloadRegistryItem] = []
def get(
self,
data: Any,
*args: Any,
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
**kwargs: Any,
) -> "Payload":
if isinstance(data, Payload):
return data
for factory, type in _CHAIN(self._first, self._normal, self._last):
if isinstance(data, type):
return factory(data, *args, **kwargs)
raise LookupError()
def register(
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
) -> None:
if order is Order.try_first:
self._first.append((factory, type))
elif order is Order.normal:
self._normal.append((factory, type))
elif order is Order.try_last:
self._last.append((factory, type))
else:
raise ValueError(f"Unsupported order {order!r}")
class Payload(ABC):
_default_content_type: str = "application/octet-stream"
_size: Optional[int] = None
def __init__(
self,
value: Any,
headers: Optional[
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
] = None,
content_type: Optional[str] = sentinel,
filename: Optional[str] = None,
encoding: Optional[str] = None,
**kwargs: Any,
) -> None:
self._encoding = encoding
self._filename = filename
self._headers: _CIMultiDict = CIMultiDict()
self._value = value
if content_type is not sentinel and content_type is not None:
self._headers[hdrs.CONTENT_TYPE] = content_type
elif self._filename is not None:
content_type = mimetypes.guess_type(self._filename)[0]
if content_type is None:
content_type = self._default_content_type
self._headers[hdrs.CONTENT_TYPE] = content_type
else:
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
self._headers.update(headers or {})
@property
def size(self) -> Optional[int]:
"""Size of the payload."""
return self._size
@property
def filename(self) -> Optional[str]:
"""Filename of the payload."""
return self._filename
@property
def headers(self) -> _CIMultiDict:
"""Custom item headers"""
return self._headers
@property
def _binary_headers(self) -> bytes:
return (
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
"utf-8"
)
+ b"\r\n"
)
@property
def encoding(self) -> Optional[str]:
"""Payload encoding"""
return self._encoding
@property
def content_type(self) -> str:
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
def set_content_disposition(
self,
disptype: str,
quote_fields: bool = True,
_charset: str = "utf-8",
**params: Any,
) -> None:
"""Sets ``Content-Disposition`` header."""
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
disptype, quote_fields=quote_fields, _charset=_charset, **params
)
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
"""Write payload.
writer is an AbstractStreamWriter instance:
"""
class BytesPayload(Payload):
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, (bytes, bytearray, memoryview)):
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
if isinstance(value, memoryview):
self._size = value.nbytes
else:
self._size = len(value)
if self._size > TOO_LARGE_BYTES_BODY:
if PY_36:
kwargs = {"source": self}
else:
kwargs = {}
warnings.warn(
"Sending a large body directly with raw bytes might"
" lock the event loop. You should probably pass an "
"io.BytesIO object instead",
ResourceWarning,
**kwargs,
)
async def write(self, writer: AbstractStreamWriter) -> None:
await writer.write(self._value)
class StringPayload(BytesPayload):
def __init__(
self,
value: str,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
real_encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
real_encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = "text/plain; charset=%s" % encoding
real_encoding = encoding
super().__init__(
value.encode(real_encoding),
encoding=real_encoding,
content_type=content_type,
*args,
**kwargs,
)
class StringIOPayload(StringPayload):
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
super().__init__(value.read(), *args, **kwargs)
class IOBasePayload(Payload):
_value: IO[Any]
def __init__(
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
) -> None:
if "filename" not in kwargs:
kwargs["filename"] = guess_filename(value)
super().__init__(value, *args, **kwargs)
if self._filename is not None and disposition is not None:
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(disposition, filename=self._filename)
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
while chunk:
await writer.write(chunk)
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
finally:
await loop.run_in_executor(None, self._value.close)
class TextIOPayload(IOBasePayload):
_value: TextIO
def __init__(
self,
value: TextIO,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
**kwargs: Any,
) -> None:
if encoding is None:
if content_type is None:
encoding = "utf-8"
content_type = "text/plain; charset=utf-8"
else:
mimetype = parse_mimetype(content_type)
encoding = mimetype.parameters.get("charset", "utf-8")
else:
if content_type is None:
content_type = "text/plain; charset=%s" % encoding
super().__init__(
value,
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
@property
def size(self) -> Optional[int]:
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
return None
async def write(self, writer: AbstractStreamWriter) -> None:
loop = asyncio.get_event_loop()
try:
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
while chunk:
data = (
chunk.encode(encoding=self._encoding)
if self._encoding
else chunk.encode()
)
await writer.write(data)
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
finally:
await loop.run_in_executor(None, self._value.close)
class BytesIOPayload(IOBasePayload):
@property
def size(self) -> int:
position = self._value.tell()
end = self._value.seek(0, os.SEEK_END)
self._value.seek(position)
return end - position
class BufferedReaderPayload(IOBasePayload):
@property
def size(self) -> Optional[int]:
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except OSError:
# data.fileno() is not supported, e.g.
# io.BufferedReader(io.BytesIO(b'data'))
return None
class JsonPayload(BytesPayload):
def __init__(
self,
value: Any,
encoding: str = "utf-8",
content_type: str = "application/json",
dumps: JSONEncoder = json.dumps,
*args: Any,
**kwargs: Any,
) -> None:
super().__init__(
dumps(value).encode(encoding),
content_type=content_type,
encoding=encoding,
*args,
**kwargs,
)
if TYPE_CHECKING: # pragma: no cover
from typing import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator[bytes]
_AsyncIterable = AsyncIterable[bytes]
else:
from collections.abc import AsyncIterable, AsyncIterator
_AsyncIterator = AsyncIterator
_AsyncIterable = AsyncIterable
class AsyncIterablePayload(Payload):
_iter: Optional[_AsyncIterator] = None
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
raise TypeError(
"value argument must support "
"collections.abc.AsyncIterablebe interface, "
"got {!r}".format(type(value))
)
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
super().__init__(value, *args, **kwargs)
self._iter = value.__aiter__()
async def write(self, writer: AbstractStreamWriter) -> None:
if self._iter:
try:
# iter is not None check prevents rare cases
# when the case iterable is used twice
while True:
chunk = await self._iter.__anext__()
await writer.write(chunk)
except StopAsyncIteration:
self._iter = None
class StreamReaderPayload(AsyncIterablePayload):
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value.iter_any(), *args, **kwargs)
PAYLOAD_REGISTRY = PayloadRegistry()
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
PAYLOAD_REGISTRY.register(StringPayload, str)
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
# multidict.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
| 13,634 | Python | 28.259657 | 88 | 0.580827 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/http_parser.py | import abc
import asyncio
import collections
import re
import string
import zlib
from contextlib import suppress
from enum import IntEnum
from typing import (
Any,
Generic,
List,
NamedTuple,
Optional,
Pattern,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from multidict import CIMultiDict, CIMultiDictProxy, istr
from yarl import URL
from . import hdrs
from .base_protocol import BaseProtocol
from .helpers import NO_EXTENSIONS, BaseTimerContext
from .http_exceptions import (
BadHttpMessage,
BadStatusLine,
ContentEncodingError,
ContentLengthError,
InvalidHeader,
LineTooLong,
TransferEncodingError,
)
from .http_writer import HttpVersion, HttpVersion10
from .log import internal_logger
from .streams import EMPTY_PAYLOAD, StreamReader
from .typedefs import Final, RawHeaders
try:
import brotli
HAS_BROTLI = True
except ImportError: # pragma: no cover
HAS_BROTLI = False
__all__ = (
"HeadersParser",
"HttpParser",
"HttpRequestParser",
"HttpResponseParser",
"RawRequestMessage",
"RawResponseMessage",
)
ASCIISET: Final[Set[str]] = set(string.printable)
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
# and https://tools.ietf.org/html/rfc7230#appendix-B
#
# method = token
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
# token = 1*tchar
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
class RawRequestMessage(NamedTuple):
method: str
path: str
version: HttpVersion
headers: "CIMultiDictProxy[str]"
raw_headers: RawHeaders
should_close: bool
compression: Optional[str]
upgrade: bool
chunked: bool
url: URL
RawResponseMessage = collections.namedtuple(
"RawResponseMessage",
[
"version",
"code",
"reason",
"headers",
"raw_headers",
"should_close",
"compression",
"upgrade",
"chunked",
],
)
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
class ParseState(IntEnum):
PARSE_NONE = 0
PARSE_LENGTH = 1
PARSE_CHUNKED = 2
PARSE_UNTIL_EOF = 3
class ChunkState(IntEnum):
PARSE_CHUNKED_SIZE = 0
PARSE_CHUNKED_CHUNK = 1
PARSE_CHUNKED_CHUNK_EOF = 2
PARSE_MAYBE_TRAILERS = 3
PARSE_TRAILERS = 4
class HeadersParser:
def __init__(
self,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
) -> None:
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
def parse_headers(
self, lines: List[bytes]
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
headers: CIMultiDict[str] = CIMultiDict()
raw_headers = []
lines_idx = 1
line = lines[1]
line_count = len(lines)
while line:
# Parse initial header name : value pair.
try:
bname, bvalue = line.split(b":", 1)
except ValueError:
raise InvalidHeader(line) from None
bname = bname.strip(b" \t")
bvalue = bvalue.lstrip()
if HDRRE.search(bname):
raise InvalidHeader(bname)
if len(bname) > self.max_field_size:
raise LineTooLong(
"request header name {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(len(bname)),
)
header_length = len(bvalue)
# next line
lines_idx += 1
line = lines[lines_idx]
# consume continuation lines
continuation = line and line[0] in (32, 9) # (' ', '\t')
if continuation:
bvalue_lst = [bvalue]
while continuation:
header_length += len(line)
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length),
)
bvalue_lst.append(line)
# next line
lines_idx += 1
if lines_idx < line_count:
line = lines[lines_idx]
if line:
continuation = line[0] in (32, 9) # (' ', '\t')
else:
line = b""
break
bvalue = b"".join(bvalue_lst)
else:
if header_length > self.max_field_size:
raise LineTooLong(
"request header field {}".format(
bname.decode("utf8", "xmlcharrefreplace")
),
str(self.max_field_size),
str(header_length),
)
bvalue = bvalue.strip()
name = bname.decode("utf-8", "surrogateescape")
value = bvalue.decode("utf-8", "surrogateescape")
headers.add(name, value)
raw_headers.append((bname, bvalue))
return (CIMultiDictProxy(headers), tuple(raw_headers))
class HttpParser(abc.ABC, Generic[_MsgT]):
def __init__(
self,
protocol: Optional[BaseProtocol] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
limit: int = 2**16,
max_line_size: int = 8190,
max_headers: int = 32768,
max_field_size: int = 8190,
timer: Optional[BaseTimerContext] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
payload_exception: Optional[Type[BaseException]] = None,
response_with_body: bool = True,
read_until_eof: bool = False,
auto_decompress: bool = True,
) -> None:
self.protocol = protocol
self.loop = loop
self.max_line_size = max_line_size
self.max_headers = max_headers
self.max_field_size = max_field_size
self.timer = timer
self.code = code
self.method = method
self.readall = readall
self.payload_exception = payload_exception
self.response_with_body = response_with_body
self.read_until_eof = read_until_eof
self._lines: List[bytes] = []
self._tail = b""
self._upgraded = False
self._payload = None
self._payload_parser: Optional[HttpPayloadParser] = None
self._auto_decompress = auto_decompress
self._limit = limit
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
@abc.abstractmethod
def parse_message(self, lines: List[bytes]) -> _MsgT:
pass
def feed_eof(self) -> Optional[_MsgT]:
if self._payload_parser is not None:
self._payload_parser.feed_eof()
self._payload_parser = None
else:
# try to extract partial message
if self._tail:
self._lines.append(self._tail)
if self._lines:
if self._lines[-1] != "\r\n":
self._lines.append(b"")
with suppress(Exception):
return self.parse_message(self._lines)
return None
def feed_data(
self,
data: bytes,
SEP: bytes = b"\r\n",
EMPTY: bytes = b"",
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
METH_CONNECT: str = hdrs.METH_CONNECT,
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
messages = []
if self._tail:
data, self._tail = self._tail + data, b""
data_len = len(data)
start_pos = 0
loop = self.loop
while start_pos < data_len:
# read HTTP message (request/response line + headers), \r\n\r\n
# and split by lines
if self._payload_parser is None and not self._upgraded:
pos = data.find(SEP, start_pos)
# consume \r\n
if pos == start_pos and not self._lines:
start_pos = pos + 2
continue
if pos >= start_pos:
# line found
self._lines.append(data[start_pos:pos])
start_pos = pos + 2
# \r\n\r\n found
if self._lines[-1] == EMPTY:
try:
msg: _MsgT = self.parse_message(self._lines)
finally:
self._lines.clear()
def get_content_length() -> Optional[int]:
# payload length
length_hdr = msg.headers.get(CONTENT_LENGTH)
if length_hdr is None:
return None
try:
length = int(length_hdr)
except ValueError:
raise InvalidHeader(CONTENT_LENGTH)
if length < 0:
raise InvalidHeader(CONTENT_LENGTH)
return length
length = get_content_length()
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
self._upgraded = msg.upgrade
method = getattr(msg, "method", self.method)
assert self.protocol is not None
# calculate payload
if (
(length is not None and length > 0)
or msg.chunked
and not msg.upgrade
):
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code,
readall=self.readall,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
elif method == METH_CONNECT:
assert isinstance(msg, RawRequestMessage)
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
self._upgraded = True
self._payload_parser = HttpPayloadParser(
payload,
method=msg.method,
compression=msg.compression,
readall=True,
auto_decompress=self._auto_decompress,
)
else:
if (
getattr(msg, "code", 100) >= 199
and length is None
and self.read_until_eof
):
payload = StreamReader(
self.protocol,
timer=self.timer,
loop=loop,
limit=self._limit,
)
payload_parser = HttpPayloadParser(
payload,
length=length,
chunked=msg.chunked,
method=method,
compression=msg.compression,
code=self.code,
readall=True,
response_with_body=self.response_with_body,
auto_decompress=self._auto_decompress,
)
if not payload_parser.done:
self._payload_parser = payload_parser
else:
payload = EMPTY_PAYLOAD
messages.append((msg, payload))
else:
self._tail = data[start_pos:]
data = EMPTY
break
# no parser, just store
elif self._payload_parser is None and self._upgraded:
assert not self._lines
break
# feed payload
elif data and start_pos < data_len:
assert not self._lines
assert self._payload_parser is not None
try:
eof, data = self._payload_parser.feed_data(data[start_pos:])
except BaseException as exc:
if self.payload_exception is not None:
self._payload_parser.payload.set_exception(
self.payload_exception(str(exc))
)
else:
self._payload_parser.payload.set_exception(exc)
eof = True
data = b""
if eof:
start_pos = 0
data_len = len(data)
self._payload_parser = None
continue
else:
break
if data and start_pos < data_len:
data = data[start_pos:]
else:
data = EMPTY
return messages, self._upgraded, data
def parse_headers(
self, lines: List[bytes]
) -> Tuple[
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
]:
"""Parses RFC 5322 headers from a stream.
Line continuations are supported. Returns list of header name
and value pairs. Header name is in upper case.
"""
headers, raw_headers = self._headers_parser.parse_headers(lines)
close_conn = None
encoding = None
upgrade = False
chunked = False
# keep-alive
conn = headers.get(hdrs.CONNECTION)
if conn:
v = conn.lower()
if v == "close":
close_conn = True
elif v == "keep-alive":
close_conn = False
elif v == "upgrade":
upgrade = True
# encoding
enc = headers.get(hdrs.CONTENT_ENCODING)
if enc:
enc = enc.lower()
if enc in ("gzip", "deflate", "br"):
encoding = enc
# chunking
te = headers.get(hdrs.TRANSFER_ENCODING)
if te is not None:
if "chunked" == te.lower():
chunked = True
else:
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
if hdrs.CONTENT_LENGTH in headers:
raise BadHttpMessage(
"Content-Length can't be present with Transfer-Encoding",
)
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
def set_upgraded(self, val: bool) -> None:
"""Set connection upgraded (to websocket) mode.
:param bool val: new state.
"""
self._upgraded = val
class HttpRequestParser(HttpParser[RawRequestMessage]):
"""Read request status line.
Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
# request line
line = lines[0].decode("utf-8", "surrogateescape")
try:
method, path, version = line.split(None, 2)
except ValueError:
raise BadStatusLine(line) from None
if len(path) > self.max_line_size:
raise LineTooLong(
"Status line is too long", str(self.max_line_size), str(len(path))
)
# method
if not METHRE.match(method):
raise BadStatusLine(method)
# version
try:
if version.startswith("HTTP/"):
n1, n2 = version[5:].split(".", 1)
version_o = HttpVersion(int(n1), int(n2))
else:
raise BadStatusLine(version)
except Exception:
raise BadStatusLine(version)
if method == "CONNECT":
# authority-form,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
url = URL.build(authority=path, encoded=True)
elif path.startswith("/"):
# origin-form,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
path_part, _hash_separator, url_fragment = path.partition("#")
path_part, _question_mark_separator, qs_part = path_part.partition("?")
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
# NOTE: parser does, otherwise it results into the same
# NOTE: HTTP Request-Line input producing different
# NOTE: `yarl.URL()` objects
url = URL.build(
path=path_part,
query_string=qs_part,
fragment=url_fragment,
encoded=True,
)
else:
# absolute-form for proxy maybe,
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
url = URL(path, encoded=True)
# read headers
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None: # then the headers weren't set in the request
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
close = True
else: # HTTP 1.1 must ask to close.
close = False
return RawRequestMessage(
method,
path,
version_o,
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
url,
)
class HttpResponseParser(HttpParser[RawResponseMessage]):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage.
"""
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
try:
version, status = line.split(None, 1)
except ValueError:
raise BadStatusLine(line) from None
try:
status, reason = status.split(None, 1)
except ValueError:
reason = ""
if len(reason) > self.max_line_size:
raise LineTooLong(
"Status line is too long", str(self.max_line_size), str(len(reason))
)
# version
match = VERSRE.match(version)
if match is None:
raise BadStatusLine(line)
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
# The status code is a three-digit number
try:
status_i = int(status)
except ValueError:
raise BadStatusLine(line) from None
if status_i > 999:
raise BadStatusLine(line)
# read headers
(
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
) = self.parse_headers(lines)
if close is None:
close = version_o <= HttpVersion10
return RawResponseMessage(
version_o,
status_i,
reason.strip(),
headers,
raw_headers,
close,
compression,
upgrade,
chunked,
)
class HttpPayloadParser:
def __init__(
self,
payload: StreamReader,
length: Optional[int] = None,
chunked: bool = False,
compression: Optional[str] = None,
code: Optional[int] = None,
method: Optional[str] = None,
readall: bool = False,
response_with_body: bool = True,
auto_decompress: bool = True,
) -> None:
self._length = 0
self._type = ParseState.PARSE_NONE
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
self._chunk_size = 0
self._chunk_tail = b""
self._auto_decompress = auto_decompress
self.done = False
# payload decompression wrapper
if response_with_body and compression and self._auto_decompress:
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
payload, compression
)
else:
real_payload = payload
# payload parser
if not response_with_body:
# don't parse payload if it's not expected to be received
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
elif chunked:
self._type = ParseState.PARSE_CHUNKED
elif length is not None:
self._type = ParseState.PARSE_LENGTH
self._length = length
if self._length == 0:
real_payload.feed_eof()
self.done = True
else:
if readall and code != 204:
self._type = ParseState.PARSE_UNTIL_EOF
elif method in ("PUT", "POST"):
internal_logger.warning( # pragma: no cover
"Content-Length or Transfer-Encoding header is required"
)
self._type = ParseState.PARSE_NONE
real_payload.feed_eof()
self.done = True
self.payload = real_payload
def feed_eof(self) -> None:
if self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_eof()
elif self._type == ParseState.PARSE_LENGTH:
raise ContentLengthError(
"Not enough data for satisfy content length header."
)
elif self._type == ParseState.PARSE_CHUNKED:
raise TransferEncodingError(
"Not enough data for satisfy transfer length header."
)
def feed_data(
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
) -> Tuple[bool, bytes]:
# Read specified amount of bytes
if self._type == ParseState.PARSE_LENGTH:
required = self._length
chunk_len = len(chunk)
if required >= chunk_len:
self._length = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
if self._length == 0:
self.payload.feed_eof()
return True, b""
else:
self._length = 0
self.payload.feed_data(chunk[:required], required)
self.payload.feed_eof()
return True, chunk[required:]
# Chunked transfer encoding parser
elif self._type == ParseState.PARSE_CHUNKED:
if self._chunk_tail:
chunk = self._chunk_tail + chunk
self._chunk_tail = b""
while chunk:
# read next chunk size
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
pos = chunk.find(SEP)
if pos >= 0:
i = chunk.find(CHUNK_EXT, 0, pos)
if i >= 0:
size_b = chunk[:i] # strip chunk-extensions
else:
size_b = chunk[:pos]
try:
size = int(bytes(size_b), 16)
except ValueError:
exc = TransferEncodingError(
chunk[:pos].decode("ascii", "surrogateescape")
)
self.payload.set_exception(exc)
raise exc from None
chunk = chunk[pos + 2 :]
if size == 0: # eof marker
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
self._chunk_size = size
self.payload.begin_http_chunk_receiving()
else:
self._chunk_tail = chunk
return False, b""
# read chunk and feed buffer
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
required = self._chunk_size
chunk_len = len(chunk)
if required > chunk_len:
self._chunk_size = required - chunk_len
self.payload.feed_data(chunk, chunk_len)
return False, b""
else:
self._chunk_size = 0
self.payload.feed_data(chunk[:required], required)
chunk = chunk[required:]
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
self.payload.end_http_chunk_receiving()
# toss the CRLF at the end of the chunk
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
if chunk[:2] == SEP:
chunk = chunk[2:]
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
else:
self._chunk_tail = chunk
return False, b""
# if stream does not contain trailer, after 0\r\n
# we should get another \r\n otherwise
# trailers needs to be skiped until \r\n\r\n
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
head = chunk[:2]
if head == SEP:
# end of stream
self.payload.feed_eof()
return True, chunk[2:]
# Both CR and LF, or only LF may not be received yet. It is
# expected that CRLF or LF will be shown at the very first
# byte next time, otherwise trailers should come. The last
# CRLF which marks the end of response might not be
# contained in the same TCP segment which delivered the
# size indicator.
if not head:
return False, b""
if head == SEP[:1]:
self._chunk_tail = head
return False, b""
self._chunk = ChunkState.PARSE_TRAILERS
# read and discard trailer up to the CRLF terminator
if self._chunk == ChunkState.PARSE_TRAILERS:
pos = chunk.find(SEP)
if pos >= 0:
chunk = chunk[pos + 2 :]
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
else:
self._chunk_tail = chunk
return False, b""
# Read all bytes until eof
elif self._type == ParseState.PARSE_UNTIL_EOF:
self.payload.feed_data(chunk, len(chunk))
return False, b""
class DeflateBuffer:
"""DeflateStream decompress stream and feed data into specified stream."""
decompressor: Any
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
self.out = out
self.size = 0
self.encoding = encoding
self._started_decoding = False
if encoding == "br":
if not HAS_BROTLI: # pragma: no cover
raise ContentEncodingError(
"Can not decode content-encoding: brotli (br). "
"Please install `Brotli`"
)
class BrotliDecoder:
# Supports both 'brotlipy' and 'Brotli' packages
# since they share an import name. The top branches
# are for 'brotlipy' and bottom branches for 'Brotli'
def __init__(self) -> None:
self._obj = brotli.Decompressor()
def decompress(self, data: bytes) -> bytes:
if hasattr(self._obj, "decompress"):
return cast(bytes, self._obj.decompress(data))
return cast(bytes, self._obj.process(data))
def flush(self) -> bytes:
if hasattr(self._obj, "flush"):
return cast(bytes, self._obj.flush())
return b""
self.decompressor = BrotliDecoder()
else:
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
def set_exception(self, exc: BaseException) -> None:
self.out.set_exception(exc)
def feed_data(self, chunk: bytes, size: int) -> None:
if not size:
return
self.size += size
# RFC1950
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
# bits 4..7 = CINFO = 1..7 = windows size.
if (
not self._started_decoding
and self.encoding == "deflate"
and chunk[0] & 0xF != 8
):
# Change the decoder to decompress incorrectly compressed data
# Actually we should issue a warning about non-RFC-compliant data.
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
try:
chunk = self.decompressor.decompress(chunk)
except Exception:
raise ContentEncodingError(
"Can not decode content-encoding: %s" % self.encoding
)
self._started_decoding = True
if chunk:
self.out.feed_data(chunk, len(chunk))
def feed_eof(self) -> None:
chunk = self.decompressor.flush()
if chunk or self.size > 0:
self.out.feed_data(chunk, len(chunk))
if self.encoding == "deflate" and not self.decompressor.eof:
raise ContentEncodingError("deflate")
self.out.feed_eof()
def begin_http_chunk_receiving(self) -> None:
self.out.begin_http_chunk_receiving()
def end_http_chunk_receiving(self) -> None:
self.out.end_http_chunk_receiving()
HttpRequestParserPy = HttpRequestParser
HttpResponseParserPy = HttpResponseParser
RawRequestMessagePy = RawRequestMessage
RawResponseMessagePy = RawResponseMessage
try:
if not NO_EXTENSIONS:
from ._http_parser import ( # type: ignore[import,no-redef]
HttpRequestParser,
HttpResponseParser,
RawRequestMessage,
RawResponseMessage,
)
HttpRequestParserC = HttpRequestParser
HttpResponseParserC = HttpResponseParser
RawRequestMessageC = RawRequestMessage
RawResponseMessageC = RawResponseMessage
except ImportError: # pragma: no cover
pass
| 33,092 | Python | 33.116495 | 88 | 0.484196 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/worker.py | """Async gunicorn worker for aiohttp.web"""
import asyncio
import os
import re
import signal
import sys
from types import FrameType
from typing import Any, Awaitable, Callable, Optional, Union # noqa
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
from gunicorn.workers import base
from aiohttp import web
from .helpers import set_result
from .web_app import Application
from .web_log import AccessLogger
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
super().__init__(*args, **kw)
self._task: Optional[asyncio.Task[None]] = None
self.exit_code = 0
self._notify_waiter: Optional[asyncio.Future[bool]] = None
def init_process(self) -> None:
# create new event_loop after fork
asyncio.get_event_loop().close()
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
super().init_process()
def run(self) -> None:
self._task = self.loop.create_task(self._run())
try: # ignore all finalization problems
self.loop.run_until_complete(self._task)
except Exception:
self.log.exception("Exception in gunicorn worker")
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.close()
sys.exit(self.exit_code)
async def _run(self) -> None:
runner = None
if isinstance(self.wsgi, Application):
app = self.wsgi
elif asyncio.iscoroutinefunction(self.wsgi):
wsgi = await self.wsgi()
if isinstance(wsgi, web.AppRunner):
runner = wsgi
app = runner.app
else:
app = wsgi
else:
raise RuntimeError(
"wsgi app should be either Application or "
"async function returning Application, got {}".format(self.wsgi)
)
if runner is None:
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(
app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(
self.cfg.access_log_format
),
)
await runner.setup()
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
runner = runner
assert runner is not None
server = runner.server
assert server is not None
for sock in self.sockets:
site = web.SockSite(
runner,
sock,
ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await site.start()
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive: # type: ignore[has-type]
self.notify()
cnt = server.requests_count
if self.cfg.max_requests and cnt > self.cfg.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
elif pid == os.getpid() and self.ppid != os.getppid():
self.alive = False
self.log.info("Parent changed, shutting down: %s", self)
else:
await self._wait_next_notify()
except BaseException:
pass
await runner.cleanup()
def _wait_next_notify(self) -> "asyncio.Future[bool]":
self._notify_waiter_done()
loop = self.loop
assert loop is not None
self._notify_waiter = waiter = loop.create_future()
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
return waiter
def _notify_waiter_done(
self, waiter: Optional["asyncio.Future[bool]"] = None
) -> None:
if waiter is None:
waiter = self._notify_waiter
if waiter is not None:
set_result(waiter, True)
if waiter is self._notify_waiter:
self._notify_waiter = None
def init_signals(self) -> None:
# Set up signals through the event loop API.
self.loop.add_signal_handler(
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
)
self.loop.add_signal_handler(
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
)
self.loop.add_signal_handler(
signal.SIGINT, self.handle_quit, signal.SIGINT, None
)
self.loop.add_signal_handler(
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
)
self.loop.add_signal_handler(
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
)
self.loop.add_signal_handler(
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
# Reset signals so Gunicorn doesn't swallow subprocess return codes
# See: https://github.com/aio-libs/aiohttp/issues/6130
if sys.version_info < (3, 8):
# Starting from Python 3.8,
# the default child watcher is ThreadedChildWatcher.
# The watcher doesn't depend on SIGCHLD signal,
# there is no need to reset it.
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
def handle_quit(self, sig: int, frame: FrameType) -> None:
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
# wakeup closing process
self._notify_waiter_done()
def handle_abort(self, sig: int, frame: FrameType) -> None:
self.alive = False
self.exit_code = 1
self.cfg.worker_abort(self)
sys.exit(1)
@staticmethod
def _create_ssl_context(cfg: Any) -> "SSLContext":
"""Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
if ssl is None: # pragma: no cover
raise RuntimeError("SSL is not supported.")
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx
def _get_valid_log_format(self, source_format: str) -> str:
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
return self.DEFAULT_AIOHTTP_LOG_FORMAT
elif re.search(r"%\([^\)]+\)", source_format):
raise ValueError(
"Gunicorn's style options in form of `%(name)s` are not "
"supported for the log formatting. Please use aiohttp's "
"format specification to configure access log formatting: "
"http://docs.aiohttp.org/en/stable/logging.html"
"#format-specification"
)
else:
return source_format
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self) -> None:
import uvloop
# Close any existing event loop before setting a
# new policy.
asyncio.get_event_loop().close()
# Setup uvloop policy, so that every
# asyncio.get_event_loop() will create an instance
# of uvloop event loop.
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
super().init_process()
class GunicornTokioWebWorker(GunicornWebWorker):
def init_process(self) -> None: # pragma: no cover
import tokio
# Close any existing event loop before setting a
# new policy.
asyncio.get_event_loop().close()
# Setup tokio policy, so that every
# asyncio.get_event_loop() will create an instance
# of tokio event loop.
asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
super().init_process()
| 8,763 | Python | 31.459259 | 84 | 0.591578 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_request.py | import asyncio
import datetime
import io
import re
import socket
import string
import tempfile
import types
import warnings
from http.cookies import SimpleCookie
from types import MappingProxyType
from typing import (
TYPE_CHECKING,
Any,
Dict,
Iterator,
Mapping,
MutableMapping,
Optional,
Pattern,
Tuple,
Union,
cast,
)
from urllib.parse import parse_qsl
import attr
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
from yarl import URL
from . import hdrs
from .abc import AbstractStreamWriter
from .helpers import (
DEBUG,
ETAG_ANY,
LIST_QUOTED_ETAG_RE,
ChainMapProxy,
ETag,
HeadersMixin,
parse_http_date,
reify,
sentinel,
)
from .http_parser import RawRequestMessage
from .http_writer import HttpVersion
from .multipart import BodyPartReader, MultipartReader
from .streams import EmptyStreamReader, StreamReader
from .typedefs import (
DEFAULT_JSON_DECODER,
Final,
JSONDecoder,
LooseHeaders,
RawHeaders,
StrOrURL,
)
from .web_exceptions import HTTPRequestEntityTooLarge
from .web_response import StreamResponse
__all__ = ("BaseRequest", "FileField", "Request")
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application
from .web_protocol import RequestHandler
from .web_urldispatcher import UrlMappingMatchInfo
@attr.s(auto_attribs=True, frozen=True, slots=True)
class FileField:
name: str
filename: str
file: io.BufferedReader
content_type: str
headers: "CIMultiDictProxy[str]"
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
# '-' at the end to prevent interpretation as range in a char class
_TOKEN: Final[str] = rf"[{_TCHAR}]+"
_QDTEXT: Final[str] = r"[{}]".format(
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
)
# qdtext includes 0x5C to escape 0x5D ('\]')
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
)
_FORWARDED_PAIR: Final[
str
] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
token=_TOKEN, quoted_string=_QUOTED_STRING
)
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
# same pattern as _QUOTED_PAIR but contains a capture group
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
############################################################
# HTTP Request
############################################################
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
POST_METHODS = {
hdrs.METH_PATCH,
hdrs.METH_POST,
hdrs.METH_PUT,
hdrs.METH_TRACE,
hdrs.METH_DELETE,
}
ATTRS = HeadersMixin.ATTRS | frozenset(
[
"_message",
"_protocol",
"_payload_writer",
"_payload",
"_headers",
"_method",
"_version",
"_rel_url",
"_post",
"_read_bytes",
"_state",
"_cache",
"_task",
"_client_max_size",
"_loop",
"_transport_sslcontext",
"_transport_peername",
]
)
def __init__(
self,
message: RawRequestMessage,
payload: StreamReader,
protocol: "RequestHandler",
payload_writer: AbstractStreamWriter,
task: "asyncio.Task[None]",
loop: asyncio.AbstractEventLoop,
*,
client_max_size: int = 1024**2,
state: Optional[Dict[str, Any]] = None,
scheme: Optional[str] = None,
host: Optional[str] = None,
remote: Optional[str] = None,
) -> None:
if state is None:
state = {}
self._message = message
self._protocol = protocol
self._payload_writer = payload_writer
self._payload = payload
self._headers = message.headers
self._method = message.method
self._version = message.version
self._cache: Dict[str, Any] = {}
url = message.url
if url.is_absolute():
# absolute URL is given,
# override auto-calculating url, host, and scheme
# all other properties should be good
self._cache["url"] = url
self._cache["host"] = url.host
self._cache["scheme"] = url.scheme
self._rel_url = url.relative()
else:
self._rel_url = message.url
self._post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None
self._read_bytes: Optional[bytes] = None
self._state = state
self._task = task
self._client_max_size = client_max_size
self._loop = loop
transport = self._protocol.transport
assert transport is not None
self._transport_sslcontext = transport.get_extra_info("sslcontext")
self._transport_peername = transport.get_extra_info("peername")
if scheme is not None:
self._cache["scheme"] = scheme
if host is not None:
self._cache["host"] = host
if remote is not None:
self._cache["remote"] = remote
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "BaseRequest":
"""Clone itself with replacement some attributes.
Creates and returns a new instance of Request object. If no parameters
are given, an exact copy is returned. If a parameter is not passed, it
will reuse the one from the current request object.
"""
if self._read_bytes:
raise RuntimeError("Cannot clone request " "after reading its content")
dct: Dict[str, Any] = {}
if method is not sentinel:
dct["method"] = method
if rel_url is not sentinel:
new_url = URL(rel_url)
dct["url"] = new_url
dct["path"] = str(new_url)
if headers is not sentinel:
# a copy semantic
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
dct["raw_headers"] = tuple(
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
)
message = self._message._replace(**dct)
kwargs = {}
if scheme is not sentinel:
kwargs["scheme"] = scheme
if host is not sentinel:
kwargs["host"] = host
if remote is not sentinel:
kwargs["remote"] = remote
return self.__class__(
message,
self._payload,
self._protocol,
self._payload_writer,
self._task,
self._loop,
client_max_size=self._client_max_size,
state=self._state.copy(),
**kwargs,
)
@property
def task(self) -> "asyncio.Task[None]":
return self._task
@property
def protocol(self) -> "RequestHandler":
return self._protocol
@property
def transport(self) -> Optional[asyncio.Transport]:
if self._protocol is None:
return None
return self._protocol.transport
@property
def writer(self) -> AbstractStreamWriter:
return self._payload_writer
@reify
def message(self) -> RawRequestMessage:
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
return self._message
@reify
def rel_url(self) -> URL:
return self._rel_url
@reify
def loop(self) -> asyncio.AbstractEventLoop:
warnings.warn(
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
)
return self._loop
# MutableMapping API
def __getitem__(self, key: str) -> Any:
return self._state[key]
def __setitem__(self, key: str, value: Any) -> None:
self._state[key] = value
def __delitem__(self, key: str) -> None:
del self._state[key]
def __len__(self) -> int:
return len(self._state)
def __iter__(self) -> Iterator[str]:
return iter(self._state)
########
@reify
def secure(self) -> bool:
"""A bool indicating if the request is handled with SSL."""
return self.scheme == "https"
@reify
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
"""A tuple containing all parsed Forwarded header(s).
Makes an effort to parse Forwarded headers as specified by RFC 7239:
- It adds one (immutable) dictionary per Forwarded 'field-value', ie
per proxy. The element corresponds to the data in the Forwarded
field-value added by the first proxy encountered by the client. Each
subsequent item corresponds to those added by later proxies.
- It checks that every value has valid syntax in general as specified
in section 4: either a 'token' or a 'quoted-string'.
- It un-escapes found escape sequences.
- It does NOT validate 'by' and 'for' contents as specified in section
6.
- It does NOT validate 'host' contents (Host ABNF).
- It does NOT validate 'proto' contents for valid URI scheme names.
Returns a tuple containing one or more immutable dicts
"""
elems = []
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
length = len(field_value)
pos = 0
need_separator = False
elem: Dict[str, str] = {}
elems.append(types.MappingProxyType(elem))
while 0 <= pos < length:
match = _FORWARDED_PAIR_RE.match(field_value, pos)
if match is not None: # got a valid forwarded-pair
if need_separator:
# bad syntax here, skip to next comma
pos = field_value.find(",", pos)
else:
name, value, port = match.groups()
if value[0] == '"':
# quoted string: remove quotes and unescape
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
if port:
value += port
elem[name.lower()] = value
pos += len(match.group(0))
need_separator = True
elif field_value[pos] == ",": # next forwarded-element
need_separator = False
elem = {}
elems.append(types.MappingProxyType(elem))
pos += 1
elif field_value[pos] == ";": # next forwarded-pair
need_separator = False
pos += 1
elif field_value[pos] in " \t":
# Allow whitespace even between forwarded-pairs, though
# RFC 7239 doesn't. This simplifies code and is in line
# with Postel's law.
pos += 1
else:
# bad syntax here, skip to next comma
pos = field_value.find(",", pos)
return tuple(elems)
@reify
def scheme(self) -> str:
"""A string representing the scheme of the request.
Hostname is resolved in this order:
- overridden value by .clone(scheme=new_scheme) call.
- type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
'http' or 'https'.
"""
if self._transport_sslcontext:
return "https"
else:
return "http"
@reify
def method(self) -> str:
"""Read only property for getting HTTP method.
The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
"""
return self._method
@reify
def version(self) -> HttpVersion:
"""Read only property for getting HTTP version of request.
Returns aiohttp.protocol.HttpVersion instance.
"""
return self._version
@reify
def host(self) -> str:
"""Hostname of the request.
Hostname is resolved in this order:
- overridden value by .clone(host=new_host) call.
- HOST HTTP header
- socket.getfqdn() value
"""
host = self._message.headers.get(hdrs.HOST)
if host is not None:
return host
return socket.getfqdn()
@reify
def remote(self) -> Optional[str]:
"""Remote IP of client initiated HTTP request.
The IP is resolved in this order:
- overridden value by .clone(remote=new_remote) call.
- peername of opened socket
"""
if self._transport_peername is None:
return None
if isinstance(self._transport_peername, (list, tuple)):
return str(self._transport_peername[0])
return str(self._transport_peername)
@reify
def url(self) -> URL:
url = URL.build(scheme=self.scheme, host=self.host)
return url.join(self._rel_url)
@reify
def path(self) -> str:
"""The URL including *PATH INFO* without the host or scheme.
E.g., ``/app/blog``
"""
return self._rel_url.path
@reify
def path_qs(self) -> str:
"""The URL including PATH_INFO and the query string.
E.g, /app/blog?id=10
"""
return str(self._rel_url)
@reify
def raw_path(self) -> str:
"""The URL including raw *PATH INFO* without the host or scheme.
Warning, the path is unquoted and may contains non valid URL characters
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
"""
return self._message.path
@reify
def query(self) -> "MultiDictProxy[str]":
"""A multidict with all the variables in the query string."""
return MultiDictProxy(self._rel_url.query)
@reify
def query_string(self) -> str:
"""The query string in the URL.
E.g., id=10
"""
return self._rel_url.query_string
@reify
def headers(self) -> "CIMultiDictProxy[str]":
"""A case-insensitive multidict proxy with all headers."""
return self._headers
@reify
def raw_headers(self) -> RawHeaders:
"""A sequence of pairs for all headers."""
return self._message.raw_headers
@reify
def if_modified_since(self) -> Optional[datetime.datetime]:
"""The value of If-Modified-Since HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
@reify
def if_unmodified_since(self) -> Optional[datetime.datetime]:
"""The value of If-Unmodified-Since HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
@staticmethod
def _etag_values(etag_header: str) -> Iterator[ETag]:
"""Extract `ETag` objects from raw header."""
if etag_header == ETAG_ANY:
yield ETag(
is_weak=False,
value=ETAG_ANY,
)
else:
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
is_weak, value, garbage = match.group(2, 3, 4)
# Any symbol captured by 4th group means
# that the following sequence is invalid.
if garbage:
break
yield ETag(
is_weak=bool(is_weak),
value=value,
)
@classmethod
def _if_match_or_none_impl(
cls, header_value: Optional[str]
) -> Optional[Tuple[ETag, ...]]:
if not header_value:
return None
return tuple(cls._etag_values(header_value))
@reify
def if_match(self) -> Optional[Tuple[ETag, ...]]:
"""The value of If-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
@reify
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
"""The value of If-None-Match HTTP header, or None.
This header is represented as a `tuple` of `ETag` objects.
"""
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
@reify
def if_range(self) -> Optional[datetime.datetime]:
"""The value of If-Range HTTP header, or None.
This header is represented as a `datetime` object.
"""
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
@reify
def keep_alive(self) -> bool:
"""Is keepalive enabled by client?"""
return not self._message.should_close
@reify
def cookies(self) -> Mapping[str, str]:
"""Return request cookies.
A read-only dictionary-like object.
"""
raw = self.headers.get(hdrs.COOKIE, "")
parsed: SimpleCookie[str] = SimpleCookie(raw)
return MappingProxyType({key: val.value for key, val in parsed.items()})
@reify
def http_range(self) -> slice:
"""The content of Range HTTP header.
Return a slice instance.
"""
rng = self._headers.get(hdrs.RANGE)
start, end = None, None
if rng is not None:
try:
pattern = r"^bytes=(\d*)-(\d*)$"
start, end = re.findall(pattern, rng)[0]
except IndexError: # pattern was not found in header
raise ValueError("range not in acceptable format")
end = int(end) if end else None
start = int(start) if start else None
if start is None and end is not None:
# end with no start is to return tail of content
start = -end
end = None
if start is not None and end is not None:
# end is inclusive in range header, exclusive for slice
end += 1
if start >= end:
raise ValueError("start cannot be after end")
if start is end is None: # No valid range supplied
raise ValueError("No start or end of range specified")
return slice(start, end, 1)
@reify
def content(self) -> StreamReader:
"""Return raw payload stream."""
return self._payload
@property
def has_body(self) -> bool:
"""Return True if request's HTTP BODY can be read, False otherwise."""
warnings.warn(
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
)
return not self._payload.at_eof()
@property
def can_read_body(self) -> bool:
"""Return True if request's HTTP BODY can be read, False otherwise."""
return not self._payload.at_eof()
@reify
def body_exists(self) -> bool:
"""Return True if request has HTTP BODY, False otherwise."""
return type(self._payload) is not EmptyStreamReader
async def release(self) -> None:
"""Release request.
Eat unread part of HTTP BODY if present.
"""
while not self._payload.at_eof():
await self._payload.readany()
async def read(self) -> bytes:
"""Read request body if present.
Returns bytes object with full request content.
"""
if self._read_bytes is None:
body = bytearray()
while True:
chunk = await self._payload.readany()
body.extend(chunk)
if self._client_max_size:
body_size = len(body)
if body_size >= self._client_max_size:
raise HTTPRequestEntityTooLarge(
max_size=self._client_max_size, actual_size=body_size
)
if not chunk:
break
self._read_bytes = bytes(body)
return self._read_bytes
async def text(self) -> str:
"""Return BODY as text using encoding from .charset."""
bytes_body = await self.read()
encoding = self.charset or "utf-8"
return bytes_body.decode(encoding)
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
"""Return BODY as JSON."""
body = await self.text()
return loads(body)
async def multipart(self) -> MultipartReader:
"""Return async iterator to process BODY as multipart."""
return MultipartReader(self._headers, self._payload)
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
"""Return POST parameters."""
if self._post is not None:
return self._post
if self._method not in self.POST_METHODS:
self._post = MultiDictProxy(MultiDict())
return self._post
content_type = self.content_type
if content_type not in (
"",
"application/x-www-form-urlencoded",
"multipart/form-data",
):
self._post = MultiDictProxy(MultiDict())
return self._post
out: MultiDict[Union[str, bytes, FileField]] = MultiDict()
if content_type == "multipart/form-data":
multipart = await self.multipart()
max_size = self._client_max_size
field = await multipart.next()
while field is not None:
size = 0
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
if isinstance(field, BodyPartReader):
assert field.name is not None
# Note that according to RFC 7578, the Content-Type header
# is optional, even for files, so we can't assume it's
# present.
# https://tools.ietf.org/html/rfc7578#section-4.4
if field.filename:
# store file in temp file
tmp = tempfile.TemporaryFile()
chunk = await field.read_chunk(size=2**16)
while chunk:
chunk = field.decode(chunk)
tmp.write(chunk)
size += len(chunk)
if 0 < max_size < size:
tmp.close()
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
chunk = await field.read_chunk(size=2**16)
tmp.seek(0)
if field_ct is None:
field_ct = "application/octet-stream"
ff = FileField(
field.name,
field.filename,
cast(io.BufferedReader, tmp),
field_ct,
field.headers,
)
out.add(field.name, ff)
else:
# deal with ordinary data
value = await field.read(decode=True)
if field_ct is None or field_ct.startswith("text/"):
charset = field.get_charset(default="utf-8")
out.add(field.name, value.decode(charset))
else:
out.add(field.name, value)
size += len(value)
if 0 < max_size < size:
raise HTTPRequestEntityTooLarge(
max_size=max_size, actual_size=size
)
else:
raise ValueError(
"To decode nested multipart you need " "to use custom reader",
)
field = await multipart.next()
else:
data = await self.read()
if data:
charset = self.charset or "utf-8"
out.extend(
parse_qsl(
data.rstrip().decode(charset),
keep_blank_values=True,
encoding=charset,
)
)
self._post = MultiDictProxy(out)
return self._post
def get_extra_info(self, name: str, default: Any = None) -> Any:
"""Extra info from protocol transport"""
protocol = self._protocol
if protocol is None:
return default
transport = protocol.transport
if transport is None:
return default
return transport.get_extra_info(name, default)
def __repr__(self) -> str:
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
"ascii"
)
return "<{} {} {} >".format(
self.__class__.__name__, self._method, ascii_encodable_path
)
def __eq__(self, other: object) -> bool:
return id(self) == id(other)
def __bool__(self) -> bool:
return True
async def _prepare_hook(self, response: StreamResponse) -> None:
return
def _cancel(self, exc: BaseException) -> None:
self._payload.set_exception(exc)
class Request(BaseRequest):
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
# matchdict, route_name, handler
# or information about traversal lookup
# initialized after route resolving
self._match_info: Optional[UrlMappingMatchInfo] = None
if DEBUG:
def __setattr__(self, name: str, val: Any) -> None:
if name not in self.ATTRS:
warnings.warn(
"Setting custom {}.{} attribute "
"is discouraged".format(self.__class__.__name__, name),
DeprecationWarning,
stacklevel=2,
)
super().__setattr__(name, val)
def clone(
self,
*,
method: str = sentinel,
rel_url: StrOrURL = sentinel,
headers: LooseHeaders = sentinel,
scheme: str = sentinel,
host: str = sentinel,
remote: str = sentinel,
) -> "Request":
ret = super().clone(
method=method,
rel_url=rel_url,
headers=headers,
scheme=scheme,
host=host,
remote=remote,
)
new_ret = cast(Request, ret)
new_ret._match_info = self._match_info
return new_ret
@reify
def match_info(self) -> "UrlMappingMatchInfo":
"""Result of route resolving."""
match_info = self._match_info
assert match_info is not None
return match_info
@property
def app(self) -> "Application":
"""Application instance."""
match_info = self._match_info
assert match_info is not None
return match_info.current_app
@property
def config_dict(self) -> ChainMapProxy:
match_info = self._match_info
assert match_info is not None
lst = match_info.apps
app = self.app
idx = lst.index(app)
sublist = list(reversed(lst[: idx + 1]))
return ChainMapProxy(sublist)
async def _prepare_hook(self, response: StreamResponse) -> None:
match_info = self._match_info
if match_info is None:
return
for app in match_info._apps:
await app.on_response_prepare.send(self, response)
| 28,187 | Python | 30.92299 | 88 | 0.540391 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/aiohttp/web_urldispatcher.py | import abc
import asyncio
import base64
import hashlib
import inspect
import keyword
import os
import re
import warnings
from contextlib import contextmanager
from functools import wraps
from pathlib import Path
from types import MappingProxyType
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
Container,
Dict,
Generator,
Iterable,
Iterator,
List,
Mapping,
Optional,
Pattern,
Set,
Sized,
Tuple,
Type,
Union,
cast,
)
from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined]
from . import hdrs
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
from .helpers import DEBUG
from .http import HttpVersion11
from .typedefs import Final, Handler, PathLike, TypedDict
from .web_exceptions import (
HTTPException,
HTTPExpectationFailed,
HTTPForbidden,
HTTPMethodNotAllowed,
HTTPNotFound,
)
from .web_fileresponse import FileResponse
from .web_request import Request
from .web_response import Response, StreamResponse
from .web_routedef import AbstractRouteDef
__all__ = (
"UrlDispatcher",
"UrlMappingMatchInfo",
"AbstractResource",
"Resource",
"PlainResource",
"DynamicResource",
"AbstractRoute",
"ResourceRoute",
"StaticResource",
"View",
)
if TYPE_CHECKING: # pragma: no cover
from .web_app import Application
BaseDict = Dict[str, str]
else:
BaseDict = dict
YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
)
ROUTE_RE: Final[Pattern[str]] = re.compile(
r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
)
PATH_SEP: Final[str] = re.escape("/")
_ExpectHandler = Callable[[Request], Awaitable[None]]
_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
class _InfoDict(TypedDict, total=False):
path: str
formatter: str
pattern: Pattern[str]
directory: Path
prefix: str
routes: Mapping[str, "AbstractRoute"]
app: "Application"
domain: str
rule: "AbstractRuleMatching"
http_exception: HTTPException
class AbstractResource(Sized, Iterable["AbstractRoute"]):
def __init__(self, *, name: Optional[str] = None) -> None:
self._name = name
@property
def name(self) -> Optional[str]:
return self._name
@property
@abc.abstractmethod
def canonical(self) -> str:
"""Exposes the resource's canonical path.
For example '/foo/bar/{name}'
"""
@abc.abstractmethod # pragma: no branch
def url_for(self, **kwargs: str) -> URL:
"""Construct url for resource with additional params."""
@abc.abstractmethod # pragma: no branch
async def resolve(self, request: Request) -> _Resolve:
"""Resolve resource.
Return (UrlMappingMatchInfo, allowed_methods) pair.
"""
@abc.abstractmethod
def add_prefix(self, prefix: str) -> None:
"""Add a prefix to processed URLs.
Required for subapplications support.
"""
@abc.abstractmethod
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
def freeze(self) -> None:
pass
@abc.abstractmethod
def raw_match(self, path: str) -> bool:
"""Perform a raw match against path"""
class AbstractRoute(abc.ABC):
def __init__(
self,
method: str,
handler: Union[Handler, Type[AbstractView]],
*,
expect_handler: Optional[_ExpectHandler] = None,
resource: Optional[AbstractResource] = None,
) -> None:
if expect_handler is None:
expect_handler = _default_expect_handler
assert asyncio.iscoroutinefunction(
expect_handler
), f"Coroutine is expected, got {expect_handler!r}"
method = method.upper()
if not HTTP_METHOD_RE.match(method):
raise ValueError(f"{method} is not allowed HTTP method")
assert callable(handler), handler
if asyncio.iscoroutinefunction(handler):
pass
elif inspect.isgeneratorfunction(handler):
warnings.warn(
"Bare generators are deprecated, " "use @coroutine wrapper",
DeprecationWarning,
)
elif isinstance(handler, type) and issubclass(handler, AbstractView):
pass
else:
warnings.warn(
"Bare functions are deprecated, " "use async ones", DeprecationWarning
)
@wraps(handler)
async def handler_wrapper(request: Request) -> StreamResponse:
result = old_handler(request)
if asyncio.iscoroutine(result):
return await result
return result # type: ignore[return-value]
old_handler = handler
handler = handler_wrapper
self._method = method
self._handler = handler
self._expect_handler = expect_handler
self._resource = resource
@property
def method(self) -> str:
return self._method
@property
def handler(self) -> Handler:
return self._handler
@property
@abc.abstractmethod
def name(self) -> Optional[str]:
"""Optional route's name, always equals to resource's name."""
@property
def resource(self) -> Optional[AbstractResource]:
return self._resource
@abc.abstractmethod
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
@abc.abstractmethod # pragma: no branch
def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
async def handle_expect_header(self, request: Request) -> None:
await self._expect_handler(request)
class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
super().__init__(match_dict)
self._route = route
self._apps: List[Application] = []
self._current_app: Optional[Application] = None
self._frozen = False
@property
def handler(self) -> Handler:
return self._route.handler
@property
def route(self) -> AbstractRoute:
return self._route
@property
def expect_handler(self) -> _ExpectHandler:
return self._route.handle_expect_header
@property
def http_exception(self) -> Optional[HTTPException]:
return None
def get_info(self) -> _InfoDict: # type: ignore[override]
return self._route.get_info()
@property
def apps(self) -> Tuple["Application", ...]:
return tuple(self._apps)
def add_app(self, app: "Application") -> None:
if self._frozen:
raise RuntimeError("Cannot change apps stack after .freeze() call")
if self._current_app is None:
self._current_app = app
self._apps.insert(0, app)
@property
def current_app(self) -> "Application":
app = self._current_app
assert app is not None
return app
@contextmanager
def set_current_app(self, app: "Application") -> Generator[None, None, None]:
if DEBUG: # pragma: no cover
if app not in self._apps:
raise RuntimeError(
"Expected one of the following apps {!r}, got {!r}".format(
self._apps, app
)
)
prev = self._current_app
self._current_app = app
try:
yield
finally:
self._current_app = prev
def freeze(self) -> None:
self._frozen = True
def __repr__(self) -> str:
return f"<MatchInfo {super().__repr__()}: {self._route}>"
class MatchInfoError(UrlMappingMatchInfo):
def __init__(self, http_exception: HTTPException) -> None:
self._exception = http_exception
super().__init__({}, SystemRoute(self._exception))
@property
def http_exception(self) -> HTTPException:
return self._exception
def __repr__(self) -> str:
return "<MatchInfoError {}: {}>".format(
self._exception.status, self._exception.reason
)
async def _default_expect_handler(request: Request) -> None:
"""Default handler for Expect header.
Just send "100 Continue" to client.
raise HTTPExpectationFailed if value of header is not "100-continue"
"""
expect = request.headers.get(hdrs.EXPECT, "")
if request.version == HttpVersion11:
if expect.lower() == "100-continue":
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
else:
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
class Resource(AbstractResource):
def __init__(self, *, name: Optional[str] = None) -> None:
super().__init__(name=name)
self._routes: List[ResourceRoute] = []
def add_route(
self,
method: str,
handler: Union[Type[AbstractView], Handler],
*,
expect_handler: Optional[_ExpectHandler] = None,
) -> "ResourceRoute":
for route_obj in self._routes:
if route_obj.method == method or route_obj.method == hdrs.METH_ANY:
raise RuntimeError(
"Added route will never be executed, "
"method {route.method} is already "
"registered".format(route=route_obj)
)
route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
self.register_route(route_obj)
return route_obj
def register_route(self, route: "ResourceRoute") -> None:
assert isinstance(
route, ResourceRoute
), f"Instance of Route class is required, got {route!r}"
self._routes.append(route)
async def resolve(self, request: Request) -> _Resolve:
allowed_methods: Set[str] = set()
match_dict = self._match(request.rel_url.raw_path)
if match_dict is None:
return None, allowed_methods
for route_obj in self._routes:
route_method = route_obj.method
allowed_methods.add(route_method)
if route_method == request.method or route_method == hdrs.METH_ANY:
return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods)
else:
return None, allowed_methods
@abc.abstractmethod
def _match(self, path: str) -> Optional[Dict[str, str]]:
pass # pragma: no cover
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator[AbstractRoute]:
return iter(self._routes)
# TODO: implement all abstract methods
class PlainResource(Resource):
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
super().__init__(name=name)
assert not path or path.startswith("/")
self._path = path
@property
def canonical(self) -> str:
return self._path
def freeze(self) -> None:
if not self._path:
self._path = "/"
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._path = prefix + self._path
def _match(self, path: str) -> Optional[Dict[str, str]]:
# string comparison is about 10 times faster than regexp matching
if self._path == path:
return {}
else:
return None
def raw_match(self, path: str) -> bool:
return self._path == path
def get_info(self) -> _InfoDict:
return {"path": self._path}
def url_for(self) -> URL: # type: ignore[override]
return URL.build(path=self._path, encoded=True)
def __repr__(self) -> str:
name = "'" + self.name + "' " if self.name is not None else ""
return f"<PlainResource {name} {self._path}>"
class DynamicResource(Resource):
DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
GOOD = r"[^{}/]+"
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
super().__init__(name=name)
pattern = ""
formatter = ""
for part in ROUTE_RE.split(path):
match = self.DYN.fullmatch(part)
if match:
pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
formatter += "{" + match.group("var") + "}"
continue
match = self.DYN_WITH_RE.fullmatch(part)
if match:
pattern += "(?P<{var}>{re})".format(**match.groupdict())
formatter += "{" + match.group("var") + "}"
continue
if "{" in part or "}" in part:
raise ValueError(f"Invalid path '{path}'['{part}']")
part = _requote_path(part)
formatter += part
pattern += re.escape(part)
try:
compiled = re.compile(pattern)
except re.error as exc:
raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
assert compiled.pattern.startswith(PATH_SEP)
assert formatter.startswith("/")
self._pattern = compiled
self._formatter = formatter
@property
def canonical(self) -> str:
return self._formatter
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
self._formatter = prefix + self._formatter
def _match(self, path: str) -> Optional[Dict[str, str]]:
match = self._pattern.fullmatch(path)
if match is None:
return None
else:
return {
key: _unquote_path(value) for key, value in match.groupdict().items()
}
def raw_match(self, path: str) -> bool:
return self._formatter == path
def get_info(self) -> _InfoDict:
return {"formatter": self._formatter, "pattern": self._pattern}
def url_for(self, **parts: str) -> URL:
url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
return URL.build(path=url, encoded=True)
def __repr__(self) -> str:
name = "'" + self.name + "' " if self.name is not None else ""
return "<DynamicResource {name} {formatter}>".format(
name=name, formatter=self._formatter
)
class PrefixResource(AbstractResource):
def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
assert not prefix or prefix.startswith("/"), prefix
assert prefix in ("", "/") or not prefix.endswith("/"), prefix
super().__init__(name=name)
self._prefix = _requote_path(prefix)
self._prefix2 = self._prefix + "/"
@property
def canonical(self) -> str:
return self._prefix
def add_prefix(self, prefix: str) -> None:
assert prefix.startswith("/")
assert not prefix.endswith("/")
assert len(prefix) > 1
self._prefix = prefix + self._prefix
self._prefix2 = self._prefix + "/"
def raw_match(self, prefix: str) -> bool:
return False
# TODO: impl missing abstract methods
class StaticResource(PrefixResource):
VERSION_KEY = "v"
def __init__(
self,
prefix: str,
directory: PathLike,
*,
name: Optional[str] = None,
expect_handler: Optional[_ExpectHandler] = None,
chunk_size: int = 256 * 1024,
show_index: bool = False,
follow_symlinks: bool = False,
append_version: bool = False,
) -> None:
super().__init__(prefix, name=name)
try:
directory = Path(directory)
if str(directory).startswith("~"):
directory = Path(os.path.expanduser(str(directory)))
directory = directory.resolve()
if not directory.is_dir():
raise ValueError("Not a directory")
except (FileNotFoundError, ValueError) as error:
raise ValueError(f"No directory exists at '{directory}'") from error
self._directory = directory
self._show_index = show_index
self._chunk_size = chunk_size
self._follow_symlinks = follow_symlinks
self._expect_handler = expect_handler
self._append_version = append_version
self._routes = {
"GET": ResourceRoute(
"GET", self._handle, self, expect_handler=expect_handler
),
"HEAD": ResourceRoute(
"HEAD", self._handle, self, expect_handler=expect_handler
),
}
def url_for( # type: ignore[override]
self,
*,
filename: Union[str, Path],
append_version: Optional[bool] = None,
) -> URL:
if append_version is None:
append_version = self._append_version
if isinstance(filename, Path):
filename = str(filename)
filename = filename.lstrip("/")
url = URL.build(path=self._prefix, encoded=True)
# filename is not encoded
if YARL_VERSION < (1, 6):
url = url / filename.replace("%", "%25")
else:
url = url / filename
if append_version:
try:
filepath = self._directory.joinpath(filename).resolve()
if not self._follow_symlinks:
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError):
# ValueError for case when path point to symlink
# with follow_symlinks is False
return url # relatively safe
if filepath.is_file():
# TODO cache file content
# with file watcher for cache invalidation
with filepath.open("rb") as f:
file_bytes = f.read()
h = self._get_file_hash(file_bytes)
url = url.with_query({self.VERSION_KEY: h})
return url
return url
@staticmethod
def _get_file_hash(byte_array: bytes) -> str:
m = hashlib.sha256() # todo sha256 can be configurable param
m.update(byte_array)
b64 = base64.urlsafe_b64encode(m.digest())
return b64.decode("ascii")
def get_info(self) -> _InfoDict:
return {
"directory": self._directory,
"prefix": self._prefix,
"routes": self._routes,
}
def set_options_route(self, handler: Handler) -> None:
if "OPTIONS" in self._routes:
raise RuntimeError("OPTIONS route was set already")
self._routes["OPTIONS"] = ResourceRoute(
"OPTIONS", handler, self, expect_handler=self._expect_handler
)
async def resolve(self, request: Request) -> _Resolve:
path = request.rel_url.raw_path
method = request.method
allowed_methods = set(self._routes)
if not path.startswith(self._prefix2) and path != self._prefix:
return None, set()
if method not in allowed_methods:
return None, allowed_methods
match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])}
return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator[AbstractRoute]:
return iter(self._routes.values())
async def _handle(self, request: Request) -> StreamResponse:
rel_url = request.match_info["filename"]
try:
filename = Path(rel_url)
if filename.anchor:
# rel_url is an absolute name like
# /static/\\machine_name\c$ or /static/D:\path
# where the static dir is totally different
raise HTTPForbidden()
filepath = self._directory.joinpath(filename).resolve()
if not self._follow_symlinks:
filepath.relative_to(self._directory)
except (ValueError, FileNotFoundError) as error:
# relatively safe
raise HTTPNotFound() from error
except HTTPForbidden:
raise
except Exception as error:
# perm error or other kind!
request.app.logger.exception(error)
raise HTTPNotFound() from error
# on opening a dir, load its contents if allowed
if filepath.is_dir():
if self._show_index:
try:
return Response(
text=self._directory_as_html(filepath), content_type="text/html"
)
except PermissionError:
raise HTTPForbidden()
else:
raise HTTPForbidden()
elif filepath.is_file():
return FileResponse(filepath, chunk_size=self._chunk_size)
else:
raise HTTPNotFound
def _directory_as_html(self, filepath: Path) -> str:
# returns directory's index as html
# sanity check
assert filepath.is_dir()
relative_path_to_dir = filepath.relative_to(self._directory).as_posix()
index_of = f"Index of /{relative_path_to_dir}"
h1 = f"<h1>{index_of}</h1>"
index_list = []
dir_index = filepath.iterdir()
for _file in sorted(dir_index):
# show file url as relative to static path
rel_path = _file.relative_to(self._directory).as_posix()
file_url = self._prefix + "/" + rel_path
# if file is a directory, add '/' to the end of the name
if _file.is_dir():
file_name = f"{_file.name}/"
else:
file_name = _file.name
index_list.append(
'<li><a href="{url}">{name}</a></li>'.format(
url=file_url, name=file_name
)
)
ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
body = f"<body>\n{h1}\n{ul}\n</body>"
head_str = f"<head>\n<title>{index_of}</title>\n</head>"
html = f"<html>\n{head_str}\n{body}\n</html>"
return html
def __repr__(self) -> str:
name = "'" + self.name + "'" if self.name is not None else ""
return "<StaticResource {name} {path} -> {directory!r}>".format(
name=name, path=self._prefix, directory=self._directory
)
class PrefixedSubAppResource(PrefixResource):
def __init__(self, prefix: str, app: "Application") -> None:
super().__init__(prefix)
self._app = app
for resource in app.router.resources():
resource.add_prefix(prefix)
def add_prefix(self, prefix: str) -> None:
super().add_prefix(prefix)
for resource in self._app.router.resources():
resource.add_prefix(prefix)
def url_for(self, *args: str, **kwargs: str) -> URL:
raise RuntimeError(".url_for() is not supported " "by sub-application root")
def get_info(self) -> _InfoDict:
return {"app": self._app, "prefix": self._prefix}
async def resolve(self, request: Request) -> _Resolve:
if (
not request.url.raw_path.startswith(self._prefix2)
and request.url.raw_path != self._prefix
):
return None, set()
match_info = await self._app.router.resolve(request)
match_info.add_app(self._app)
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
methods = match_info.http_exception.allowed_methods
else:
methods = set()
return match_info, methods
def __len__(self) -> int:
return len(self._app.router.routes())
def __iter__(self) -> Iterator[AbstractRoute]:
return iter(self._app.router.routes())
def __repr__(self) -> str:
return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
prefix=self._prefix, app=self._app
)
class AbstractRuleMatching(abc.ABC):
@abc.abstractmethod # pragma: no branch
async def match(self, request: Request) -> bool:
"""Return bool if the request satisfies the criteria"""
@abc.abstractmethod # pragma: no branch
def get_info(self) -> _InfoDict:
"""Return a dict with additional info useful for introspection"""
@property
@abc.abstractmethod # pragma: no branch
def canonical(self) -> str:
"""Return a str"""
class Domain(AbstractRuleMatching):
re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
def __init__(self, domain: str) -> None:
super().__init__()
self._domain = self.validation(domain)
@property
def canonical(self) -> str:
return self._domain
def validation(self, domain: str) -> str:
if not isinstance(domain, str):
raise TypeError("Domain must be str")
domain = domain.rstrip(".").lower()
if not domain:
raise ValueError("Domain cannot be empty")
elif "://" in domain:
raise ValueError("Scheme not supported")
url = URL("http://" + domain)
assert url.raw_host is not None
if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
raise ValueError("Domain not valid")
if url.port == 80:
return url.raw_host
return f"{url.raw_host}:{url.port}"
async def match(self, request: Request) -> bool:
host = request.headers.get(hdrs.HOST)
if not host:
return False
return self.match_domain(host)
def match_domain(self, host: str) -> bool:
return host.lower() == self._domain
def get_info(self) -> _InfoDict:
return {"domain": self._domain}
class MaskDomain(Domain):
re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
def __init__(self, domain: str) -> None:
super().__init__(domain)
mask = self._domain.replace(".", r"\.").replace("*", ".*")
self._mask = re.compile(mask)
@property
def canonical(self) -> str:
return self._mask.pattern
def match_domain(self, host: str) -> bool:
return self._mask.fullmatch(host) is not None
class MatchedSubAppResource(PrefixedSubAppResource):
def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
AbstractResource.__init__(self)
self._prefix = ""
self._app = app
self._rule = rule
@property
def canonical(self) -> str:
return self._rule.canonical
def get_info(self) -> _InfoDict:
return {"app": self._app, "rule": self._rule}
async def resolve(self, request: Request) -> _Resolve:
if not await self._rule.match(request):
return None, set()
match_info = await self._app.router.resolve(request)
match_info.add_app(self._app)
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
methods = match_info.http_exception.allowed_methods
else:
methods = set()
return match_info, methods
def __repr__(self) -> str:
return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app)
class ResourceRoute(AbstractRoute):
"""A route with resource"""
def __init__(
self,
method: str,
handler: Union[Handler, Type[AbstractView]],
resource: AbstractResource,
*,
expect_handler: Optional[_ExpectHandler] = None,
) -> None:
super().__init__(
method, handler, expect_handler=expect_handler, resource=resource
)
def __repr__(self) -> str:
return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
method=self.method, resource=self._resource, handler=self.handler
)
@property
def name(self) -> Optional[str]:
if self._resource is None:
return None
return self._resource.name
def url_for(self, *args: str, **kwargs: str) -> URL:
"""Construct url for route with additional params."""
assert self._resource is not None
return self._resource.url_for(*args, **kwargs)
def get_info(self) -> _InfoDict:
assert self._resource is not None
return self._resource.get_info()
class SystemRoute(AbstractRoute):
def __init__(self, http_exception: HTTPException) -> None:
super().__init__(hdrs.METH_ANY, self._handle)
self._http_exception = http_exception
def url_for(self, *args: str, **kwargs: str) -> URL:
raise RuntimeError(".url_for() is not allowed for SystemRoute")
@property
def name(self) -> Optional[str]:
return None
def get_info(self) -> _InfoDict:
return {"http_exception": self._http_exception}
async def _handle(self, request: Request) -> StreamResponse:
raise self._http_exception
@property
def status(self) -> int:
return self._http_exception.status
@property
def reason(self) -> str:
return self._http_exception.reason
def __repr__(self) -> str:
return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
class View(AbstractView):
async def _iter(self) -> StreamResponse:
if self.request.method not in hdrs.METH_ALL:
self._raise_allowed_methods()
method: Callable[[], Awaitable[StreamResponse]] = getattr(
self, self.request.method.lower(), None
)
if method is None:
self._raise_allowed_methods()
resp = await method()
return resp
def __await__(self) -> Generator[Any, None, StreamResponse]:
return self._iter().__await__()
def _raise_allowed_methods(self) -> None:
allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
def __init__(self, resources: List[AbstractResource]) -> None:
self._resources = resources
def __len__(self) -> int:
return len(self._resources)
def __iter__(self) -> Iterator[AbstractResource]:
yield from self._resources
def __contains__(self, resource: object) -> bool:
return resource in self._resources
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
def __init__(self, resources: List[AbstractResource]):
self._routes: List[AbstractRoute] = []
for resource in resources:
for route in resource:
self._routes.append(route)
def __len__(self) -> int:
return len(self._routes)
def __iter__(self) -> Iterator[AbstractRoute]:
yield from self._routes
def __contains__(self, route: object) -> bool:
return route in self._routes
class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
NAME_SPLIT_RE = re.compile(r"[.:-]")
def __init__(self) -> None:
super().__init__()
self._resources: List[AbstractResource] = []
self._named_resources: Dict[str, AbstractResource] = {}
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
method = request.method
allowed_methods: Set[str] = set()
for resource in self._resources:
match_dict, allowed = await resource.resolve(request)
if match_dict is not None:
return match_dict
else:
allowed_methods |= allowed
if allowed_methods:
return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
else:
return MatchInfoError(HTTPNotFound())
def __iter__(self) -> Iterator[str]:
return iter(self._named_resources)
def __len__(self) -> int:
return len(self._named_resources)
def __contains__(self, resource: object) -> bool:
return resource in self._named_resources
def __getitem__(self, name: str) -> AbstractResource:
return self._named_resources[name]
def resources(self) -> ResourcesView:
return ResourcesView(self._resources)
def routes(self) -> RoutesView:
return RoutesView(self._resources)
def named_resources(self) -> Mapping[str, AbstractResource]:
return MappingProxyType(self._named_resources)
def register_resource(self, resource: AbstractResource) -> None:
assert isinstance(
resource, AbstractResource
), f"Instance of AbstractResource class is required, got {resource!r}"
if self.frozen:
raise RuntimeError("Cannot register a resource into frozen router.")
name = resource.name
if name is not None:
parts = self.NAME_SPLIT_RE.split(name)
for part in parts:
if keyword.iskeyword(part):
raise ValueError(
f"Incorrect route name {name!r}, "
"python keywords cannot be used "
"for route name"
)
if not part.isidentifier():
raise ValueError(
"Incorrect route name {!r}, "
"the name should be a sequence of "
"python identifiers separated "
"by dash, dot or column".format(name)
)
if name in self._named_resources:
raise ValueError(
"Duplicate {!r}, "
"already handled by {!r}".format(name, self._named_resources[name])
)
self._named_resources[name] = resource
self._resources.append(resource)
def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
if path and not path.startswith("/"):
raise ValueError("path should be started with / or be empty")
# Reuse last added resource if path and name are the same
if self._resources:
resource = self._resources[-1]
if resource.name == name and resource.raw_match(path):
return cast(Resource, resource)
if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
resource = PlainResource(_requote_path(path), name=name)
self.register_resource(resource)
return resource
resource = DynamicResource(path, name=name)
self.register_resource(resource)
return resource
def add_route(
self,
method: str,
path: str,
handler: Union[Handler, Type[AbstractView]],
*,
name: Optional[str] = None,
expect_handler: Optional[_ExpectHandler] = None,
) -> AbstractRoute:
resource = self.add_resource(path, name=name)
return resource.add_route(method, handler, expect_handler=expect_handler)
def add_static(
self,
prefix: str,
path: PathLike,
*,
name: Optional[str] = None,
expect_handler: Optional[_ExpectHandler] = None,
chunk_size: int = 256 * 1024,
show_index: bool = False,
follow_symlinks: bool = False,
append_version: bool = False,
) -> AbstractResource:
"""Add static files view.
prefix - url prefix
path - folder with files
"""
assert prefix.startswith("/")
if prefix.endswith("/"):
prefix = prefix[:-1]
resource = StaticResource(
prefix,
path,
name=name,
expect_handler=expect_handler,
chunk_size=chunk_size,
show_index=show_index,
follow_symlinks=follow_symlinks,
append_version=append_version,
)
self.register_resource(resource)
return resource
def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method HEAD."""
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method OPTIONS."""
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
def add_get(
self,
path: str,
handler: Handler,
*,
name: Optional[str] = None,
allow_head: bool = True,
**kwargs: Any,
) -> AbstractRoute:
"""Shortcut for add_route with method GET.
If allow_head is true, another
route is added allowing head requests to the same endpoint.
"""
resource = self.add_resource(path, name=name)
if allow_head:
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method POST."""
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method PUT."""
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method PATCH."""
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""Shortcut for add_route with method DELETE."""
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
def add_view(
self, path: str, handler: Type[AbstractView], **kwargs: Any
) -> AbstractRoute:
"""Shortcut for add_route with ANY methods for a class-based view."""
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
def freeze(self) -> None:
super().freeze()
for resource in self._resources:
resource.freeze()
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
"""Append routes to route table.
Parameter should be a sequence of RouteDef objects.
Returns a list of registered AbstractRoute instances.
"""
registered_routes = []
for route_def in routes:
registered_routes.extend(route_def.register(self))
return registered_routes
def _quote_path(value: str) -> str:
if YARL_VERSION < (1, 6):
value = value.replace("%", "%25")
return URL.build(path=value, encoded=False).raw_path
def _unquote_path(value: str) -> str:
return URL.build(path=value, encoded=True).path
def _requote_path(value: str) -> str:
# Quote non-ascii characters and other characters which must be quoted,
# but preserve existing %-sequences.
result = _quote_path(value)
if "%" in value:
result = result.replace("%25", "%")
return result
| 39,483 | Python | 31.337428 | 88 | 0.580883 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/events.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.events
:synopsis: File system events and event handlers.
:author: [email protected] (Yesudeep Mangalapilly)
Event Classes
-------------
.. autoclass:: FileSystemEvent
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: FileSystemMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileMovedEvent
:members:
:show-inheritance:
.. autoclass:: DirMovedEvent
:members:
:show-inheritance:
.. autoclass:: FileModifiedEvent
:members:
:show-inheritance:
.. autoclass:: DirModifiedEvent
:members:
:show-inheritance:
.. autoclass:: FileCreatedEvent
:members:
:show-inheritance:
.. autoclass:: DirCreatedEvent
:members:
:show-inheritance:
.. autoclass:: FileDeletedEvent
:members:
:show-inheritance:
.. autoclass:: DirDeletedEvent
:members:
:show-inheritance:
Event Handler Classes
---------------------
.. autoclass:: FileSystemEventHandler
:members:
:show-inheritance:
.. autoclass:: PatternMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: RegexMatchingEventHandler
:members:
:show-inheritance:
.. autoclass:: LoggingEventHandler
:members:
:show-inheritance:
"""
import os.path
import logging
import re
from pathtools.patterns import match_any_paths
from watchdog.utils import has_attribute
from watchdog.utils import unicode_paths
EVENT_TYPE_MOVED = 'moved'
EVENT_TYPE_DELETED = 'deleted'
EVENT_TYPE_CREATED = 'created'
EVENT_TYPE_MODIFIED = 'modified'
class FileSystemEvent(object):
"""
Immutable type that represents a file system event that is triggered
when a change occurs on the monitored file system.
All FileSystemEvent objects are required to be immutable and hence
can be used as keys in dictionaries or be added to sets.
"""
event_type = None
"""The type of the event as a string."""
is_directory = False
"""True if event was emitted for a directory; False otherwise."""
is_synthetic = False
"""
True if event was synthesized; False otherwise.
These are events that weren't actually broadcast by the OS, but
are presumed to have happened based on other, actual events.
"""
def __init__(self, src_path):
self._src_path = src_path
@property
def src_path(self):
"""Source path of the file system object that triggered this event."""
return self._src_path
def __str__(self):
return self.__repr__()
def __repr__(self):
return ("<%(class_name)s: event_type=%(event_type)s, "
"src_path=%(src_path)r, "
"is_directory=%(is_directory)s>"
) % (dict(
class_name=self.__class__.__name__,
event_type=self.event_type,
src_path=self.src_path,
is_directory=self.is_directory))
# Used for comparison of events.
@property
def key(self):
return (self.event_type, self.src_path, self.is_directory)
def __eq__(self, event):
return self.key == event.key
def __ne__(self, event):
return self.key != event.key
def __hash__(self):
return hash(self.key)
class FileSystemMovedEvent(FileSystemEvent):
"""
File system event representing any kind of file system movement.
"""
event_type = EVENT_TYPE_MOVED
def __init__(self, src_path, dest_path):
super(FileSystemMovedEvent, self).__init__(src_path)
self._dest_path = dest_path
@property
def dest_path(self):
"""The destination path of the move event."""
return self._dest_path
# Used for hashing this as an immutable object.
@property
def key(self):
return (self.event_type, self.src_path, self.dest_path, self.is_directory)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r, "
"dest_path=%(dest_path)r, "
"is_directory=%(is_directory)s>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path,
dest_path=self.dest_path,
is_directory=self.is_directory))
# File events.
class FileDeletedEvent(FileSystemEvent):
"""File system event representing file deletion on the file system."""
event_type = EVENT_TYPE_DELETED
def __init__(self, src_path):
super(FileDeletedEvent, self).__init__(src_path)
def __repr__(self):
return "<%(class_name)s: src_path=%(src_path)r>" %\
dict(class_name=self.__class__.__name__,
src_path=self.src_path)
class FileModifiedEvent(FileSystemEvent):
"""File system event representing file modification on the file system."""
event_type = EVENT_TYPE_MODIFIED
def __init__(self, src_path):
super(FileModifiedEvent, self).__init__(src_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path))
class FileCreatedEvent(FileSystemEvent):
"""File system event representing file creation on the file system."""
event_type = EVENT_TYPE_CREATED
def __init__(self, src_path):
super(FileCreatedEvent, self).__init__(src_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path))
class FileMovedEvent(FileSystemMovedEvent):
"""File system event representing file movement on the file system."""
def __init__(self, src_path, dest_path):
super(FileMovedEvent, self).__init__(src_path, dest_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r, "
"dest_path=%(dest_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path,
dest_path=self.dest_path))
# Directory events.
class DirDeletedEvent(FileSystemEvent):
"""File system event representing directory deletion on the file system."""
event_type = EVENT_TYPE_DELETED
is_directory = True
def __init__(self, src_path):
super(DirDeletedEvent, self).__init__(src_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path))
class DirModifiedEvent(FileSystemEvent):
"""
File system event representing directory modification on the file system.
"""
event_type = EVENT_TYPE_MODIFIED
is_directory = True
def __init__(self, src_path):
super(DirModifiedEvent, self).__init__(src_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path))
class DirCreatedEvent(FileSystemEvent):
"""File system event representing directory creation on the file system."""
event_type = EVENT_TYPE_CREATED
is_directory = True
def __init__(self, src_path):
super(DirCreatedEvent, self).__init__(src_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path))
class DirMovedEvent(FileSystemMovedEvent):
"""File system event representing directory movement on the file system."""
is_directory = True
def __init__(self, src_path, dest_path):
super(DirMovedEvent, self).__init__(src_path, dest_path)
def __repr__(self):
return ("<%(class_name)s: src_path=%(src_path)r, "
"dest_path=%(dest_path)r>"
) % (dict(class_name=self.__class__.__name__,
src_path=self.src_path,
dest_path=self.dest_path))
class FileSystemEventHandler(object):
"""
Base file system event handler that you can override methods from.
"""
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
self.on_any_event(event)
{
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted,
EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
}[event.event_type](event)
def on_any_event(self, event):
"""Catch-all event handler.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
def on_moved(self, event):
"""Called when a file or a directory is moved or renamed.
:param event:
Event representing file/directory movement.
:type event:
:class:`DirMovedEvent` or :class:`FileMovedEvent`
"""
def on_created(self, event):
"""Called when a file or directory is created.
:param event:
Event representing file/directory creation.
:type event:
:class:`DirCreatedEvent` or :class:`FileCreatedEvent`
"""
def on_deleted(self, event):
"""Called when a file or directory is deleted.
:param event:
Event representing file/directory deletion.
:type event:
:class:`DirDeletedEvent` or :class:`FileDeletedEvent`
"""
def on_modified(self, event):
"""Called when a file or directory is modified.
:param event:
Event representing file/directory modification.
:type event:
:class:`DirModifiedEvent` or :class:`FileModifiedEvent`
"""
class PatternMatchingEventHandler(FileSystemEventHandler):
"""
Matches given patterns with file paths associated with occurring events.
"""
def __init__(self, patterns=None, ignore_patterns=None,
ignore_directories=False, case_sensitive=False):
super(PatternMatchingEventHandler, self).__init__()
self._patterns = patterns
self._ignore_patterns = ignore_patterns
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def patterns(self):
"""
(Read-only)
Patterns to allow matching event paths.
"""
return self._patterns
@property
def ignore_patterns(self):
"""
(Read-only)
Patterns to ignore matching event paths.
"""
return self._ignore_patterns
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if has_attribute(event, 'dest_path'):
paths.append(unicode_paths.decode(event.dest_path))
if event.src_path:
paths.append(unicode_paths.decode(event.src_path))
if match_any_paths(paths,
included_patterns=self.patterns,
excluded_patterns=self.ignore_patterns,
case_sensitive=self.case_sensitive):
super(PatternMatchingEventHandler, self).dispatch(event)
class RegexMatchingEventHandler(FileSystemEventHandler):
"""
Matches given regexes with file paths associated with occurring events.
"""
def __init__(self, regexes=None, ignore_regexes=None,
ignore_directories=False, case_sensitive=False):
super(RegexMatchingEventHandler, self).__init__()
if regexes is None:
regexes = [r".*"]
if ignore_regexes is None:
ignore_regexes = []
if case_sensitive:
self._regexes = [re.compile(r) for r in regexes]
self._ignore_regexes = [re.compile(r) for r in ignore_regexes]
else:
self._regexes = [re.compile(r, re.I) for r in regexes]
self._ignore_regexes = [re.compile(r, re.I) for r in ignore_regexes]
self._ignore_directories = ignore_directories
self._case_sensitive = case_sensitive
@property
def regexes(self):
"""
(Read-only)
Regexes to allow matching event paths.
"""
return self._regexes
@property
def ignore_regexes(self):
"""
(Read-only)
Regexes to ignore matching event paths.
"""
return self._ignore_regexes
@property
def ignore_directories(self):
"""
(Read-only)
``True`` if directories should be ignored; ``False`` otherwise.
"""
return self._ignore_directories
@property
def case_sensitive(self):
"""
(Read-only)
``True`` if path names should be matched sensitive to case; ``False``
otherwise.
"""
return self._case_sensitive
def dispatch(self, event):
"""Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
"""
if self.ignore_directories and event.is_directory:
return
paths = []
if has_attribute(event, 'dest_path'):
paths.append(unicode_paths.decode(event.dest_path))
if event.src_path:
paths.append(unicode_paths.decode(event.src_path))
if any(r.match(p) for r in self.ignore_regexes for p in paths):
return
if any(r.match(p) for r in self.regexes for p in paths):
super(RegexMatchingEventHandler, self).dispatch(event)
class LoggingEventHandler(FileSystemEventHandler):
"""Logs all the events captured."""
def __init__(self, logger=None):
super(LoggingEventHandler, self).__init__()
self.logger = logger or logging.root
def on_moved(self, event):
super(LoggingEventHandler, self).on_moved(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Moved %s: from %s to %s", what, event.src_path,
event.dest_path)
def on_created(self, event):
super(LoggingEventHandler, self).on_created(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Created %s: %s", what, event.src_path)
def on_deleted(self, event):
super(LoggingEventHandler, self).on_deleted(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Deleted %s: %s", what, event.src_path)
def on_modified(self, event):
super(LoggingEventHandler, self).on_modified(event)
what = 'directory' if event.is_directory else 'file'
self.logger.info("Modified %s: %s", what, event.src_path)
class LoggingFileSystemEventHandler(LoggingEventHandler):
"""
For backwards-compatibility. Please use :class:`LoggingEventHandler`
instead.
"""
def generate_sub_moved_events(src_dir_path, dest_dir_path):
"""Generates an event list of :class:`DirMovedEvent` and
:class:`FileMovedEvent` objects for all the files and directories within
the given moved directory that were moved along with the directory.
:param src_dir_path:
The source path of the moved directory.
:param dest_dir_path:
The destination path of the moved directory.
:returns:
An iterable of file system events of type :class:`DirMovedEvent` and
:class:`FileMovedEvent`.
"""
for root, directories, filenames in os.walk(dest_dir_path):
for directory in directories:
full_path = os.path.join(root, directory)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
event = DirMovedEvent(renamed_path, full_path)
event.is_synthetic = True
yield event
for filename in filenames:
full_path = os.path.join(root, filename)
renamed_path = full_path.replace(dest_dir_path, src_dir_path) if src_dir_path else None
event = FileMovedEvent(renamed_path, full_path)
event.is_synthetic = True
yield event
def generate_sub_created_events(src_dir_path):
"""Generates an event list of :class:`DirCreatedEvent` and
:class:`FileCreatedEvent` objects for all the files and directories within
the given moved directory that were moved along with the directory.
:param src_dir_path:
The source path of the created directory.
:returns:
An iterable of file system events of type :class:`DirCreatedEvent` and
:class:`FileCreatedEvent`.
"""
for root, directories, filenames in os.walk(src_dir_path):
for directory in directories:
event = DirCreatedEvent(os.path.join(root, directory))
event.is_synthetic = True
yield event
for filename in filenames:
event = FileCreatedEvent(os.path.join(root, filename))
event.is_synthetic = True
yield event
| 18,707 | Python | 29.028892 | 99 | 0.607473 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/watchmedo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.watchmedo
:author: [email protected] (Yesudeep Mangalapilly)
:synopsis: ``watchmedo`` shell script utility.
"""
import os.path
import sys
import yaml
import time
import logging
try:
from cStringIO import StringIO
except ImportError:
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from argh import arg, aliases, ArghParser, expects_obj
from watchdog.version import VERSION_STRING
from watchdog.utils import WatchdogShutdown, load_class
logging.basicConfig(level=logging.INFO)
CONFIG_KEY_TRICKS = 'tricks'
CONFIG_KEY_PYTHON_PATH = 'python-path'
def path_split(pathname_spec, separator=os.pathsep):
"""
Splits a pathname specification separated by an OS-dependent separator.
:param pathname_spec:
The pathname specification.
:param separator:
(OS Dependent) `:` on Unix and `;` on Windows or user-specified.
"""
return list(pathname_spec.split(separator))
def add_to_sys_path(pathnames, index=0):
"""
Adds specified paths at specified index into the sys.path list.
:param paths:
A list of paths to add to the sys.path
:param index:
(Default 0) The index in the sys.path list where the paths will be
added.
"""
for pathname in pathnames[::-1]:
sys.path.insert(index, pathname)
def load_config(tricks_file_pathname):
"""
Loads the YAML configuration from the specified file.
:param tricks_file_path:
The path to the tricks configuration file.
:returns:
A dictionary of configuration information.
"""
with open(tricks_file_pathname, 'rb') as f:
return yaml.safe_load(f.read())
def parse_patterns(patterns_spec, ignore_patterns_spec, separator=';'):
"""
Parses pattern argument specs and returns a two-tuple of
(patterns, ignore_patterns).
"""
patterns = patterns_spec.split(separator)
ignore_patterns = ignore_patterns_spec.split(separator)
if ignore_patterns == ['']:
ignore_patterns = []
return (patterns, ignore_patterns)
def observe_with(observer, event_handler, pathnames, recursive):
"""
Single observer thread with a scheduled path and event handler.
:param observer:
The observer thread.
:param event_handler:
Event handler which will be called in response to file system events.
:param pathnames:
A list of pathnames to monitor.
:param recursive:
``True`` if recursive; ``False`` otherwise.
"""
for pathname in set(pathnames):
observer.schedule(event_handler, pathname, recursive)
observer.start()
try:
while True:
time.sleep(1)
except WatchdogShutdown:
observer.stop()
observer.join()
def schedule_tricks(observer, tricks, pathname, recursive):
"""
Schedules tricks with the specified observer and for the given watch
path.
:param observer:
The observer thread into which to schedule the trick and watch.
:param tricks:
A list of tricks.
:param pathname:
A path name which should be watched.
:param recursive:
``True`` if recursive; ``False`` otherwise.
"""
for trick in tricks:
for name, value in list(trick.items()):
TrickClass = load_class(name)
handler = TrickClass(**value)
trick_pathname = getattr(handler, 'source_directory', None) or pathname
observer.schedule(handler, trick_pathname, recursive)
@aliases('tricks')
@arg('files',
nargs='*',
help='perform tricks from given file')
@arg('--python-path',
default='.',
help='paths separated by %s to add to the python path' % os.pathsep)
@arg('--interval',
'--timeout',
dest='timeout',
default=1.0,
help='use this as the polling interval/blocking timeout (in seconds)')
@arg('--recursive',
default=True,
help='recursively monitor paths')
@expects_obj
def tricks_from(args):
"""
Subcommand to execute tricks from a tricks configuration file.
:param args:
Command line argument options.
"""
from watchdog.observers import Observer
add_to_sys_path(path_split(args.python_path))
observers = []
for tricks_file in args.files:
observer = Observer(timeout=args.timeout)
if not os.path.exists(tricks_file):
raise IOError("cannot find tricks file: %s" % tricks_file)
config = load_config(tricks_file)
try:
tricks = config[CONFIG_KEY_TRICKS]
except KeyError:
raise KeyError("No `%s' key specified in %s." % (
CONFIG_KEY_TRICKS, tricks_file))
if CONFIG_KEY_PYTHON_PATH in config:
add_to_sys_path(config[CONFIG_KEY_PYTHON_PATH])
dir_path = os.path.dirname(tricks_file)
if not dir_path:
dir_path = os.path.relpath(os.getcwd())
schedule_tricks(observer, tricks, dir_path, args.recursive)
observer.start()
observers.append(observer)
try:
while True:
time.sleep(1)
except WatchdogShutdown:
for o in observers:
o.unschedule_all()
o.stop()
for o in observers:
o.join()
@aliases('generate-tricks-yaml')
@arg('trick_paths',
nargs='*',
help='Dotted paths for all the tricks you want to generate')
@arg('--python-path',
default='.',
help='paths separated by %s to add to the python path' % os.pathsep)
@arg('--append-to-file',
default=None,
help='appends the generated tricks YAML to a file; \
if not specified, prints to standard output')
@arg('-a',
'--append-only',
dest='append_only',
default=False,
help='if --append-to-file is not specified, produces output for \
appending instead of a complete tricks yaml file.')
@expects_obj
def tricks_generate_yaml(args):
"""
Subcommand to generate Yaml configuration for tricks named on the command
line.
:param args:
Command line argument options.
"""
python_paths = path_split(args.python_path)
add_to_sys_path(python_paths)
output = StringIO()
for trick_path in args.trick_paths:
TrickClass = load_class(trick_path)
output.write(TrickClass.generate_yaml())
content = output.getvalue()
output.close()
header = yaml.dump({CONFIG_KEY_PYTHON_PATH: python_paths})
header += "%s:\n" % CONFIG_KEY_TRICKS
if args.append_to_file is None:
# Output to standard output.
if not args.append_only:
content = header + content
sys.stdout.write(content)
else:
if not os.path.exists(args.append_to_file):
content = header + content
with open(args.append_to_file, 'ab') as output:
output.write(content)
@arg('directories',
nargs='*',
default='.',
help='directories to watch.')
@arg('-p',
'--pattern',
'--patterns',
dest='patterns',
default='*',
help='matches event paths with these patterns (separated by ;).')
@arg('-i',
'--ignore-pattern',
'--ignore-patterns',
dest='ignore_patterns',
default='',
help='ignores event paths with these patterns (separated by ;).')
@arg('-D',
'--ignore-directories',
dest='ignore_directories',
default=False,
help='ignores events for directories')
@arg('-R',
'--recursive',
dest='recursive',
default=False,
help='monitors the directories recursively')
@arg('--interval',
'--timeout',
dest='timeout',
default=1.0,
help='use this as the polling interval/blocking timeout')
@arg('--trace',
default=False,
help='dumps complete dispatching trace')
@arg('--debug-force-polling',
default=False,
help='[debug] forces polling')
@arg('--debug-force-kqueue',
default=False,
help='[debug] forces BSD kqueue(2)')
@arg('--debug-force-winapi',
default=False,
help='[debug] forces Windows API')
@arg('--debug-force-winapi-async',
default=False,
help='[debug] forces Windows API + I/O completion')
@arg('--debug-force-fsevents',
default=False,
help='[debug] forces Mac OS X FSEvents')
@arg('--debug-force-inotify',
default=False,
help='[debug] forces Linux inotify(7)')
@expects_obj
def log(args):
"""
Subcommand to log file system events to the console.
:param args:
Command line argument options.
"""
from watchdog.utils import echo
from watchdog.tricks import LoggerTrick
if args.trace:
echo.echo_class(LoggerTrick)
patterns, ignore_patterns =\
parse_patterns(args.patterns, args.ignore_patterns)
handler = LoggerTrick(patterns=patterns,
ignore_patterns=ignore_patterns,
ignore_directories=args.ignore_directories)
if args.debug_force_polling:
from watchdog.observers.polling import PollingObserver as Observer
elif args.debug_force_kqueue:
from watchdog.observers.kqueue import KqueueObserver as Observer
elif args.debug_force_winapi_async:
from watchdog.observers.read_directory_changes_async import\
WindowsApiAsyncObserver as Observer
elif args.debug_force_winapi:
from watchdog.observers.read_directory_changes import\
WindowsApiObserver as Observer
elif args.debug_force_inotify:
from watchdog.observers.inotify import InotifyObserver as Observer
elif args.debug_force_fsevents:
from watchdog.observers.fsevents import FSEventsObserver as Observer
else:
# Automatically picks the most appropriate observer for the platform
# on which it is running.
from watchdog.observers import Observer
observer = Observer(timeout=args.timeout)
observe_with(observer, handler, args.directories, args.recursive)
@arg('directories',
nargs='*',
default='.',
help='directories to watch')
@arg('-c',
'--command',
dest='command',
default=None,
help='''shell command executed in response to matching events.
These interpolation variables are available to your command string::
${watch_src_path} - event source path;
${watch_dest_path} - event destination path (for moved events);
${watch_event_type} - event type;
${watch_object} - ``file`` or ``directory``
Note::
Please ensure you do not use double quotes (") to quote
your command string. That will force your shell to
interpolate before the command is processed by this
subcommand.
Example option usage::
--command='echo "${watch_src_path}"'
''')
@arg('-p',
'--pattern',
'--patterns',
dest='patterns',
default='*',
help='matches event paths with these patterns (separated by ;).')
@arg('-i',
'--ignore-pattern',
'--ignore-patterns',
dest='ignore_patterns',
default='',
help='ignores event paths with these patterns (separated by ;).')
@arg('-D',
'--ignore-directories',
dest='ignore_directories',
default=False,
help='ignores events for directories')
@arg('-R',
'--recursive',
dest='recursive',
default=False,
help='monitors the directories recursively')
@arg('--interval',
'--timeout',
dest='timeout',
default=1.0,
help='use this as the polling interval/blocking timeout')
@arg('-w', '--wait',
dest='wait_for_process',
action='store_true',
default=False,
help="wait for process to finish to avoid multiple simultaneous instances")
@arg('-W', '--drop',
dest='drop_during_process',
action='store_true',
default=False,
help="Ignore events that occur while command is still being executed "
"to avoid multiple simultaneous instances")
@arg('--debug-force-polling',
default=False,
help='[debug] forces polling')
@expects_obj
def shell_command(args):
"""
Subcommand to execute shell commands in response to file system events.
:param args:
Command line argument options.
"""
from watchdog.tricks import ShellCommandTrick
if not args.command:
args.command = None
if args.debug_force_polling:
from watchdog.observers.polling import PollingObserver as Observer
else:
from watchdog.observers import Observer
patterns, ignore_patterns = parse_patterns(args.patterns,
args.ignore_patterns)
handler = ShellCommandTrick(shell_command=args.command,
patterns=patterns,
ignore_patterns=ignore_patterns,
ignore_directories=args.ignore_directories,
wait_for_process=args.wait_for_process,
drop_during_process=args.drop_during_process)
observer = Observer(timeout=args.timeout)
observe_with(observer, handler, args.directories, args.recursive)
@arg('command',
help='''Long-running command to run in a subprocess.
''')
@arg('command_args',
metavar='arg',
nargs='*',
help='''Command arguments.
Note: Use -- before the command arguments, otherwise watchmedo will
try to interpret them.
''')
@arg('-d',
'--directory',
dest='directories',
metavar='directory',
action='append',
help='Directory to watch. Use another -d or --directory option '
'for each directory.')
@arg('-p',
'--pattern',
'--patterns',
dest='patterns',
default='*',
help='matches event paths with these patterns (separated by ;).')
@arg('-i',
'--ignore-pattern',
'--ignore-patterns',
dest='ignore_patterns',
default='',
help='ignores event paths with these patterns (separated by ;).')
@arg('-D',
'--ignore-directories',
dest='ignore_directories',
default=False,
help='ignores events for directories')
@arg('-R',
'--recursive',
dest='recursive',
default=False,
help='monitors the directories recursively')
@arg('--interval',
'--timeout',
dest='timeout',
default=1.0,
help='use this as the polling interval/blocking timeout')
@arg('--signal',
dest='signal',
default='SIGINT',
help='stop the subprocess with this signal (default SIGINT)')
@arg('--debug-force-polling',
default=False,
help='[debug] forces polling')
@arg('--kill-after',
dest='kill_after',
default=10.0,
help='when stopping, kill the subprocess after the specified timeout '
'(default 10)')
@expects_obj
def auto_restart(args):
"""
Subcommand to start a long-running subprocess and restart it
on matched events.
:param args:
Command line argument options.
"""
if args.debug_force_polling:
from watchdog.observers.polling import PollingObserver as Observer
else:
from watchdog.observers import Observer
from watchdog.tricks import AutoRestartTrick
import signal
if not args.directories:
args.directories = ['.']
# Allow either signal name or number.
if args.signal.startswith("SIG"):
stop_signal = getattr(signal, args.signal)
else:
stop_signal = int(args.signal)
# Handle termination signals by raising a semantic exception which will
# allow us to gracefully unwind and stop the observer
termination_signals = {signal.SIGTERM, signal.SIGINT}
def handler_termination_signal(_signum, _frame):
# Neuter all signals so that we don't attempt a double shutdown
for signum in termination_signals:
signal.signal(signum, signal.SIG_IGN)
raise WatchdogShutdown
for signum in termination_signals:
signal.signal(signum, handler_termination_signal)
patterns, ignore_patterns = parse_patterns(args.patterns,
args.ignore_patterns)
command = [args.command]
command.extend(args.command_args)
handler = AutoRestartTrick(command=command,
patterns=patterns,
ignore_patterns=ignore_patterns,
ignore_directories=args.ignore_directories,
stop_signal=stop_signal,
kill_after=args.kill_after)
handler.start()
observer = Observer(timeout=args.timeout)
try:
observe_with(observer, handler, args.directories, args.recursive)
except WatchdogShutdown:
pass
finally:
handler.stop()
epilog = """Copyright 2011 Yesudeep Mangalapilly <[email protected]>.
Copyright 2012 Google, Inc.
Licensed under the terms of the Apache license, version 2.0. Please see
LICENSE in the source code for more information."""
parser = ArghParser(epilog=epilog)
parser.add_commands([tricks_from,
tricks_generate_yaml,
log,
shell_command,
auto_restart])
parser.add_argument('--version',
action='version',
version='%(prog)s ' + VERSION_STRING)
def main():
"""Entry-point function."""
parser.dispatch()
if __name__ == '__main__':
main()
| 17,979 | Python | 29.423012 | 83 | 0.636409 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/tricks/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.tricks
:synopsis: Utility event handlers.
:author: [email protected] (Yesudeep Mangalapilly)
Classes
-------
.. autoclass:: Trick
:members:
:show-inheritance:
.. autoclass:: LoggerTrick
:members:
:show-inheritance:
.. autoclass:: ShellCommandTrick
:members:
:show-inheritance:
.. autoclass:: AutoRestartTrick
:members:
:show-inheritance:
"""
import os
import signal
import subprocess
import time
from watchdog.utils import echo, has_attribute
from watchdog.events import PatternMatchingEventHandler
class Trick(PatternMatchingEventHandler):
"""Your tricks should subclass this class."""
@classmethod
def generate_yaml(cls):
context = dict(module_name=cls.__module__,
klass_name=cls.__name__)
template_yaml = """- %(module_name)s.%(klass_name)s:
args:
- argument1
- argument2
kwargs:
patterns:
- "*.py"
- "*.js"
ignore_patterns:
- "version.py"
ignore_directories: false
"""
return template_yaml % context
class LoggerTrick(Trick):
"""A simple trick that does only logs events."""
def on_any_event(self, event):
pass
@echo.echo
def on_modified(self, event):
pass
@echo.echo
def on_deleted(self, event):
pass
@echo.echo
def on_created(self, event):
pass
@echo.echo
def on_moved(self, event):
pass
class ShellCommandTrick(Trick):
"""Executes shell commands in response to matched events."""
def __init__(self, shell_command=None, patterns=None, ignore_patterns=None,
ignore_directories=False, wait_for_process=False,
drop_during_process=False):
super(ShellCommandTrick, self).__init__(patterns, ignore_patterns,
ignore_directories)
self.shell_command = shell_command
self.wait_for_process = wait_for_process
self.drop_during_process = drop_during_process
self.process = None
def on_any_event(self, event):
from string import Template
if self.drop_during_process and self.process and self.process.poll() is None:
return
if event.is_directory:
object_type = 'directory'
else:
object_type = 'file'
context = {
'watch_src_path': event.src_path,
'watch_dest_path': '',
'watch_event_type': event.event_type,
'watch_object': object_type,
}
if self.shell_command is None:
if has_attribute(event, 'dest_path'):
context.update({'dest_path': event.dest_path})
command = 'echo "${watch_event_type} ${watch_object} from ${watch_src_path} to ${watch_dest_path}"'
else:
command = 'echo "${watch_event_type} ${watch_object} ${watch_src_path}"'
else:
if has_attribute(event, 'dest_path'):
context.update({'watch_dest_path': event.dest_path})
command = self.shell_command
command = Template(command).safe_substitute(**context)
self.process = subprocess.Popen(command, shell=True)
if self.wait_for_process:
self.process.wait()
class AutoRestartTrick(Trick):
"""Starts a long-running subprocess and restarts it on matched events.
The command parameter is a list of command arguments, such as
`['bin/myserver', '-c', 'etc/myconfig.ini']`.
Call `start()` after creating the Trick. Call `stop()` when stopping
the process.
"""
def __init__(self, command, patterns=None, ignore_patterns=None,
ignore_directories=False, stop_signal=signal.SIGINT,
kill_after=10):
super(AutoRestartTrick, self).__init__(
patterns, ignore_patterns, ignore_directories)
self.command = command
self.stop_signal = stop_signal
self.kill_after = kill_after
self.process = None
def start(self):
self.process = subprocess.Popen(self.command, preexec_fn=os.setsid)
def stop(self):
if self.process is None:
return
try:
os.killpg(os.getpgid(self.process.pid), self.stop_signal)
except OSError:
# Process is already gone
pass
else:
kill_time = time.time() + self.kill_after
while time.time() < kill_time:
if self.process.poll() is not None:
break
time.sleep(0.25)
else:
try:
os.killpg(os.getpgid(self.process.pid), 9)
except OSError:
# Process is already gone
pass
self.process = None
@echo.echo
def on_any_event(self, event):
self.stop()
self.start()
| 5,587 | Python | 27.080402 | 115 | 0.601754 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/win32stat.py | # -*- coding: utf-8 -*-
#
# Copyright 2014 Thomas Amland <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.utils.win32stat
:synopsis: Implementation of stat with st_ino and st_dev support.
Functions
---------
.. autofunction:: stat
"""
import ctypes
import ctypes.wintypes
import stat as stdstat
from collections import namedtuple
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
OPEN_EXISTING = 3
FILE_READ_ATTRIBUTES = 0x80
FILE_ATTRIBUTE_NORMAL = 0x80
FILE_ATTRIBUTE_READONLY = 0x1
FILE_ATTRIBUTE_DIRECTORY = 0x10
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
class FILETIME(ctypes.Structure):
_fields_ = [("dwLowDateTime", ctypes.wintypes.DWORD),
("dwHighDateTime", ctypes.wintypes.DWORD)]
class BY_HANDLE_FILE_INFORMATION(ctypes.Structure):
_fields_ = [('dwFileAttributes', ctypes.wintypes.DWORD),
('ftCreationTime', FILETIME),
('ftLastAccessTime', FILETIME),
('ftLastWriteTime', FILETIME),
('dwVolumeSerialNumber', ctypes.wintypes.DWORD),
('nFileSizeHigh', ctypes.wintypes.DWORD),
('nFileSizeLow', ctypes.wintypes.DWORD),
('nNumberOfLinks', ctypes.wintypes.DWORD),
('nFileIndexHigh', ctypes.wintypes.DWORD),
('nFileIndexLow', ctypes.wintypes.DWORD)]
kernel32 = ctypes.WinDLL("kernel32")
CreateFile = kernel32.CreateFileW
CreateFile.restype = ctypes.wintypes.HANDLE
CreateFile.argtypes = (
ctypes.c_wchar_p,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.c_void_p,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
ctypes.wintypes.HANDLE,
)
GetFileInformationByHandle = kernel32.GetFileInformationByHandle
GetFileInformationByHandle.restype = ctypes.wintypes.BOOL
GetFileInformationByHandle.argtypes = (
ctypes.wintypes.HANDLE,
ctypes.wintypes.POINTER(BY_HANDLE_FILE_INFORMATION),
)
CloseHandle = kernel32.CloseHandle
CloseHandle.restype = ctypes.wintypes.BOOL
CloseHandle.argtypes = (ctypes.wintypes.HANDLE,)
StatResult = namedtuple('StatResult', 'st_dev st_ino st_mode st_mtime st_size')
def _to_mode(attr):
m = 0
if (attr & FILE_ATTRIBUTE_DIRECTORY):
m |= stdstat.S_IFDIR | 0o111
else:
m |= stdstat.S_IFREG
if (attr & FILE_ATTRIBUTE_READONLY):
m |= 0o444
else:
m |= 0o666
return m
def _to_unix_time(ft):
t = (ft.dwHighDateTime) << 32 | ft.dwLowDateTime
return (t / 10000000) - 11644473600
def stat(path):
hfile = CreateFile(path,
FILE_READ_ATTRIBUTES,
0,
None,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL
| FILE_FLAG_BACKUP_SEMANTICS
| FILE_FLAG_OPEN_REPARSE_POINT,
None)
if hfile == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
info = BY_HANDLE_FILE_INFORMATION()
r = GetFileInformationByHandle(hfile, info)
CloseHandle(hfile)
if not r:
raise ctypes.WinError()
return StatResult(st_dev=info.dwVolumeSerialNumber,
st_ino=(info.nFileIndexHigh << 32) + info.nFileIndexLow,
st_mode=_to_mode(info.dwFileAttributes),
st_mtime=_to_unix_time(info.ftLastWriteTime),
st_size=(info.nFileSizeHigh << 32) + info.nFileSizeLow
)
| 4,029 | Python | 29.530303 | 79 | 0.648052 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/delayed_queue.py | # -*- coding: utf-8 -*-
#
# Copyright 2014 Thomas Amland <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import threading
from collections import deque
class DelayedQueue(object):
def __init__(self, delay):
self.delay_sec = delay
self._lock = threading.Lock()
self._not_empty = threading.Condition(self._lock)
self._queue = deque()
self._closed = False
def put(self, element, delay=False):
"""Add element to queue."""
self._lock.acquire()
self._queue.append((element, time.time(), delay))
self._not_empty.notify()
self._lock.release()
def close(self):
"""Close queue, indicating no more items will be added."""
self._closed = True
# Interrupt the blocking _not_empty.wait() call in get
self._not_empty.acquire()
self._not_empty.notify()
self._not_empty.release()
def get(self):
"""Remove and return an element from the queue, or this queue has been
closed raise the Closed exception.
"""
while True:
# wait for element to be added to queue
self._not_empty.acquire()
while len(self._queue) == 0 and not self._closed:
self._not_empty.wait()
if self._closed:
self._not_empty.release()
return None
head, insert_time, delay = self._queue[0]
self._not_empty.release()
# wait for delay if required
if delay:
time_left = insert_time + self.delay_sec - time.time()
while time_left > 0:
time.sleep(time_left)
time_left = insert_time + self.delay_sec - time.time()
# return element if it's still in the queue
with self._lock:
if len(self._queue) > 0 and self._queue[0][0] is head:
self._queue.popleft()
return head
def remove(self, predicate):
"""Remove and return the first items for which predicate is True,
ignoring delay."""
with self._lock:
for i, (elem, t, delay) in enumerate(self._queue):
if predicate(elem):
del self._queue[i]
return elem
return None
| 2,872 | Python | 33.202381 | 78 | 0.581476 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/bricks.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility collections or "bricks".
:module: watchdog.utils.bricks
:author: [email protected] (Yesudeep Mangalapilly)
:author: [email protected] (Lukáš Lalinský)
:author: [email protected] (Raymond Hettinger)
Classes
=======
.. autoclass:: OrderedSetQueue
:members:
:show-inheritance:
:inherited-members:
.. autoclass:: OrderedSet
"""
from .compat import queue
class SkipRepeatsQueue(queue.Queue, object):
"""Thread-safe implementation of an special queue where a
put of the last-item put'd will be dropped.
The implementation leverages locking already implemented in the base class
redefining only the primitives.
Queued items must be immutable and hashable so that they can be used
as dictionary keys. You must implement **only read-only properties** and
the :meth:`Item.__hash__()`, :meth:`Item.__eq__()`, and
:meth:`Item.__ne__()` methods for items to be hashable.
An example implementation follows::
class Item(object):
def __init__(self, a, b):
self._a = a
self._b = b
@property
def a(self):
return self._a
@property
def b(self):
return self._b
def _key(self):
return (self._a, self._b)
def __eq__(self, item):
return self._key() == item._key()
def __ne__(self, item):
return self._key() != item._key()
def __hash__(self):
return hash(self._key())
based on the OrderedSetQueue below
"""
def _init(self, maxsize):
super(SkipRepeatsQueue, self)._init(maxsize)
self._last_item = None
def _put(self, item):
if item != self._last_item:
super(SkipRepeatsQueue, self)._put(item)
self._last_item = item
else:
# `put` increments `unfinished_tasks` even if we did not put
# anything into the queue here
self.unfinished_tasks -= 1
def _get(self):
item = super(SkipRepeatsQueue, self)._get()
if item is self._last_item:
self._last_item = None
return item
| 2,895 | Python | 27.116505 | 78 | 0.613126 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.utils
:synopsis: Utility classes and functions.
:author: [email protected] (Yesudeep Mangalapilly)
Classes
-------
.. autoclass:: BaseThread
:members:
:show-inheritance:
:inherited-members:
"""
import os
import sys
import threading
from watchdog.utils import platform
from watchdog.utils.compat import Event
if sys.version_info[0] == 2 and platform.is_windows():
# st_ino is not implemented in os.stat on this platform
import win32stat
stat = win32stat.stat
else:
stat = os.stat
def has_attribute(ob, attribute):
"""
:func:`hasattr` swallows exceptions. :func:`has_attribute` tests a Python object for the
presence of an attribute.
:param ob:
object to inspect
:param attribute:
``str`` for the name of the attribute.
"""
return getattr(ob, attribute, None) is not None
class UnsupportedLibc(Exception):
pass
class WatchdogShutdown(Exception):
"""
Semantic exception used to signal an external shutdown event.
"""
pass
class BaseThread(threading.Thread):
""" Convenience class for creating stoppable threads. """
def __init__(self):
threading.Thread.__init__(self)
if has_attribute(self, 'daemon'):
self.daemon = True
else:
self.setDaemon(True)
self._stopped_event = Event()
if not has_attribute(self._stopped_event, 'is_set'):
self._stopped_event.is_set = self._stopped_event.isSet
@property
def stopped_event(self):
return self._stopped_event
def should_keep_running(self):
"""Determines whether the thread should continue running."""
return not self._stopped_event.is_set()
def on_thread_stop(self):
"""Override this method instead of :meth:`stop()`.
:meth:`stop()` calls this method.
This method is called immediately after the thread is signaled to stop.
"""
pass
def stop(self):
"""Signals the thread to stop."""
self._stopped_event.set()
self.on_thread_stop()
def on_thread_start(self):
"""Override this method instead of :meth:`start()`. :meth:`start()`
calls this method.
This method is called right before this thread is started and this
object’s run() method is invoked.
"""
pass
def start(self):
self.on_thread_start()
threading.Thread.start(self)
def load_module(module_name):
"""Imports a module given its name and returns a handle to it."""
try:
__import__(module_name)
except ImportError:
raise ImportError('No module named %s' % module_name)
return sys.modules[module_name]
def load_class(dotted_path):
"""Loads and returns a class definition provided a dotted path
specification the last part of the dotted path is the class name
and there is at least one module name preceding the class name.
Notes:
You will need to ensure that the module you are trying to load
exists in the Python path.
Examples:
- module.name.ClassName # Provided module.name is in the Python path.
- module.ClassName # Provided module is in the Python path.
What won't work:
- ClassName
- modle.name.ClassName # Typo in module name.
- module.name.ClasNam # Typo in classname.
"""
dotted_path_split = dotted_path.split('.')
if len(dotted_path_split) > 1:
klass_name = dotted_path_split[-1]
module_name = '.'.join(dotted_path_split[:-1])
module = load_module(module_name)
if has_attribute(module, klass_name):
klass = getattr(module, klass_name)
return klass
# Finally create and return an instance of the class
# return klass(*args, **kwargs)
else:
raise AttributeError('Module %s does not have class attribute %s' % (
module_name, klass_name))
else:
raise ValueError(
'Dotted module path %s must contain a module name and a classname' % dotted_path)
| 4,808 | Python | 28.145454 | 93 | 0.646839 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/unicode_paths.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2013 Will Bond <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
from watchdog.utils import platform
try:
# Python 2
str_cls = unicode
bytes_cls = str
except NameError:
# Python 3
str_cls = str
bytes_cls = bytes
# This is used by Linux when the locale seems to be improperly set. UTF-8 tends
# to be the encoding used by all distros, so this is a good fallback.
fs_fallback_encoding = 'utf-8'
fs_encoding = sys.getfilesystemencoding() or fs_fallback_encoding
def encode(path):
if isinstance(path, str_cls):
try:
path = path.encode(fs_encoding, 'strict')
except UnicodeEncodeError:
if not platform.is_linux():
raise
path = path.encode(fs_fallback_encoding, 'strict')
return path
def decode(path):
if isinstance(path, bytes_cls):
try:
path = path.decode(fs_encoding, 'strict')
except UnicodeDecodeError:
if not platform.is_linux():
raise
path = path.decode(fs_fallback_encoding, 'strict')
return path
| 2,184 | Python | 32.615384 | 79 | 0.697802 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/echo.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# echo.py: Tracing function calls using Python decorators.
#
# Written by Thomas Guest <[email protected]>
# Please see http://wordaligned.org/articles/echo
#
# Place into the public domain.
""" Echo calls made to functions and methods in a module.
"Echoing" a function call means printing out the name of the function
and the values of its arguments before making the call (which is more
commonly referred to as "tracing", but Python already has a trace module).
Example: to echo calls made to functions in "my_module" do:
import echo
import my_module
echo.echo_module(my_module)
Example: to echo calls made to functions in "my_module.my_class" do:
echo.echo_class(my_module.my_class)
Alternatively, echo.echo can be used to decorate functions. Calls to the
decorated function will be echoed.
Example:
@echo.echo
def my_function(args):
pass
"""
import inspect
import sys
def name(item):
" Return an item's name. "
return item.__name__
def is_classmethod(instancemethod, klass):
" Determine if an instancemethod is a classmethod. "
return inspect.ismethod(instancemethod) and instancemethod.__self__ is klass
def is_static_method(method, klass):
"""Returns True if method is an instance method of klass."""
for c in klass.mro():
if name(method) in c.__dict__:
return isinstance(c.__dict__[name(method)], staticmethod)
else:
return False
def is_class_private_name(name):
" Determine if a name is a class private name. "
# Exclude system defined names such as __init__, __add__ etc
return name.startswith("__") and not name.endswith("__")
def method_name(method):
""" Return a method's name.
This function returns the name the method is accessed by from
outside the class (i.e. it prefixes "private" methods appropriately).
"""
mname = name(method)
if is_class_private_name(mname):
mname = "_%s%s" % (name(method.__self__.__class__), mname)
return mname
def format_arg_value(arg_val):
""" Return a string representing a (name, value) pair.
>>> format_arg_value(('x', (1, 2, 3)))
'x=(1, 2, 3)'
"""
arg, val = arg_val
return "%s=%r" % (arg, val)
def echo(fn, write=sys.stdout.write):
""" Echo calls to a function.
Returns a decorated version of the input function which "echoes" calls
made to it by writing out the function's name and the arguments it was
called with.
"""
import functools
# Unpack function's arg count, arg names, arg defaults
code = fn.__code__
argcount = code.co_argcount
argnames = code.co_varnames[:argcount]
fn_defaults = fn.__defaults__ or list()
argdefs = dict(list(zip(argnames[-len(fn_defaults):], fn_defaults)))
@functools.wraps(fn)
def wrapped(*v, **k):
# Collect function arguments by chaining together positional,
# defaulted, extra positional and keyword arguments.
positional = list(map(format_arg_value, list(zip(argnames, v))))
defaulted = [format_arg_value((a, argdefs[a]))
for a in argnames[len(v):] if a not in k]
nameless = list(map(repr, v[argcount:]))
keyword = list(map(format_arg_value, list(k.items())))
args = positional + defaulted + nameless + keyword
write("%s(%s)\n" % (name(fn), ", ".join(args)))
return fn(*v, **k)
return wrapped
def echo_instancemethod(klass, method, write=sys.stdout.write):
""" Change an instancemethod so that calls to it are echoed.
Replacing a classmethod is a little more tricky.
See: http://www.python.org/doc/current/ref/types.html
"""
mname = method_name(method)
never_echo = "__str__", "__repr__", # Avoid recursion printing method calls
if mname in never_echo:
pass
elif is_classmethod(method, klass):
setattr(klass, mname, classmethod(echo(method.__func__, write)))
else:
setattr(klass, mname, echo(method, write))
def echo_class(klass, write=sys.stdout.write):
""" Echo calls to class methods and static functions
"""
for _, method in inspect.getmembers(klass, inspect.ismethod):
# In python 3 only class methods are returned here, but in python2 instance methods are too.
echo_instancemethod(klass, method, write)
for _, fn in inspect.getmembers(klass, inspect.isfunction):
if is_static_method(fn, klass):
setattr(klass, name(fn), staticmethod(echo(fn, write)))
else:
# It's not a class or a static method, so it must be an instance method.
# This should only be called in python 3, because in python 3 instance methods are considered functions.
echo_instancemethod(klass, fn, write)
def echo_module(mod, write=sys.stdout.write):
""" Echo calls to functions and methods in a module.
"""
for fname, fn in inspect.getmembers(mod, inspect.isfunction):
setattr(mod, fname, echo(fn, write))
for _, klass in inspect.getmembers(mod, inspect.isclass):
echo_class(klass, write)
if __name__ == "__main__":
import doctest
optionflags = doctest.ELLIPSIS
doctest.testfile('echoexample.txt', optionflags=optionflags)
doctest.testmod(optionflags=optionflags)
| 5,313 | Python | 31.601227 | 116 | 0.659703 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/dirsnapshot.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
# Copyright 2014 Thomas Amland <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.utils.dirsnapshot
:synopsis: Directory snapshots and comparison.
:author: [email protected] (Yesudeep Mangalapilly)
.. ADMONITION:: Where are the moved events? They "disappeared"
This implementation does not take partition boundaries
into consideration. It will only work when the directory
tree is entirely on the same file system. More specifically,
any part of the code that depends on inode numbers can
break if partition boundaries are crossed. In these cases,
the snapshot diff will represent file/directory movement as
created and deleted events.
Classes
-------
.. autoclass:: DirectorySnapshot
:members:
:show-inheritance:
.. autoclass:: DirectorySnapshotDiff
:members:
:show-inheritance:
.. autoclass:: EmptyDirectorySnapshot
:members:
:show-inheritance:
"""
import errno
import os
from stat import S_ISDIR
from watchdog.utils import stat as default_stat
try:
from os import scandir
except ImportError:
from os import listdir as scandir
class DirectorySnapshotDiff(object):
"""
Compares two directory snapshots and creates an object that represents
the difference between the two snapshots.
:param ref:
The reference directory snapshot.
:type ref:
:class:`DirectorySnapshot`
:param snapshot:
The directory snapshot which will be compared
with the reference snapshot.
:type snapshot:
:class:`DirectorySnapshot`
:param ignore_device:
A boolean indicating whether to ignore the device id or not.
By default, a file may be uniquely identified by a combination of its first
inode and its device id. The problem is that the device id may (or may not)
change between system boots. This problem would cause the DirectorySnapshotDiff
to think a file has been deleted and created again but it would be the
exact same file.
Set to True only if you are sure you will always use the same device.
:type ignore_device:
:class:`bool`
"""
def __init__(self, ref, snapshot, ignore_device=False):
created = snapshot.paths - ref.paths
deleted = ref.paths - snapshot.paths
if ignore_device:
def get_inode(directory, full_path):
return directory.inode(full_path)[0]
else:
def get_inode(directory, full_path):
return directory.inode(full_path)
# check that all unchanged paths have the same inode
for path in ref.paths & snapshot.paths:
if get_inode(ref, path) != get_inode(snapshot, path):
created.add(path)
deleted.add(path)
# find moved paths
moved = set()
for path in set(deleted):
inode = ref.inode(path)
new_path = snapshot.path(inode)
if new_path:
# file is not deleted but moved
deleted.remove(path)
moved.add((path, new_path))
for path in set(created):
inode = snapshot.inode(path)
old_path = ref.path(inode)
if old_path:
created.remove(path)
moved.add((old_path, path))
# find modified paths
# first check paths that have not moved
modified = set()
for path in ref.paths & snapshot.paths:
if get_inode(ref, path) == get_inode(snapshot, path):
if ref.mtime(path) != snapshot.mtime(path) or ref.size(path) != snapshot.size(path):
modified.add(path)
for (old_path, new_path) in moved:
if ref.mtime(old_path) != snapshot.mtime(new_path) or ref.size(old_path) != snapshot.size(new_path):
modified.add(old_path)
self._dirs_created = [path for path in created if snapshot.isdir(path)]
self._dirs_deleted = [path for path in deleted if ref.isdir(path)]
self._dirs_modified = [path for path in modified if ref.isdir(path)]
self._dirs_moved = [(frm, to) for (frm, to) in moved if ref.isdir(frm)]
self._files_created = list(created - set(self._dirs_created))
self._files_deleted = list(deleted - set(self._dirs_deleted))
self._files_modified = list(modified - set(self._dirs_modified))
self._files_moved = list(moved - set(self._dirs_moved))
def __str__(self):
return self.__repr__()
def __repr__(self):
fmt = (
'<{0} files(created={1}, deleted={2}, modified={3}, moved={4}),'
' folders(created={5}, deleted={6}, modified={7}, moved={8})>'
)
return fmt.format(
type(self).__name__,
len(self._files_created),
len(self._files_deleted),
len(self._files_modified),
len(self._files_moved),
len(self._dirs_created),
len(self._dirs_deleted),
len(self._dirs_modified),
len(self._dirs_moved)
)
@property
def files_created(self):
"""List of files that were created."""
return self._files_created
@property
def files_deleted(self):
"""List of files that were deleted."""
return self._files_deleted
@property
def files_modified(self):
"""List of files that were modified."""
return self._files_modified
@property
def files_moved(self):
"""
List of files that were moved.
Each event is a two-tuple the first item of which is the path
that has been renamed to the second item in the tuple.
"""
return self._files_moved
@property
def dirs_modified(self):
"""
List of directories that were modified.
"""
return self._dirs_modified
@property
def dirs_moved(self):
"""
List of directories that were moved.
Each event is a two-tuple the first item of which is the path
that has been renamed to the second item in the tuple.
"""
return self._dirs_moved
@property
def dirs_deleted(self):
"""
List of directories that were deleted.
"""
return self._dirs_deleted
@property
def dirs_created(self):
"""
List of directories that were created.
"""
return self._dirs_created
class DirectorySnapshot(object):
"""
A snapshot of stat information of files in a directory.
:param path:
The directory path for which a snapshot should be taken.
:type path:
``str``
:param recursive:
``True`` if the entire directory tree should be included in the
snapshot; ``False`` otherwise.
:type recursive:
``bool``
:param stat:
Use custom stat function that returns a stat structure for path.
Currently only st_dev, st_ino, st_mode and st_mtime are needed.
A function taking a ``path`` as argument which will be called
for every entry in the directory tree.
:param listdir:
Use custom listdir function. For details see ``os.scandir`` if available, else ``os.listdir``.
"""
def __init__(self, path, recursive=True,
stat=default_stat,
listdir=scandir):
self.recursive = recursive
self.stat = stat
self.listdir = listdir
self._stat_info = {}
self._inode_to_path = {}
st = self.stat(path)
self._stat_info[path] = st
self._inode_to_path[(st.st_ino, st.st_dev)] = path
for p, st in self.walk(path):
i = (st.st_ino, st.st_dev)
self._inode_to_path[i] = p
self._stat_info[p] = st
def walk(self, root):
try:
paths = [os.path.join(root, entry if isinstance(entry, str) else entry.name)
for entry in self.listdir(root)]
except OSError as e:
# Directory may have been deleted between finding it in the directory
# list of its parent and trying to delete its contents. If this
# happens we treat it as empty. Likewise if the directory was replaced
# with a file of the same name (less likely, but possible).
if e.errno in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
return
else:
raise
entries = []
for p in paths:
try:
entry = (p, self.stat(p))
entries.append(entry)
yield entry
except OSError:
continue
if self.recursive:
for path, st in entries:
try:
if S_ISDIR(st.st_mode):
for entry in self.walk(path):
yield entry
except (IOError, OSError) as e:
# IOError for Python 2
# OSError for Python 3
# (should be only PermissionError when dropping Python 2 support)
if e.errno != errno.EACCES:
raise
@property
def paths(self):
"""
Set of file/directory paths in the snapshot.
"""
return set(self._stat_info.keys())
def path(self, id):
"""
Returns path for id. None if id is unknown to this snapshot.
"""
return self._inode_to_path.get(id)
def inode(self, path):
""" Returns an id for path. """
st = self._stat_info[path]
return (st.st_ino, st.st_dev)
def isdir(self, path):
return S_ISDIR(self._stat_info[path].st_mode)
def mtime(self, path):
return self._stat_info[path].st_mtime
def size(self, path):
return self._stat_info[path].st_size
def stat_info(self, path):
"""
Returns a stat information object for the specified path from
the snapshot.
Attached information is subject to change. Do not use unless
you specify `stat` in constructor. Use :func:`inode`, :func:`mtime`,
:func:`isdir` instead.
:param path:
The path for which stat information should be obtained
from a snapshot.
"""
return self._stat_info[path]
def __sub__(self, previous_dirsnap):
"""Allow subtracting a DirectorySnapshot object instance from
another.
:returns:
A :class:`DirectorySnapshotDiff` object.
"""
return DirectorySnapshotDiff(previous_dirsnap, self)
def __str__(self):
return self.__repr__()
def __repr__(self):
return str(self._stat_info)
class EmptyDirectorySnapshot(object):
"""Class to implement an empty snapshot. This is used together with
DirectorySnapshot and DirectorySnapshotDiff in order to get all the files/folders
in the directory as created.
"""
@staticmethod
def path(_):
"""Mock up method to return the path of the received inode. As the snapshot
is intended to be empty, it always returns None.
:returns:
None.
"""
return None
@property
def paths(self):
"""Mock up method to return a set of file/directory paths in the snapshot. As
the snapshot is intended to be empty, it always returns an empty set.
:returns:
An empty set.
"""
return set()
| 12,229 | Python | 31.099737 | 112 | 0.593098 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/utils/platform.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
PLATFORM_WINDOWS = 'windows'
PLATFORM_LINUX = 'linux'
PLATFORM_BSD = 'bsd'
PLATFORM_DARWIN = 'darwin'
PLATFORM_UNKNOWN = 'unknown'
def get_platform_name():
if sys.platform.startswith("win"):
return PLATFORM_WINDOWS
elif sys.platform.startswith('darwin'):
return PLATFORM_DARWIN
elif sys.platform.startswith('linux'):
return PLATFORM_LINUX
elif sys.platform.startswith(('dragonfly', 'freebsd', 'netbsd', 'openbsd', 'bsd')):
return PLATFORM_BSD
else:
return PLATFORM_UNKNOWN
__platform__ = get_platform_name()
def is_linux():
return __platform__ == PLATFORM_LINUX
def is_bsd():
return __platform__ == PLATFORM_BSD
def is_darwin():
return __platform__ == PLATFORM_DARWIN
def is_windows():
return __platform__ == PLATFORM_WINDOWS
| 1,512 | Python | 24.644067 | 87 | 0.69709 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/fsevents.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.fsevents
:synopsis: FSEvents based emitter implementation.
:author: [email protected] (Yesudeep Mangalapilly)
:platforms: Mac OS X
"""
from __future__ import with_statement
import os
import sys
import threading
import unicodedata
import _watchdog_fsevents as _fsevents
from watchdog.events import (
FileDeletedEvent,
FileModifiedEvent,
FileCreatedEvent,
FileMovedEvent,
DirDeletedEvent,
DirModifiedEvent,
DirCreatedEvent,
DirMovedEvent
)
from watchdog.observers.api import (
BaseObserver,
EventEmitter,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
class FSEventsEmitter(EventEmitter):
"""
Mac OS X FSEvents Emitter class.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
def on_thread_stop(self):
if self.watch:
_fsevents.remove_watch(self.watch)
_fsevents.stop(self)
self._watch = None
def queue_events(self, timeout):
with self._lock:
events = self.native_events
i = 0
while i < len(events):
event = events[i]
# For some reason the create and remove flags are sometimes also
# set for rename and modify type events, so let those take
# precedence.
if event.is_renamed:
# Internal moves appears to always be consecutive in the same
# buffer and have IDs differ by exactly one (while others
# don't) making it possible to pair up the two events coming
# from a singe move operation. (None of this is documented!)
# Otherwise, guess whether file was moved in or out.
# TODO: handle id wrapping
if (i + 1 < len(events) and events[i + 1].is_renamed
and events[i + 1].event_id == event.event_id + 1):
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(event.path, events[i + 1].path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
self.queue_event(DirModifiedEvent(os.path.dirname(events[i + 1].path)))
i += 1
elif os.path.exists(event.path):
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
else:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
# TODO: generate events for tree
elif event.is_modified or event.is_inode_meta_mod or event.is_xattr_mod :
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(event.path))
elif event.is_created:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
elif event.is_removed:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
i += 1
def run(self):
try:
def callback(pathnames, flags, ids, emitter=self):
with emitter._lock:
emitter.native_events = [
_fsevents.NativeEvent(event_path, event_flags, event_id)
for event_path, event_flags, event_id in zip(pathnames, flags, ids)
]
emitter.queue_events(emitter.timeout)
# for pathname, flag in zip(pathnames, flags):
# if emitter.watch.is_recursive: # and pathname != emitter.watch.path:
# new_sub_snapshot = DirectorySnapshot(pathname, True)
# old_sub_snapshot = self.snapshot.copy(pathname)
# diff = new_sub_snapshot - old_sub_snapshot
# self.snapshot += new_subsnapshot
# else:
# new_snapshot = DirectorySnapshot(emitter.watch.path, False)
# diff = new_snapshot - emitter.snapshot
# emitter.snapshot = new_snapshot
# INFO: FSEvents reports directory notifications recursively
# by default, so we do not need to add subdirectory paths.
# pathnames = set([self.watch.path])
# if self.watch.is_recursive:
# for root, directory_names, _ in os.walk(self.watch.path):
# for directory_name in directory_names:
# full_path = absolute_path(
# os.path.join(root, directory_name))
# pathnames.add(full_path)
self.pathnames = [self.watch.path]
_fsevents.add_watch(self,
self.watch,
callback,
self.pathnames)
_fsevents.read_events(self)
except Exception:
pass
class FSEventsObserver(BaseObserver):
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=FSEventsEmitter,
timeout=timeout)
def schedule(self, event_handler, path, recursive=False):
# Python 2/3 compat
try:
str_class = unicode
except NameError:
str_class = str
# Fix for issue #26: Trace/BPT error when given a unicode path
# string. https://github.com/gorakhargosh/watchdog/issues#issue/26
if isinstance(path, str_class):
# path = unicode(path, 'utf-8')
path = unicodedata.normalize('NFC', path)
# We only encode the path in Python 2 for backwards compatibility.
# On Python 3 we want the path to stay as unicode if possible for
# the sake of path matching not having to be rewritten to use the
# bytes API instead of strings. The _watchdog_fsevent.so code for
# Python 3 can handle both str and bytes paths, which is why we
# do not HAVE to encode it with Python 3. The Python 2 code in
# _watchdog_fsevents.so was not changed for the sake of backwards
# compatibility.
if sys.version_info < (3,):
path = path.encode('utf-8')
return BaseObserver.schedule(self, event_handler, path, recursive)
| 8,156 | Python | 40.196969 | 95 | 0.584723 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/winapi.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# winapi.py: Windows API-Python interface (removes dependency on pywin32)
#
# Copyright (C) 2007 Thomas Heller <[email protected]>
# Copyright (C) 2010 Will McGugan <[email protected]>
# Copyright (C) 2010 Ryan Kelly <[email protected]>
# Copyright (C) 2010 Yesudeep Mangalapilly <[email protected]>
# Copyright (C) 2014 Thomas Amland
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and / or other materials provided with the distribution.
# * Neither the name of the organization nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Portions of this code were taken from pyfilesystem, which uses the above
# new BSD license.
import ctypes.wintypes
from functools import reduce
LPVOID = ctypes.wintypes.LPVOID
# Invalid handle value.
INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
# File notification constants.
FILE_NOTIFY_CHANGE_FILE_NAME = 0x01
FILE_NOTIFY_CHANGE_DIR_NAME = 0x02
FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x04
FILE_NOTIFY_CHANGE_SIZE = 0x08
FILE_NOTIFY_CHANGE_LAST_WRITE = 0x010
FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x020
FILE_NOTIFY_CHANGE_CREATION = 0x040
FILE_NOTIFY_CHANGE_SECURITY = 0x0100
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
FILE_FLAG_OVERLAPPED = 0x40000000
FILE_LIST_DIRECTORY = 1
FILE_SHARE_READ = 0x01
FILE_SHARE_WRITE = 0x02
FILE_SHARE_DELETE = 0x04
OPEN_EXISTING = 3
VOLUME_NAME_NT = 0x02
# File action constants.
FILE_ACTION_CREATED = 1
FILE_ACTION_DELETED = 2
FILE_ACTION_MODIFIED = 3
FILE_ACTION_RENAMED_OLD_NAME = 4
FILE_ACTION_RENAMED_NEW_NAME = 5
FILE_ACTION_DELETED_SELF = 0xFFFE
FILE_ACTION_OVERFLOW = 0xFFFF
# Aliases
FILE_ACTION_ADDED = FILE_ACTION_CREATED
FILE_ACTION_REMOVED = FILE_ACTION_DELETED
FILE_ACTION_REMOVED_SELF = FILE_ACTION_DELETED_SELF
THREAD_TERMINATE = 0x0001
# IO waiting constants.
WAIT_ABANDONED = 0x00000080
WAIT_IO_COMPLETION = 0x000000C0
WAIT_OBJECT_0 = 0x00000000
WAIT_TIMEOUT = 0x00000102
# Error codes
ERROR_OPERATION_ABORTED = 995
class OVERLAPPED(ctypes.Structure):
_fields_ = [('Internal', LPVOID),
('InternalHigh', LPVOID),
('Offset', ctypes.wintypes.DWORD),
('OffsetHigh', ctypes.wintypes.DWORD),
('Pointer', LPVOID),
('hEvent', ctypes.wintypes.HANDLE),
]
def _errcheck_bool(value, func, args):
if not value:
raise ctypes.WinError()
return args
def _errcheck_handle(value, func, args):
if not value:
raise ctypes.WinError()
if value == INVALID_HANDLE_VALUE:
raise ctypes.WinError()
return args
def _errcheck_dword(value, func, args):
if value == 0xFFFFFFFF:
raise ctypes.WinError()
return args
kernel32 = ctypes.WinDLL("kernel32")
ReadDirectoryChangesW = kernel32.ReadDirectoryChangesW
ReadDirectoryChangesW.restype = ctypes.wintypes.BOOL
ReadDirectoryChangesW.errcheck = _errcheck_bool
ReadDirectoryChangesW.argtypes = (
ctypes.wintypes.HANDLE, # hDirectory
LPVOID, # lpBuffer
ctypes.wintypes.DWORD, # nBufferLength
ctypes.wintypes.BOOL, # bWatchSubtree
ctypes.wintypes.DWORD, # dwNotifyFilter
ctypes.POINTER(ctypes.wintypes.DWORD), # lpBytesReturned
ctypes.POINTER(OVERLAPPED), # lpOverlapped
LPVOID # FileIOCompletionRoutine # lpCompletionRoutine
)
CreateFileW = kernel32.CreateFileW
CreateFileW.restype = ctypes.wintypes.HANDLE
CreateFileW.errcheck = _errcheck_handle
CreateFileW.argtypes = (
ctypes.wintypes.LPCWSTR, # lpFileName
ctypes.wintypes.DWORD, # dwDesiredAccess
ctypes.wintypes.DWORD, # dwShareMode
LPVOID, # lpSecurityAttributes
ctypes.wintypes.DWORD, # dwCreationDisposition
ctypes.wintypes.DWORD, # dwFlagsAndAttributes
ctypes.wintypes.HANDLE # hTemplateFile
)
CloseHandle = kernel32.CloseHandle
CloseHandle.restype = ctypes.wintypes.BOOL
CloseHandle.argtypes = (
ctypes.wintypes.HANDLE, # hObject
)
CancelIoEx = kernel32.CancelIoEx
CancelIoEx.restype = ctypes.wintypes.BOOL
CancelIoEx.errcheck = _errcheck_bool
CancelIoEx.argtypes = (
ctypes.wintypes.HANDLE, # hObject
ctypes.POINTER(OVERLAPPED) # lpOverlapped
)
CreateEvent = kernel32.CreateEventW
CreateEvent.restype = ctypes.wintypes.HANDLE
CreateEvent.errcheck = _errcheck_handle
CreateEvent.argtypes = (
LPVOID, # lpEventAttributes
ctypes.wintypes.BOOL, # bManualReset
ctypes.wintypes.BOOL, # bInitialState
ctypes.wintypes.LPCWSTR, # lpName
)
SetEvent = kernel32.SetEvent
SetEvent.restype = ctypes.wintypes.BOOL
SetEvent.errcheck = _errcheck_bool
SetEvent.argtypes = (
ctypes.wintypes.HANDLE, # hEvent
)
WaitForSingleObjectEx = kernel32.WaitForSingleObjectEx
WaitForSingleObjectEx.restype = ctypes.wintypes.DWORD
WaitForSingleObjectEx.errcheck = _errcheck_dword
WaitForSingleObjectEx.argtypes = (
ctypes.wintypes.HANDLE, # hObject
ctypes.wintypes.DWORD, # dwMilliseconds
ctypes.wintypes.BOOL, # bAlertable
)
CreateIoCompletionPort = kernel32.CreateIoCompletionPort
CreateIoCompletionPort.restype = ctypes.wintypes.HANDLE
CreateIoCompletionPort.errcheck = _errcheck_handle
CreateIoCompletionPort.argtypes = (
ctypes.wintypes.HANDLE, # FileHandle
ctypes.wintypes.HANDLE, # ExistingCompletionPort
LPVOID, # CompletionKey
ctypes.wintypes.DWORD, # NumberOfConcurrentThreads
)
GetQueuedCompletionStatus = kernel32.GetQueuedCompletionStatus
GetQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
GetQueuedCompletionStatus.errcheck = _errcheck_bool
GetQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
LPVOID, # lpNumberOfBytesTransferred
LPVOID, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
ctypes.wintypes.DWORD, # dwMilliseconds
)
PostQueuedCompletionStatus = kernel32.PostQueuedCompletionStatus
PostQueuedCompletionStatus.restype = ctypes.wintypes.BOOL
PostQueuedCompletionStatus.errcheck = _errcheck_bool
PostQueuedCompletionStatus.argtypes = (
ctypes.wintypes.HANDLE, # CompletionPort
ctypes.wintypes.DWORD, # lpNumberOfBytesTransferred
ctypes.wintypes.DWORD, # lpCompletionKey
ctypes.POINTER(OVERLAPPED), # lpOverlapped
)
GetFinalPathNameByHandleW = kernel32.GetFinalPathNameByHandleW
GetFinalPathNameByHandleW.restype = ctypes.wintypes.DWORD
GetFinalPathNameByHandleW.errcheck = _errcheck_dword
GetFinalPathNameByHandleW.argtypes = (
ctypes.wintypes.HANDLE, # hFile
ctypes.wintypes.LPWSTR, # lpszFilePath
ctypes.wintypes.DWORD, # cchFilePath
ctypes.wintypes.DWORD, # DWORD
)
class FILE_NOTIFY_INFORMATION(ctypes.Structure):
_fields_ = [("NextEntryOffset", ctypes.wintypes.DWORD),
("Action", ctypes.wintypes.DWORD),
("FileNameLength", ctypes.wintypes.DWORD),
# ("FileName", (ctypes.wintypes.WCHAR * 1))]
("FileName", (ctypes.c_char * 1))]
LPFNI = ctypes.POINTER(FILE_NOTIFY_INFORMATION)
# We don't need to recalculate these flags every time a call is made to
# the win32 API functions.
WATCHDOG_FILE_FLAGS = FILE_FLAG_BACKUP_SEMANTICS
WATCHDOG_FILE_SHARE_FLAGS = reduce(
lambda x, y: x | y, [
FILE_SHARE_READ,
FILE_SHARE_WRITE,
FILE_SHARE_DELETE,
])
WATCHDOG_FILE_NOTIFY_FLAGS = reduce(
lambda x, y: x | y, [
FILE_NOTIFY_CHANGE_FILE_NAME,
FILE_NOTIFY_CHANGE_DIR_NAME,
FILE_NOTIFY_CHANGE_ATTRIBUTES,
FILE_NOTIFY_CHANGE_SIZE,
FILE_NOTIFY_CHANGE_LAST_WRITE,
FILE_NOTIFY_CHANGE_SECURITY,
FILE_NOTIFY_CHANGE_LAST_ACCESS,
FILE_NOTIFY_CHANGE_CREATION,
])
BUFFER_SIZE = 2048
def _parse_event_buffer(readBuffer, nBytes):
results = []
while nBytes > 0:
fni = ctypes.cast(readBuffer, LPFNI)[0]
ptr = ctypes.addressof(fni) + FILE_NOTIFY_INFORMATION.FileName.offset
# filename = ctypes.wstring_at(ptr, fni.FileNameLength)
filename = ctypes.string_at(ptr, fni.FileNameLength)
results.append((fni.Action, filename.decode('utf-16')))
numToSkip = fni.NextEntryOffset
if numToSkip <= 0:
break
readBuffer = readBuffer[numToSkip:]
nBytes -= numToSkip # numToSkip is long. nBytes should be long too.
return results
def _is_observed_path_deleted(handle, path):
# Comparison of observed path and actual path, returned by
# GetFinalPathNameByHandleW. If directory moved to the trash bin, or
# deleted, actual path will not be equal to observed path.
buff = ctypes.create_unicode_buffer(BUFFER_SIZE)
GetFinalPathNameByHandleW(handle, buff, BUFFER_SIZE, VOLUME_NAME_NT)
return buff.value != path
def _generate_observed_path_deleted_event():
# Create synthetic event for notify that observed directory is deleted
path = ctypes.create_unicode_buffer('.')
event = FILE_NOTIFY_INFORMATION(0, FILE_ACTION_DELETED_SELF, len(path), path.value.encode("utf-8"))
event_size = ctypes.sizeof(event)
buff = ctypes.create_string_buffer(BUFFER_SIZE)
ctypes.memmove(buff, ctypes.addressof(event), event_size)
return buff, event_size
def get_directory_handle(path):
"""Returns a Windows handle to the specified directory path."""
return CreateFileW(path, FILE_LIST_DIRECTORY, WATCHDOG_FILE_SHARE_FLAGS,
None, OPEN_EXISTING, WATCHDOG_FILE_FLAGS, None)
def close_directory_handle(handle):
try:
CancelIoEx(handle, None) # force ReadDirectoryChangesW to return
CloseHandle(handle) # close directory handle
except WindowsError:
try:
CloseHandle(handle) # close directory handle
except Exception:
return
def read_directory_changes(handle, path, recursive):
"""Read changes to the directory using the specified directory handle.
http://timgolden.me.uk/pywin32-docs/win32file__ReadDirectoryChangesW_meth.html
"""
event_buffer = ctypes.create_string_buffer(BUFFER_SIZE)
nbytes = ctypes.wintypes.DWORD()
try:
ReadDirectoryChangesW(handle, ctypes.byref(event_buffer),
len(event_buffer), recursive,
WATCHDOG_FILE_NOTIFY_FLAGS,
ctypes.byref(nbytes), None, None)
except WindowsError as e:
if e.winerror == ERROR_OPERATION_ABORTED:
return [], 0
# Handle the case when the root path is deleted
if _is_observed_path_deleted(handle, path):
return _generate_observed_path_deleted_event()
raise e
# Python 2/3 compat
try:
int_class = long
except NameError:
int_class = int
return event_buffer.raw, int_class(nbytes.value)
class WinAPINativeEvent(object):
def __init__(self, action, src_path):
self.action = action
self.src_path = src_path
@property
def is_added(self):
return self.action == FILE_ACTION_CREATED
@property
def is_removed(self):
return self.action == FILE_ACTION_REMOVED
@property
def is_modified(self):
return self.action == FILE_ACTION_MODIFIED
@property
def is_renamed_old(self):
return self.action == FILE_ACTION_RENAMED_OLD_NAME
@property
def is_renamed_new(self):
return self.action == FILE_ACTION_RENAMED_NEW_NAME
@property
def is_removed_self(self):
return self.action == FILE_ACTION_REMOVED_SELF
def __repr__(self):
return ("<%s: action=%d, src_path=%r>" % (
type(self).__name__, self.action, self.src_path))
def read_events(handle, path, recursive):
buf, nbytes = read_directory_changes(handle, path, recursive)
events = _parse_event_buffer(buf, nbytes)
return [WinAPINativeEvent(action, src_path) for action, src_path in events]
| 13,063 | Python | 32.497436 | 103 | 0.714384 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/polling.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.polling
:synopsis: Polling emitter implementation.
:author: [email protected] (Yesudeep Mangalapilly)
Classes
-------
.. autoclass:: PollingObserver
:members:
:show-inheritance:
.. autoclass:: PollingObserverVFS
:members:
:show-inheritance:
:special-members:
"""
from __future__ import with_statement
import threading
from functools import partial
from watchdog.utils import stat as default_stat
from watchdog.utils.dirsnapshot import DirectorySnapshot, DirectorySnapshotDiff
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_OBSERVER_TIMEOUT,
DEFAULT_EMITTER_TIMEOUT
)
from watchdog.events import (
DirMovedEvent,
DirDeletedEvent,
DirCreatedEvent,
DirModifiedEvent,
FileMovedEvent,
FileDeletedEvent,
FileCreatedEvent,
FileModifiedEvent
)
try:
from os import scandir
except ImportError:
from os import listdir as scandir
class PollingEmitter(EventEmitter):
"""
Platform-independent emitter that polls a directory to detect file
system changes.
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT,
stat=default_stat, listdir=scandir):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._snapshot = None
self._lock = threading.Lock()
self._take_snapshot = lambda: DirectorySnapshot(
self.watch.path, self.watch.is_recursive, stat=stat, listdir=listdir)
def on_thread_start(self):
self._snapshot = self._take_snapshot()
def queue_events(self, timeout):
# We don't want to hit the disk continuously.
# timeout behaves like an interval for polling emitters.
if self.stopped_event.wait(timeout):
return
with self._lock:
if not self.should_keep_running():
return
# Get event diff between fresh snapshot and previous snapshot.
# Update snapshot.
try:
new_snapshot = self._take_snapshot()
except OSError:
self.queue_event(DirDeletedEvent(self.watch.path))
self.stop()
return
events = DirectorySnapshotDiff(self._snapshot, new_snapshot)
self._snapshot = new_snapshot
# Files.
for src_path in events.files_deleted:
self.queue_event(FileDeletedEvent(src_path))
for src_path in events.files_modified:
self.queue_event(FileModifiedEvent(src_path))
for src_path in events.files_created:
self.queue_event(FileCreatedEvent(src_path))
for src_path, dest_path in events.files_moved:
self.queue_event(FileMovedEvent(src_path, dest_path))
# Directories.
for src_path in events.dirs_deleted:
self.queue_event(DirDeletedEvent(src_path))
for src_path in events.dirs_modified:
self.queue_event(DirModifiedEvent(src_path))
for src_path in events.dirs_created:
self.queue_event(DirCreatedEvent(src_path))
for src_path, dest_path in events.dirs_moved:
self.queue_event(DirMovedEvent(src_path, dest_path))
class PollingObserver(BaseObserver):
"""
Platform-independent observer that polls a directory to detect file
system changes.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=PollingEmitter, timeout=timeout)
class PollingObserverVFS(BaseObserver):
"""
File system independent observer that polls a directory to detect changes.
"""
def __init__(self, stat, listdir, polling_interval=1):
"""
:param stat: stat function. See ``os.stat`` for details.
:param listdir: listdir function. See ``os.listdir`` for details.
:type polling_interval: float
:param polling_interval: interval in seconds between polling the file system.
"""
emitter_cls = partial(PollingEmitter, stat=stat, listdir=listdir)
BaseObserver.__init__(self, emitter_class=emitter_cls, timeout=polling_interval)
| 4,929 | Python | 31.866666 | 88 | 0.661392 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/inotify.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.inotify
:synopsis: ``inotify(7)`` based emitter implementation.
:author: Sebastien Martini <[email protected]>
:author: Luke McCarthy <[email protected]>
:author: [email protected] (Yesudeep Mangalapilly)
:author: Tim Cuthbertson <[email protected]>
:platforms: Linux 2.6.13+.
.. ADMONITION:: About system requirements
Recommended minimum kernel version: 2.6.25.
Quote from the inotify(7) man page:
"Inotify was merged into the 2.6.13 Linux kernel. The required library
interfaces were added to glibc in version 2.4. (IN_DONT_FOLLOW,
IN_MASK_ADD, and IN_ONLYDIR were only added in version 2.5.)"
Therefore, you must ensure the system is running at least these versions
appropriate libraries and the kernel.
.. ADMONITION:: About recursiveness, event order, and event coalescing
Quote from the inotify(7) man page:
If successive output inotify events produced on the inotify file
descriptor are identical (same wd, mask, cookie, and name) then they
are coalesced into a single event if the older event has not yet been
read (but see BUGS).
The events returned by reading from an inotify file descriptor form
an ordered queue. Thus, for example, it is guaranteed that when
renaming from one directory to another, events will be produced in
the correct order on the inotify file descriptor.
...
Inotify monitoring of directories is not recursive: to monitor
subdirectories under a directory, additional watches must be created.
This emitter implementation therefore automatically adds watches for
sub-directories if running in recursive mode.
Some extremely useful articles and documentation:
.. _inotify FAQ: http://inotify.aiken.cz/?section=inotify&page=faq&lang=en
.. _intro to inotify: http://www.linuxjournal.com/article/8478
"""
from __future__ import with_statement
import os
import threading
from .inotify_buffer import InotifyBuffer
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
from watchdog.events import (
DirDeletedEvent,
DirModifiedEvent,
DirMovedEvent,
DirCreatedEvent,
FileDeletedEvent,
FileModifiedEvent,
FileMovedEvent,
FileCreatedEvent,
generate_sub_moved_events,
generate_sub_created_events,
)
from watchdog.utils import unicode_paths
class InotifyEmitter(EventEmitter):
"""
inotify(7)-based event emitter.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self._inotify = None
def on_thread_start(self):
path = unicode_paths.encode(self.watch.path)
self._inotify = InotifyBuffer(path, self.watch.is_recursive)
def on_thread_stop(self):
if self._inotify:
self._inotify.close()
def queue_events(self, timeout, full_events=False):
# If "full_events" is true, then the method will report unmatched move events as separate events
# This behavior is by default only called by a InotifyFullEmitter
with self._lock:
event = self._inotify.read_event()
if event is None:
return
if isinstance(event, tuple):
move_from, move_to = event
src_path = self._decode_path(move_from.src_path)
dest_path = self._decode_path(move_to.src_path)
cls = DirMovedEvent if move_from.is_directory else FileMovedEvent
self.queue_event(cls(src_path, dest_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
self.queue_event(DirModifiedEvent(os.path.dirname(dest_path)))
if move_from.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_moved_events(src_path, dest_path):
self.queue_event(sub_event)
return
src_path = self._decode_path(event.src_path)
if event.is_moved_to:
if full_events:
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(None, src_path))
else:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
if event.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_created_events(src_path):
self.queue_event(sub_event)
elif event.is_attrib:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_modify:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_delete or (event.is_moved_from and not full_events):
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_moved_from and full_events:
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(src_path, None))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_create:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
def _decode_path(self, path):
""" Decode path only if unicode string was passed to this emitter. """
if isinstance(self.watch.path, bytes):
return path
return unicode_paths.decode(path)
class InotifyFullEmitter(InotifyEmitter):
"""
inotify(7)-based event emitter. By default this class produces move events even if they are not matched
Such move events will have a ``None`` value for the unmatched part.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
InotifyEmitter.__init__(self, event_queue, watch, timeout)
def queue_events(self, timeout, events=True):
InotifyEmitter.queue_events(self, timeout, full_events=events)
class InotifyObserver(BaseObserver):
"""
Observer thread that schedules watching directories and dispatches
calls to event handlers.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT, generate_full_events=False):
if (generate_full_events):
BaseObserver.__init__(self, emitter_class=InotifyFullEmitter, timeout=timeout)
else:
BaseObserver.__init__(self, emitter_class=InotifyEmitter,
timeout=timeout)
| 8,525 | Python | 37.754545 | 107 | 0.658534 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/fsevents2.py | # -*- coding: utf-8 -*-
#
# Copyright 2014 Thomas Amland <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.fsevents2
:synopsis: FSEvents based emitter implementation.
:platforms: Mac OS X
"""
import os
import logging
import unicodedata
from threading import Thread
from watchdog.utils.compat import queue
from watchdog.events import (
FileDeletedEvent,
FileModifiedEvent,
FileCreatedEvent,
FileMovedEvent,
DirDeletedEvent,
DirModifiedEvent,
DirCreatedEvent,
DirMovedEvent
)
from watchdog.observers.api import (
BaseObserver,
EventEmitter,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT,
)
# pyobjc
import AppKit
from FSEvents import (
FSEventStreamCreate,
CFRunLoopGetCurrent,
FSEventStreamScheduleWithRunLoop,
FSEventStreamStart,
CFRunLoopRun,
CFRunLoopStop,
FSEventStreamStop,
FSEventStreamInvalidate,
FSEventStreamRelease,
)
from FSEvents import (
kCFAllocatorDefault,
kCFRunLoopDefaultMode,
kFSEventStreamEventIdSinceNow,
kFSEventStreamCreateFlagNoDefer,
kFSEventStreamCreateFlagFileEvents,
kFSEventStreamEventFlagItemCreated,
kFSEventStreamEventFlagItemRemoved,
kFSEventStreamEventFlagItemInodeMetaMod,
kFSEventStreamEventFlagItemRenamed,
kFSEventStreamEventFlagItemModified,
kFSEventStreamEventFlagItemFinderInfoMod,
kFSEventStreamEventFlagItemChangeOwner,
kFSEventStreamEventFlagItemXattrMod,
kFSEventStreamEventFlagItemIsDir,
kFSEventStreamEventFlagItemIsSymlink,
)
logger = logging.getLogger(__name__)
class FSEventsQueue(Thread):
""" Low level FSEvents client. """
def __init__(self, path):
Thread.__init__(self)
self._queue = queue.Queue()
self._run_loop = None
if isinstance(path, bytes):
path = path.decode('utf-8')
self._path = unicodedata.normalize('NFC', path)
context = None
latency = 1.0
self._stream_ref = FSEventStreamCreate(
kCFAllocatorDefault, self._callback, context, [self._path],
kFSEventStreamEventIdSinceNow, latency,
kFSEventStreamCreateFlagNoDefer | kFSEventStreamCreateFlagFileEvents)
if self._stream_ref is None:
raise IOError("FSEvents. Could not create stream.")
def run(self):
pool = AppKit.NSAutoreleasePool.alloc().init()
self._run_loop = CFRunLoopGetCurrent()
FSEventStreamScheduleWithRunLoop(
self._stream_ref, self._run_loop, kCFRunLoopDefaultMode)
if not FSEventStreamStart(self._stream_ref):
FSEventStreamInvalidate(self._stream_ref)
FSEventStreamRelease(self._stream_ref)
raise IOError("FSEvents. Could not start stream.")
CFRunLoopRun()
FSEventStreamStop(self._stream_ref)
FSEventStreamInvalidate(self._stream_ref)
FSEventStreamRelease(self._stream_ref)
del pool
# Make sure waiting thread is notified
self._queue.put(None)
def stop(self):
if self._run_loop is not None:
CFRunLoopStop(self._run_loop)
def _callback(self, streamRef, clientCallBackInfo, numEvents, eventPaths, eventFlags, eventIDs):
events = [NativeEvent(path, flags, _id) for path, flags, _id in
zip(eventPaths, eventFlags, eventIDs)]
logger.debug("FSEvents callback. Got %d events:" % numEvents)
for e in events:
logger.debug(e)
self._queue.put(events)
def read_events(self):
"""
Returns a list or one or more events, or None if there are no more
events to be read.
"""
if not self.is_alive():
return None
return self._queue.get()
class NativeEvent(object):
def __init__(self, path, flags, event_id):
self.path = path
self.flags = flags
self.event_id = event_id
self.is_created = bool(flags & kFSEventStreamEventFlagItemCreated)
self.is_removed = bool(flags & kFSEventStreamEventFlagItemRemoved)
self.is_renamed = bool(flags & kFSEventStreamEventFlagItemRenamed)
self.is_modified = bool(flags & kFSEventStreamEventFlagItemModified)
self.is_change_owner = bool(flags & kFSEventStreamEventFlagItemChangeOwner)
self.is_inode_meta_mod = bool(flags & kFSEventStreamEventFlagItemInodeMetaMod)
self.is_finder_info_mod = bool(flags & kFSEventStreamEventFlagItemFinderInfoMod)
self.is_xattr_mod = bool(flags & kFSEventStreamEventFlagItemXattrMod)
self.is_symlink = bool(flags & kFSEventStreamEventFlagItemIsSymlink)
self.is_directory = bool(flags & kFSEventStreamEventFlagItemIsDir)
@property
def _event_type(self):
if self.is_created:
return "Created"
if self.is_removed:
return "Removed"
if self.is_renamed:
return "Renamed"
if self.is_modified:
return "Modified"
if self.is_inode_meta_mod:
return "InodeMetaMod"
if self.is_xattr_mod:
return "XattrMod"
return "Unknown"
def __repr__(self):
s = "<%s: path=%s, type=%s, is_dir=%s, flags=%s, id=%s>"
return s % (type(self).__name__, repr(self.path), self._event_type,
self.is_directory, hex(self.flags), self.event_id)
class FSEventsEmitter(EventEmitter):
"""
FSEvents based event emitter. Handles conversion of native events.
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._fsevents = FSEventsQueue(watch.path)
self._fsevents.start()
def on_thread_stop(self):
self._fsevents.stop()
def queue_events(self, timeout):
events = self._fsevents.read_events()
if events is None:
return
i = 0
while i < len(events):
event = events[i]
# For some reason the create and remove flags are sometimes also
# set for rename and modify type events, so let those take
# precedence.
if event.is_renamed:
# Internal moves appears to always be consecutive in the same
# buffer and have IDs differ by exactly one (while others
# don't) making it possible to pair up the two events coming
# from a singe move operation. (None of this is documented!)
# Otherwise, guess whether file was moved in or out.
# TODO: handle id wrapping
if (i + 1 < len(events) and events[i + 1].is_renamed
and events[i + 1].event_id == event.event_id + 1):
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(event.path, events[i + 1].path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
self.queue_event(DirModifiedEvent(os.path.dirname(events[i + 1].path)))
i += 1
elif os.path.exists(event.path):
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
else:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
# TODO: generate events for tree
elif event.is_modified or event.is_inode_meta_mod or event.is_xattr_mod :
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(event.path))
elif event.is_created:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
elif event.is_removed:
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(event.path))
self.queue_event(DirModifiedEvent(os.path.dirname(event.path)))
i += 1
class FSEventsObserver2(BaseObserver):
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=FSEventsEmitter, timeout=timeout)
| 9,142 | Python | 36.016194 | 100 | 0.646467 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/kqueue.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.kqueue
:synopsis: ``kqueue(2)`` based emitter implementation.
:author: [email protected] (Yesudeep Mangalapilly)
:platforms: Mac OS X and BSD with kqueue(2).
.. WARNING:: kqueue is a very heavyweight way to monitor file systems.
Each kqueue-detected directory modification triggers
a full directory scan. Traversing the entire directory tree
and opening file descriptors for all files will create
performance problems. We need to find a way to re-scan
only those directories which report changes and do a diff
between two sub-DirectorySnapshots perhaps.
.. ADMONITION:: About OS X performance guidelines
Quote from the `Mac OS X File System Performance Guidelines`_:
"When you only want to track changes on a file or directory, be sure to
open it using the ``O_EVTONLY`` flag. This flag prevents the file or
directory from being marked as open or in use. This is important
if you are tracking files on a removable volume and the user tries to
unmount the volume. With this flag in place, the system knows it can
dismiss the volume. If you had opened the files or directories without
this flag, the volume would be marked as busy and would not be
unmounted."
``O_EVTONLY`` is defined as ``0x8000`` in the OS X header files.
More information here: http://www.mlsite.net/blog/?p=2312
Classes
-------
.. autoclass:: KqueueEmitter
:members:
:show-inheritance:
Collections and Utility Classes
-------------------------------
.. autoclass:: KeventDescriptor
:members:
:show-inheritance:
.. autoclass:: KeventDescriptorSet
:members:
:show-inheritance:
.. _Mac OS X File System Performance Guidelines:
http://developer.apple.com/library/ios/#documentation/Performance/Conceptual/FileSystem/Articles/TrackingChanges.html#//apple_ref/doc/uid/20001993-CJBJFIDD
"""
from __future__ import with_statement
from watchdog.utils import platform
import threading
import errno
from stat import S_ISDIR
import os
import os.path
import select
from pathtools.path import absolute_path
from watchdog.observers.api import (
BaseObserver,
EventEmitter,
DEFAULT_OBSERVER_TIMEOUT,
DEFAULT_EMITTER_TIMEOUT
)
from watchdog.utils import stat as default_stat
from watchdog.utils.dirsnapshot import DirectorySnapshot
from watchdog.events import (
DirMovedEvent,
DirDeletedEvent,
DirCreatedEvent,
DirModifiedEvent,
FileMovedEvent,
FileDeletedEvent,
FileCreatedEvent,
FileModifiedEvent,
EVENT_TYPE_MOVED,
EVENT_TYPE_DELETED,
EVENT_TYPE_CREATED,
generate_sub_moved_events,
)
# Maximum number of events to process.
MAX_EVENTS = 4096
# O_EVTONLY value from the header files for OS X only.
O_EVTONLY = 0x8000
# Pre-calculated values for the kevent filter, flags, and fflags attributes.
if platform.is_darwin():
WATCHDOG_OS_OPEN_FLAGS = O_EVTONLY
else:
WATCHDOG_OS_OPEN_FLAGS = os.O_RDONLY | os.O_NONBLOCK
WATCHDOG_KQ_FILTER = select.KQ_FILTER_VNODE
WATCHDOG_KQ_EV_FLAGS = select.KQ_EV_ADD | select.KQ_EV_ENABLE | select.KQ_EV_CLEAR
WATCHDOG_KQ_FFLAGS = (
select.KQ_NOTE_DELETE
| select.KQ_NOTE_WRITE
| select.KQ_NOTE_EXTEND
| select.KQ_NOTE_ATTRIB
| select.KQ_NOTE_LINK
| select.KQ_NOTE_RENAME
| select.KQ_NOTE_REVOKE
)
# Flag tests.
def is_deleted(kev):
"""Determines whether the given kevent represents deletion."""
return kev.fflags & select.KQ_NOTE_DELETE
def is_modified(kev):
"""Determines whether the given kevent represents modification."""
fflags = kev.fflags
return (fflags & select.KQ_NOTE_EXTEND) or (fflags & select.KQ_NOTE_WRITE)
def is_attrib_modified(kev):
"""Determines whether the given kevent represents attribute modification."""
return kev.fflags & select.KQ_NOTE_ATTRIB
def is_renamed(kev):
"""Determines whether the given kevent represents movement."""
return kev.fflags & select.KQ_NOTE_RENAME
class KeventDescriptorSet(object):
"""
Thread-safe kevent descriptor collection.
"""
def __init__(self):
# Set of KeventDescriptor
self._descriptors = set()
# Descriptor for a given path.
self._descriptor_for_path = dict()
# Descriptor for a given fd.
self._descriptor_for_fd = dict()
# List of kevent objects.
self._kevents = list()
self._lock = threading.Lock()
@property
def kevents(self):
"""
List of kevents monitored.
"""
with self._lock:
return self._kevents
@property
def paths(self):
"""
List of paths for which kevents have been created.
"""
with self._lock:
return list(self._descriptor_for_path.keys())
def get_for_fd(self, fd):
"""
Given a file descriptor, returns the kevent descriptor object
for it.
:param fd:
OS file descriptor.
:type fd:
``int``
:returns:
A :class:`KeventDescriptor` object.
"""
with self._lock:
return self._descriptor_for_fd[fd]
def get(self, path):
"""
Obtains a :class:`KeventDescriptor` object for the specified path.
:param path:
Path for which the descriptor will be obtained.
"""
with self._lock:
path = absolute_path(path)
return self._get(path)
def __contains__(self, path):
"""
Determines whether a :class:`KeventDescriptor has been registered
for the specified path.
:param path:
Path for which the descriptor will be obtained.
"""
with self._lock:
path = absolute_path(path)
return self._has_path(path)
def add(self, path, is_directory):
"""
Adds a :class:`KeventDescriptor` to the collection for the given
path.
:param path:
The path for which a :class:`KeventDescriptor` object will be
added.
:param is_directory:
``True`` if the path refers to a directory; ``False`` otherwise.
:type is_directory:
``bool``
"""
with self._lock:
path = absolute_path(path)
if not self._has_path(path):
self._add_descriptor(KeventDescriptor(path, is_directory))
def remove(self, path):
"""
Removes the :class:`KeventDescriptor` object for the given path
if it already exists.
:param path:
Path for which the :class:`KeventDescriptor` object will be
removed.
"""
with self._lock:
path = absolute_path(path)
if self._has_path(path):
self._remove_descriptor(self._get(path))
def clear(self):
"""
Clears the collection and closes all open descriptors.
"""
with self._lock:
for descriptor in self._descriptors:
descriptor.close()
self._descriptors.clear()
self._descriptor_for_fd.clear()
self._descriptor_for_path.clear()
self._kevents = []
# Thread-unsafe methods. Locking is provided at a higher level.
def _get(self, path):
"""Returns a kevent descriptor for a given path."""
return self._descriptor_for_path[path]
def _has_path(self, path):
"""Determines whether a :class:`KeventDescriptor` for the specified
path exists already in the collection."""
return path in self._descriptor_for_path
def _add_descriptor(self, descriptor):
"""
Adds a descriptor to the collection.
:param descriptor:
An instance of :class:`KeventDescriptor` to be added.
"""
self._descriptors.add(descriptor)
self._kevents.append(descriptor.kevent)
self._descriptor_for_path[descriptor.path] = descriptor
self._descriptor_for_fd[descriptor.fd] = descriptor
def _remove_descriptor(self, descriptor):
"""
Removes a descriptor from the collection.
:param descriptor:
An instance of :class:`KeventDescriptor` to be removed.
"""
self._descriptors.remove(descriptor)
del self._descriptor_for_fd[descriptor.fd]
del self._descriptor_for_path[descriptor.path]
self._kevents.remove(descriptor.kevent)
descriptor.close()
class KeventDescriptor(object):
"""
A kevent descriptor convenience data structure to keep together:
* kevent
* directory status
* path
* file descriptor
:param path:
Path string for which a kevent descriptor will be created.
:param is_directory:
``True`` if the path refers to a directory; ``False`` otherwise.
:type is_directory:
``bool``
"""
def __init__(self, path, is_directory):
self._path = absolute_path(path)
self._is_directory = is_directory
self._fd = os.open(path, WATCHDOG_OS_OPEN_FLAGS)
self._kev = select.kevent(self._fd,
filter=WATCHDOG_KQ_FILTER,
flags=WATCHDOG_KQ_EV_FLAGS,
fflags=WATCHDOG_KQ_FFLAGS)
@property
def fd(self):
"""OS file descriptor for the kevent descriptor."""
return self._fd
@property
def path(self):
"""The path associated with the kevent descriptor."""
return self._path
@property
def kevent(self):
"""The kevent object associated with the kevent descriptor."""
return self._kev
@property
def is_directory(self):
"""Determines whether the kevent descriptor refers to a directory.
:returns:
``True`` or ``False``
"""
return self._is_directory
def close(self):
"""
Closes the file descriptor associated with a kevent descriptor.
"""
try:
os.close(self.fd)
except OSError:
pass
@property
def key(self):
return (self.path, self.is_directory)
def __eq__(self, descriptor):
return self.key == descriptor.key
def __ne__(self, descriptor):
return self.key != descriptor.key
def __hash__(self):
return hash(self.key)
def __repr__(self):
return "<%s: path=%s, is_directory=%s>"\
% (type(self).__name__, self.path, self.is_directory)
class KqueueEmitter(EventEmitter):
"""
kqueue(2)-based event emitter.
.. ADMONITION:: About ``kqueue(2)`` behavior and this implementation
``kqueue(2)`` monitors file system events only for
open descriptors, which means, this emitter does a lot of
book-keeping behind the scenes to keep track of open
descriptors for every entry in the monitored directory tree.
This also means the number of maximum open file descriptors
on your system must be increased **manually**.
Usually, issuing a call to ``ulimit`` should suffice::
ulimit -n 1024
Ensure that you pick a number that is larger than the
number of files you expect to be monitored.
``kqueue(2)`` does not provide enough information about the
following things:
* The destination path of a file or directory that is renamed.
* Creation of a file or directory within a directory; in this
case, ``kqueue(2)`` only indicates a modified event on the
parent directory.
Therefore, this emitter takes a snapshot of the directory
tree when ``kqueue(2)`` detects a change on the file system
to be able to determine the above information.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
:param stat: stat function. See ``os.stat`` for details.
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT,
stat=default_stat):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._kq = select.kqueue()
self._lock = threading.RLock()
# A collection of KeventDescriptor.
self._descriptors = KeventDescriptorSet()
def custom_stat(path, self=self):
stat_info = stat(path)
self._register_kevent(path, S_ISDIR(stat_info.st_mode))
return stat_info
self._snapshot = DirectorySnapshot(watch.path,
recursive=watch.is_recursive,
stat=custom_stat)
def _register_kevent(self, path, is_directory):
"""
Registers a kevent descriptor for the given path.
:param path:
Path for which a kevent descriptor will be created.
:param is_directory:
``True`` if the path refers to a directory; ``False`` otherwise.
:type is_directory:
``bool``
"""
try:
self._descriptors.add(path, is_directory)
except OSError as e:
if e.errno == errno.ENOENT:
# Probably dealing with a temporary file that was created
# and then quickly deleted before we could open
# a descriptor for it. Therefore, simply queue a sequence
# of created and deleted events for the path.
# path = absolute_path(path)
# if is_directory:
# self.queue_event(DirCreatedEvent(path))
# self.queue_event(DirDeletedEvent(path))
# else:
# self.queue_event(FileCreatedEvent(path))
# self.queue_event(FileDeletedEvent(path))
# TODO: We could simply ignore these files.
# Locked files cause the python process to die with
# a bus error when we handle temporary files.
# eg. .git/index.lock when running tig operations.
# I don't fully understand this at the moment.
pass
elif e.errno == errno.EOPNOTSUPP:
# Probably dealing with the socket or special file
# mounted through a file system that does not support
# access to it (e.g. NFS). On BSD systems look at
# EOPNOTSUPP in man 2 open.
pass
else:
# All other errors are propagated.
raise
def _unregister_kevent(self, path):
"""
Convenience function to close the kevent descriptor for a
specified kqueue-monitored path.
:param path:
Path for which the kevent descriptor will be closed.
"""
self._descriptors.remove(path)
def queue_event(self, event):
"""
Handles queueing a single event object.
:param event:
An instance of :class:`watchdog.events.FileSystemEvent`
or a subclass.
"""
# Handles all the book keeping for queued events.
# We do not need to fire moved/deleted events for all subitems in
# a directory tree here, because this function is called by kqueue
# for all those events anyway.
EventEmitter.queue_event(self, event)
if event.event_type == EVENT_TYPE_CREATED:
self._register_kevent(event.src_path, event.is_directory)
elif event.event_type == EVENT_TYPE_MOVED:
self._unregister_kevent(event.src_path)
self._register_kevent(event.dest_path, event.is_directory)
elif event.event_type == EVENT_TYPE_DELETED:
self._unregister_kevent(event.src_path)
def _gen_kqueue_events(self,
kev,
ref_snapshot,
new_snapshot):
"""
Generate events from the kevent list returned from the call to
:meth:`select.kqueue.control`.
.. NOTE:: kqueue only tells us about deletions, file modifications,
attribute modifications. The other events, namely,
file creation, directory modification, file rename,
directory rename, directory creation, etc. are
determined by comparing directory snapshots.
"""
descriptor = self._descriptors.get_for_fd(kev.ident)
src_path = descriptor.path
if is_renamed(kev):
# Kqueue does not specify the destination names for renames
# to, so we have to process these using the a snapshot
# of the directory.
for event in self._gen_renamed_events(src_path,
descriptor.is_directory,
ref_snapshot,
new_snapshot):
yield event
elif is_attrib_modified(kev):
if descriptor.is_directory:
yield DirModifiedEvent(src_path)
else:
yield FileModifiedEvent(src_path)
elif is_modified(kev):
if descriptor.is_directory:
if self.watch.is_recursive or self.watch.path == src_path:
# When a directory is modified, it may be due to
# sub-file/directory renames or new file/directory
# creation. We determine all this by comparing
# snapshots later.
yield DirModifiedEvent(src_path)
else:
yield FileModifiedEvent(src_path)
elif is_deleted(kev):
if descriptor.is_directory:
yield DirDeletedEvent(src_path)
else:
yield FileDeletedEvent(src_path)
def _parent_dir_modified(self, src_path):
"""
Helper to generate a DirModifiedEvent on the parent of src_path.
"""
return DirModifiedEvent(os.path.dirname(src_path))
def _gen_renamed_events(self,
src_path,
is_directory,
ref_snapshot,
new_snapshot):
"""
Compares information from two directory snapshots (one taken before
the rename operation and another taken right after) to determine the
destination path of the file system object renamed, and yields
the appropriate events to be queued.
"""
try:
f_inode = ref_snapshot.inode(src_path)
except KeyError:
# Probably caught a temporary file/directory that was renamed
# and deleted. Fires a sequence of created and deleted events
# for the path.
if is_directory:
yield DirCreatedEvent(src_path)
yield DirDeletedEvent(src_path)
else:
yield FileCreatedEvent(src_path)
yield FileDeletedEvent(src_path)
# We don't process any further and bail out assuming
# the event represents deletion/creation instead of movement.
return
dest_path = new_snapshot.path(f_inode)
if dest_path is not None:
dest_path = absolute_path(dest_path)
if is_directory:
event = DirMovedEvent(src_path, dest_path)
yield event
else:
yield FileMovedEvent(src_path, dest_path)
yield self._parent_dir_modified(src_path)
yield self._parent_dir_modified(dest_path)
if is_directory:
# TODO: Do we need to fire moved events for the items
# inside the directory tree? Does kqueue does this
# all by itself? Check this and then enable this code
# only if it doesn't already.
# A: It doesn't. So I've enabled this block.
if self.watch.is_recursive:
for sub_event in generate_sub_moved_events(src_path, dest_path):
yield sub_event
else:
# If the new snapshot does not have an inode for the
# old path, we haven't found the new name. Therefore,
# we mark it as deleted and remove unregister the path.
if is_directory:
yield DirDeletedEvent(src_path)
else:
yield FileDeletedEvent(src_path)
yield self._parent_dir_modified(src_path)
def _read_events(self, timeout=None):
"""
Reads events from a call to the blocking
:meth:`select.kqueue.control()` method.
:param timeout:
Blocking timeout for reading events.
:type timeout:
``float`` (seconds)
"""
return self._kq.control(self._descriptors.kevents,
MAX_EVENTS,
timeout)
def queue_events(self, timeout):
"""
Queues events by reading them from a call to the blocking
:meth:`select.kqueue.control()` method.
:param timeout:
Blocking timeout for reading events.
:type timeout:
``float`` (seconds)
"""
with self._lock:
try:
event_list = self._read_events(timeout)
# TODO: investigate why order appears to be reversed
event_list.reverse()
# Take a fresh snapshot of the directory and update the
# saved snapshot.
new_snapshot = DirectorySnapshot(self.watch.path,
self.watch.is_recursive)
ref_snapshot = self._snapshot
self._snapshot = new_snapshot
diff_events = new_snapshot - ref_snapshot
# Process events
for directory_created in diff_events.dirs_created:
self.queue_event(DirCreatedEvent(directory_created))
for file_created in diff_events.files_created:
self.queue_event(FileCreatedEvent(file_created))
for file_modified in diff_events.files_modified:
self.queue_event(FileModifiedEvent(file_modified))
for kev in event_list:
for event in self._gen_kqueue_events(kev,
ref_snapshot,
new_snapshot):
self.queue_event(event)
except OSError as e:
if e.errno != errno.EBADF:
raise
def on_thread_stop(self):
# Clean up.
with self._lock:
self._descriptors.clear()
self._kq.close()
class KqueueObserver(BaseObserver):
"""
Observer thread that schedules watching directories and dispatches
calls to event handlers.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=KqueueEmitter, timeout=timeout)
| 24,355 | Python | 33.449788 | 159 | 0.585424 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers
:synopsis: Observer that picks a native implementation if available.
:author: [email protected] (Yesudeep Mangalapilly)
Classes
=======
.. autoclass:: Observer
:members:
:show-inheritance:
:inherited-members:
Observer thread that schedules watching directories and dispatches
calls to event handlers.
You can also import platform specific classes directly and use it instead
of :class:`Observer`. Here is a list of implemented observer classes.:
============== ================================ ==============================
Class Platforms Note
============== ================================ ==============================
|Inotify| Linux 2.6.13+ ``inotify(7)`` based observer
|FSEvents| Mac OS X FSEvents based observer
|Kqueue| Mac OS X and BSD with kqueue(2) ``kqueue(2)`` based observer
|WinApi| MS Windows Windows API-based observer
|Polling| Any fallback implementation
============== ================================ ==============================
.. |Inotify| replace:: :class:`.inotify.InotifyObserver`
.. |FSEvents| replace:: :class:`.fsevents.FSEventsObserver`
.. |Kqueue| replace:: :class:`.kqueue.KqueueObserver`
.. |WinApi| replace:: :class:`.read_directory_changes.WindowsApiObserver`
.. |WinApiAsync| replace:: :class:`.read_directory_changes_async.WindowsApiAsyncObserver`
.. |Polling| replace:: :class:`.polling.PollingObserver`
"""
import warnings
from watchdog.utils import platform
from watchdog.utils import UnsupportedLibc
if platform.is_linux():
try:
from .inotify import InotifyObserver as Observer
except UnsupportedLibc:
from .polling import PollingObserver as Observer
elif platform.is_darwin():
try:
from .fsevents import FSEventsObserver as Observer
except Exception:
try:
from .kqueue import KqueueObserver as Observer
warnings.warn("Failed to import fsevents. Fall back to kqueue")
except Exception:
from .polling import PollingObserver as Observer
warnings.warn("Failed to import fsevents and kqueue. Fall back to polling.")
elif platform.is_bsd():
from .kqueue import KqueueObserver as Observer
elif platform.is_windows():
# TODO: find a reliable way of checking Windows version and import
# polling explicitly for Windows XP
try:
from .read_directory_changes import WindowsApiObserver as Observer
except Exception:
from .polling import PollingObserver as Observer
warnings.warn("Failed to import read_directory_changes. Fall back to polling.")
else:
from .polling import PollingObserver as Observer
__all__ = ["Observer"]
| 3,528 | Python | 36.542553 | 89 | 0.649093 |
omniverse-code/kit/exts/omni.kit.pip_archive/pip_prebundle/watchdog/observers/read_directory_changes.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <[email protected]>
# Copyright 2012 Google, Inc.
# Copyright 2014 Thomas Amland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
import os.path
import time
from watchdog.events import (
DirCreatedEvent,
DirMovedEvent,
DirModifiedEvent,
FileCreatedEvent,
FileDeletedEvent,
FileMovedEvent,
FileModifiedEvent,
generate_sub_moved_events,
generate_sub_created_events,
)
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_OBSERVER_TIMEOUT,
DEFAULT_EMITTER_TIMEOUT
)
from watchdog.observers.winapi import (
read_events,
get_directory_handle,
close_directory_handle,
)
# HACK:
WATCHDOG_TRAVERSE_MOVED_DIR_DELAY = 1 # seconds
class WindowsApiEmitter(EventEmitter):
"""
Windows API-based emitter that uses ReadDirectoryChangesW
to detect file system changes for a watch.
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self._handle = None
def on_thread_start(self):
self._handle = get_directory_handle(self.watch.path)
def on_thread_stop(self):
if self._handle:
close_directory_handle(self._handle)
def _read_events(self):
return read_events(self._handle, self.watch.path, self.watch.is_recursive)
def queue_events(self, timeout):
winapi_events = self._read_events()
with self._lock:
last_renamed_src_path = ""
for winapi_event in winapi_events:
src_path = os.path.join(self.watch.path, winapi_event.src_path)
if winapi_event.is_renamed_old:
last_renamed_src_path = src_path
elif winapi_event.is_renamed_new:
dest_path = src_path
src_path = last_renamed_src_path
if os.path.isdir(dest_path):
event = DirMovedEvent(src_path, dest_path)
if self.watch.is_recursive:
# HACK: We introduce a forced delay before
# traversing the moved directory. This will read
# only file movement that finishes within this
# delay time.
time.sleep(WATCHDOG_TRAVERSE_MOVED_DIR_DELAY)
# The following block of code may not
# obtain moved events for the entire tree if
# the I/O is not completed within the above
# delay time. So, it's not guaranteed to work.
# TODO: Come up with a better solution, possibly
# a way to wait for I/O to complete before
# queuing events.
for sub_moved_event in generate_sub_moved_events(src_path, dest_path):
self.queue_event(sub_moved_event)
self.queue_event(event)
else:
self.queue_event(FileMovedEvent(src_path, dest_path))
elif winapi_event.is_modified:
cls = DirModifiedEvent if os.path.isdir(src_path) else FileModifiedEvent
self.queue_event(cls(src_path))
elif winapi_event.is_added:
isdir = os.path.isdir(src_path)
cls = DirCreatedEvent if isdir else FileCreatedEvent
self.queue_event(cls(src_path))
if isdir and self.watch.is_recursive:
# If a directory is moved from outside the watched folder to inside it
# we only get a created directory event out of it, not any events for its children
# so use the same hack as for file moves to get the child events
time.sleep(WATCHDOG_TRAVERSE_MOVED_DIR_DELAY)
sub_events = generate_sub_created_events(src_path)
for sub_created_event in sub_events:
self.queue_event(sub_created_event)
elif winapi_event.is_removed:
self.queue_event(FileDeletedEvent(src_path))
elif winapi_event.is_removed_self:
self.stop()
class WindowsApiObserver(BaseObserver):
"""
Observer thread that schedules watching directories and dispatches
calls to event handlers.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT):
BaseObserver.__init__(self, emitter_class=WindowsApiEmitter,
timeout=timeout)
| 5,381 | Python | 38.284671 | 106 | 0.586508 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.