code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from .master_async import MasterAsync, trans_int_to_float
import time
import json
import asyncio
import aioredis
slave_id = 1
class DeviceAsync:
def __init__(self, *, modbus_ip: str = "127.0.0.1", modbus_port:int = 502, device_id, address, length) -> None:
self.device_id = device_id
self.master = MasterAsync(modbus_ip, modbus_port)
self.address = address
self.length = length
self.redis = None # redis连接
async def connect_redis(self, redis_ip:int = "127.0.0.1", redis_port:int=6379, redis_db:int = 1):
"""
连接到Redis
"""
if self.redis is None:
address = f"redis://{redis_ip}:{redis_port}/{redis_db}"
self.redis = await aioredis.from_url(address)
async def write_to_redis(self, *, redis_ip: int = "127.0.0.1", redis_port: int = 6379, redis_db: int = 1, controls, freeq_seconds: int = 0.04, debug: bool = False):
"""
从modbus读取数据
controls:{主控名称:该变量在modbus上的字节数}
"""
await self.connect_redis(redis_ip=redis_ip, redis_port=redis_port, redis_db=redis_db)
start = time.time()
# 从modbus读取数据
await asyncio.sleep(freeq_seconds)
data = []
# print("正在采集:", 1, 3, self.address, self.length)
try:
data = await self.master.execute(1, 3, self.address, self.length)
except Exception as e:
print(e)
# print("采集到的原始数据是什么:", data)
values = trans_int_to_float(data)
self.raw = {} # 聚合数据
self.data_ = {} # 非聚合数据
def mean(value_list):
"""取平均数"""
if value_list is None or len(value_list) == 0:
return 0
sum = 0
for i in value_list:
sum += i
return sum / len(value_list)
count = 0
for k, v in controls.items():
# 从队列中取数
self.raw[k] = values[count: count + v]
self.data_[k] = mean(values[count: count + v])
count += v # 浮点数一次进2个字节
# 存入Redis
await self.redis.set(f"{self.device_id}_control_raw", json.dumps(self.raw))
await self.redis.set(f"{self.device_id}_control_data", json.dumps(self.data_))
if debug:
end = time.time()
print("单次读取耗时: ", end - start)
print(f"设备{self.device_id}采集到的数据是什么:", self.data_)
print(f"设备{self.device_id}采集到的高频数据是什么:", self.raw)
return self.raw, self.data_ | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/device_async.py | device_async.py |
import socket
import select
import struct
from .hooks import call_hooks
from .modbus import (
Databank, Master, Query, Server,
InvalidArgumentError, ModbusInvalidResponseError, ModbusInvalidRequestError
)
from .utils import threadsafe_function, flush_socket, to_data, LOGGER
# -------------------------------------------------------------------------------
class ModbusInvalidMbapError(Exception):
"""Exception raised when the modbus TCP header doesn't correspond to what is expected"""
def __init__(self, value):
Exception.__init__(self, value)
# -------------------------------------------------------------------------------
class TcpMbap(object):
"""Defines the information added by the Modbus TCP layer"""
def __init__(self):
"""Constructor: initializes with 0"""
self.transaction_id = 0
self.protocol_id = 0
self.length = 0
self.unit_id = 0
def clone(self, mbap):
"""Set the value of each fields from another TcpMbap instance"""
self.transaction_id = mbap.transaction_id
self.protocol_id = mbap.protocol_id
self.length = mbap.length
self.unit_id = mbap.unit_id
def _check_ids(self, request_mbap):
"""
Check that the ids in the request and the response are similar.
if not returns a string describing the error
"""
error_str = ""
if request_mbap.transaction_id != self.transaction_id:
error_str += "Invalid transaction id: request={0} - response={1}. ".format(
request_mbap.transaction_id, self.transaction_id)
if request_mbap.protocol_id != self.protocol_id:
error_str += "Invalid protocol id: request={0} - response={1}. ".format(
request_mbap.protocol_id, self.protocol_id
)
if request_mbap.unit_id != self.unit_id:
error_str += "Invalid unit id: request={0} - response={1}. ".format(request_mbap.unit_id, self.unit_id)
return error_str
def check_length(self, pdu_length):
"""Check the length field is valid. If not raise an exception"""
following_bytes_length = pdu_length + 1
if self.length != following_bytes_length:
return "Response length is {0} while receiving {1} bytes. ".format(self.length, following_bytes_length)
return ""
def check_response(self, request_mbap, response_pdu_length):
"""Check that the MBAP of the response is valid. If not raise an exception"""
error_str = self._check_ids(request_mbap)
error_str += self.check_length(response_pdu_length)
if len(error_str) > 0:
raise ModbusInvalidMbapError(error_str)
def pack(self):
"""convert the TCP mbap into a string"""
return struct.pack(">HHHB", self.transaction_id, self.protocol_id, self.length, self.unit_id)
def unpack(self, value):
"""extract the TCP mbap from a string"""
(self.transaction_id, self.protocol_id, self.length, self.unit_id) = struct.unpack(">HHHB", value)
class TcpQuery(Query):
"""Subclass of a Query. Adds the Modbus TCP specific part of the protocol"""
# static variable for giving a unique id to each query
_last_transaction_id = 0
def __init__(self):
"""Constructor"""
super(TcpQuery, self).__init__()
self._request_mbap = TcpMbap()
self._response_mbap = TcpMbap()
@threadsafe_function
def _get_transaction_id(self):
"""returns an identifier for the query"""
if TcpQuery._last_transaction_id < 0xffff:
TcpQuery._last_transaction_id += 1
else:
TcpQuery._last_transaction_id = 0
return TcpQuery._last_transaction_id
def build_request(self, pdu, slave):
"""Add the Modbus TCP part to the request"""
if (slave < 0) or (slave > 255):
raise InvalidArgumentError("{0} Invalid value for slave id".format(slave))
self._request_mbap.length = len(pdu) + 1
self._request_mbap.transaction_id = self._get_transaction_id()
self._request_mbap.unit_id = slave
mbap = self._request_mbap.pack()
return mbap + pdu
def parse_response(self, response):
"""Extract the pdu from the Modbus TCP response"""
if len(response) > 6:
mbap, pdu = response[:7], response[7:]
self._response_mbap.unpack(mbap)
self._response_mbap.check_response(self._request_mbap, len(pdu))
return pdu
else:
raise ModbusInvalidResponseError("Response length is only {0} bytes. ".format(len(response)))
def parse_request(self, request):
"""Extract the pdu from a modbus request"""
if len(request) > 6:
mbap, pdu = request[:7], request[7:]
self._request_mbap.unpack(mbap)
error_str = self._request_mbap.check_length(len(pdu))
if len(error_str) > 0:
raise ModbusInvalidMbapError(error_str)
return self._request_mbap.unit_id, pdu
else:
raise ModbusInvalidRequestError("Request length is only {0} bytes. ".format(len(request)))
def build_response(self, response_pdu):
"""Build the response"""
self._response_mbap.clone(self._request_mbap)
self._response_mbap.length = len(response_pdu) + 1
return self._response_mbap.pack() + response_pdu
class TcpMaster(Master):
"""Subclass of Master. Implements the Modbus TCP MAC layer"""
def __init__(self, host="127.0.0.1", port=502, timeout_in_sec=5.0):
"""Constructor. Set the communication settings"""
super(TcpMaster, self).__init__(timeout_in_sec)
self._host = host
self._port = port
self._sock = None
def _do_open(self):
"""Connect to the Modbus slave"""
if self._sock:
self._sock.close()
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_timeout(self.get_timeout())
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
call_hooks("modbus_tcp.TcpMaster.before_connect", (self,))
self._sock.connect((self._host, self._port))
call_hooks("modbus_tcp.TcpMaster.after_connect", (self,))
def _do_close(self):
"""Close the connection with the Modbus Slave"""
if self._sock:
call_hooks("modbus_tcp.TcpMaster.before_close", (self,))
self._sock.close()
call_hooks("modbus_tcp.TcpMaster.after_close", (self,))
self._sock = None
return True
def set_timeout(self, timeout_in_sec):
"""Change the timeout value"""
super(TcpMaster, self).set_timeout(timeout_in_sec)
if self._sock:
self._sock.setblocking(timeout_in_sec > 0)
if timeout_in_sec:
self._sock.settimeout(timeout_in_sec)
def _send(self, request):
"""Send request to the slave"""
retval = call_hooks("modbus_tcp.TcpMaster.before_send", (self, request))
if retval is not None:
request = retval
try:
flush_socket(self._sock, 3)
except Exception as msg:
# if we can't flush the socket successfully: a disconnection may happened
# try to reconnect
LOGGER.error('Error while flushing the socket: {0}'.format(msg))
self._do_open()
self._sock.send(request)
def _recv(self, expected_length=-1):
response = to_data('')
length = 255
while len(response) < length:
rcv_byte = self._sock.recv(1)
if rcv_byte:
response += rcv_byte
if len(response) == 6:
to_be_recv_length = struct.unpack(">HHH", response)[2]
length = to_be_recv_length + 6
else:
break
retval = call_hooks("modbus_tcp.TcpMaster.after_recv", (self, response))
if retval is not None:
return retval
return response
def _make_query(self):
return TcpQuery()
class TcpServer(Server):
def __init__(self, port=502, address='', timeout_in_sec=1, databank=None, error_on_missing_slave=True):
databank = databank if databank else Databank(error_on_missing_slave=error_on_missing_slave)
super(TcpServer, self).__init__(databank)
self._sock = None
self._sa = (address, port)
self._timeout_in_sec = timeout_in_sec
self._sockets = []
def _make_query(self):
return TcpQuery()
def _get_request_length(self, mbap):
if len(mbap) < 6:
raise ModbusInvalidRequestError("The mbap is only %d bytes long", len(mbap))
length = struct.unpack(">HHH", mbap[:6])[2]
return length
def _do_init(self):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self._timeout_in_sec:
self._sock.settimeout(self._timeout_in_sec)
self._sock.setblocking(0)
self._sock.bind(self._sa)
self._sock.listen(10)
self._sockets.append(self._sock)
def _do_exit(self):
for sock in self._sockets:
try:
sock.close()
self._sockets.remove(sock)
except Exception as msg:
LOGGER.warning("Error while closing socket, Exception occurred: %s", msg)
self._sock.close()
self._sock = None
def _do_run(self):
inputready = select.select(self._sockets, [], [], 1.0)[0]
# handle data on each a socket
for sock in inputready:
try:
if sock == self._sock:
# handle the server socket
client, address = self._sock.accept()
client.setblocking(0)
LOGGER.info("%s is connected with socket %d...", str(address), client.fileno())
self._sockets.append(client)
call_hooks("modbus_tcp.TcpServer.on_connect", (self, client, address))
else:
if len(sock.recv(1, socket.MSG_PEEK)) == 0:
# socket is disconnected
LOGGER.info("%d is disconnected" % (sock.fileno()))
call_hooks("modbus_tcp.TcpServer.on_disconnect", (self, sock))
sock.close()
self._sockets.remove(sock)
break
# handle all other sockets
sock.settimeout(1.0)
request = to_data("")
is_ok = True
# read the 7 bytes of the mbap
while (len(request) < 7) and is_ok:
new_byte = sock.recv(1)
if len(new_byte) == 0:
is_ok = False
else:
request += new_byte
retval = call_hooks("modbus_tcp.TcpServer.after_recv", (self, sock, request))
if retval is not None:
request = retval
if is_ok:
# read the rest of the request
length = self._get_request_length(request)
while (len(request) < (length + 6)) and is_ok:
new_byte = sock.recv(1)
if len(new_byte) == 0:
is_ok = False
else:
request += new_byte
if is_ok:
response = ""
# parse the request
try:
response = self._handle(request)
except Exception as msg:
LOGGER.error("Error while handling a request, Exception occurred: %s", msg)
# send back the response
if response:
try:
retval = call_hooks("modbus_tcp.TcpServer.before_send", (self, sock, response))
if retval is not None:
response = retval
sock.send(response)
call_hooks("modbus_tcp.TcpServer.after_send", (self, sock, response))
except Exception as msg:
is_ok = False
LOGGER.error(
"Error while sending on socket %d, Exception occurred: %s", sock.fileno(), msg
)
except Exception as excpt:
LOGGER.warning("Error while processing data on socket %d: %s", sock.fileno(), excpt)
call_hooks("modbus_tcp.TcpServer.on_error", (self, sock, excpt))
sock.close()
self._sockets.remove(sock) | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/modbus_tcp.py | modbus_tcp.py |
import struct
import time
from . import LOGGER
from .modbus import (
Databank, Query, Master, Server,
InvalidArgumentError, ModbusInvalidResponseError, ModbusInvalidRequestError
)
from .hooks import call_hooks
from . import utils
class RtuQuery(Query):
"""Subclass of a Query. Adds the Modbus RTU specific part of the protocol"""
def __init__(self):
"""Constructor"""
super(RtuQuery, self).__init__()
self._request_address = 0
self._response_address = 0
def build_request(self, pdu, slave):
"""Add the Modbus RTU part to the request"""
self._request_address = slave
if (self._request_address < 0) or (self._request_address > 255):
raise InvalidArgumentError("Invalid address {0}".format(self._request_address))
data = struct.pack(">B", self._request_address) + pdu
crc = struct.pack(">H", utils.calculate_crc(data))
return data + crc
def parse_response(self, response):
"""Extract the pdu from the Modbus RTU response"""
if len(response) < 3:
raise ModbusInvalidResponseError("Response length is invalid {0}".format(len(response)))
(self._response_address,) = struct.unpack(">B", response[0:1])
if self._request_address != self._response_address:
raise ModbusInvalidResponseError(
"Response address {0} is different from request address {1}".format(
self._response_address, self._request_address
)
)
(crc,) = struct.unpack(">H", response[-2:])
if crc != utils.calculate_crc(response[:-2]):
raise ModbusInvalidResponseError("Invalid CRC in response")
return response[1:-2]
def parse_request(self, request):
"""Extract the pdu from the Modbus RTU request"""
if len(request) < 3:
raise ModbusInvalidRequestError("Request length is invalid {0}".format(len(request)))
(self._request_address,) = struct.unpack(">B", request[0:1])
(crc,) = struct.unpack(">H", request[-2:])
if crc != utils.calculate_crc(request[:-2]):
raise ModbusInvalidRequestError("Invalid CRC in request")
return self._request_address, request[1:-2]
def build_response(self, response_pdu):
"""Build the response"""
self._response_address = self._request_address
data = struct.pack(">B", self._response_address) + response_pdu
crc = struct.pack(">H", utils.calculate_crc(data))
return data + crc
class RtuMaster(Master):
"""Subclass of Master. Implements the Modbus RTU MAC layer"""
def __init__(self, serial, interchar_multiplier=1.5, interframe_multiplier=3.5, t0=None):
"""Constructor. Pass the pyserial.Serial object"""
self._serial = serial
self.use_sw_timeout = False
LOGGER.info("RtuMaster %s is %s", self._serial.name, "opened" if self._serial.is_open else "closed")
super(RtuMaster, self).__init__(self._serial.timeout)
if t0:
self._t0 = t0
else:
self._t0 = utils.calculate_rtu_inter_char(self._serial.baudrate)
self._serial.inter_byte_timeout = interchar_multiplier * self._t0
self.set_timeout(interframe_multiplier * self._t0)
# For some RS-485 adapters, the sent data(echo data) appears before modbus response.
# So read echo data and discard it. By [email protected]
self.handle_local_echo = False
def _do_open(self):
"""Open the given serial port if not already opened"""
if not self._serial.is_open:
call_hooks("modbus_rtu.RtuMaster.before_open", (self,))
self._serial.open()
def _do_close(self):
"""Close the serial port if still opened"""
if self._serial.is_open:
self._serial.close()
call_hooks("modbus_rtu.RtuMaster.after_close", (self,))
return True
def set_timeout(self, timeout_in_sec, use_sw_timeout=False):
"""Change the timeout value"""
Master.set_timeout(self, timeout_in_sec)
self._serial.timeout = timeout_in_sec
# Use software based timeout in case the timeout functionality provided by the serial port is unreliable
self.use_sw_timeout = use_sw_timeout
def _send(self, request):
"""Send request to the slave"""
retval = call_hooks("modbus_rtu.RtuMaster.before_send", (self, request))
if retval is not None:
request = retval
self._serial.reset_input_buffer()
self._serial.reset_output_buffer()
self._serial.write(request)
self._serial.flush()
# Read the echo data, and discard it
if self.handle_local_echo:
self._serial.read(len(request))
def _recv(self, expected_length=-1):
"""Receive the response from the slave"""
response = utils.to_data("")
start_time = time.time() if self.use_sw_timeout else 0
while True:
read_bytes = self._serial.read(expected_length if expected_length > 0 else 1)
if self.use_sw_timeout:
read_duration = time.time() - start_time
else:
read_duration = 0
if (not read_bytes) or (read_duration > self._serial.timeout):
break
response += read_bytes
if expected_length >= 0 and len(response) >= expected_length:
# if the expected number of byte is received consider that the response is done
# improve performance by avoiding end-of-response detection by timeout
break
retval = call_hooks("modbus_rtu.RtuMaster.after_recv", (self, response))
if retval is not None:
return retval
return response
def _make_query(self):
"""Returns an instance of a Query subclass implementing the modbus RTU protocol"""
return RtuQuery()
class RtuServer(Server):
"""This class implements a simple and mono-threaded modbus rtu server"""
_timeout = 0
def __init__(self, serial, databank=None, error_on_missing_slave=True, **kwargs):
"""
Constructor: initializes the server settings
serial: a pyserial object
databank: the data to access
interframe_multiplier: 3.5 by default
interchar_multiplier: 1.5 by default
"""
interframe_multiplier = kwargs.pop('interframe_multiplier', 3.5)
interchar_multiplier = kwargs.pop('interchar_multiplier', 1.5)
databank = databank if databank else Databank(error_on_missing_slave=error_on_missing_slave)
super(RtuServer, self).__init__(databank)
self._serial = serial
LOGGER.info("RtuServer %s is %s", self._serial.name, "opened" if self._serial.is_open else "closed")
self._t0 = utils.calculate_rtu_inter_char(self._serial.baudrate)
self._serial.inter_byte_timeout = interchar_multiplier * self._t0
self.set_timeout(interframe_multiplier * self._t0)
self._block_on_first_byte = False
def close(self):
"""close the serial communication"""
if self._serial.is_open:
call_hooks("modbus_rtu.RtuServer.before_close", (self,))
self._serial.close()
call_hooks("modbus_rtu.RtuServer.after_close", (self,))
def set_timeout(self, timeout):
self._timeout = timeout
self._serial.timeout = timeout
def get_timeout(self):
return self._timeout
def __del__(self):
"""Destructor"""
self.close()
def _make_query(self):
"""Returns an instance of a Query subclass implementing the modbus RTU protocol"""
return RtuQuery()
def start(self):
"""Allow the server thread to block on first byte"""
self._block_on_first_byte = True
super(RtuServer, self).start()
def stop(self):
"""Force the server thread to exit"""
# Prevent blocking on first byte in server thread.
# Without the _block_on_first_byte following problem could happen:
# 1. Current blocking read(1) is cancelled
# 2. Server thread resumes and start next read(1)
# 3. RtuServer clears go event and waits for thread to finish
# 4. Server thread finishes only when a byte is received
# Thanks to _block_on_first_byte, if server thread does start new read
# it will timeout as it won't be blocking.
self._block_on_first_byte = False
if self._serial.is_open:
# cancel any pending read from server thread, it most likely is
# blocking read(1) call
self._serial.cancel_read()
super(RtuServer, self).stop()
def _do_init(self):
"""initialize the serial connection"""
if not self._serial.is_open:
call_hooks("modbus_rtu.RtuServer.before_open", (self,))
self._serial.open()
call_hooks("modbus_rtu.RtuServer.after_open", (self,))
def _do_exit(self):
"""close the serial connection"""
self.close()
def _do_run(self):
"""main function of the server"""
try:
# check the status of every socket
request = utils.to_data('')
if self._block_on_first_byte:
# do a blocking read for first byte
self._serial.timeout = None
try:
read_bytes = self._serial.read(1)
request += read_bytes
except Exception as e:
self._serial.close()
self._serial.open()
self._serial.timeout = self._timeout
# Read rest of the request
while True:
try:
read_bytes = self._serial.read(128)
if not read_bytes:
break
except Exception as e:
self._serial.close()
self._serial.open()
break
request += read_bytes
# parse the request
if request:
retval = call_hooks("modbus_rtu.RtuServer.after_read", (self, request))
if retval is not None:
request = retval
response = self._handle(request)
# send back the response
retval = call_hooks("modbus_rtu.RtuServer.before_write", (self, response))
if retval is not None:
response = retval
if response:
if self._serial.in_waiting > 0:
# Most likely master timed out on this request and started a new one
# for which we already received atleast 1 byte
LOGGER.warning("Not sending response because there is new request pending")
else:
self._serial.write(response)
self._serial.flush()
time.sleep(self.get_timeout())
call_hooks("modbus_rtu.RtuServer.after_write", (self, response))
except Exception as excpt:
LOGGER.error("Error while handling request, Exception occurred: %s", excpt)
call_hooks("modbus_rtu.RtuServer.on_error", (self, excpt)) | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/modbus_rtu.py | modbus_rtu.py |
from __future__ import print_function
import ctypes
import os
import sys
import select
import serial
import threading
import time
from . import utils
from . import hooks
from . import modbus
from . import modbus_tcp
from . import modbus_rtu
if utils.PY2:
import Queue as queue
import SocketServer
else:
import queue
import socketserver as SocketServer
# add logging capability
LOGGER = utils.create_logger(name="console", record_format="%(message)s")
# The communication between the server and the user interfaces (console or rpc) are done through queues
# command received from the interfaces
INPUT_QUEUE = queue.Queue()
# response to be sent back by the interfaces
OUTPUT_QUEUE = queue.Queue()
class CompositeServer(modbus.Server):
"""make possible to have several servers sharing the same databank"""
def __init__(self, list_of_server_classes, list_of_server_args, databank=None):
"""Constructor"""
super(CompositeServer, self).__init__(databank)
self._servers = [
the_class(*the_args, **{"databank": self.get_db()})
for the_class, the_args in zip(list_of_server_classes, list_of_server_args)
if issubclass(the_class, modbus.Server)
]
def set_verbose(self, verbose):
"""if verbose is true the sent and received packets will be logged"""
for srv in self._servers:
srv.set_verbose(verbose)
def _make_thread(self):
"""should initialize the main thread of the server. You don't need it here"""
pass
def _make_query(self):
"""Returns an instance of a Query subclass implementing the MAC layer protocol"""
raise NotImplementedError()
def start(self):
"""Start the server. It will handle request"""
for srv in self._servers:
srv.start()
def stop(self):
"""stop the server. It doesn't handle request anymore"""
for srv in self._servers:
srv.stop()
class RpcHandler(SocketServer.BaseRequestHandler):
"""An instance of this class is created every time an RPC call is received by the server"""
def handle(self):
"""This function is called automatically by the SocketServer"""
# self.request is the TCP socket connected to the client
# read the incoming command
request = self.request.recv(1024).strip()
# write to the queue waiting to be processed by the server
INPUT_QUEUE.put(request)
# wait for the server answer in the output queue
response = OUTPUT_QUEUE.get(timeout=5.0)
# send back the answer
self.request.send(response)
class RpcInterface(threading.Thread):
"""Manage RPC call over TCP/IP thanks to the SocketServer module"""
def __init__(self):
"""Constructor"""
super(RpcInterface, self).__init__()
self.rpc_server = SocketServer.TCPServer(("", 2711), RpcHandler)
def run(self):
"""run the server and wait that it returns"""
self.rpc_server.serve_forever(0.5)
def close(self):
"""force the socket server to exit"""
try:
self.rpc_server.shutdown()
self.join(1.0)
except Exception:
LOGGER.warning("An error occurred while closing RPC interface")
class ConsoleInterface(threading.Thread):
"""Manage user actions from the console"""
def __init__(self):
"""constructor: initialize communication with the console"""
super(ConsoleInterface, self).__init__()
self.inq = INPUT_QUEUE
self.outq = OUTPUT_QUEUE
if os.name == "nt":
ctypes.windll.Kernel32.GetStdHandle.restype = ctypes.c_ulong
self.console_handle = ctypes.windll.Kernel32.GetStdHandle(ctypes.c_ulong(0xfffffff5))
ctypes.windll.Kernel32.WaitForSingleObject.restype = ctypes.c_ulong
elif os.name == "posix":
# select already imported
pass
else:
raise Exception("%s platform is not supported yet" % os.name)
self._go = threading.Event()
self._go.set()
def _check_console_input(self):
"""test if there is something to read on the console"""
if os.name == "nt":
if 0 == ctypes.windll.Kernel32.WaitForSingleObject(self.console_handle, 500):
return True
elif os.name == "posix":
(inputready, abcd, efgh) = select.select([sys.stdin], [], [], 0.5)
if len(inputready) > 0:
return True
else:
raise Exception("%s platform is not supported yet" % os.name)
return False
def run(self):
"""read from the console, transfer to the server and write the answer"""
while self._go.isSet(): #while app is running
if self._check_console_input(): #if something to read on the console
cmd = sys.stdin.readline() #read it
self.inq.put(cmd) #dispatch it tpo the server
response = self.outq.get(timeout=2.0) #wait for an answer
sys.stdout.write(response) #write the answer on the console
def close(self):
"""terminates the thread"""
self._go.clear()
self.join(1.0)
class Simulator(object):
"""The main class of the app in charge of running everything"""
def __init__(self, server=None):
"""Constructor"""
if server is None:
self.server = CompositeServer([modbus_rtu.RtuServer, modbus_tcp.TcpServer], [(serial.Serial(0),), ()])
else:
self.server = server
self.rpc = RpcInterface()
self.console = ConsoleInterface()
self.inq, self.outq = INPUT_QUEUE, OUTPUT_QUEUE
self._hooks_fct = {}
self.cmds = {
"add_slave": self._do_add_slave,
"has_slave": self._do_has_slave,
"remove_slave": self._do_remove_slave,
"remove_all_slaves": self._do_remove_all_slaves,
"add_block": self._do_add_block,
"remove_block": self._do_remove_block,
"remove_all_blocks": self._do_remove_all_blocks,
"set_values": self._do_set_values,
"get_values": self._do_get_values,
"install_hook": self._do_install_hook,
"uninstall_hook": self._do_uninstall_hook,
"set_verbose": self._do_set_verbose,
}
def add_command(self, name, fct):
"""add a custom command"""
self.cmds[name] = fct
def start(self):
"""run the servers"""
self.server.start()
self.console.start()
self.rpc.start()
LOGGER.info("modbus_tk.simulator is running...")
self._handle()
def declare_hook(self, fct_name, fct):
"""declare a hook function by its name. It must be installed by an install hook command"""
self._hooks_fct[fct_name] = fct
def _tuple_to_str(self, the_tuple):
"""convert a tuple to a string"""
ret = ""
for item in the_tuple:
ret += (" " + str(item))
return ret[1:]
def _do_add_slave(self, args):
"""execute the add_slave command"""
slave_id = int(args[1])
self.server.add_slave(slave_id)
return "{0}".format(slave_id)
def _do_has_slave(self, args):
"""execute the has_slave command"""
slave_id = int(args[1])
try:
self.server.get_slave(slave_id)
except Exception:
return "0"
return "1"
def _do_remove_slave(self, args):
"""execute the remove_slave command"""
slave_id = int(args[1])
self.server.remove_slave(slave_id)
return ""
def _do_remove_all_slaves(self, args):
"""execute the remove_slave command"""
self.server.remove_all_slaves()
return ""
def _do_add_block(self, args):
"""execute the add_block command"""
slave_id = int(args[1])
name = args[2]
block_type = int(args[3])
starting_address = int(args[4])
length = int(args[5])
slave = self.server.get_slave(slave_id)
slave.add_block(name, block_type, starting_address, length)
return name
def _do_remove_block(self, args):
"""execute the remove_block command"""
slave_id = int(args[1])
name = args[2]
slave = self.server.get_slave(slave_id)
slave.remove_block(name)
def _do_remove_all_blocks(self, args):
"""execute the remove_all_blocks command"""
slave_id = int(args[1])
slave = self.server.get_slave(slave_id)
slave.remove_all_blocks()
def _do_set_values(self, args):
"""execute the set_values command"""
slave_id = int(args[1])
name = args[2]
address = int(args[3])
values = []
for val in args[4:]:
values.append(int(val))
slave = self.server.get_slave(slave_id)
slave.set_values(name, address, values)
values = slave.get_values(name, address, len(values))
return self._tuple_to_str(values)
def _do_get_values(self, args):
"""execute the get_values command"""
slave_id = int(args[1])
name = args[2]
address = int(args[3])
length = int(args[4])
slave = self.server.get_slave(slave_id)
values = slave.get_values(name, address, length)
return self._tuple_to_str(values)
def _do_install_hook(self, args):
"""install a function as a hook"""
hook_name = args[1]
fct_name = args[2]
hooks.install_hook(hook_name, self._hooks_fct[fct_name])
def _do_uninstall_hook(self, args):
"""
uninstall a function as a hook.
If no function is given, uninstall all functions
"""
hook_name = args[1]
try:
hooks.uninstall_hook(hook_name)
except KeyError as exception:
LOGGER.error(str(exception))
def _do_set_verbose(self, args):
"""change the verbosity of the server"""
verbose = int(args[1])
self.server.set_verbose(verbose)
return "%d" % verbose
def _handle(self):
"""almost-for-ever loop in charge of listening for command and executing it"""
while True:
cmd = self.inq.get()
args = cmd.strip('\r\n').split(' ')
if cmd.find('quit') == 0:
self.outq.put('bye-bye\r\n')
break
elif args[0] in self.cmds:
try:
answer = self.cmds[args[0]](args)
self.outq.put("%s done: %s\r\n" % (args[0], answer))
except Exception as msg:
self.outq.put("%s error: %s\r\n" % (args[0], msg))
else:
self.outq.put("error: unknown command %s\r\n" % (args[0]))
def close(self):
"""close every server"""
self.console.close()
self.rpc.close()
self.server.stop()
def print_me(args):
"""hook function example"""
request = args[1]
print("print_me: len = ", len(request))
def run_simulator():
"""run simulator"""
simulator = Simulator()
try:
LOGGER.info("'quit' for closing the server")
simulator.declare_hook("print_me", print_me)
simulator.start()
except Exception as exception:
print(exception)
finally:
simulator.close()
LOGGER.info("modbus_tk.simulator has stopped!")
# In python 2.5, the SocketServer shutdown is not working Ok
# The 2 lines below are an ugly temporary workaround
time.sleep(1.0)
sys.exit()
if __name__ == "__main__":
run_simulator() | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/simulator.py | simulator.py |
from __future__ import print_function
import sys
import threading
import socket
import select
import logging
LOGGER = logging.getLogger("modbus_tk")
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
def threadsafe_function(fcn):
"""decorator making sure that the decorated function is thread safe"""
lock = threading.RLock()
def new(*args, **kwargs):
"""Lock and call the decorated function
Unless kwargs['threadsafe'] == False
"""
threadsafe = kwargs.pop('threadsafe', True)
if threadsafe:
lock.acquire()
try:
ret = fcn(*args, **kwargs)
except Exception as excpt:
raise excpt
finally:
if threadsafe:
lock.release()
return ret
return new
def flush_socket(socks, lim=0):
"""remove the data present on the socket"""
input_socks = [socks]
cnt = 0
while True:
i_socks = select.select(input_socks, input_socks, input_socks, 0.0)[0]
if len(i_socks) == 0:
break
for sock in i_socks:
sock.recv(1024)
if lim > 0:
cnt += 1
if cnt >= lim:
# avoid infinite loop due to loss of connection
raise Exception("flush_socket: maximum number of iterations reached")
def get_log_buffer(prefix, buff):
"""Format binary data into a string for debug purpose"""
log = prefix
for i in buff:
log += str(ord(i) if PY2 else i) + "-"
return log[:-1]
class ConsoleHandler(logging.Handler):
"""This class is a logger handler. It prints on the console"""
def __init__(self):
"""Constructor"""
logging.Handler.__init__(self)
def emit(self, record):
"""format and print the record on the console"""
print(self.format(record))
class LogitHandler(logging.Handler):
"""This class is a logger handler. It send to a udp socket"""
def __init__(self, dest):
"""Constructor"""
logging.Handler.__init__(self)
self._dest = dest
self._sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def emit(self, record):
"""format and send the record over udp"""
data = self.format(record) + "\r\n"
if PY3:
data = to_data(data)
self._sock.sendto(data, self._dest)
class DummyHandler(logging.Handler):
"""This class is a logger handler. It doesn't do anything"""
def __init__(self):
"""Constructor"""
super(DummyHandler, self).__init__()
def emit(self, record):
"""do nothing with the given record"""
pass
def create_logger(name="dummy", level=logging.DEBUG, record_format=None):
"""Create a logger according to the given settings"""
if record_format is None:
record_format = "%(asctime)s\t%(levelname)s\t%(module)s.%(funcName)s\t%(threadName)s\t%(message)s"
logger = logging.getLogger("modbus_tk")
logger.setLevel(level)
formatter = logging.Formatter(record_format)
if name == "udp":
log_handler = LogitHandler(("127.0.0.1", 1975))
elif name == "console":
log_handler = ConsoleHandler()
elif name == "dummy":
log_handler = DummyHandler()
else:
raise Exception("Unknown handler %s" % name)
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
return logger
def swap_bytes(word_val):
"""swap lsb and msb of a word"""
msb = (word_val >> 8) & 0xFF
lsb = word_val & 0xFF
return (lsb << 8) + msb
def calculate_crc(data):
"""Calculate the CRC16 of a datagram"""
CRC16table = (
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040
)
crc = 0xFFFF
if PY2:
for c in data:
crc = (crc >> 8) ^ CRC16table[(ord(c) ^ crc) & 0xFF]
else:
for c in data:
crc = (crc >> 8) ^ CRC16table[((c) ^ crc) & 0xFF]
return swap_bytes(crc)
def calculate_rtu_inter_char(baudrate):
"""calculates the interchar delay from the baudrate"""
if baudrate <= 19200:
return 11.0 / baudrate
else:
return 0.0005
class WorkerThread(object):
"""
A thread which is running an almost-ever loop
It can be stopped by calling the stop function
"""
def __init__(self, main_fct, args=(), init_fct=None, exit_fct=None):
"""Constructor"""
self._fcts = [init_fct, main_fct, exit_fct]
self._args = args
self._thread = threading.Thread(target=WorkerThread._run, args=(self,))
self._go = threading.Event()
def start(self):
"""Start the thread"""
self._go.set()
self._thread.start()
def stop(self):
"""stop the thread"""
if self._thread.is_alive():
self._go.clear()
self._thread.join()
def _run(self):
"""main function of the thread execute _main_fct until stop is called"""
# pylint: disable=broad-except
try:
if self._fcts[0]:
self._fcts[0](*self._args)
while self._go.isSet():
self._fcts[1](*self._args)
except Exception as excpt:
LOGGER.error("error: %s", str(excpt))
finally:
if self._fcts[2]:
self._fcts[2](*self._args)
def to_data(string_data):
if PY2:
return string_data
else:
return bytearray(string_data, 'ascii') | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/utils.py | utils.py |
from __future__ import print_function
import socket
from . import defines
class SimulatorRpcClient(object):
"""Make possible to send command to the modbus_tk.Simulator thanks to Remote Process Call"""
def __init__(self, host="127.0.0.1", port=2711, timeout=0.5):
"""Constructor"""
self.host = host
self.port = port
self.timeout = timeout
def _rpc_call(self, query):
"""send a rpc call and return the result"""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
sock.send(query)
response = sock.recv(1024)
sock.close()
return self._response_to_values(response.strip("\r\n"), query.split(" ")[0])
def _response_to_values(self, response, command):
"""extract the return value from the response"""
prefix = command + " done: "
if response.find(prefix) == 0:
return response[len(prefix):]
else:
raise Exception(response)
def add_slave(self, slave_id):
"""add a new slave with the given id"""
query = "add_slave %d" % (slave_id)
return self._rpc_call(query)
def remove_slave(self, slave_id):
"""add a new slave with the given id"""
query = "remove_slave %d" % (slave_id)
return self._rpc_call(query)
def remove_all_slaves(self):
"""add a new slave with the given id"""
query = "remove_all_slaves"
self._rpc_call(query)
def has_slave(self, slave_id):
"""add a new slave with the given id"""
query = "has_slave %d" % (slave_id)
if "1" == self._rpc_call(query):
return True
return False
def add_block(self, slave_id, block_name, block_type, starting_address, length):
"""add a new modbus block into the slave"""
query = "add_block %d %s %d %d %d" % (slave_id, block_name, block_type, starting_address, length)
return self._rpc_call(query)
def remove_block(self, slave_id, block_name):
"""remove the modbus block with the given name and slave"""
query = "remove_block %d %s" % (slave_id, block_name)
self._rpc_call(query)
def remove_all_blocks(self, slave_id):
"""remove the modbus block with the given name and slave"""
query = "remove_all_blocks %d" % (slave_id)
self._rpc_call(query)
def set_values(self, slave_id, block_name, address, values):
"""set the values of registers"""
query = "set_values %d %s %d" % (slave_id, block_name, address)
for val in values:
query += (" " + str(val))
return self._rpc_call(query)
def get_values(self, slave_id, block_name, address, length):
"""get the values of some registers"""
query = "get_values %d %s %d %d" % (slave_id, block_name, address, length)
ret_values = self._rpc_call(query)
return tuple([int(val) for val in ret_values.split(' ')])
def install_hook(self, hook_name, fct_name):
"""add a hook"""
query = "install_hook %s %s" % (hook_name, fct_name)
self._rpc_call(query)
def uninstall_hook(self, hook_name, fct_name=""):
"""remove a hook"""
query = "uninstall_hook %s %s" % (hook_name, fct_name)
self._rpc_call(query)
if __name__ == "__main__":
modbus_simu = SimulatorRpcClient()
modbus_simu.remove_all_slaves()
print(modbus_simu.add_slave(12))
print(modbus_simu.add_block(12, "toto", defines.COILS, 0, 100))
print(modbus_simu.set_values(12, "toto", 0, [5, 8, 7, 6, 41]))
print(modbus_simu.get_values(12, "toto", 0, 5))
print(modbus_simu.set_values(12, "toto", 2, [9]))
print(modbus_simu.get_values(12, "toto", 0, 5))
print(modbus_simu.has_slave(12))
print(modbus_simu.add_block(12, "titi", defines.COILS, 100, 100))
print(modbus_simu.remove_block(12, "titi"))
print(modbus_simu.add_slave(25))
print(modbus_simu.has_slave(25))
print(modbus_simu.add_slave(28))
modbus_simu.remove_slave(25)
print(modbus_simu.has_slave(25))
print(modbus_simu.has_slave(28))
modbus_simu.remove_all_blocks(12)
modbus_simu.remove_all_slaves()
print(modbus_simu.has_slave(28))
print(modbus_simu.has_slave(12))
modbus_simu.install_hook("modbus.Server.before_handle_request", "print_me") | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/simulator_rpc_client.py | simulator_rpc_client.py |
from __future__ import with_statement
import threading
_LOCK = threading.RLock()
_HOOKS = {}
def install_hook(name, fct):
"""
Install one of the following hook
modbus_rtu.RtuMaster.before_open((master,))
modbus_rtu.RtuMaster.after_close((master,)
modbus_rtu.RtuMaster.before_send((master, request)) returns modified request or None
modbus_rtu.RtuMaster.after_recv((master, response)) returns modified response or None
modbus_rtu.RtuServer.before_close((server, ))
modbus_rtu.RtuServer.after_close((server, ))
modbus_rtu.RtuServer.before_open((server, ))
modbus_rtu.RtuServer.after_open(((server, ))
modbus_rtu.RtuServer.after_read((server, request)) returns modified request or None
modbus_rtu.RtuServer.before_write((server, response)) returns modified response or None
modbus_rtu.RtuServer.after_write((server, response))
modbus_rtu.RtuServer.on_error((server, excpt))
modbus_tcp.TcpMaster.before_connect((master, ))
modbus_tcp.TcpMaster.after_connect((master, ))
modbus_tcp.TcpMaster.before_close((master, ))
modbus_tcp.TcpMaster.after_close((master, ))
modbus_tcp.TcpMaster.before_send((master, request))
modbus_tcp.TcpServer.after_send((master, request))
modbus_tcp.TcpMaster.after_recv((master, response))
modbus_tcp.TcpServer.on_connect((server, client, address))
modbus_tcp.TcpServer.on_disconnect((server, sock))
modbus_tcp.TcpServer.after_recv((server, sock, request)) returns modified request or None
modbus_tcp.TcpServer.before_send((server, sock, response)) returns modified response or None
modbus_tcp.TcpServer.on_error((server, sock, excpt))
modbus_rtu_over_tcp.RtuOverTcpMaster.after_recv((master, response))
modbus.Master.before_send((master, request)) returns modified request or None
modbus.Master.after_send((master))
modbus.Master.after_recv((master, response)) returns modified response or None
modbus.Slave.handle_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_write_multiple_coils_request((slave, request_pdu))
modbus.Slave.handle_write_multiple_registers_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_write_single_register_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_write_single_coil_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_read_input_registers_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_read_holding_registers_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_read_discrete_inputs_request((slave, request_pdu)) returns modified response or None
modbus.Slave.handle_read_coils_request((slave, request_pdu)) returns modified response or None
modbus.Slave.on_handle_broadcast((slave, response_pdu)) returns modified response or None
modbus.Slave.on_exception((slave, function_code, excpt))
modbus.Databank.on_error((db, excpt, request_pdu))
modbus.ModbusBlock.setitem((self, slice, value))
modbus.Server.before_handle_request((server, request)) returns modified request or None
modbus.Server.after_handle_request((server, response)) returns modified response or None
"""
with _LOCK:
try:
_HOOKS[name].append(fct)
except KeyError:
_HOOKS[name] = [fct]
def uninstall_hook(name, fct=None):
"""remove the function from the hooks"""
with _LOCK:
if fct:
_HOOKS[name].remove(fct)
else:
del _HOOKS[name][:]
def call_hooks(name, args):
"""call the function associated with the hook and pass the given args"""
with _LOCK:
try:
for fct in _HOOKS[name]:
retval = fct(args)
if retval is not None:
return retval
except KeyError:
pass
return None | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/hooks.py | hooks.py |
from __future__ import with_statement
import struct
import threading
from . import defines
from .exceptions import (
ModbusError, ModbusFunctionNotSupportedError, DuplicatedKeyError, MissingKeyError, InvalidModbusBlockError,
InvalidArgumentError, OverlapModbusBlockError, OutOfModbusBlockError, ModbusInvalidResponseError,
ModbusInvalidRequestError
)
from .hooks import call_hooks
from .utils import threadsafe_function, get_log_buffer, LOGGER
# modbus_tk is using the python logging mechanism
# you can define this logger in your app in order to see its prints logs
class Query(object):
"""
Interface to be implemented in subclass for every specific modbus MAC layer
"""
def __init__(self):
"""Constructor"""
pass
def build_request(self, pdu, slave):
"""
Get the modbus application protocol request pdu and slave id
Encapsulate with MAC layer information
Returns a string
"""
raise NotImplementedError()
def parse_response(self, response):
"""
Get the full response and extract the modbus application protocol
response pdu
Returns a string
"""
raise NotImplementedError()
def parse_request(self, request):
"""
Get the full request and extract the modbus application protocol
request pdu
Returns a string and the slave id
"""
raise NotImplementedError()
def build_response(self, response_pdu):
"""
Get the modbus application protocol response pdu and encapsulate with
MAC layer information
Returns a string
"""
raise NotImplementedError()
class Master(object):
"""
This class implements the Modbus Application protocol for a master
To be subclassed with a class implementing the MAC layer
"""
def __init__(self, timeout_in_sec, hooks=None):
"""Constructor: can define a timeout"""
self._timeout = timeout_in_sec
self._verbose = False
self._is_opened = False
def __del__(self):
"""Destructor: close the connection"""
self.close()
def set_verbose(self, verbose):
"""print some more log prints for debug purpose"""
self._verbose = verbose
def open(self):
"""open the communication with the slave"""
if not self._is_opened:
self._do_open()
self._is_opened = True
def close(self):
"""close the communication with the slave"""
if self._is_opened:
ret = self._do_close()
if ret:
self._is_opened = False
def _do_open(self):
"""Open the MAC layer"""
raise NotImplementedError()
def _do_close(self):
"""Close the MAC layer"""
raise NotImplementedError()
def _send(self, buf):
"""Send data to a slave on the MAC layer"""
raise NotImplementedError()
def _recv(self, expected_length):
"""
Receive data from a slave on the MAC layer
if expected_length is >=0 then consider that the response is done when this
number of bytes is received
"""
raise NotImplementedError()
def _make_query(self):
"""
Returns an instance of a Query subclass implementing
the MAC layer protocol
"""
raise NotImplementedError()
@threadsafe_function
def execute(
self, slave, function_code, starting_address, quantity_of_x=0, output_value=0, data_format="",
expected_length=-1, write_starting_address_FC23=0):
"""
Execute a modbus query and returns the data part of the answer as a tuple
The returned tuple depends on the query function code. see modbus protocol
specification for details
data_format makes possible to extract the data like defined in the
zstruct python module documentation
"""
pdu = ""
is_read_function = False
nb_of_digits = 0
# open the connection if it is not already done
self.open()
# Build the modbus pdu and the format of the expected data.
# It depends of function code. see modbus specifications for details.
if function_code == defines.READ_COILS or function_code == defines.READ_DISCRETE_INPUTS:
is_read_function = True
pdu = struct.pack(">BHH", function_code, starting_address, quantity_of_x)
byte_count = quantity_of_x // 8
if (quantity_of_x % 8) > 0:
byte_count += 1
nb_of_digits = quantity_of_x
if not data_format:
data_format = ">" + (byte_count * "B")
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + bytcodeLen + bytecode + crc1 + crc2
expected_length = byte_count + 5
elif function_code == defines.READ_INPUT_REGISTERS or function_code == defines.READ_HOLDING_REGISTERS:
is_read_function = True
pdu = struct.pack(">BHH", function_code, starting_address, quantity_of_x)
if not data_format:
data_format = ">" + (quantity_of_x * "H")
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + bytcodeLen + bytecode x 2 + crc1 + crc2
expected_length = 2 * quantity_of_x + 5
elif (function_code == defines.WRITE_SINGLE_COIL) or (function_code == defines.WRITE_SINGLE_REGISTER):
if function_code == defines.WRITE_SINGLE_COIL:
if output_value != 0:
output_value = 0xff00
fmt = ">BHH"
else:
fmt = ">BH" + ("H" if output_value >= 0 else "h")
pdu = struct.pack(fmt, function_code, starting_address, output_value)
if not data_format:
data_format = ">HH"
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + adress1 + adress2 + value1+value2 + crc1 + crc2
expected_length = 8
elif function_code == defines.WRITE_MULTIPLE_COILS:
byte_count = len(output_value) // 8
if (len(output_value) % 8) > 0:
byte_count += 1
pdu = struct.pack(">BHHB", function_code, starting_address, len(output_value), byte_count)
i, byte_value = 0, 0
for j in output_value:
if j > 0:
byte_value += pow(2, i)
if i == 7:
pdu += struct.pack(">B", byte_value)
i, byte_value = 0, 0
else:
i += 1
if i > 0:
pdu += struct.pack(">B", byte_value)
if not data_format:
data_format = ">HH"
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + adress1 + adress2 + outputQuant1 + outputQuant2 + crc1 + crc2
expected_length = 8
elif function_code == defines.WRITE_MULTIPLE_REGISTERS:
if output_value and data_format:
byte_count = struct.calcsize(data_format)
else:
byte_count = 2 * len(output_value)
pdu = struct.pack(">BHHB", function_code, starting_address, byte_count // 2, byte_count)
if output_value and data_format:
pdu += struct.pack(data_format, *output_value)
else:
for j in output_value:
fmt = "H" if j >= 0 else "h"
pdu += struct.pack(">" + fmt, j)
# data_format is now used to process response which is always 2 registers:
# 1) data address of first register, 2) number of registers written
data_format = ">HH"
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + adress1 + adress2 + outputQuant1 + outputQuant2 + crc1 + crc2
expected_length = 8
elif function_code == defines.READ_EXCEPTION_STATUS:
pdu = struct.pack(">B", function_code)
data_format = ">B"
if expected_length < 0:
# No length was specified and calculated length can be used:
expected_length = 5
elif function_code == defines.DIAGNOSTIC:
# SubFuncCode are in starting_address
pdu = struct.pack(">BH", function_code, starting_address)
if len(output_value) > 0:
for j in output_value:
# copy data in pdu
pdu += struct.pack(">B", j)
if not data_format:
data_format = ">" + (len(output_value) * "B")
if expected_length < 0:
# No length was specified and calculated length can be used:
# slave + func + SubFunc1 + SubFunc2 + Data + crc1 + crc2
expected_length = len(output_value) + 6
elif function_code == defines.READ_WRITE_MULTIPLE_REGISTERS:
is_read_function = True
byte_count = 2 * len(output_value)
pdu = struct.pack(
">BHHHHB",
function_code, starting_address, quantity_of_x, write_starting_address_FC23,
len(output_value), byte_count
)
for j in output_value:
fmt = "H" if j >= 0 else "h"
# copy data in pdu
pdu += struct.pack(">" + fmt, j)
if not data_format:
data_format = ">" + (quantity_of_x * "H")
if expected_length < 0:
# No lenght was specified and calculated length can be used:
# slave + func + bytcodeLen + bytecode x 2 + crc1 + crc2
expected_length = 2 * quantity_of_x + 5
else:
raise ModbusFunctionNotSupportedError("The {0} function code is not supported. ".format(function_code))
# instantiate a query which implements the MAC (TCP or RTU) part of the protocol
query = self._make_query()
# add the mac part of the protocol to the request
request = query.build_request(pdu, slave)
# send the request to the slave
retval = call_hooks("modbus.Master.before_send", (self, request))
if retval is not None:
request = retval
if self._verbose:
LOGGER.debug(get_log_buffer("-> ", request))
self._send(request)
call_hooks("modbus.Master.after_send", (self,))
if slave != 0:
# receive the data from the slave
response = self._recv(expected_length)
retval = call_hooks("modbus.Master.after_recv", (self, response))
if retval is not None:
response = retval
if self._verbose:
LOGGER.debug(get_log_buffer("<- ", response))
# extract the pdu part of the response
response_pdu = query.parse_response(response)
# analyze the received data
(return_code, byte_2) = struct.unpack(">BB", response_pdu[0:2])
if return_code > 0x80:
# the slave has returned an error
exception_code = byte_2
raise ModbusError(exception_code)
else:
if is_read_function:
# get the values returned by the reading function
byte_count = byte_2
data = response_pdu[2:]
if byte_count != len(data):
# the byte count in the pdu is invalid
raise ModbusInvalidResponseError(
"Byte count is {0} while actual number of bytes is {1}. ".format(byte_count, len(data))
)
else:
# returns what is returned by the slave after a writing function
data = response_pdu[1:]
# returns the data as a tuple according to the data_format
# (calculated based on the function or user-defined)
result = struct.unpack(data_format, data)
if nb_of_digits > 0:
digits = []
for byte_val in result:
for i in range(8):
if len(digits) >= nb_of_digits:
break
digits.append(byte_val % 2)
byte_val = byte_val >> 1
result = tuple(digits)
return result
def set_timeout(self, timeout_in_sec):
"""Defines a timeout on the MAC layer"""
self._timeout = timeout_in_sec
def get_timeout(self):
"""Gets the current value of the MAC layer timeout"""
return self._timeout
class ModbusBlock(object):
"""This class represents the values for a range of addresses"""
def __init__(self, starting_address, size, name=''):
"""
Contructor: defines the address range and creates the array of values
"""
self.starting_address = starting_address
self._data = [0] * size
self.size = len(self._data)
def is_in(self, starting_address, size):
"""
Returns true if a block with the given address and size
would overlap this block
"""
if starting_address > self.starting_address:
return (self.starting_address + self.size) > starting_address
elif starting_address < self.starting_address:
return (starting_address + size) > self.starting_address
return True
def __getitem__(self, item):
""""""
return self._data.__getitem__(item)
def __setitem__(self, item, value):
""""""
call_hooks("modbus.ModbusBlock.setitem", (self, item, value))
return self._data.__setitem__(item, value)
class Slave(object):
"""
This class define a modbus slave which is in charge of making the action
asked by a modbus query
"""
def __init__(self, slave_id, unsigned=True, memory=None):
"""Constructor"""
self._id = slave_id
# treat every value written to/read from register as an unsigned value
self.unsigned = unsigned
# the map registring all blocks of the slave
self._blocks = {}
# a shortcut to find blocks per type
if memory is None:
self._memory = {
defines.COILS: [],
defines.DISCRETE_INPUTS: [],
defines.HOLDING_REGISTERS: [],
defines.ANALOG_INPUTS: [],
}
else:
self._memory = memory
# a lock for mutual access to the _blocks and _memory maps
self._data_lock = threading.RLock()
# map modbus function code to a function:
self._fn_code_map = {
defines.READ_COILS: self._read_coils,
defines.READ_DISCRETE_INPUTS: self._read_discrete_inputs,
defines.READ_INPUT_REGISTERS: self._read_input_registers,
defines.READ_HOLDING_REGISTERS: self._read_holding_registers,
defines.WRITE_SINGLE_COIL: self._write_single_coil,
defines.WRITE_SINGLE_REGISTER: self._write_single_register,
defines.WRITE_MULTIPLE_COILS: self._write_multiple_coils,
defines.WRITE_MULTIPLE_REGISTERS: self._write_multiple_registers,
}
def _get_block_and_offset(self, block_type, address, length):
"""returns the block and offset corresponding to the given address"""
for block in self._memory[block_type]:
if address >= block.starting_address:
offset = address - block.starting_address
if block.size >= offset + length:
return block, offset
raise ModbusError(defines.ILLEGAL_DATA_ADDRESS)
def _read_digital(self, block_type, request_pdu):
"""read the value of coils and discrete inputs"""
(starting_address, quantity_of_x) = struct.unpack(">HH", request_pdu[1:5])
if (quantity_of_x <= 0) or (quantity_of_x > 2000):
# maximum allowed size is 2000 bits in one reading
raise ModbusError(defines.ILLEGAL_DATA_VALUE)
block, offset = self._get_block_and_offset(block_type, starting_address, quantity_of_x)
values = block[offset:offset + quantity_of_x]
# pack bits in bytes
byte_count = quantity_of_x // 8
if (quantity_of_x % 8) > 0:
byte_count += 1
# write the response header
response = struct.pack(">B", byte_count)
i, byte_value = 0, 0
for coil in values:
if coil:
byte_value += (1 << i)
if i >= 7:
# write the values of 8 bits in a byte
response += struct.pack(">B", byte_value)
# reset the counters
i, byte_value = 0, 0
else:
i += 1
# if there is remaining bits: add one more byte with their values
if i > 0:
fmt = "B" if self.unsigned else "b"
response += struct.pack(">" + fmt, byte_value)
return response
def _read_coils(self, request_pdu):
"""handle read coils modbus function"""
call_hooks("modbus.Slave.handle_read_coils_request", (self, request_pdu))
return self._read_digital(defines.COILS, request_pdu)
def _read_discrete_inputs(self, request_pdu):
"""handle read discrete inputs modbus function"""
call_hooks("modbus.Slave.handle_read_discrete_inputs_request", (self, request_pdu))
return self._read_digital(defines.DISCRETE_INPUTS, request_pdu)
def _read_registers(self, block_type, request_pdu):
"""read the value of holding and input registers"""
(starting_address, quantity_of_x) = struct.unpack(">HH", request_pdu[1:5])
if (quantity_of_x <= 0) or (quantity_of_x > 125):
# maximum allowed size is 125 registers in one reading
LOGGER.debug("quantity_of_x is %d", quantity_of_x)
raise ModbusError(defines.ILLEGAL_DATA_VALUE)
# look for the block corresponding to the request
block, offset = self._get_block_and_offset(block_type, starting_address, quantity_of_x)
# get the values
values = block[offset:offset + quantity_of_x]
# write the response header
response = struct.pack(">B", 2 * quantity_of_x)
# add the values of every register on 2 bytes
for reg in values:
fmt = "H" if self.unsigned else "h"
response += struct.pack(">" + fmt, reg)
return response
def _read_holding_registers(self, request_pdu):
"""handle read coils modbus function"""
call_hooks("modbus.Slave.handle_read_holding_registers_request", (self, request_pdu))
return self._read_registers(defines.HOLDING_REGISTERS, request_pdu)
def _read_input_registers(self, request_pdu):
"""handle read coils modbus function"""
call_hooks("modbus.Slave.handle_read_input_registers_request", (self, request_pdu))
return self._read_registers(defines.ANALOG_INPUTS, request_pdu)
def _write_multiple_registers(self, request_pdu):
"""execute modbus function 16"""
call_hooks("modbus.Slave.handle_write_multiple_registers_request", (self, request_pdu))
# get the starting address and the number of items from the request pdu
(starting_address, quantity_of_x, byte_count) = struct.unpack(">HHB", request_pdu[1:6])
if (quantity_of_x <= 0) or (quantity_of_x > 123) or (byte_count != (quantity_of_x * 2)):
# maximum allowed size is 123 registers in one reading
raise ModbusError(defines.ILLEGAL_DATA_VALUE)
# look for the block corresponding to the request
block, offset = self._get_block_and_offset(defines.HOLDING_REGISTERS, starting_address, quantity_of_x)
count = 0
for i in range(quantity_of_x):
count += 1
fmt = "H" if self.unsigned else "h"
block[offset + i] = struct.unpack(">" + fmt, request_pdu[6 + 2 * i:8 + 2 * i])[0]
return struct.pack(">HH", starting_address, count)
def _write_multiple_coils(self, request_pdu):
"""execute modbus function 15"""
call_hooks("modbus.Slave.handle_write_multiple_coils_request", (self, request_pdu))
# get the starting address and the number of items from the request pdu
(starting_address, quantity_of_x, byte_count) = struct.unpack(">HHB", request_pdu[1:6])
expected_byte_count = quantity_of_x // 8
if (quantity_of_x % 8) > 0:
expected_byte_count += 1
if (quantity_of_x <= 0) or (quantity_of_x > 1968) or (byte_count != expected_byte_count):
# maximum allowed size is 1968 coils
raise ModbusError(defines.ILLEGAL_DATA_VALUE)
# look for the block corresponding to the request
block, offset = self._get_block_and_offset(defines.COILS, starting_address, quantity_of_x)
count = 0
for i in range(byte_count):
if count >= quantity_of_x:
break
fmt = "B" if self.unsigned else "b"
(byte_value,) = struct.unpack(">" + fmt, request_pdu[6 + i:7 + i])
for j in range(8):
if count >= quantity_of_x:
break
if byte_value & (1 << j):
block[offset + i * 8 + j] = 1
else:
block[offset + i * 8 + j] = 0
count += 1
return struct.pack(">HH", starting_address, count)
def _write_single_register(self, request_pdu):
"""execute modbus function 6"""
call_hooks("modbus.Slave.handle_write_single_register_request", (self, request_pdu))
fmt = "H" if self.unsigned else "h"
(data_address, value) = struct.unpack(">H" + fmt, request_pdu[1:5])
block, offset = self._get_block_and_offset(defines.HOLDING_REGISTERS, data_address, 1)
block[offset] = value
# returns echo of the command
return request_pdu[1:]
def _write_single_coil(self, request_pdu):
"""execute modbus function 5"""
call_hooks("modbus.Slave.handle_write_single_coil_request", (self, request_pdu))
(data_address, value) = struct.unpack(">HH", request_pdu[1:5])
block, offset = self._get_block_and_offset(defines.COILS, data_address, 1)
if value == 0:
block[offset] = 0
elif value == 0xff00:
block[offset] = 1
else:
raise ModbusError(defines.ILLEGAL_DATA_VALUE)
# returns echo of the command
return request_pdu[1:]
def handle_request(self, request_pdu, broadcast=False):
"""
parse the request pdu, makes the corresponding action
and returns the response pdu
"""
# thread-safe
with self._data_lock:
try:
retval = call_hooks("modbus.Slave.handle_request", (self, request_pdu))
if retval is not None:
return retval
# get the function code
(function_code,) = struct.unpack(">B", request_pdu[0:1])
# check if the function code is valid. If not returns error response
if function_code not in self._fn_code_map:
raise ModbusError(defines.ILLEGAL_FUNCTION)
# if read query is broadcasted raises an error
cant_be_broadcasted = (
defines.READ_COILS,
defines.READ_DISCRETE_INPUTS,
defines.READ_INPUT_REGISTERS,
defines.READ_HOLDING_REGISTERS
)
if broadcast and (function_code in cant_be_broadcasted):
raise ModbusInvalidRequestError("Function %d can not be broadcasted" % function_code)
# execute the corresponding function
response_pdu = self._fn_code_map[function_code](request_pdu)
if response_pdu:
if broadcast:
call_hooks("modbus.Slave.on_handle_broadcast", (self, response_pdu))
LOGGER.debug("broadcast: %s", get_log_buffer("!!", response_pdu))
return ""
else:
return struct.pack(">B", function_code) + response_pdu
raise Exception("No response for function %d" % function_code)
except ModbusError as excpt:
LOGGER.debug(str(excpt))
call_hooks("modbus.Slave.on_exception", (self, function_code, excpt))
return struct.pack(">BB", function_code + 128, excpt.get_exception_code())
def add_block(self, block_name: str, block_type: int, starting_address: int, size: int) -> None:
"""
通过名字添加一个新的唯一的block
@param block_name: block名称
@param block_type: block类型
@param starting_address: 起始地址
@param size: 数据长度
"""
# 进程安全
with self._data_lock:
if size <= 0:
raise InvalidArgumentError("size must be a positive number")
if starting_address < 0:
raise InvalidArgumentError("starting address must be zero or positive number")
if block_name in self._blocks:
raise DuplicatedKeyError("Block {0} already exists. ".format(block_name))
if block_type not in self._memory:
raise InvalidModbusBlockError("Invalid block type {0}".format(block_type))
# check that the new block doesn't overlap an existing block
# it means that only 1 block per type must correspond to a given address
# for example: it must not have 2 holding registers at address 100
index = 0
for i in range(len(self._memory[block_type])):
block = self._memory[block_type][i]
if block.is_in(starting_address, size):
raise OverlapModbusBlockError(
"Overlap block at {0} size {1}".format(block.starting_address, block.size)
)
if block.starting_address > starting_address:
index = i
break
# if the block is ok: register it
self._blocks[block_name] = (block_type, starting_address)
# add it in the 'per type' shortcut
self._memory[block_type].insert(index, ModbusBlock(starting_address, size, block_name))
def remove_block(self, block_name):
"""
Remove the block with the given name.
Raise an exception if not found
"""
# thread safe
with self._data_lock:
block = self._get_block(block_name)
# the block has been found: remove it from the shortcut
block_type = self._blocks.pop(block_name)[0]
self._memory[block_type].remove(block)
def remove_all_blocks(self):
"""
Remove all the blocks
"""
# thread safe
with self._data_lock:
self._blocks.clear()
for key in self._memory:
self._memory[key] = []
def _get_block(self, block_name):
"""Find a block by its name and raise and exception if not found"""
if block_name not in self._blocks:
raise MissingKeyError("block {0} not found".format(block_name))
(block_type, starting_address) = self._blocks[block_name]
for block in self._memory[block_type]:
if block.starting_address == starting_address:
return block
raise Exception("Bug?: the block {0} is not registered properly in memory".format(block_name))
def set_values(self, block_name, address, values):
"""
在指定的地址添加一个值,如果values是列表或者元组,每一个元素都会被写入,如果是一个值,只写入值
"""
# 进程安全
with self._data_lock:
block = self._get_block(block_name)
# the block has been found
# check that it doesn't write out of the block
offset = address - block.starting_address
size = 1
if isinstance(values, list) or isinstance(values, tuple):
size = len(values)
if (offset < 0) or ((offset + size) > block.size):
raise OutOfModbusBlockError(
"address {0} size {1} is out of block {2}".format(address, size, block_name)
)
# if Ok: write the values
if isinstance(values, list) or isinstance(values, tuple):
block[offset:offset + len(values)] = values
else:
block[offset] = values
def get_values(self, block_name, address, size=1):
"""
return the values of n items at the given address of the given block
"""
# thread safe
with self._data_lock:
block = self._get_block(block_name)
# the block has been found
# check that it doesn't write out of the block
offset = address - block.starting_address
if (offset < 0) or ((offset + size) > block.size):
raise OutOfModbusBlockError(
"address {0} size {1} is out of block {2}".format(address, size, block_name)
)
# returns the values
if size == 1:
return tuple([block[offset], ])
else:
return tuple(block[offset:offset + size])
class Databank(object):
"""A databank is a shared place containing the data of all slaves"""
def __init__(self, error_on_missing_slave=True):
"""Constructor"""
# the map of slaves by ids
self._slaves = {}
# protect access to the map of slaves
self._lock = threading.RLock()
self.error_on_missing_slave = error_on_missing_slave
def add_slave(self, slave_id, unsigned=True, memory=None):
"""Add a new slave with the given id"""
with self._lock:
if (slave_id <= 0) or (slave_id > 255):
raise Exception("Invalid slave id {0}".format(slave_id))
if slave_id not in self._slaves:
self._slaves[slave_id] = Slave(slave_id, unsigned, memory)
return self._slaves[slave_id]
else:
raise DuplicatedKeyError("Slave {0} already exists".format(slave_id))
def get_slave(self, slave_id):
"""Get the slave with the given id"""
with self._lock:
if slave_id in self._slaves:
return self._slaves[slave_id]
else:
raise MissingKeyError("Slave {0} doesn't exist".format(slave_id))
def remove_slave(self, slave_id):
"""Remove the slave with the given id"""
with self._lock:
if slave_id in self._slaves:
self._slaves.pop(slave_id)
else:
raise MissingKeyError("Slave {0} already exists".format(slave_id))
def remove_all_slaves(self):
"""clean the list of slaves"""
with self._lock:
self._slaves.clear()
def handle_request(self, query, request):
"""
when a request is received, handle it and returns the response pdu
"""
request_pdu = ""
try:
# extract the pdu and the slave id
(slave_id, request_pdu) = query.parse_request(request)
# get the slave and let him executes the action
if slave_id == 0:
# broadcast
for key in self._slaves:
self._slaves[key].handle_request(request_pdu, broadcast=True)
return
else:
try:
slave = self.get_slave(slave_id)
except MissingKeyError:
if self.error_on_missing_slave:
raise
else:
return ""
response_pdu = slave.handle_request(request_pdu)
# make the full response
response = query.build_response(response_pdu)
return response
except ModbusInvalidRequestError as excpt:
# Request is invalid, do not send any response
LOGGER.error("invalid request: " + str(excpt))
return ""
except MissingKeyError as excpt:
# No slave with this ID in server, do not send any response
LOGGER.error("handle request failed: " + str(excpt))
return ""
except Exception as excpt:
call_hooks("modbus.Databank.on_error", (self, excpt, request_pdu))
LOGGER.error("handle request failed: " + str(excpt))
# If the request was not handled correctly, return a server error response
func_code = 1
if len(request_pdu) > 0:
(func_code,) = struct.unpack(">B", request_pdu[0:1])
return struct.pack(">BB", func_code + 0x80, defines.SLAVE_DEVICE_FAILURE)
class Server(object):
"""
This class owns several slaves and defines an interface
to be implemented for a TCP or RTU server
"""
def __init__(self, databank=None):
"""Constructor"""
# never use a mutable type as default argument
self._databank = databank if databank else Databank()
self._verbose = False
self._thread = None
self._go = None
self._make_thread()
def _do_init(self):
"""executed before the server starts: to be overridden"""
pass
def _do_exit(self):
"""executed after the server stops: to be overridden"""
pass
def _do_run(self):
"""main function of the server: to be overridden"""
pass
def _make_thread(self):
"""create the main thread of the server"""
self._thread = threading.Thread(target=Server._run_server, args=(self,))
self._go = threading.Event()
def set_verbose(self, verbose):
"""if verbose is true the sent and received packets will be logged"""
self._verbose = verbose
def get_db(self):
"""returns the databank"""
return self._databank
def add_slave(self, slave_id, unsigned=True, memory=None):
"""
向server添加一个slave
"""
return self._databank.add_slave(slave_id, unsigned, memory)
def get_slave(self, slave_id):
"""
通过给出的slave_id获取slave
"""
return self._databank.get_slave(slave_id)
def remove_slave(self, slave_id):
"""remove the slave with the given id"""
self._databank.remove_slave(slave_id)
def remove_all_slaves(self):
"""remove the slave with the given id"""
self._databank.remove_all_slaves()
def _make_query(self):
"""
Returns an instance of a Query subclass implementing
the MAC layer protocol
"""
raise NotImplementedError()
def start(self):
"""Start the server. It will handle request"""
self._go.set()
self._thread.start()
def stop(self):
"""stop the server. It doesn't handle request anymore"""
if self._thread.is_alive():
self._go.clear()
self._thread.join()
def _run_server(self):
"""main function of the main thread"""
try:
self._do_init()
while self._go.isSet():
self._do_run()
LOGGER.info("%s has stopped", self.__class__)
self._do_exit()
except Exception as excpt:
LOGGER.error("server error: %s", str(excpt))
# make possible to rerun in future
self._make_thread()
def _handle(self, request):
"""handle a received sentence"""
if self._verbose:
LOGGER.debug(get_log_buffer("-->", request))
# gets a query for analyzing the request
query = self._make_query()
retval = call_hooks("modbus.Server.before_handle_request", (self, request))
if retval:
request = retval
response = self._databank.handle_request(query, request)
retval = call_hooks("modbus.Server.after_handle_request", (self, response))
if retval:
response = retval
if response and self._verbose:
LOGGER.debug(get_log_buffer("<--", response))
return response | zdpapi-modbus | /zdpapi_modbus-1.7.1.tar.gz/zdpapi_modbus-1.7.1/zdpapi_modbus/libs/modbus_tk/modbus.py | modbus.py |
import re
import json
import warnings
import contextlib
from .pymysql.err import (
Warning, Error, InterfaceError, DataError,
DatabaseError, OperationalError, IntegrityError, InternalError,
NotSupportedError, ProgrammingError)
from .log import logger
from .connection import FIELD_TYPE
# https://github.com/PyMySQL/PyMySQL/blob/master/pymysql/cursors.py#L11-L18
#: Regular expression for :meth:`Cursor.executemany`.
#: executemany only suports simple bulk insert.
#: You can use it to load large dataset.
RE_INSERT_VALUES = re.compile(
r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" +
r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" +
r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
re.IGNORECASE | re.DOTALL)
class Cursor:
"""Cursor is used to interact with the database."""
#: Max statement size which :meth:`executemany` generates.
#:
#: Max size of allowed statement is max_allowed_packet -
# packet_header_size.
#: Default value of max_allowed_packet is 1048576.
max_stmt_length = 1024000
def __init__(self, connection, echo=False):
"""Do not create an instance of a Cursor yourself. Call
connections.Connection.cursor().
"""
self._connection = connection
self._loop = self._connection.loop
self._description = None
self._rownumber = 0
self._rowcount = -1
self._arraysize = 1
self._executed = None
self._result = None
self._rows = None
self._lastrowid = None
self._echo = echo
@property
def connection(self):
"""This read-only attribute return a reference to the Connection
object on which the cursor was created."""
return self._connection
@property
def description(self):
"""This read-only attribute is a sequence of 7-item sequences.
Each of these sequences is a collections.namedtuple containing
information describing one result column:
0. name: the name of the column returned.
1. type_code: the type of the column.
2. display_size: the actual length of the column in bytes.
3. internal_size: the size in bytes of the column associated to
this column on the server.
4. precision: total number of significant digits in columns of
type NUMERIC. None for other types.
5. scale: count of decimal digits in the fractional part in
columns of type NUMERIC. None for other types.
6. null_ok: always None as not easy to retrieve from the libpq.
This attribute will be None for operations that do not
return rows or if the cursor has not had an operation invoked
via the execute() method yet.
"""
return self._description
@property
def rowcount(self):
"""Returns the number of rows that has been produced of affected.
This read-only attribute specifies the number of rows that the
last :meth:`execute` produced (for Data Query Language
statements like SELECT) or affected (for Data Manipulation
Language statements like UPDATE or INSERT).
The attribute is -1 in case no .execute() has been performed
on the cursor or the row count of the last operation if it
can't be determined by the interface.
"""
return self._rowcount
@property
def rownumber(self):
"""Row index.
This read-only attribute provides the current 0-based index of the
cursor in the result set or ``None`` if the index cannot be
determined.
"""
return self._rownumber
@property
def arraysize(self):
"""How many rows will be returned by fetchmany() call.
This read/write attribute specifies the number of rows to
fetch at a time with fetchmany(). It defaults to
1 meaning to fetch a single row at a time.
"""
return self._arraysize
@arraysize.setter
def arraysize(self, val):
"""How many rows will be returned by fetchmany() call.
This read/write attribute specifies the number of rows to
fetch at a time with fetchmany(). It defaults to
1 meaning to fetch a single row at a time.
"""
self._arraysize = val
@property
def lastrowid(self):
"""This read-only property returns the value generated for an
AUTO_INCREMENT column by the previous INSERT or UPDATE statement
or None when there is no such value available. For example,
if you perform an INSERT into a table that contains an AUTO_INCREMENT
column, lastrowid returns the AUTO_INCREMENT value for the new row.
"""
return self._lastrowid
@property
def echo(self):
"""Return echo mode status."""
return self._echo
@property
def closed(self):
"""The readonly property that returns ``True`` if connections was
detached from current cursor
"""
return True if not self._connection else False
async def close(self):
"""Closing a cursor just exhausts all remaining data."""
conn = self._connection
if conn is None:
return
try:
while (await self.nextset()):
pass
finally:
self._connection = None
def _get_db(self):
if not self._connection:
raise ProgrammingError("Cursor closed")
return self._connection
def _check_executed(self):
if not self._executed:
raise ProgrammingError("execute() first")
def _conv_row(self, row):
return row
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
async def nextset(self):
"""Get the next query set"""
conn = self._get_db()
current_result = self._result
if current_result is None or current_result is not conn._result:
return
if not current_result.has_next:
return
self._result = None
self._clear_result()
await conn.next_result()
await self._do_get_result()
return True
def _escape_args(self, args, conn):
if isinstance(args, (tuple, list)):
return tuple(conn.escape(arg) for arg in args)
elif isinstance(args, dict):
return dict((key, conn.escape(val)) for (key, val) in args.items())
else:
# If it's not a dictionary let's try escaping it anyways.
# Worst case it will throw a Value error
return conn.escape(args)
def mogrify(self, query, args=None):
""" Returns the exact string that is sent to the database by calling
the execute() method. This method follows the extension to the DB
API 2.0 followed by Psycopg.
:param query: ``str`` sql statement
:param args: ``tuple`` or ``list`` of arguments for sql query
"""
conn = self._get_db()
if args is not None:
query = query % self._escape_args(args, conn)
return query
async def execute(self, query, args=None):
"""Executes the given operation
Executes the given operation substituting any markers with
the given parameters.
For example, getting all rows where id is 5:
cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,))
:param query: ``str`` sql statement
:param args: ``tuple`` or ``list`` of arguments for sql query
:returns: ``int``, number of rows that has been produced of affected
"""
conn = self._get_db()
while (await self.nextset()):
pass
if args is not None:
query = query % self._escape_args(args, conn)
await self._query(query)
self._executed = query
if self._echo:
logger.info(query)
logger.info("%r", args)
return self._rowcount
async def executemany(self, query, args):
"""Execute the given operation multiple times
The executemany() method will execute the operation iterating
over the list of parameters in seq_params.
Example: Inserting 3 new employees and their phone number
data = [
('Jane','555-001'),
('Joe', '555-001'),
('John', '555-003')
]
stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')"
await cursor.executemany(stmt, data)
INSERT or REPLACE statements are optimized by batching the data,
that is using the MySQL multiple rows syntax.
:param query: `str`, sql statement
:param args: ``tuple`` or ``list`` of arguments for sql query
"""
if not args:
return
if self._echo:
logger.info("CALL %s", query)
logger.info("%r", args)
m = RE_INSERT_VALUES.match(query)
if m:
q_prefix = m.group(1)
q_values = m.group(2).rstrip()
q_postfix = m.group(3) or ''
assert q_values[0] == '(' and q_values[-1] == ')'
return (await self._do_execute_many(
q_prefix, q_values, q_postfix, args, self.max_stmt_length,
self._get_db().encoding))
else:
rows = 0
for arg in args:
await self.execute(query, arg)
rows += self._rowcount
self._rowcount = rows
return self._rowcount
async def _do_execute_many(self, prefix, values, postfix, args,
max_stmt_length, encoding):
conn = self._get_db()
escape = self._escape_args
if isinstance(prefix, str):
prefix = prefix.encode(encoding)
if isinstance(postfix, str):
postfix = postfix.encode(encoding)
sql = bytearray(prefix)
args = iter(args)
v = values % escape(next(args), conn)
if isinstance(v, str):
v = v.encode(encoding, 'surrogateescape')
sql += v
rows = 0
for arg in args:
v = values % escape(arg, conn)
if isinstance(v, str):
v = v.encode(encoding, 'surrogateescape')
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
r = await self.execute(sql + postfix)
rows += r
sql = bytearray(prefix)
else:
sql += b','
sql += v
r = await self.execute(sql + postfix)
rows += r
self._rowcount = rows
return rows
async def callproc(self, procname, args=()):
"""Execute stored procedure procname with args
Compatibility warning: PEP-249 specifies that any modified
parameters must be returned. This is currently impossible
as they are only available by storing them in a server
variable and then retrieved by a query. Since stored
procedures return zero or more result sets, there is no
reliable way to get at OUT or INOUT parameters via callproc.
The server variables are named @_procname_n, where procname
is the parameter above and n is the position of the parameter
(from zero). Once all result sets generated by the procedure
have been fetched, you can issue a SELECT @_procname_0, ...
query using .execute() to get any OUT or INOUT values.
Compatibility warning: The act of calling a stored procedure
itself creates an empty result set. This appears after any
result sets generated by the procedure. This is non-standard
behavior with respect to the DB-API. Be sure to use nextset()
to advance through all result sets; otherwise you may get
disconnected.
:param procname: ``str``, name of procedure to execute on server
:param args: `sequence of parameters to use with procedure
:returns: the original args.
"""
conn = self._get_db()
if self._echo:
logger.info("CALL %s", procname)
logger.info("%r", args)
for index, arg in enumerate(args):
q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg))
await self._query(q)
await self.nextset()
_args = ','.join('@_%s_%d' % (procname, i) for i in range(len(args)))
q = "CALL %s(%s)" % (procname, _args)
await self._query(q)
self._executed = q
return args
def fetchone(self):
"""Fetch the next row """
self._check_executed()
fut = self._loop.create_future()
if self._rows is None or self._rownumber >= len(self._rows):
fut.set_result(None)
return fut
result = self._rows[self._rownumber]
self._rownumber += 1
fut = self._loop.create_future()
fut.set_result(result)
return fut
def fetchmany(self, size=None):
"""Returns the next set of rows of a query result, returning a
list of tuples. When no more rows are available, it returns an
empty list.
The number of rows returned can be specified using the size argument,
which defaults to one
:param size: ``int`` number of rows to return
:returns: ``list`` of fetched rows
"""
self._check_executed()
fut = self._loop.create_future()
if self._rows is None:
fut.set_result([])
return fut
end = self._rownumber + (size or self._arraysize)
result = self._rows[self._rownumber:end]
self._rownumber = min(end, len(self._rows))
fut.set_result(result)
return fut
def fetchall(self):
"""Returns all rows of a query result set
:returns: ``list`` of fetched rows
"""
self._check_executed()
fut = self._loop.create_future()
if self._rows is None:
fut.set_result([])
return fut
if self._rownumber:
result = self._rows[self._rownumber:]
else:
result = self._rows
self._rownumber = len(self._rows)
fut.set_result(result)
return fut
def scroll(self, value, mode='relative'):
"""Scroll the cursor in the result set to a new position according
to mode.
If mode is relative (default), value is taken as offset to the
current position in the result set, if set to absolute, value
states an absolute target position. An IndexError should be raised in
case a scroll operation would leave the result set. In this case,
the cursor position is left undefined (ideal would be to
not move the cursor at all).
:param int value: move cursor to next position according to mode.
:param str mode: scroll mode, possible modes: `relative` and `absolute`
"""
self._check_executed()
if mode == 'relative':
r = self._rownumber + value
elif mode == 'absolute':
r = value
else:
raise ProgrammingError("unknown scroll mode %s" % mode)
if not (0 <= r < len(self._rows)):
raise IndexError("out of range")
self._rownumber = r
fut = self._loop.create_future()
fut.set_result(None)
return fut
async def _query(self, q):
conn = self._get_db()
self._last_executed = q
self._clear_result()
await conn.query(q)
await self._do_get_result()
def _clear_result(self):
self._rownumber = 0
self._result = None
self._rowcount = 0
self._description = None
self._lastrowid = None
self._rows = None
async def _do_get_result(self):
conn = self._get_db()
self._rownumber = 0
self._result = result = conn._result
self._rowcount = result.affected_rows
self._description = result.description
self._lastrowid = result.insert_id
self._rows = result.rows
if result.warning_count > 0:
await self._show_warnings(conn)
async def _show_warnings(self, conn):
if self._result and self._result.has_next:
return
ws = await conn.show_warnings()
if ws is None:
return
for w in ws:
msg = w[-1]
warnings.warn(str(msg), Warning, 4)
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
def __aiter__(self):
return self
async def __anext__(self):
ret = await self.fetchone()
if ret is not None:
return ret
else:
raise StopAsyncIteration # noqa
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
return
class _DeserializationCursorMixin:
async def _do_get_result(self):
await super()._do_get_result()
if self._rows:
self._rows = [self._deserialization_row(r) for r in self._rows]
def _deserialization_row(self, row):
if row is None:
return None
if isinstance(row, dict):
dict_flag = True
else:
row = list(row)
dict_flag = False
for index, (name, field_type, *n) in enumerate(self._description):
if field_type == FIELD_TYPE.JSON:
point = name if dict_flag else index
with contextlib.suppress(ValueError, TypeError):
row[point] = json.loads(row[point])
if dict_flag:
return row
else:
return tuple(row)
def _conv_row(self, row):
if row is None:
return None
row = super()._conv_row(row)
return self._deserialization_row(row)
class DeserializationCursor(_DeserializationCursorMixin, Cursor):
"""A cursor automatic deserialization of json type fields"""
class _DictCursorMixin:
# You can override this to use OrderedDict or other dict-like types.
dict_type = dict
async def _do_get_result(self):
await super()._do_get_result()
fields = []
if self._description:
for f in self._result.fields:
name = f.name
if name in fields:
name = f.table_name + '.' + name
fields.append(name)
self._fields = fields
if fields and self._rows:
self._rows = [self._conv_row(r) for r in self._rows]
def _conv_row(self, row):
if row is None:
return None
row = super()._conv_row(row)
return self.dict_type(zip(self._fields, row))
class DictCursor(_DictCursorMixin, Cursor):
"""A cursor which returns results as a dictionary"""
class SSCursor(Cursor):
"""Unbuffered Cursor, mainly useful for queries that return a lot of
data, or for connections to remote servers over a slow network.
Instead of copying every row of data into a buffer, this will fetch
rows as needed. The upside of this, is the client uses much less memory,
and rows are returned much faster when traveling over a slow network,
or if the result set is very big.
There are limitations, though. The MySQL protocol doesn't support
returning the total number of rows, so the only way to tell how many rows
there are is to iterate over every row returned. Also, it currently isn't
possible to scroll backwards, as only the current row is held in memory.
"""
async def close(self):
conn = self._connection
if conn is None:
return
if self._result is not None and self._result is conn._result:
await self._result._finish_unbuffered_query()
try:
while (await self.nextset()):
pass
finally:
self._connection = None
async def _query(self, q):
conn = self._get_db()
self._last_executed = q
await conn.query(q, unbuffered=True)
await self._do_get_result()
return self._rowcount
async def _read_next(self):
"""Read next row """
row = await self._result._read_rowdata_packet_unbuffered()
row = self._conv_row(row)
return row
async def fetchone(self):
""" Fetch next row """
self._check_executed()
row = await self._read_next()
if row is None:
return
self._rownumber += 1
return row
async def fetchall(self):
"""Fetch all, as per MySQLdb. Pretty useless for large queries, as
it is buffered.
"""
rows = []
while True:
row = await self.fetchone()
if row is None:
break
rows.append(row)
return rows
async def fetchmany(self, size=None):
"""Returns the next set of rows of a query result, returning a
list of tuples. When no more rows are available, it returns an
empty list.
The number of rows returned can be specified using the size argument,
which defaults to one
:param size: ``int`` number of rows to return
:returns: ``list`` of fetched rows
"""
self._check_executed()
if size is None:
size = self._arraysize
rows = []
for i in range(size):
row = await self._read_next()
if row is None:
break
rows.append(row)
self._rownumber += 1
return rows
async def scroll(self, value, mode='relative'):
"""Scroll the cursor in the result set to a new position
according to mode . Same as :meth:`Cursor.scroll`, but move cursor
on server side one by one row. If you want to move 20 rows forward
scroll will make 20 queries to move cursor. Currently only forward
scrolling is supported.
:param int value: move cursor to next position according to mode.
:param str mode: scroll mode, possible modes: `relative` and `absolute`
"""
self._check_executed()
if mode == 'relative':
if value < 0:
raise NotSupportedError("Backwards scrolling not supported "
"by this cursor")
for _ in range(value):
await self._read_next()
self._rownumber += value
elif mode == 'absolute':
if value < self._rownumber:
raise NotSupportedError(
"Backwards scrolling not supported by this cursor")
end = value - self._rownumber
for _ in range(end):
await self._read_next()
self._rownumber = value
else:
raise ProgrammingError("unknown scroll mode %s" % mode)
class SSDictCursor(_DictCursorMixin, SSCursor):
"""An unbuffered cursor, which returns results as a dictionary """ | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/cursors.py | cursors.py |
import asyncio
import os
import socket
import struct
import sys
import warnings
import configparser
import getpass
from functools import partial
from .pymysql.charset import charset_by_name, charset_by_id
from .pymysql.constants import SERVER_STATUS
from .pymysql.constants import CLIENT
from .pymysql.constants import COMMAND
from .pymysql.constants import FIELD_TYPE
from .pymysql.util import byte2int, int2byte
from .pymysql.converters import (escape_item, encoders, decoders,
escape_string, escape_bytes_prefixed, through)
from .pymysql.err import (Warning, Error,
InterfaceError, DataError, DatabaseError,
OperationalError,
IntegrityError, InternalError, NotSupportedError,
ProgrammingError)
from .pymysql.connections import TEXT_TYPES, MAX_PACKET_LEN, DEFAULT_CHARSET
from .pymysql.connections import _auth
from .pymysql.connections import pack_int24
from .pymysql.connections import MysqlPacket
from .pymysql.connections import FieldDescriptorPacket
from .pymysql.connections import EOFPacketWrapper
from .pymysql.connections import OKPacketWrapper
from .pymysql.connections import LoadLocalPacketWrapper
from .pymysql.connections import lenenc_int
# from aiomysql.utils import _convert_to_str
from .cursors import Cursor
from .utils import _ConnectionContextManager, _ContextManager
from .log import logger
DEFAULT_USER = getpass.getuser()
def connect(host="localhost", user=None, password="",
db=None, port=3306, unix_socket=None,
charset='', sql_mode=None,
read_default_file=None, conv=decoders, use_unicode=None,
client_flag=0, cursorclass=Cursor, init_command=None,
connect_timeout=None, read_default_group=None,
no_delay=None, autocommit=False, echo=False,
local_infile=False, loop=None, ssl=None, auth_plugin='',
program_name='', server_public_key=None):
"""See connections.Connection.__init__() for information about
defaults."""
coro = _connect(host=host, user=user, password=password, db=db,
port=port, unix_socket=unix_socket, charset=charset,
sql_mode=sql_mode, read_default_file=read_default_file,
conv=conv, use_unicode=use_unicode,
client_flag=client_flag, cursorclass=cursorclass,
init_command=init_command,
connect_timeout=connect_timeout,
read_default_group=read_default_group,
no_delay=no_delay, autocommit=autocommit, echo=echo,
local_infile=local_infile, loop=loop, ssl=ssl,
auth_plugin=auth_plugin, program_name=program_name)
return _ConnectionContextManager(coro)
async def _connect(*args, **kwargs):
conn = Connection(*args, **kwargs)
await conn._connect()
return conn
class Connection:
"""Representation of a socket with a mysql server.
The proper way to get an instance of this class is to call
connect().
"""
def __init__(self, host="localhost", user=None, password="",
db=None, port=3306, unix_socket=None,
charset='', sql_mode=None,
read_default_file=None, conv=decoders, use_unicode=None,
client_flag=0, cursorclass=Cursor, init_command=None,
connect_timeout=None, read_default_group=None,
no_delay=None, autocommit=False, echo=False,
local_infile=False, loop=None, ssl=None, auth_plugin='',
program_name='', server_public_key=None):
"""
Establish a connection to the MySQL database. Accepts several
arguments:
:param host: Host where the database server is located
:param user: Username to log in as
:param password: Password to use.
:param db: Database to use, None to not use a particular one.
:param port: MySQL port to use, default is usually OK.
:param unix_socket: Optionally, you can use a unix socket rather
than TCP/IP.
:param charset: Charset you want to use.
:param sql_mode: Default SQL_MODE to use.
:param read_default_file: Specifies my.cnf file to read these
parameters from under the [client] section.
:param conv: Decoders dictionary to use instead of the default one.
This is used to provide custom marshalling of types.
See converters.
:param use_unicode: Whether or not to default to unicode strings.
:param client_flag: Custom flags to send to MySQL. Find
potential values in constants.CLIENT.
:param cursorclass: Custom cursor class to use.
:param init_command: Initial SQL statement to run when connection is
established.
:param connect_timeout: Timeout before throwing an exception
when connecting.
:param read_default_group: Group to read from in the configuration
file.
:param no_delay: Disable Nagle's algorithm on the socket
:param autocommit: Autocommit mode. None means use server default.
(default: False)
:param local_infile: boolean to enable the use of LOAD DATA LOCAL
command. (default: False)
:param ssl: Optional SSL Context to force SSL
:param auth_plugin: String to manually specify the authentication
plugin to use, i.e you will want to use mysql_clear_password
when using IAM authentication with Amazon RDS.
(default: Server Default)
:param program_name: Program name string to provide when
handshaking with MySQL. (default: sys.argv[0])
:param server_public_key: SHA256 authentication plugin public
key value.
:param loop: asyncio loop
"""
self._loop = loop or asyncio.get_event_loop()
if use_unicode is None and sys.version_info[0] > 2:
use_unicode = True
if read_default_file:
if not read_default_group:
read_default_group = "client"
cfg = configparser.RawConfigParser()
cfg.read(os.path.expanduser(read_default_file))
_config = partial(cfg.get, read_default_group)
user = _config("user", fallback=user)
password = _config("password", fallback=password)
host = _config("host", fallback=host)
db = _config("database", fallback=db)
unix_socket = _config("socket", fallback=unix_socket)
port = int(_config("port", fallback=port))
charset = _config("default-character-set", fallback=charset)
# pymysql port
if no_delay is not None:
warnings.warn("no_delay option is deprecated", DeprecationWarning)
no_delay = bool(no_delay)
else:
no_delay = True
self._host = host
self._port = port
self._user = user or DEFAULT_USER
self._password = password or ""
self._db = db
self._no_delay = no_delay
self._echo = echo
self._last_usage = self._loop.time()
self._client_auth_plugin = auth_plugin
self._server_auth_plugin = ""
self._auth_plugin_used = ""
self.server_public_key = server_public_key
self.salt = None
from . import __version__
self._connect_attrs = {
'_client_name': 'aiomysql',
'_pid': str(os.getpid()),
'_client_version': __version__,
}
if program_name:
self._connect_attrs["program_name"] = program_name
elif sys.argv:
self._connect_attrs["program_name"] = sys.argv[0]
self._unix_socket = unix_socket
if charset:
self._charset = charset
self.use_unicode = True
else:
self._charset = DEFAULT_CHARSET
self.use_unicode = False
if use_unicode is not None:
self.use_unicode = use_unicode
self._ssl_context = ssl
if ssl:
client_flag |= CLIENT.SSL
self._encoding = charset_by_name(self._charset).encoding
if local_infile:
client_flag |= CLIENT.LOCAL_FILES
client_flag |= CLIENT.CAPABILITIES
client_flag |= CLIENT.MULTI_STATEMENTS
if self._db:
client_flag |= CLIENT.CONNECT_WITH_DB
self.client_flag = client_flag
self.cursorclass = cursorclass
self.connect_timeout = connect_timeout
self._result = None
self._affected_rows = 0
self.host_info = "Not connected"
#: specified autocommit mode. None means use server default.
self.autocommit_mode = autocommit
self.encoders = encoders # Need for MySQLdb compatibility.
self.decoders = conv
self.sql_mode = sql_mode
self.init_command = init_command
# asyncio StreamReader, StreamWriter
self._reader = None
self._writer = None
# If connection was closed for specific reason, we should show that to
# user
self._close_reason = None
@property
def host(self):
"""MySQL server IP address or name"""
return self._host
@property
def port(self):
"""MySQL server TCP/IP port"""
return self._port
@property
def unix_socket(self):
"""MySQL Unix socket file location"""
return self._unix_socket
@property
def db(self):
"""Current database name."""
return self._db
@property
def user(self):
"""User used while connecting to MySQL"""
return self._user
@property
def echo(self):
"""Return echo mode status."""
return self._echo
@property
def last_usage(self):
"""Return time() when connection was used."""
return self._last_usage
@property
def loop(self):
return self._loop
@property
def closed(self):
"""The readonly property that returns ``True`` if connections is
closed.
"""
return self._writer is None
@property
def encoding(self):
"""Encoding employed for this connection."""
return self._encoding
@property
def charset(self):
"""Returns the character set for current connection."""
return self._charset
def close(self):
"""Close socket connection"""
if self._writer:
self._writer.transport.close()
self._writer = None
self._reader = None
async def ensure_closed(self):
"""Send quit command and then close socket connection"""
if self._writer is None:
# connection has been closed
return
send_data = struct.pack('<i', 1) + int2byte(COMMAND.COM_QUIT)
self._writer.write(send_data)
await self._writer.drain()
self.close()
async def autocommit(self, value):
"""Enable/disable autocommit mode for current MySQL session.
:param value: ``bool``, toggle autocommit
"""
self.autocommit_mode = bool(value)
current = self.get_autocommit()
if value != current:
await self._send_autocommit_mode()
def get_autocommit(self):
"""Returns autocommit status for current MySQL session.
:returns bool: current autocommit status."""
status = self.server_status & SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT
return bool(status)
async def _read_ok_packet(self):
pkt = await self._read_packet()
if not pkt.is_ok_packet():
raise OperationalError(2014, "Command Out of Sync")
ok = OKPacketWrapper(pkt)
self.server_status = ok.server_status
return True
async def _send_autocommit_mode(self):
"""Set whether or not to commit after every execute() """
await self._execute_command(
COMMAND.COM_QUERY,
"SET AUTOCOMMIT = %s" % self.escape(self.autocommit_mode))
await self._read_ok_packet()
async def begin(self):
"""Begin transaction."""
await self._execute_command(COMMAND.COM_QUERY, "BEGIN")
await self._read_ok_packet()
async def commit(self):
"""Commit changes to stable storage."""
await self._execute_command(COMMAND.COM_QUERY, "COMMIT")
await self._read_ok_packet()
async def rollback(self):
"""Roll back the current transaction."""
await self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
await self._read_ok_packet()
async def select_db(self, db):
"""Set current db"""
await self._execute_command(COMMAND.COM_INIT_DB, db)
await self._read_ok_packet()
async def show_warnings(self):
"""SHOW WARNINGS"""
await self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
result = MySQLResult(self)
await result.read()
return result.rows
def escape(self, obj):
""" Escape whatever value you pass to it"""
if isinstance(obj, str):
return "'" + self.escape_string(obj) + "'"
if isinstance(obj, bytes):
return escape_bytes_prefixed(obj)
return escape_item(obj, self._charset)
def literal(self, obj):
"""Alias for escape()"""
return self.escape(obj)
def escape_string(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return s.replace("'", "''")
return escape_string(s)
def cursor(self, *cursors):
"""Instantiates and returns a cursor
By default, :class:`Cursor` is returned. It is possible to also give a
custom cursor through the cursor_class parameter, but it needs to
be a subclass of :class:`Cursor`
:param cursor: custom cursor class.
:returns: instance of cursor, by default :class:`Cursor`
:raises TypeError: cursor_class is not a subclass of Cursor.
"""
self._ensure_alive()
self._last_usage = self._loop.time()
try:
if cursors and \
any(not issubclass(cursor, Cursor) for cursor in cursors):
raise TypeError('Custom cursor must be subclass of Cursor')
except TypeError:
raise TypeError('Custom cursor must be subclass of Cursor')
if cursors and len(cursors) == 1:
cur = cursors[0](self, self._echo)
elif cursors:
cursor_name = ''.join(map(lambda x: x.__name__, cursors)) \
.replace('Cursor', '') + 'Cursor'
cursor_class = type(cursor_name, cursors, {})
cur = cursor_class(self, self._echo)
else:
cur = self.cursorclass(self, self._echo)
fut = self._loop.create_future()
fut.set_result(cur)
return _ContextManager(fut)
# The following methods are INTERNAL USE ONLY (called from Cursor)
async def query(self, sql, unbuffered=False):
# logger.debug("DEBUG: sending query: %s", _convert_to_str(sql))
if isinstance(sql, str):
sql = sql.encode(self.encoding, 'surrogateescape')
await self._execute_command(COMMAND.COM_QUERY, sql)
await self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
async def next_result(self):
await self._read_query_result()
return self._affected_rows
def affected_rows(self):
return self._affected_rows
async def kill(self, thread_id):
arg = struct.pack('<I', thread_id)
await self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
await self._read_ok_packet()
async def ping(self, reconnect=True):
"""Check if the server is alive"""
if self._writer is None and self._reader is None:
if reconnect:
await self._connect()
reconnect = False
else:
raise Error("Already closed")
try:
await self._execute_command(COMMAND.COM_PING, "")
await self._read_ok_packet()
except Exception:
if reconnect:
await self._connect()
await self.ping(False)
else:
raise
async def set_charset(self, charset):
"""Sets the character set for the current connection"""
# Make sure charset is supported.
encoding = charset_by_name(charset).encoding
await self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s"
% self.escape(charset))
await self._read_packet()
self._charset = charset
self._encoding = encoding
async def _connect(self):
# TODO: Set close callback
# raise OperationalError(2006,
# "MySQL server has gone away (%r)" % (e,))
try:
if self._unix_socket and self._host in ('localhost', '127.0.0.1'):
self._reader, self._writer = await \
asyncio.wait_for(
asyncio.open_unix_connection(
self._unix_socket),
timeout=self.connect_timeout)
self.host_info = "Localhost via UNIX socket: " + \
self._unix_socket
else:
self._reader, self._writer = await \
asyncio.wait_for(
asyncio.open_connection(
self._host,
self._port),
timeout=self.connect_timeout)
self._set_keep_alive()
self.host_info = "socket %s:%d" % (self._host, self._port)
# do not set no delay in case of unix_socket
if self._no_delay and not self._unix_socket:
self._set_nodelay(True)
self._next_seq_id = 0
await self._get_server_information()
await self._request_authentication()
self.connected_time = self._loop.time()
if self.sql_mode is not None:
await self.query("SET sql_mode=%s" % (self.sql_mode,))
if self.init_command is not None:
await self.query(self.init_command)
await self.commit()
if self.autocommit_mode is not None:
await self.autocommit(self.autocommit_mode)
except Exception as e:
if self._writer:
self._writer.transport.close()
self._reader = None
self._writer = None
raise OperationalError(2003,
"Can't connect to MySQL server on %r" %
self._host) from e
def _set_keep_alive(self):
transport = self._writer.transport
transport.pause_reading()
raw_sock = transport.get_extra_info('socket', default=None)
if raw_sock is None:
raise RuntimeError("Transport does not expose socket instance")
raw_sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
transport.resume_reading()
def _set_nodelay(self, value):
flag = int(bool(value))
transport = self._writer.transport
transport.pause_reading()
raw_sock = transport.get_extra_info('socket', default=None)
if raw_sock is None:
raise RuntimeError("Transport does not expose socket instance")
raw_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, flag)
transport.resume_reading()
def write_packet(self, payload):
"""Writes an entire "mysql packet" in its entirety to the network
addings its length and sequence number.
"""
# Internal note: when you build packet manually and calls
# _write_bytes() directly, you should set self._next_seq_id properly.
data = pack_int24(len(payload)) + int2byte(self._next_seq_id) + payload
self._write_bytes(data)
self._next_seq_id = (self._next_seq_id + 1) % 256
async def _read_packet(self, packet_type=MysqlPacket):
"""Read an entire "mysql packet" in its entirety from the network
and return a MysqlPacket type that represents the results.
"""
buff = b''
while True:
try:
packet_header = await self._read_bytes(4)
except asyncio.CancelledError:
self._close_on_cancel()
raise
btrl, btrh, packet_number = struct.unpack(
'<HBB', packet_header)
bytes_to_read = btrl + (btrh << 16)
# Outbound and inbound packets are numbered sequentialy, so
# we increment in both write_packet and read_packet. The count
# is reset at new COMMAND PHASE.
if packet_number != self._next_seq_id:
raise InternalError(
"Packet sequence number wrong - got %d expected %d" %
(packet_number, self._next_seq_id))
self._next_seq_id = (self._next_seq_id + 1) % 256
try:
recv_data = await self._read_bytes(bytes_to_read)
except asyncio.CancelledError:
self._close_on_cancel()
raise
buff += recv_data
# https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
if bytes_to_read == 0xffffff:
continue
if bytes_to_read < MAX_PACKET_LEN:
break
packet = packet_type(buff, self._encoding)
packet.check_error()
return packet
async def _read_bytes(self, num_bytes):
try:
data = await self._reader.readexactly(num_bytes)
except asyncio.IncompleteReadError as e:
msg = "Lost connection to MySQL server during query"
raise OperationalError(2013, msg) from e
except (IOError, OSError) as e:
msg = "Lost connection to MySQL server during query (%s)" % (e,)
raise OperationalError(2013, msg) from e
return data
def _write_bytes(self, data):
return self._writer.write(data)
async def _read_query_result(self, unbuffered=False):
self._result = None
if unbuffered:
try:
result = MySQLResult(self)
await result.init_unbuffered_query()
except BaseException:
result.unbuffered_active = False
result.connection = None
raise
else:
result = MySQLResult(self)
await result.read()
self._result = result
self._affected_rows = result.affected_rows
if result.server_status is not None:
self.server_status = result.server_status
def insert_id(self):
if self._result:
return self._result.insert_id
else:
return 0
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
self.close()
else:
await self.ensure_closed()
return
async def _execute_command(self, command, sql):
self._ensure_alive()
# If the last query was unbuffered, make sure it finishes before
# sending new commands
if self._result is not None:
if self._result.unbuffered_active:
warnings.warn("Previous unbuffered result was left incomplete")
self._result._finish_unbuffered_query()
while self._result.has_next:
await self.next_result()
self._result = None
if isinstance(sql, str):
sql = sql.encode(self._encoding)
chunk_size = min(MAX_PACKET_LEN, len(sql) + 1) # +1 is for command
prelude = struct.pack('<iB', chunk_size, command)
self._write_bytes(prelude + sql[:chunk_size - 1])
# logger.debug(dump_packet(prelude + sql))
self._next_seq_id = 1
if chunk_size < MAX_PACKET_LEN:
return
sql = sql[chunk_size - 1:]
while True:
chunk_size = min(MAX_PACKET_LEN, len(sql))
self.write_packet(sql[:chunk_size])
sql = sql[chunk_size:]
if not sql and chunk_size < MAX_PACKET_LEN:
break
async def _request_authentication(self):
# https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
if int(self.server_version.split('.', 1)[0]) >= 5:
self.client_flag |= CLIENT.MULTI_RESULTS
if self.user is None:
raise ValueError("Did not specify a username")
if self._ssl_context:
# capablities, max packet, charset
data = struct.pack('<IIB', self.client_flag, 16777216, 33)
data += b'\x00' * (32 - len(data))
self.write_packet(data)
# Stop sending events to data_received
self._writer.transport.pause_reading()
# Get the raw socket from the transport
raw_sock = self._writer.transport.get_extra_info('socket',
default=None)
if raw_sock is None:
raise RuntimeError("Transport does not expose socket instance")
raw_sock = raw_sock.dup()
self._writer.transport.close()
# MySQL expects TLS negotiation to happen in the middle of a
# TCP connection not at start. Passing in a socket to
# open_connection will cause it to negotiate TLS on an existing
# connection not initiate a new one.
self._reader, self._writer = await asyncio.open_connection(
sock=raw_sock, ssl=self._ssl_context,
server_hostname=self._host
)
charset_id = charset_by_name(self.charset).id
if isinstance(self.user, str):
_user = self.user.encode(self.encoding)
else:
_user = self.user
data_init = struct.pack('<iIB23s', self.client_flag, MAX_PACKET_LEN,
charset_id, b'')
data = data_init + _user + b'\0'
authresp = b''
auth_plugin = self._client_auth_plugin
if not self._client_auth_plugin:
# Contains the auth plugin from handshake
auth_plugin = self._server_auth_plugin
if auth_plugin in ('', 'mysql_native_password'):
authresp = _auth.scramble_native_password(
self._password.encode('latin1'), self.salt)
elif auth_plugin == 'caching_sha2_password':
if self._password:
authresp = _auth.scramble_caching_sha2(
self._password.encode('latin1'), self.salt
)
# Else: empty password
elif auth_plugin == 'sha256_password':
if self._ssl_context and self.server_capabilities & CLIENT.SSL:
authresp = self._password.encode('latin1') + b'\0'
elif self._password:
authresp = b'\1' # request public key
else:
authresp = b'\0' # empty password
elif auth_plugin in ('', 'mysql_clear_password'):
authresp = self._password.encode('latin1') + b'\0'
if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
data += lenenc_int(len(authresp)) + authresp
elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
data += struct.pack('B', len(authresp)) + authresp
else: # pragma: no cover
# not testing against servers without secure auth (>=5.0)
data += authresp + b'\0'
if self._db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
if isinstance(self._db, str):
db = self._db.encode(self.encoding)
else:
db = self._db
data += db + b'\0'
if self.server_capabilities & CLIENT.PLUGIN_AUTH:
name = auth_plugin
if isinstance(name, str):
name = name.encode('ascii')
data += name + b'\0'
self._auth_plugin_used = auth_plugin
# Sends the server a few pieces of client info
if self.server_capabilities & CLIENT.CONNECT_ATTRS:
connect_attrs = b''
for k, v in self._connect_attrs.items():
k, v = k.encode('utf8'), v.encode('utf8')
connect_attrs += struct.pack('B', len(k)) + k
connect_attrs += struct.pack('B', len(v)) + v
data += struct.pack('B', len(connect_attrs)) + connect_attrs
self.write_packet(data)
auth_packet = await self._read_packet()
# if authentication method isn't accepted the first byte
# will have the octet 254
if auth_packet.is_auth_switch_request():
# https://dev.mysql.com/doc/internals/en/
# connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
auth_packet.read_uint8() # 0xfe packet identifier
plugin_name = auth_packet.read_string()
if (self.server_capabilities & CLIENT.PLUGIN_AUTH and
plugin_name is not None):
await self._process_auth(plugin_name, auth_packet)
else:
# send legacy handshake
data = _auth.scramble_old_password(
self._password.encode('latin1'),
auth_packet.read_all()) + b'\0'
self.write_packet(data)
await self._read_packet()
elif auth_packet.is_extra_auth_data():
if auth_plugin == "caching_sha2_password":
await self.caching_sha2_password_auth(auth_packet)
elif auth_plugin == "sha256_password":
await self.sha256_password_auth(auth_packet)
else:
raise OperationalError("Received extra packet "
"for auth method %r", auth_plugin)
async def _process_auth(self, plugin_name, auth_packet):
# These auth plugins do their own packet handling
if plugin_name == b"caching_sha2_password":
await self.caching_sha2_password_auth(auth_packet)
self._auth_plugin_used = plugin_name.decode()
elif plugin_name == b"sha256_password":
await self.sha256_password_auth(auth_packet)
self._auth_plugin_used = plugin_name.decode()
else:
if plugin_name == b"mysql_native_password":
# https://dev.mysql.com/doc/internals/en/
# secure-password-authentication.html#packet-Authentication::
# Native41
data = _auth.scramble_native_password(
self._password.encode('latin1'),
auth_packet.read_all())
elif plugin_name == b"mysql_old_password":
# https://dev.mysql.com/doc/internals/en/
# old-password-authentication.html
data = _auth.scramble_old_password(
self._password.encode('latin1'),
auth_packet.read_all()
) + b'\0'
elif plugin_name == b"mysql_clear_password":
# https://dev.mysql.com/doc/internals/en/
# clear-text-authentication.html
data = self._password.encode('latin1') + b'\0'
else:
raise OperationalError(
2059, "Authentication plugin '{0}'"
" not configured".format(plugin_name)
)
self.write_packet(data)
pkt = await self._read_packet()
pkt.check_error()
self._auth_plugin_used = plugin_name.decode()
return pkt
async def caching_sha2_password_auth(self, pkt):
# No password fast path
if not self._password:
self.write_packet(b'')
pkt = await self._read_packet()
pkt.check_error()
return pkt
if pkt.is_auth_switch_request():
# Try from fast auth
logger.debug("caching sha2: Trying fast path")
self.salt = pkt.read_all()
scrambled = _auth.scramble_caching_sha2(
self._password.encode('latin1'), self.salt
)
self.write_packet(scrambled)
pkt = await self._read_packet()
pkt.check_error()
# else: fast auth is tried in initial handshake
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet "
"for fast auth: {0}".format(pkt._data[:1])
)
# magic numbers:
# 2 - request public key
# 3 - fast auth succeeded
# 4 - need full auth
pkt.advance(1)
n = pkt.read_uint8()
if n == 3:
logger.debug("caching sha2: succeeded by fast path.")
pkt = await self._read_packet()
pkt.check_error() # pkt must be OK packet
return pkt
if n != 4:
raise OperationalError("caching sha2: Unknown "
"result for fast auth: {0}".format(n))
logger.debug("caching sha2: Trying full auth...")
if self._ssl_context:
logger.debug("caching sha2: Sending plain "
"password via secure connection")
self.write_packet(self._password.encode('latin1') + b'\0')
pkt = await self._read_packet()
pkt.check_error()
return pkt
if not self.server_public_key:
self.write_packet(b'\x02')
pkt = await self._read_packet() # Request public key
pkt.check_error()
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet "
"for public key: {0}".format(pkt._data[:1])
)
self.server_public_key = pkt._data[1:]
logger.debug(self.server_public_key.decode('ascii'))
data = _auth.sha2_rsa_encrypt(
self._password.encode('latin1'), self.salt,
self.server_public_key
)
self.write_packet(data)
pkt = await self._read_packet()
pkt.check_error()
async def sha256_password_auth(self, pkt):
if self._ssl_context:
logger.debug("sha256: Sending plain password")
data = self._password.encode('latin1') + b'\0'
self.write_packet(data)
pkt = await self._read_packet()
pkt.check_error()
return pkt
if pkt.is_auth_switch_request():
self.salt = pkt.read_all()
if not self.server_public_key and self._password:
# Request server public key
logger.debug("sha256: Requesting server public key")
self.write_packet(b'\1')
pkt = await self._read_packet()
pkt.check_error()
if pkt.is_extra_auth_data():
self.server_public_key = pkt._data[1:]
logger.debug(
"Received public key:\n",
self.server_public_key.decode('ascii')
)
if self._password:
if not self.server_public_key:
raise OperationalError("Couldn't receive server's public key")
data = _auth.sha2_rsa_encrypt(
self._password.encode('latin1'), self.salt,
self.server_public_key
)
else:
data = b''
self.write_packet(data)
pkt = await self._read_packet()
pkt.check_error()
return pkt
# _mysql support
def thread_id(self):
return self.server_thread_id[0]
def character_set_name(self):
return self._charset
def get_host_info(self):
return self.host_info
def get_proto_info(self):
return self.protocol_version
async def _get_server_information(self):
i = 0
packet = await self._read_packet()
data = packet.get_all_data()
# logger.debug(dump_packet(data))
self.protocol_version = byte2int(data[i:i + 1])
i += 1
server_end = data.find(b'\0', i)
self.server_version = data[i:server_end].decode('latin1')
i = server_end + 1
self.server_thread_id = struct.unpack('<I', data[i:i + 4])
i += 4
self.salt = data[i:i + 8]
i += 9 # 8 + 1(filler)
self.server_capabilities = struct.unpack('<H', data[i:i + 2])[0]
i += 2
if len(data) >= i + 6:
lang, stat, cap_h, salt_len = struct.unpack('<BHHB', data[i:i + 6])
i += 6
self.server_language = lang
try:
self.server_charset = charset_by_id(lang).name
except KeyError:
# unknown collation
self.server_charset = None
self.server_status = stat
# logger.debug("server_status: %s" % _convert_to_str(stat))
self.server_capabilities |= cap_h << 16
# logger.debug("salt_len: %s" % _convert_to_str(salt_len))
salt_len = max(12, salt_len - 9)
# reserved
i += 10
if len(data) >= i + salt_len:
# salt_len includes auth_plugin_data_part_1 and filler
self.salt += data[i:i + salt_len]
i += salt_len
i += 1
# AUTH PLUGIN NAME may appear here.
if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
# Due to Bug#59453 the auth-plugin-name is missing the terminating
# NUL-char in versions prior to 5.5.10 and 5.6.2.
# ref: https://dev.mysql.com/doc/internals/en/
# connection-phase-packets.html#packet-Protocol::Handshake
# didn't use version checks as mariadb is corrected and reports
# earlier than those two.
server_end = data.find(b'\0', i)
if server_end < 0: # pragma: no cover - very specific upstream bug
# not found \0 and last field so take it all
self._server_auth_plugin = data[i:].decode('latin1')
else:
self._server_auth_plugin = data[i:server_end].decode('latin1')
def get_transaction_status(self):
return bool(self.server_status & SERVER_STATUS.SERVER_STATUS_IN_TRANS)
def get_server_info(self):
return self.server_version
# Just to always have consistent errors 2 helpers
def _close_on_cancel(self):
self.close()
self._close_reason = "Cancelled during execution"
def _ensure_alive(self):
if not self._writer:
if self._close_reason is None:
raise InterfaceError("(0, 'Not connected')")
else:
raise InterfaceError(self._close_reason)
def __del__(self):
if self._writer:
warnings.warn("Unclosed connection {!r}".format(self),
ResourceWarning)
self.close()
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
# TODO: move OK and EOF packet parsing/logic into a proper subclass
# of MysqlPacket like has been done with FieldDescriptorPacket.
class MySQLResult:
def __init__(self, connection):
self.connection = connection
self.affected_rows = None
self.insert_id = None
self.server_status = None
self.warning_count = 0
self.message = None
self.field_count = 0
self.description = None
self.rows = None
self.has_next = None
self.unbuffered_active = False
async def read(self):
try:
first_packet = await self.connection._read_packet()
# TODO: use classes for different packet types?
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
elif first_packet.is_load_local_packet():
await self._read_load_local_packet(first_packet)
else:
await self._read_result_packet(first_packet)
finally:
self.connection = None
async def init_unbuffered_query(self):
self.unbuffered_active = True
first_packet = await self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
self.unbuffered_active = False
self.connection = None
elif first_packet.is_load_local_packet():
await self._read_load_local_packet(first_packet)
self.unbuffered_active = False
self.connection = None
else:
self.field_count = first_packet.read_length_encoded_integer()
await self._get_descriptions()
# Apparently, MySQLdb picks this number because it's the maximum
# value of a 64bit unsigned integer. Since we're emulating MySQLdb,
# we set it to this instead of None, which would be preferred.
self.affected_rows = 18446744073709551615
def _read_ok_packet(self, first_packet):
ok_packet = OKPacketWrapper(first_packet)
self.affected_rows = ok_packet.affected_rows
self.insert_id = ok_packet.insert_id
self.server_status = ok_packet.server_status
self.warning_count = ok_packet.warning_count
self.message = ok_packet.message
self.has_next = ok_packet.has_next
async def _read_load_local_packet(self, first_packet):
load_packet = LoadLocalPacketWrapper(first_packet)
sender = LoadLocalFile(load_packet.filename, self.connection)
try:
await sender.send_data()
except Exception:
# Skip ok packet
await self.connection._read_packet()
raise
ok_packet = await self.connection._read_packet()
if not ok_packet.is_ok_packet():
raise OperationalError(2014, "Commands Out of Sync")
self._read_ok_packet(ok_packet)
def _check_packet_is_eof(self, packet):
if packet.is_eof_packet():
eof_packet = EOFPacketWrapper(packet)
self.warning_count = eof_packet.warning_count
self.has_next = eof_packet.has_next
return True
return False
async def _read_result_packet(self, first_packet):
self.field_count = first_packet.read_length_encoded_integer()
await self._get_descriptions()
await self._read_rowdata_packet()
async def _read_rowdata_packet_unbuffered(self):
# Check if in an active query
if not self.unbuffered_active:
return
packet = await self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None
self.rows = None
return
row = self._read_row_from_packet(packet)
self.affected_rows = 1
# rows should tuple of row for MySQL-python compatibility.
self.rows = (row,)
return row
async def _finish_unbuffered_query(self):
# After much reading on the MySQL protocol, it appears that there is,
# in fact, no way to stop MySQL from sending all the data after
# executing a query, so we just spin, and wait for an EOF packet.
while self.unbuffered_active:
packet = await self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
# release reference to kill cyclic reference.
self.connection = None
async def _read_rowdata_packet(self):
"""Read a rowdata packet for each data row in the result set."""
rows = []
while True:
packet = await self.connection._read_packet()
if self._check_packet_is_eof(packet):
# release reference to kill cyclic reference.
self.connection = None
break
rows.append(self._read_row_from_packet(packet))
self.affected_rows = len(rows)
self.rows = tuple(rows)
def _read_row_from_packet(self, packet):
row = []
for encoding, converter in self.converters:
try:
data = packet.read_length_coded_string()
except IndexError:
# No more columns in this row
# See https://github.com/PyMySQL/PyMySQL/pull/434
break
if data is not None:
if encoding is not None:
data = data.decode(encoding)
if converter is not None:
data = converter(data)
row.append(data)
return tuple(row)
async def _get_descriptions(self):
"""Read a column descriptor packet for each column in the result."""
self.fields = []
self.converters = []
use_unicode = self.connection.use_unicode
conn_encoding = self.connection.encoding
description = []
for i in range(self.field_count):
field = await self.connection._read_packet(
FieldDescriptorPacket)
self.fields.append(field)
description.append(field.description())
field_type = field.type_code
if use_unicode:
if field_type == FIELD_TYPE.JSON:
# When SELECT from JSON column: charset = binary
# When SELECT CAST(... AS JSON): charset = connection
# encoding
# This behavior is different from TEXT / BLOB.
# We should decode result by connection encoding
# regardless charsetnr.
# See https://github.com/PyMySQL/PyMySQL/issues/488
encoding = conn_encoding # SELECT CAST(... AS JSON)
elif field_type in TEXT_TYPES:
if field.charsetnr == 63: # binary
# TEXTs with charset=binary means BINARY types.
encoding = None
else:
encoding = conn_encoding
else:
# Integers, Dates and Times, and other basic data
# is encoded in ascii
encoding = 'ascii'
else:
encoding = None
converter = self.connection.decoders.get(field_type)
if converter is through:
converter = None
self.converters.append((encoding, converter))
eof_packet = await self.connection._read_packet()
assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF'
self.description = tuple(description)
class LoadLocalFile(object):
def __init__(self, filename, connection):
self.filename = filename
self.connection = connection
self._loop = connection.loop
self._file_object = None
self._executor = None # means use default executor
def _open_file(self):
def opener(filename):
try:
self._file_object = open(filename, 'rb')
except IOError as e:
msg = "Can't find file '{0}'".format(filename)
raise OperationalError(1017, msg) from e
fut = self._loop.run_in_executor(self._executor, opener, self.filename)
return fut
def _file_read(self, chunk_size):
def freader(chunk_size):
try:
chunk = self._file_object.read(chunk_size)
if not chunk:
self._file_object.close()
self._file_object = None
except Exception as e:
self._file_object.close()
self._file_object = None
msg = "Error reading file {}".format(self.filename)
raise OperationalError(1024, msg) from e
return chunk
fut = self._loop.run_in_executor(self._executor, freader, chunk_size)
return fut
async def send_data(self):
"""Send data packets from the local file to the server"""
self.connection._ensure_alive()
conn = self.connection
try:
await self._open_file()
with self._file_object:
chunk_size = MAX_PACKET_LEN
while True:
chunk = await self._file_read(chunk_size)
if not chunk:
break
# TODO: consider drain data
conn.write_packet(chunk)
except asyncio.CancelledError:
self.connection._close_on_cancel()
raise
finally:
# send the empty packet to signify we are done sending data
conn.write_packet(b"") | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/connection.py | connection.py |
from .mysql import Mysql
from typing import Dict, List, Any, Tuple
class Crud:
def __init__(self,
db: Mysql,
table: str,
columns: List[Any]) -> None:
self.db = db
self.table = table
self.columns = columns
def _get_columns_str(self):
"""
根据列名自动生成names和values,用于动态拼接SQL语句
"""
names = ", ".join(self.columns)
values_ = ["%s" for i in range(len(self.columns))]
values = ", ".join(values_)
return names, values
def _get_columns_id_str(self, columns: List) -> str:
"""
返回字段字符串,用跟在select后面做列筛选
"""
# 字段
columns_ = []
if "id" not in columns:
columns_.append("id")
columns_.extend(columns)
# 字段字符串
columns_str = ", ".join(columns_)
return columns_str
def _get_insert_sql(self):
"""
获取新增数据的SQL
"""
names, values = self._get_columns_str()
sql = f"INSERT INTO {self.table} ({names}) VALUES ({values})"
return sql
def _to_dict(self, data: Tuple) -> Dict:
"""
将tuple类型的结果转换为dict类型的结果
"""
# 获取字段
columns_ = []
if "id" not in self.columns:
columns_.append("id")
columns_.extend(self.columns)
# 转换数据
return dict(zip(columns_, data))
def _to_list_dict(self, data: Tuple) -> List[Dict]:
"""
将tuple类型列表的结果转换为dict类型列表的结果
"""
result = []
for i in data:
result.append(self._to_dict(i))
return result
async def add(self, data_dict: Dict):
"""
添加单条数据
"""
name_str = ", ".join(data_dict.keys())
value_str = ", ".join(["%s" for _ in data_dict.values()])
sql = f"INSERT INTO {self.table}({name_str}) values ({value_str})"
await self.db.execute(sql, values=tuple(data_dict.values()))
async def add_many(self, data: List[Dict]):
"""
添加多条数据
"""
if len(data) == 0:
return
# 准备SQL语句
data_dict = data[0]
name_str = ", ".join(data_dict.keys())
value_str = ", ".join(["%s" for _ in data_dict.values()])
sql = f"INSERT INTO {self.table}({name_str}) values ({value_str})"
# 提取数据
values = [tuple(item.values()) for item in data]
sql = self._get_insert_sql()
# 执行SQL语句
await self.db.execute(sql, data=values)
async def delete(self, id: int):
"""
删除单条数据
"""
sql = f"DELETE FROM {self.table} WHERE id = %s;"
await self.db.execute(sql, values=[id])
async def delete_ids(self, ids: Tuple[int]):
"""
根据ID列表删除多条数据
"""
ids_ = ["%s" for _ in range(len(ids))]
ids_str = ", ".join(ids_)
sql = f"DELETE FROM {self.table} WHERE id IN({ids_str});"
await self.db.execute(sql, values=ids)
async def update(self, id: int, update_dict: Dict):
"""
更新单条数据
"""
update_ = []
values = []
# 组合参数
for k, v in update_dict.items():
update_.append(f"{k} = %s")
values.append(v)
update_str = ", ".join(update_)
# 生成SQL语句
sql = f"UPDATE {self.table} SET {update_str} WHERE id = %s;"
values.append(id)
# 执行SQL语句
await self.db.execute(sql, tuple(values))
async def update_many(self, updates: List[Dict]):
"""
更新多条数据
"""
# 数组为空或者字典没有id字段,则直接返回
if not updates or updates[0].get("id") is None:
return
# 更新多条数据
for data in updates:
# 取出ID
id_ = data.get("id")
del data["id"]
update_ = []
values = []
# 组合参数
for k, v in data.items():
update_.append(f"{k} = %s")
values.append(v)
update_str = ", ".join(update_)
# 生成SQL语句
sql = f"UPDATE {self.table} SET {update_str} WHERE id = %s;"
values.append(id_)
await self.db.execute(sql, tuple(values))
async def find(self, id: int) -> Tuple[Any]:
"""
查找单条数据
"""
# 字段字符串
columns_str = self._get_columns_id_str(self.columns)
# SQL语句
sql = f"SELECT {columns_str} FROM {self.table} WHERE id = %s;"
result = await self.db.execute(sql, values=(id,), return_all=False)
return self._to_dict(result)
async def find_page(self, page: int = 1, size: int = 20, order_column: str = "id", order_type: str = "DESC") -> List[Dict]:
"""
分页查找多条数据
"""
# 字段字符串
columns_str = self._get_columns_id_str(self.columns)
# 分页查询
offset = (page - 1) * size
# 这里采用“延迟关联”大大提高查询效率:《高性能MySQL》 第3版 242页
sql = f"""
SELECT {columns_str} FROM {self.table}
INNER JOIN(
SELECT id FROM {self.table}
ORDER BY id LIMIT {offset}, {size}
) AS t1 USING(id)
ORDER BY {order_column} {order_type};
"""
result = await self.db.execute(sql)
return self._to_list_dict(result)
async def find_total(self) -> int:
"""
查询数据总数
"""
sql = f"SELECT COUNT(*) FROM {self.table};"
result = await self.db.execute(sql)
total = 0
try:
total = result[0][0]
except:
pass
return total
async def find_ids(self, ids: List[int]) -> List[Dict]:
"""
根据ID列表查找多条数据
"""
# 字段字符串
columns_str = self._get_columns_id_str(self.columns)
# 查询字符串
query_ = ["%s" for _ in range(len(ids))]
query_str = ", ".join(query_)
# SQL语句
sql = f"SELECT {columns_str} FROM {self.table} WHERE id in ({query_str});"
result = await self.db.execute(sql, values=tuple(ids))
return self._to_list_dict(result) | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/crud.py | crud.py |
from collections.abc import Coroutine
class _ContextManager(Coroutine):
__slots__ = ('_coro', '_obj')
def __init__(self, coro):
self._coro = coro
self._obj = None
def send(self, value):
return self._coro.send(value)
def throw(self, typ, val=None, tb=None):
if val is None:
return self._coro.throw(typ)
elif tb is None:
return self._coro.throw(typ, val)
else:
return self._coro.throw(typ, val, tb)
def close(self):
return self._coro.close()
@property
def gi_frame(self):
return self._coro.gi_frame
@property
def gi_running(self):
return self._coro.gi_running
@property
def gi_code(self):
return self._coro.gi_code
def __next__(self):
return self.send(None)
def __iter__(self):
return self._coro.__await__()
def __await__(self):
return self._coro.__await__()
async def __aenter__(self):
self._obj = await self._coro
return self._obj
async def __aexit__(self, exc_type, exc, tb):
await self._obj.close()
self._obj = None
class _ConnectionContextManager(_ContextManager):
async def __aexit__(self, exc_type, exc, tb):
if exc_type is not None:
self._obj.close()
else:
await self._obj.ensure_closed()
self._obj = None
class _PoolContextManager(_ContextManager):
async def __aexit__(self, exc_type, exc, tb):
self._obj.close()
await self._obj.wait_closed()
self._obj = None
class _SAConnectionContextManager(_ContextManager):
async def __aiter__(self):
result = await self._coro
return result
class _TransactionContextManager(_ContextManager):
async def __aexit__(self, exc_type, exc, tb):
if exc_type:
await self._obj.rollback()
else:
if self._obj.is_active:
await self._obj.commit()
self._obj = None
class _PoolAcquireContextManager(_ContextManager):
__slots__ = ('_coro', '_conn', '_pool')
def __init__(self, coro, pool):
self._coro = coro
self._conn = None
self._pool = pool
async def __aenter__(self):
self._conn = await self._coro
return self._conn
async def __aexit__(self, exc_type, exc, tb):
try:
await self._pool.release(self._conn)
finally:
self._pool = None
self._conn = None
class _PoolConnectionContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
connection around a block:
with (yield from pool) as conn:
cur = yield from conn.cursor()
while failing loudly when accidentally using:
with pool:
<block>
"""
__slots__ = ('_pool', '_conn')
def __init__(self, pool, conn):
self._pool = pool
self._conn = conn
def __enter__(self):
assert self._conn
return self._conn
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self._pool.release(self._conn)
finally:
self._pool = None
self._conn = None
async def __aenter__(self):
assert not self._conn
self._conn = await self._pool.acquire()
return self._conn
async def __aexit__(self, exc_type, exc_val, exc_tb):
try:
await self._pool.release(self._conn)
finally:
self._pool = None
self._conn = None | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/utils.py | utils.py |
import asyncio
# import aiomysql
from typing import List, Tuple, Union, Any
from .connection import connect
class Mysql:
def __init__(self,
host='127.0.0.1',
port=3306,
user='root',
password='root',
db='test') -> None:
self.loop = asyncio.get_event_loop()
self.host = host
self.port = port
self.user = user
self.password = password
self.db = db
self.pool = None
self.conn = None
async def connect(self):
"""
建立数据库连接
"""
# self.conn = await aiomysql.connect(
self.conn = await connect(
host=self.host,
port=self.port,
user=self.user,
password=self.password,
db=self.db,
loop=self.loop)
async def execute(self,
sql: str = None,
values: Tuple[Any] = None,
data: List[Tuple] = None,
return_all=True) -> Union[Tuple[Tuple], Tuple[Any]]:
"""
执行SQL语句
"""
# 自动建立连接
if self.conn is None:
await self.connect()
async with self.conn.cursor() as cur:
# 批量新增
if data is not None:
try:
await cur.executemany(sql, data)
await self.conn.commit()
except Exception as e:
await self.conn.rollback()
raise e
elif values is not None:
# 执行带参数的SQL语句
try:
await cur.execute(sql, values)
await self.conn.commit()
except Exception as e:
await self.conn.rollback()
raise e
else:
# 执行单条SQL语句
try:
await cur.execute(sql)
await self.conn.commit()
except Exception as e:
await self.conn.rollback()
raise e
# 返回数据
if return_all:
return await cur.fetchall()
return await cur.fetchone()
async def create_table(self, sql: str):
"""
创建数据库表
"""
await self.execute(sql)
async def close(self):
self.conn.close() | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/mysql.py | mysql.py |
from ._compat import PY2, text_type, long_type, JYTHON, IRONPYTHON, unichr
import datetime
from decimal import Decimal
import re
import time
from .constants import FIELD_TYPE, FLAG
from .charset import charset_by_id, charset_to_encoding
def escape_item(val, charset, mapping=None):
if mapping is None:
mapping = encoders
encoder = mapping.get(type(val))
# Fallback to default when no encoder found
if not encoder:
try:
encoder = mapping[text_type]
except KeyError:
raise TypeError("no default type converter defined")
if encoder in (escape_dict, escape_sequence):
val = encoder(val, charset, mapping)
else:
val = encoder(val, mapping)
return val
def escape_dict(val, charset, mapping=None):
n = {}
for k, v in val.items():
quoted = escape_item(v, charset, mapping)
n[k] = quoted
return n
def escape_sequence(val, charset, mapping=None):
n = []
for item in val:
quoted = escape_item(item, charset, mapping)
n.append(quoted)
return "(" + ",".join(n) + ")"
def escape_set(val, charset, mapping=None):
return ','.join([escape_item(x, charset, mapping) for x in val])
def escape_bool(value, mapping=None):
return str(int(value))
def escape_object(value, mapping=None):
return str(value)
def escape_int(value, mapping=None):
return str(value)
def escape_float(value, mapping=None):
return ('%.15g' % value)
_escape_table = [unichr(x) for x in range(128)]
_escape_table[0] = u'\\0'
_escape_table[ord('\\')] = u'\\\\'
_escape_table[ord('\n')] = u'\\n'
_escape_table[ord('\r')] = u'\\r'
_escape_table[ord('\032')] = u'\\Z'
_escape_table[ord('"')] = u'\\"'
_escape_table[ord("'")] = u"\\'"
def _escape_unicode(value, mapping=None):
"""escapes *value* without adding quote.
Value should be unicode
"""
return value.translate(_escape_table)
if PY2:
def escape_string(value, mapping=None):
"""escape_string escapes *value* but not surround it with quotes.
Value should be bytes or unicode.
"""
if isinstance(value, unicode):
return _escape_unicode(value)
assert isinstance(value, (bytes, bytearray))
value = value.replace('\\', '\\\\')
value = value.replace('\0', '\\0')
value = value.replace('\n', '\\n')
value = value.replace('\r', '\\r')
value = value.replace('\032', '\\Z')
value = value.replace("'", "\\'")
value = value.replace('"', '\\"')
return value
def escape_bytes_prefixed(value, mapping=None):
assert isinstance(value, (bytes, bytearray))
return b"_binary'%s'" % escape_string(value)
def escape_bytes(value, mapping=None):
assert isinstance(value, (bytes, bytearray))
return b"'%s'" % escape_string(value)
else:
escape_string = _escape_unicode
# On Python ~3.5, str.decode('ascii', 'surrogateescape') is slow.
# (fixed in Python 3.6, http://bugs.python.org/issue24870)
# Workaround is str.decode('latin1') then translate 0x80-0xff into 0udc80-0udcff.
# We can escape special chars and surrogateescape at once.
_escape_bytes_table = _escape_table + [chr(i) for i in range(0xdc80, 0xdd00)]
def escape_bytes_prefixed(value, mapping=None):
return "_binary'%s'" % value.decode('latin1').translate(_escape_bytes_table)
def escape_bytes(value, mapping=None):
return "'%s'" % value.decode('latin1').translate(_escape_bytes_table)
def escape_unicode(value, mapping=None):
return u"'%s'" % _escape_unicode(value)
def escape_str(value, mapping=None):
return "'%s'" % escape_string(str(value), mapping)
def escape_None(value, mapping=None):
return 'NULL'
def escape_timedelta(obj, mapping=None):
seconds = int(obj.seconds) % 60
minutes = int(obj.seconds // 60) % 60
hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
if obj.microseconds:
fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
else:
fmt = "'{0:02d}:{1:02d}:{2:02d}'"
return fmt.format(hours, minutes, seconds, obj.microseconds)
def escape_time(obj, mapping=None):
if obj.microsecond:
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
else:
fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
return fmt.format(obj)
def escape_datetime(obj, mapping=None):
if obj.microsecond:
fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
else:
fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
return fmt.format(obj)
def escape_date(obj, mapping=None):
fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
return fmt.format(obj)
def escape_struct_time(obj, mapping=None):
return escape_datetime(datetime.datetime(*obj[:6]))
def _convert_second_fraction(s):
if not s:
return 0
# Pad zeros to ensure the fraction length in microseconds
s = s.ljust(6, '0')
return int(s[:6])
DATETIME_RE = re.compile(r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
def convert_datetime(obj):
"""Returns a DATETIME or TIMESTAMP column value as a datetime object:
>>> datetime_or_None('2007-02-25 23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
>>> datetime_or_None('2007-02-25T23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
Illegal values are returned as None:
>>> datetime_or_None('2007-02-31T23:06:20') is None
True
>>> datetime_or_None('0000-00-00 00:00:00') is None
True
"""
if not PY2 and isinstance(obj, (bytes, bytearray)):
obj = obj.decode('ascii')
m = DATETIME_RE.match(obj)
if not m:
return convert_date(obj)
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
return datetime.datetime(*[ int(x) for x in groups ])
except ValueError:
return convert_date(obj)
TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
def convert_timedelta(obj):
"""Returns a TIME column as a timedelta object:
>>> timedelta_or_None('25:06:17')
datetime.timedelta(1, 3977)
>>> timedelta_or_None('-25:06:17')
datetime.timedelta(-2, 83177)
Illegal values are returned as None:
>>> timedelta_or_None('random crap') is None
True
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
"""
if not PY2 and isinstance(obj, (bytes, bytearray)):
obj = obj.decode('ascii')
m = TIMEDELTA_RE.match(obj)
if not m:
return obj
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
negate = -1 if groups[0] else 1
hours, minutes, seconds, microseconds = groups[1:]
tdelta = datetime.timedelta(
hours = int(hours),
minutes = int(minutes),
seconds = int(seconds),
microseconds = int(microseconds)
) * negate
return tdelta
except ValueError:
return obj
TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
def convert_time(obj):
"""Returns a TIME column as a time object:
>>> time_or_None('15:06:17')
datetime.time(15, 6, 17)
Illegal values are returned as None:
>>> time_or_None('-25:06:17') is None
True
>>> time_or_None('random crap') is None
True
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
Also note that MySQL's TIME column corresponds more closely to
Python's timedelta and not time. However if you want TIME columns
to be treated as time-of-day and not a time offset, then you can
use set this function as the converter for FIELD_TYPE.TIME.
"""
if not PY2 and isinstance(obj, (bytes, bytearray)):
obj = obj.decode('ascii')
m = TIME_RE.match(obj)
if not m:
return obj
try:
groups = list(m.groups())
groups[-1] = _convert_second_fraction(groups[-1])
hours, minutes, seconds, microseconds = groups
return datetime.time(hour=int(hours), minute=int(minutes),
second=int(seconds), microsecond=int(microseconds))
except ValueError:
return obj
def convert_date(obj):
"""Returns a DATE column as a date object:
>>> date_or_None('2007-02-26')
datetime.date(2007, 2, 26)
Illegal values are returned as None:
>>> date_or_None('2007-02-31') is None
True
>>> date_or_None('0000-00-00') is None
True
"""
if not PY2 and isinstance(obj, (bytes, bytearray)):
obj = obj.decode('ascii')
try:
return datetime.date(*[ int(x) for x in obj.split('-', 2) ])
except ValueError:
return obj
def convert_mysql_timestamp(timestamp):
"""Convert a MySQL TIMESTAMP to a Timestamp object.
MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME:
>>> mysql_timestamp_converter('2007-02-25 22:32:17')
datetime.datetime(2007, 2, 25, 22, 32, 17)
MySQL < 4.1 uses a big string of numbers:
>>> mysql_timestamp_converter('20070225223217')
datetime.datetime(2007, 2, 25, 22, 32, 17)
Illegal values are returned as None:
>>> mysql_timestamp_converter('2007-02-31 22:32:17') is None
True
>>> mysql_timestamp_converter('00000000000000') is None
True
"""
if not PY2 and isinstance(timestamp, (bytes, bytearray)):
timestamp = timestamp.decode('ascii')
if timestamp[4] == '-':
return convert_datetime(timestamp)
timestamp += "0"*(14-len(timestamp)) # padding
year, month, day, hour, minute, second = \
int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \
int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14])
try:
return datetime.datetime(year, month, day, hour, minute, second)
except ValueError:
return timestamp
def convert_set(s):
if isinstance(s, (bytes, bytearray)):
return set(s.split(b","))
return set(s.split(","))
def through(x):
return x
#def convert_bit(b):
# b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
# return struct.unpack(">Q", b)[0]
#
# the snippet above is right, but MySQLdb doesn't process bits,
# so we shouldn't either
convert_bit = through
encoders = {
bool: escape_bool,
int: escape_int,
long_type: escape_int,
float: escape_float,
str: escape_str,
text_type: escape_unicode,
tuple: escape_sequence,
list: escape_sequence,
set: escape_sequence,
frozenset: escape_sequence,
dict: escape_dict,
type(None): escape_None,
datetime.date: escape_date,
datetime.datetime: escape_datetime,
datetime.timedelta: escape_timedelta,
datetime.time: escape_time,
time.struct_time: escape_struct_time,
Decimal: escape_object,
}
if not PY2 or JYTHON or IRONPYTHON:
encoders[bytes] = escape_bytes
decoders = {
FIELD_TYPE.BIT: convert_bit,
FIELD_TYPE.TINY: int,
FIELD_TYPE.SHORT: int,
FIELD_TYPE.LONG: int,
FIELD_TYPE.FLOAT: float,
FIELD_TYPE.DOUBLE: float,
FIELD_TYPE.LONGLONG: int,
FIELD_TYPE.INT24: int,
FIELD_TYPE.YEAR: int,
FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp,
FIELD_TYPE.DATETIME: convert_datetime,
FIELD_TYPE.TIME: convert_timedelta,
FIELD_TYPE.DATE: convert_date,
FIELD_TYPE.SET: convert_set,
FIELD_TYPE.BLOB: through,
FIELD_TYPE.TINY_BLOB: through,
FIELD_TYPE.MEDIUM_BLOB: through,
FIELD_TYPE.LONG_BLOB: through,
FIELD_TYPE.STRING: through,
FIELD_TYPE.VAR_STRING: through,
FIELD_TYPE.VARCHAR: through,
FIELD_TYPE.DECIMAL: Decimal,
FIELD_TYPE.NEWDECIMAL: Decimal,
}
# for MySQLdb compatibility
conversions = encoders.copy()
conversions.update(decoders)
Thing2Literal = escape_str | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/converters.py | converters.py |
from __future__ import print_function, absolute_import
from functools import partial
import re
import warnings
from ._compat import range_type, text_type, PY2
from . import err
#: Regular expression for :meth:`Cursor.executemany`.
#: executemany only suports simple bulk insert.
#: You can use it to load large dataset.
RE_INSERT_VALUES = re.compile(
r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)" +
r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" +
r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
re.IGNORECASE | re.DOTALL)
class Cursor(object):
"""
This is the object you use to interact with the database.
Do not create an instance of a Cursor yourself. Call
connections.Connection.cursor().
See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
the specification.
"""
#: Max statement size which :meth:`executemany` generates.
#:
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
#: Default value of max_allowed_packet is 1048576.
max_stmt_length = 1024000
_defer_warnings = False
def __init__(self, connection):
self.connection = connection
self.description = None
self.rownumber = 0
self.rowcount = -1
self.arraysize = 1
self._executed = None
self._result = None
self._rows = None
self._warnings_handled = False
def close(self):
"""
Closing a cursor just exhausts all remaining data.
"""
conn = self.connection
if conn is None:
return
try:
while self.nextset():
pass
finally:
self.connection = None
def __enter__(self):
return self
def __exit__(self, *exc_info):
del exc_info
self.close()
def _get_db(self):
if not self.connection:
raise err.ProgrammingError("Cursor closed")
return self.connection
def _check_executed(self):
if not self._executed:
raise err.ProgrammingError("execute() first")
def _conv_row(self, row):
return row
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
def _nextset(self, unbuffered=False):
"""Get the next query set"""
conn = self._get_db()
current_result = self._result
# for unbuffered queries warnings are only available once whole result has been read
if unbuffered:
self._show_warnings()
if current_result is None or current_result is not conn._result:
return None
if not current_result.has_next:
return None
self._result = None
self._clear_result()
conn.next_result(unbuffered=unbuffered)
self._do_get_result()
return True
def nextset(self):
return self._nextset(False)
def _ensure_bytes(self, x, encoding=None):
if isinstance(x, text_type):
x = x.encode(encoding)
elif isinstance(x, (tuple, list)):
x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x)
return x
def _escape_args(self, args, conn):
ensure_bytes = partial(self._ensure_bytes, encoding=conn.encoding)
if isinstance(args, (tuple, list)):
if PY2:
args = tuple(map(ensure_bytes, args))
return tuple(conn.literal(arg) for arg in args)
elif isinstance(args, dict):
if PY2:
args = {ensure_bytes(key): ensure_bytes(val) for
(key, val) in args.items()}
return {key: conn.literal(val) for (key, val) in args.items()}
else:
# If it's not a dictionary let's try escaping it anyways.
# Worst case it will throw a Value error
if PY2:
args = ensure_bytes(args)
return conn.escape(args)
def mogrify(self, query, args=None):
"""
Returns the exact string that is sent to the database by calling the
execute() method.
This method follows the extension to the DB API 2.0 followed by Psycopg.
"""
conn = self._get_db()
if PY2: # Use bytes on Python 2 always
query = self._ensure_bytes(query, encoding=conn.encoding)
if args is not None:
query = query % self._escape_args(args, conn)
return query
def execute(self, query, args=None):
"""Execute a query
:param str query: Query to execute.
:param args: parameters used with query. (optional)
:type args: tuple, list or dict
:return: Number of affected rows
:rtype: int
If args is a list or tuple, %s can be used as a placeholder in the query.
If args is a dict, %(name)s can be used as a placeholder in the query.
"""
while self.nextset():
pass
query = self.mogrify(query, args)
result = self._query(query)
self._executed = query
return result
def executemany(self, query, args):
# type: (str, list) -> int
"""Run several data against one query
:param query: query to execute on server
:param args: Sequence of sequences or mappings. It is used as parameter.
:return: Number of rows affected, if any.
This method improves performance on multiple-row INSERT and
REPLACE. Otherwise it is equivalent to looping over args with
execute().
"""
if not args:
return
m = RE_INSERT_VALUES.match(query)
if m:
q_prefix = m.group(1) % ()
q_values = m.group(2).rstrip()
q_postfix = m.group(3) or ''
assert q_values[0] == '(' and q_values[-1] == ')'
return self._do_execute_many(q_prefix, q_values, q_postfix, args,
self.max_stmt_length,
self._get_db().encoding)
self.rowcount = sum(self.execute(query, arg) for arg in args)
return self.rowcount
def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding):
conn = self._get_db()
escape = self._escape_args
if isinstance(prefix, text_type):
prefix = prefix.encode(encoding)
if PY2 and isinstance(values, text_type):
values = values.encode(encoding)
if isinstance(postfix, text_type):
postfix = postfix.encode(encoding)
sql = bytearray(prefix)
args = iter(args)
v = values % escape(next(args), conn)
if isinstance(v, text_type):
if PY2:
v = v.encode(encoding)
else:
v = v.encode(encoding, 'surrogateescape')
sql += v
rows = 0
for arg in args:
v = values % escape(arg, conn)
if isinstance(v, text_type):
if PY2:
v = v.encode(encoding)
else:
v = v.encode(encoding, 'surrogateescape')
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
rows += self.execute(sql + postfix)
sql = bytearray(prefix)
else:
sql += b','
sql += v
rows += self.execute(sql + postfix)
self.rowcount = rows
return rows
def callproc(self, procname, args=()):
"""Execute stored procedure procname with args
procname -- string, name of procedure to execute on server
args -- Sequence of parameters to use with procedure
Returns the original args.
Compatibility warning: PEP-249 specifies that any modified
parameters must be returned. This is currently impossible
as they are only available by storing them in a server
variable and then retrieved by a query. Since stored
procedures return zero or more result sets, there is no
reliable way to get at OUT or INOUT parameters via callproc.
The server variables are named @_procname_n, where procname
is the parameter above and n is the position of the parameter
(from zero). Once all result sets generated by the procedure
have been fetched, you can issue a SELECT @_procname_0, ...
query using .execute() to get any OUT or INOUT values.
Compatibility warning: The act of calling a stored procedure
itself creates an empty result set. This appears after any
result sets generated by the procedure. This is non-standard
behavior with respect to the DB-API. Be sure to use nextset()
to advance through all result sets; otherwise you may get
disconnected.
"""
conn = self._get_db()
if args:
fmt = '@_{0}_%d=%s'.format(procname)
self._query('SET %s' % ','.join(fmt % (index, conn.escape(arg))
for index, arg in enumerate(args)))
self.nextset()
q = "CALL %s(%s)" % (procname,
','.join(['@_%s_%d' % (procname, i)
for i in range_type(len(args))]))
self._query(q)
self._executed = q
return args
def fetchone(self):
"""Fetch the next row"""
self._check_executed()
if self._rows is None or self.rownumber >= len(self._rows):
return None
result = self._rows[self.rownumber]
self.rownumber += 1
return result
def fetchmany(self, size=None):
"""Fetch several rows"""
self._check_executed()
if self._rows is None:
return ()
end = self.rownumber + (size or self.arraysize)
result = self._rows[self.rownumber:end]
self.rownumber = min(end, len(self._rows))
return result
def fetchall(self):
"""Fetch all the rows"""
self._check_executed()
if self._rows is None:
return ()
if self.rownumber:
result = self._rows[self.rownumber:]
else:
result = self._rows
self.rownumber = len(self._rows)
return result
def scroll(self, value, mode='relative'):
self._check_executed()
if mode == 'relative':
r = self.rownumber + value
elif mode == 'absolute':
r = value
else:
raise err.ProgrammingError("unknown scroll mode %s" % mode)
if not (0 <= r < len(self._rows)):
raise IndexError("out of range")
self.rownumber = r
def _query(self, q):
conn = self._get_db()
self._last_executed = q
self._clear_result()
conn.query(q)
self._do_get_result()
return self.rowcount
def _clear_result(self):
self.rownumber = 0
self._result = None
self.rowcount = 0
self.description = None
self.lastrowid = None
self._rows = None
def _do_get_result(self):
conn = self._get_db()
self._result = result = conn._result
self.rowcount = result.affected_rows
self.description = result.description
self.lastrowid = result.insert_id
self._rows = result.rows
self._warnings_handled = False
if not self._defer_warnings:
self._show_warnings()
def _show_warnings(self):
if self._warnings_handled:
return
self._warnings_handled = True
if self._result and (self._result.has_next or not self._result.warning_count):
return
ws = self._get_db().show_warnings()
if ws is None:
return
for w in ws:
msg = w[-1]
if PY2:
if isinstance(msg, unicode):
msg = msg.encode('utf-8', 'replace')
warnings.warn(err.Warning(*w[1:3]), stacklevel=4)
def __iter__(self):
return iter(self.fetchone, None)
Warning = err.Warning
Error = err.Error
InterfaceError = err.InterfaceError
DatabaseError = err.DatabaseError
DataError = err.DataError
OperationalError = err.OperationalError
IntegrityError = err.IntegrityError
InternalError = err.InternalError
ProgrammingError = err.ProgrammingError
NotSupportedError = err.NotSupportedError
class DictCursorMixin(object):
# You can override this to use OrderedDict or other dict-like types.
dict_type = dict
def _do_get_result(self):
super(DictCursorMixin, self)._do_get_result()
fields = []
if self.description:
for f in self._result.fields:
name = f.name
if name in fields:
name = f.table_name + '.' + name
fields.append(name)
self._fields = fields
if fields and self._rows:
self._rows = [self._conv_row(r) for r in self._rows]
def _conv_row(self, row):
if row is None:
return None
return self.dict_type(zip(self._fields, row))
class DictCursor(DictCursorMixin, Cursor):
"""A cursor which returns results as a dictionary"""
class SSCursor(Cursor):
"""
Unbuffered Cursor, mainly useful for queries that return a lot of data,
or for connections to remote servers over a slow network.
Instead of copying every row of data into a buffer, this will fetch
rows as needed. The upside of this is the client uses much less memory,
and rows are returned much faster when traveling over a slow network
or if the result set is very big.
There are limitations, though. The MySQL protocol doesn't support
returning the total number of rows, so the only way to tell how many rows
there are is to iterate over every row returned. Also, it currently isn't
possible to scroll backwards, as only the current row is held in memory.
"""
_defer_warnings = True
def _conv_row(self, row):
return row
def close(self):
conn = self.connection
if conn is None:
return
if self._result is not None and self._result is conn._result:
self._result._finish_unbuffered_query()
try:
while self.nextset():
pass
finally:
self.connection = None
__del__ = close
def _query(self, q):
conn = self._get_db()
self._last_executed = q
self._clear_result()
conn.query(q, unbuffered=True)
self._do_get_result()
return self.rowcount
def nextset(self):
return self._nextset(unbuffered=True)
def read_next(self):
"""Read next row"""
return self._conv_row(self._result._read_rowdata_packet_unbuffered())
def fetchone(self):
"""Fetch next row"""
self._check_executed()
row = self.read_next()
if row is None:
self._show_warnings()
return None
self.rownumber += 1
return row
def fetchall(self):
"""
Fetch all, as per MySQLdb. Pretty useless for large queries, as
it is buffered. See fetchall_unbuffered(), if you want an unbuffered
generator version of this method.
"""
return list(self.fetchall_unbuffered())
def fetchall_unbuffered(self):
"""
Fetch all, implemented as a generator, which isn't to standard,
however, it doesn't make sense to return everything in a list, as that
would use ridiculous memory for large result sets.
"""
return iter(self.fetchone, None)
def __iter__(self):
return self.fetchall_unbuffered()
def fetchmany(self, size=None):
"""Fetch many"""
self._check_executed()
if size is None:
size = self.arraysize
rows = []
for i in range_type(size):
row = self.read_next()
if row is None:
self._show_warnings()
break
rows.append(row)
self.rownumber += 1
return rows
def scroll(self, value, mode='relative'):
self._check_executed()
if mode == 'relative':
if value < 0:
raise err.NotSupportedError(
"Backwards scrolling not supported by this cursor")
for _ in range_type(value):
self.read_next()
self.rownumber += value
elif mode == 'absolute':
if value < self.rownumber:
raise err.NotSupportedError(
"Backwards scrolling not supported by this cursor")
end = value - self.rownumber
for _ in range_type(end):
self.read_next()
self.rownumber = value
else:
raise err.ProgrammingError("unknown scroll mode %s" % mode)
class SSDictCursor(DictCursorMixin, SSCursor):
"""An unbuffered cursor, which returns results as a dictionary""" | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/cursors.py | cursors.py |
MBLENGTH = {
8:1,
33:3,
88:2,
91:2
}
class Charset(object):
def __init__(self, id, name, collation, is_default):
self.id, self.name, self.collation = id, name, collation
self.is_default = is_default == 'Yes'
def __repr__(self):
return "Charset(id=%s, name=%r, collation=%r)" % (
self.id, self.name, self.collation)
@property
def encoding(self):
name = self.name
if name in ('utf8mb4', 'utf8mb3'):
return 'utf8'
return name
@property
def is_binary(self):
return self.id == 63
class Charsets:
def __init__(self):
self._by_id = {}
self._by_name = {}
def add(self, c):
self._by_id[c.id] = c
if c.is_default:
self._by_name[c.name] = c
def by_id(self, id):
return self._by_id[id]
def by_name(self, name):
return self._by_name.get(name.lower())
_charsets = Charsets()
"""
Generated with:
mysql -N -s -e "select id, character_set_name, collation_name, is_default
from information_schema.collations order by id;" | python -c "import sys
for l in sys.stdin.readlines():
id, name, collation, is_default = l.split(chr(9))
print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \
% (id, name, collation, is_default.strip())
"
"""
_charsets.add(Charset(1, 'big5', 'big5_chinese_ci', 'Yes'))
_charsets.add(Charset(2, 'latin2', 'latin2_czech_cs', ''))
_charsets.add(Charset(3, 'dec8', 'dec8_swedish_ci', 'Yes'))
_charsets.add(Charset(4, 'cp850', 'cp850_general_ci', 'Yes'))
_charsets.add(Charset(5, 'latin1', 'latin1_german1_ci', ''))
_charsets.add(Charset(6, 'hp8', 'hp8_english_ci', 'Yes'))
_charsets.add(Charset(7, 'koi8r', 'koi8r_general_ci', 'Yes'))
_charsets.add(Charset(8, 'latin1', 'latin1_swedish_ci', 'Yes'))
_charsets.add(Charset(9, 'latin2', 'latin2_general_ci', 'Yes'))
_charsets.add(Charset(10, 'swe7', 'swe7_swedish_ci', 'Yes'))
_charsets.add(Charset(11, 'ascii', 'ascii_general_ci', 'Yes'))
_charsets.add(Charset(12, 'ujis', 'ujis_japanese_ci', 'Yes'))
_charsets.add(Charset(13, 'sjis', 'sjis_japanese_ci', 'Yes'))
_charsets.add(Charset(14, 'cp1251', 'cp1251_bulgarian_ci', ''))
_charsets.add(Charset(15, 'latin1', 'latin1_danish_ci', ''))
_charsets.add(Charset(16, 'hebrew', 'hebrew_general_ci', 'Yes'))
_charsets.add(Charset(18, 'tis620', 'tis620_thai_ci', 'Yes'))
_charsets.add(Charset(19, 'euckr', 'euckr_korean_ci', 'Yes'))
_charsets.add(Charset(20, 'latin7', 'latin7_estonian_cs', ''))
_charsets.add(Charset(21, 'latin2', 'latin2_hungarian_ci', ''))
_charsets.add(Charset(22, 'koi8u', 'koi8u_general_ci', 'Yes'))
_charsets.add(Charset(23, 'cp1251', 'cp1251_ukrainian_ci', ''))
_charsets.add(Charset(24, 'gb2312', 'gb2312_chinese_ci', 'Yes'))
_charsets.add(Charset(25, 'greek', 'greek_general_ci', 'Yes'))
_charsets.add(Charset(26, 'cp1250', 'cp1250_general_ci', 'Yes'))
_charsets.add(Charset(27, 'latin2', 'latin2_croatian_ci', ''))
_charsets.add(Charset(28, 'gbk', 'gbk_chinese_ci', 'Yes'))
_charsets.add(Charset(29, 'cp1257', 'cp1257_lithuanian_ci', ''))
_charsets.add(Charset(30, 'latin5', 'latin5_turkish_ci', 'Yes'))
_charsets.add(Charset(31, 'latin1', 'latin1_german2_ci', ''))
_charsets.add(Charset(32, 'armscii8', 'armscii8_general_ci', 'Yes'))
_charsets.add(Charset(33, 'utf8', 'utf8_general_ci', 'Yes'))
_charsets.add(Charset(34, 'cp1250', 'cp1250_czech_cs', ''))
_charsets.add(Charset(36, 'cp866', 'cp866_general_ci', 'Yes'))
_charsets.add(Charset(37, 'keybcs2', 'keybcs2_general_ci', 'Yes'))
_charsets.add(Charset(38, 'macce', 'macce_general_ci', 'Yes'))
_charsets.add(Charset(39, 'macroman', 'macroman_general_ci', 'Yes'))
_charsets.add(Charset(40, 'cp852', 'cp852_general_ci', 'Yes'))
_charsets.add(Charset(41, 'latin7', 'latin7_general_ci', 'Yes'))
_charsets.add(Charset(42, 'latin7', 'latin7_general_cs', ''))
_charsets.add(Charset(43, 'macce', 'macce_bin', ''))
_charsets.add(Charset(44, 'cp1250', 'cp1250_croatian_ci', ''))
_charsets.add(Charset(45, 'utf8mb4', 'utf8mb4_general_ci', 'Yes'))
_charsets.add(Charset(46, 'utf8mb4', 'utf8mb4_bin', ''))
_charsets.add(Charset(47, 'latin1', 'latin1_bin', ''))
_charsets.add(Charset(48, 'latin1', 'latin1_general_ci', ''))
_charsets.add(Charset(49, 'latin1', 'latin1_general_cs', ''))
_charsets.add(Charset(50, 'cp1251', 'cp1251_bin', ''))
_charsets.add(Charset(51, 'cp1251', 'cp1251_general_ci', 'Yes'))
_charsets.add(Charset(52, 'cp1251', 'cp1251_general_cs', ''))
_charsets.add(Charset(53, 'macroman', 'macroman_bin', ''))
_charsets.add(Charset(57, 'cp1256', 'cp1256_general_ci', 'Yes'))
_charsets.add(Charset(58, 'cp1257', 'cp1257_bin', ''))
_charsets.add(Charset(59, 'cp1257', 'cp1257_general_ci', 'Yes'))
_charsets.add(Charset(63, 'binary', 'binary', 'Yes'))
_charsets.add(Charset(64, 'armscii8', 'armscii8_bin', ''))
_charsets.add(Charset(65, 'ascii', 'ascii_bin', ''))
_charsets.add(Charset(66, 'cp1250', 'cp1250_bin', ''))
_charsets.add(Charset(67, 'cp1256', 'cp1256_bin', ''))
_charsets.add(Charset(68, 'cp866', 'cp866_bin', ''))
_charsets.add(Charset(69, 'dec8', 'dec8_bin', ''))
_charsets.add(Charset(70, 'greek', 'greek_bin', ''))
_charsets.add(Charset(71, 'hebrew', 'hebrew_bin', ''))
_charsets.add(Charset(72, 'hp8', 'hp8_bin', ''))
_charsets.add(Charset(73, 'keybcs2', 'keybcs2_bin', ''))
_charsets.add(Charset(74, 'koi8r', 'koi8r_bin', ''))
_charsets.add(Charset(75, 'koi8u', 'koi8u_bin', ''))
_charsets.add(Charset(76, 'utf8', 'utf8_tolower_ci', ''))
_charsets.add(Charset(77, 'latin2', 'latin2_bin', ''))
_charsets.add(Charset(78, 'latin5', 'latin5_bin', ''))
_charsets.add(Charset(79, 'latin7', 'latin7_bin', ''))
_charsets.add(Charset(80, 'cp850', 'cp850_bin', ''))
_charsets.add(Charset(81, 'cp852', 'cp852_bin', ''))
_charsets.add(Charset(82, 'swe7', 'swe7_bin', ''))
_charsets.add(Charset(83, 'utf8', 'utf8_bin', ''))
_charsets.add(Charset(84, 'big5', 'big5_bin', ''))
_charsets.add(Charset(85, 'euckr', 'euckr_bin', ''))
_charsets.add(Charset(86, 'gb2312', 'gb2312_bin', ''))
_charsets.add(Charset(87, 'gbk', 'gbk_bin', ''))
_charsets.add(Charset(88, 'sjis', 'sjis_bin', ''))
_charsets.add(Charset(89, 'tis620', 'tis620_bin', ''))
_charsets.add(Charset(91, 'ujis', 'ujis_bin', ''))
_charsets.add(Charset(92, 'geostd8', 'geostd8_general_ci', 'Yes'))
_charsets.add(Charset(93, 'geostd8', 'geostd8_bin', ''))
_charsets.add(Charset(94, 'latin1', 'latin1_spanish_ci', ''))
_charsets.add(Charset(95, 'cp932', 'cp932_japanese_ci', 'Yes'))
_charsets.add(Charset(96, 'cp932', 'cp932_bin', ''))
_charsets.add(Charset(97, 'eucjpms', 'eucjpms_japanese_ci', 'Yes'))
_charsets.add(Charset(98, 'eucjpms', 'eucjpms_bin', ''))
_charsets.add(Charset(99, 'cp1250', 'cp1250_polish_ci', ''))
_charsets.add(Charset(192, 'utf8', 'utf8_unicode_ci', ''))
_charsets.add(Charset(193, 'utf8', 'utf8_icelandic_ci', ''))
_charsets.add(Charset(194, 'utf8', 'utf8_latvian_ci', ''))
_charsets.add(Charset(195, 'utf8', 'utf8_romanian_ci', ''))
_charsets.add(Charset(196, 'utf8', 'utf8_slovenian_ci', ''))
_charsets.add(Charset(197, 'utf8', 'utf8_polish_ci', ''))
_charsets.add(Charset(198, 'utf8', 'utf8_estonian_ci', ''))
_charsets.add(Charset(199, 'utf8', 'utf8_spanish_ci', ''))
_charsets.add(Charset(200, 'utf8', 'utf8_swedish_ci', ''))
_charsets.add(Charset(201, 'utf8', 'utf8_turkish_ci', ''))
_charsets.add(Charset(202, 'utf8', 'utf8_czech_ci', ''))
_charsets.add(Charset(203, 'utf8', 'utf8_danish_ci', ''))
_charsets.add(Charset(204, 'utf8', 'utf8_lithuanian_ci', ''))
_charsets.add(Charset(205, 'utf8', 'utf8_slovak_ci', ''))
_charsets.add(Charset(206, 'utf8', 'utf8_spanish2_ci', ''))
_charsets.add(Charset(207, 'utf8', 'utf8_roman_ci', ''))
_charsets.add(Charset(208, 'utf8', 'utf8_persian_ci', ''))
_charsets.add(Charset(209, 'utf8', 'utf8_esperanto_ci', ''))
_charsets.add(Charset(210, 'utf8', 'utf8_hungarian_ci', ''))
_charsets.add(Charset(211, 'utf8', 'utf8_sinhala_ci', ''))
_charsets.add(Charset(212, 'utf8', 'utf8_german2_ci', ''))
_charsets.add(Charset(213, 'utf8', 'utf8_croatian_ci', ''))
_charsets.add(Charset(214, 'utf8', 'utf8_unicode_520_ci', ''))
_charsets.add(Charset(215, 'utf8', 'utf8_vietnamese_ci', ''))
_charsets.add(Charset(223, 'utf8', 'utf8_general_mysql500_ci', ''))
_charsets.add(Charset(224, 'utf8mb4', 'utf8mb4_unicode_ci', ''))
_charsets.add(Charset(225, 'utf8mb4', 'utf8mb4_icelandic_ci', ''))
_charsets.add(Charset(226, 'utf8mb4', 'utf8mb4_latvian_ci', ''))
_charsets.add(Charset(227, 'utf8mb4', 'utf8mb4_romanian_ci', ''))
_charsets.add(Charset(228, 'utf8mb4', 'utf8mb4_slovenian_ci', ''))
_charsets.add(Charset(229, 'utf8mb4', 'utf8mb4_polish_ci', ''))
_charsets.add(Charset(230, 'utf8mb4', 'utf8mb4_estonian_ci', ''))
_charsets.add(Charset(231, 'utf8mb4', 'utf8mb4_spanish_ci', ''))
_charsets.add(Charset(232, 'utf8mb4', 'utf8mb4_swedish_ci', ''))
_charsets.add(Charset(233, 'utf8mb4', 'utf8mb4_turkish_ci', ''))
_charsets.add(Charset(234, 'utf8mb4', 'utf8mb4_czech_ci', ''))
_charsets.add(Charset(235, 'utf8mb4', 'utf8mb4_danish_ci', ''))
_charsets.add(Charset(236, 'utf8mb4', 'utf8mb4_lithuanian_ci', ''))
_charsets.add(Charset(237, 'utf8mb4', 'utf8mb4_slovak_ci', ''))
_charsets.add(Charset(238, 'utf8mb4', 'utf8mb4_spanish2_ci', ''))
_charsets.add(Charset(239, 'utf8mb4', 'utf8mb4_roman_ci', ''))
_charsets.add(Charset(240, 'utf8mb4', 'utf8mb4_persian_ci', ''))
_charsets.add(Charset(241, 'utf8mb4', 'utf8mb4_esperanto_ci', ''))
_charsets.add(Charset(242, 'utf8mb4', 'utf8mb4_hungarian_ci', ''))
_charsets.add(Charset(243, 'utf8mb4', 'utf8mb4_sinhala_ci', ''))
_charsets.add(Charset(244, 'utf8mb4', 'utf8mb4_german2_ci', ''))
_charsets.add(Charset(245, 'utf8mb4', 'utf8mb4_croatian_ci', ''))
_charsets.add(Charset(246, 'utf8mb4', 'utf8mb4_unicode_520_ci', ''))
_charsets.add(Charset(247, 'utf8mb4', 'utf8mb4_vietnamese_ci', ''))
_charsets.add(Charset(248, 'gb18030', 'gb18030_chinese_ci', 'Yes'))
_charsets.add(Charset(249, 'gb18030', 'gb18030_bin', ''))
_charsets.add(Charset(250, 'gb18030', 'gb18030_unicode_520_ci', ''))
_charsets.add(Charset(255, 'utf8mb4', 'utf8mb4_0900_ai_ci', ''))
charset_by_name = _charsets.by_name
charset_by_id = _charsets.by_id
#TODO: remove this
def charset_to_encoding(name):
"""Convert MySQL's charset name to Python's codec name"""
if name in ('utf8mb4', 'utf8mb3'):
return 'utf8'
return name | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/charset.py | charset.py |
import struct
from .constants import ER
class MySQLError(Exception):
"""Exception related to operation with MySQL."""
class Warning(Warning, MySQLError):
"""Exception raised for important warnings like data truncations
while inserting, etc."""
class Error(MySQLError):
"""Exception that is the base class of all other error exceptions
(not Warning)."""
class InterfaceError(Error):
"""Exception raised for errors that are related to the database
interface rather than the database itself."""
class DatabaseError(Error):
"""Exception raised for errors that are related to the
database."""
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the
processed data like division by zero, numeric value out of range,
etc."""
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's
operation and not necessarily under the control of the programmer,
e.g. an unexpected disconnect occurs, the data source name is not
found, a transaction could not be processed, a memory allocation
error occurred during processing, etc."""
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database
is affected, e.g. a foreign key check fails, duplicate key,
etc."""
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal
error, e.g. the cursor is not valid anymore, the transaction is
out of sync, etc."""
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors, e.g. table not found
or already exists, syntax error in the SQL statement, wrong number
of parameters specified, etc."""
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used
which is not supported by the database, e.g. requesting a
.rollback() on a connection that does not support transaction or
has transactions turned off."""
error_map = {}
def _map_error(exc, *errors):
for error in errors:
error_map[error] = exc
_map_error(ProgrammingError, ER.DB_CREATE_EXISTS, ER.SYNTAX_ERROR,
ER.PARSE_ERROR, ER.NO_SUCH_TABLE, ER.WRONG_DB_NAME,
ER.WRONG_TABLE_NAME, ER.FIELD_SPECIFIED_TWICE,
ER.INVALID_GROUP_FUNC_USE, ER.UNSUPPORTED_EXTENSION,
ER.TABLE_MUST_HAVE_COLUMNS, ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
ER.WRONG_DB_NAME, ER.WRONG_COLUMN_NAME,
)
_map_error(DataError, ER.WARN_DATA_TRUNCATED, ER.WARN_NULL_TO_NOTNULL,
ER.WARN_DATA_OUT_OF_RANGE, ER.NO_DEFAULT, ER.PRIMARY_CANT_HAVE_NULL,
ER.DATA_TOO_LONG, ER.DATETIME_FUNCTION_OVERFLOW)
_map_error(IntegrityError, ER.DUP_ENTRY, ER.NO_REFERENCED_ROW,
ER.NO_REFERENCED_ROW_2, ER.ROW_IS_REFERENCED, ER.ROW_IS_REFERENCED_2,
ER.CANNOT_ADD_FOREIGN, ER.BAD_NULL_ERROR)
_map_error(NotSupportedError, ER.WARNING_NOT_COMPLETE_ROLLBACK,
ER.NOT_SUPPORTED_YET, ER.FEATURE_DISABLED, ER.UNKNOWN_STORAGE_ENGINE)
_map_error(OperationalError, ER.DBACCESS_DENIED_ERROR, ER.ACCESS_DENIED_ERROR,
ER.CON_COUNT_ERROR, ER.TABLEACCESS_DENIED_ERROR,
ER.COLUMNACCESS_DENIED_ERROR, ER.CONSTRAINT_FAILED, ER.LOCK_DEADLOCK)
del _map_error, ER
def raise_mysql_exception(data):
errno = struct.unpack('<h', data[1:3])[0]
is_41 = data[3:4] == b"#"
if is_41:
# client protocol 4.1
errval = data[9:].decode('utf-8', 'replace')
else:
errval = data[3:].decode('utf-8', 'replace')
errorclass = error_map.get(errno, InternalError)
raise errorclass(errno, errval) | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/err.py | err.py |
from ._compat import text_type, PY2
from .constants import CLIENT
from .err import OperationalError
from .util import byte2int, int2byte
try:
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding
_have_cryptography = True
except ImportError:
_have_cryptography = False
from functools import partial
import hashlib
import io
import struct
import warnings
DEBUG = False
SCRAMBLE_LENGTH = 20
sha1_new = partial(hashlib.new, 'sha1')
# mysql_native_password
# https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41
def scramble_native_password(password, message):
"""Scramble used for mysql_native_password"""
if not password:
return b''
stage1 = sha1_new(password).digest()
stage2 = sha1_new(stage1).digest()
s = sha1_new()
s.update(message[:SCRAMBLE_LENGTH])
s.update(stage2)
result = s.digest()
return _my_crypt(result, stage1)
def _my_crypt(message1, message2):
result = bytearray(message1)
if PY2:
message2 = bytearray(message2)
for i in range(len(result)):
result[i] ^= message2[i]
return bytes(result)
# old_passwords support ported from libmysql/password.c
# https://dev.mysql.com/doc/internals/en/old-password-authentication.html
SCRAMBLE_LENGTH_323 = 8
class RandStruct_323(object):
def __init__(self, seed1, seed2):
self.max_value = 0x3FFFFFFF
self.seed1 = seed1 % self.max_value
self.seed2 = seed2 % self.max_value
def my_rnd(self):
self.seed1 = (self.seed1 * 3 + self.seed2) % self.max_value
self.seed2 = (self.seed1 + self.seed2 + 33) % self.max_value
return float(self.seed1) / float(self.max_value)
def scramble_old_password(password, message):
"""Scramble for old_password"""
warnings.warn("old password (for MySQL <4.1) is used. Upgrade your password with newer auth method.\n"
"old password support will be removed in future PyMySQL version")
hash_pass = _hash_password_323(password)
hash_message = _hash_password_323(message[:SCRAMBLE_LENGTH_323])
hash_pass_n = struct.unpack(">LL", hash_pass)
hash_message_n = struct.unpack(">LL", hash_message)
rand_st = RandStruct_323(
hash_pass_n[0] ^ hash_message_n[0], hash_pass_n[1] ^ hash_message_n[1]
)
outbuf = io.BytesIO()
for _ in range(min(SCRAMBLE_LENGTH_323, len(message))):
outbuf.write(int2byte(int(rand_st.my_rnd() * 31) + 64))
extra = int2byte(int(rand_st.my_rnd() * 31))
out = outbuf.getvalue()
outbuf = io.BytesIO()
for c in out:
outbuf.write(int2byte(byte2int(c) ^ byte2int(extra)))
return outbuf.getvalue()
def _hash_password_323(password):
nr = 1345345333
add = 7
nr2 = 0x12345671
# x in py3 is numbers, p27 is chars
for c in [byte2int(x) for x in password if x not in (' ', '\t', 32, 9)]:
nr ^= (((nr & 63) + add) * c) + (nr << 8) & 0xFFFFFFFF
nr2 = (nr2 + ((nr2 << 8) ^ nr)) & 0xFFFFFFFF
add = (add + c) & 0xFFFFFFFF
r1 = nr & ((1 << 31) - 1) # kill sign bits
r2 = nr2 & ((1 << 31) - 1)
return struct.pack(">LL", r1, r2)
# sha256_password
def _roundtrip(conn, send_data):
conn.write_packet(send_data)
pkt = conn._read_packet()
pkt.check_error()
return pkt
def _xor_password(password, salt):
password_bytes = bytearray(password)
salt = bytearray(salt) # for PY2 compat.
salt_len = len(salt)
for i in range(len(password_bytes)):
password_bytes[i] ^= salt[i % salt_len]
return bytes(password_bytes)
def sha2_rsa_encrypt(password, salt, public_key):
"""Encrypt password with salt and public_key.
Used for sha256_password and caching_sha2_password.
"""
if not _have_cryptography:
raise RuntimeError("cryptography is required for sha256_password or caching_sha2_password")
message = _xor_password(password + b'\0', salt)
rsa_key = serialization.load_pem_public_key(public_key, default_backend())
return rsa_key.encrypt(
message,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None,
),
)
def sha256_password_auth(conn, pkt):
if conn._secure:
if DEBUG:
print("sha256: Sending plain password")
data = conn.password + b'\0'
return _roundtrip(conn, data)
if pkt.is_auth_switch_request():
conn.salt = pkt.read_all()
if not conn.server_public_key and conn.password:
# Request server public key
if DEBUG:
print("sha256: Requesting server public key")
pkt = _roundtrip(conn, b'\1')
if pkt.is_extra_auth_data():
conn.server_public_key = pkt._data[1:]
if DEBUG:
print("Received public key:\n", conn.server_public_key.decode('ascii'))
if conn.password:
if not conn.server_public_key:
raise OperationalError("Couldn't receive server's public key")
data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
else:
data = b''
return _roundtrip(conn, data)
def scramble_caching_sha2(password, nonce):
# (bytes, bytes) -> bytes
"""Scramble algorithm used in cached_sha2_password fast path.
XOR(SHA256(password), SHA256(SHA256(SHA256(password)), nonce))
"""
if not password:
return b''
p1 = hashlib.sha256(password).digest()
p2 = hashlib.sha256(p1).digest()
p3 = hashlib.sha256(p2 + nonce).digest()
res = bytearray(p1)
if PY2:
p3 = bytearray(p3)
for i in range(len(p3)):
res[i] ^= p3[i]
return bytes(res)
def caching_sha2_password_auth(conn, pkt):
# No password fast path
if not conn.password:
return _roundtrip(conn, b'')
if pkt.is_auth_switch_request():
# Try from fast auth
if DEBUG:
print("caching sha2: Trying fast path")
conn.salt = pkt.read_all()
scrambled = scramble_caching_sha2(conn.password, conn.salt)
pkt = _roundtrip(conn, scrambled)
# else: fast auth is tried in initial handshake
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet for fast auth: %s" % pkt._data[:1]
)
# magic numbers:
# 2 - request public key
# 3 - fast auth succeeded
# 4 - need full auth
pkt.advance(1)
n = pkt.read_uint8()
if n == 3:
if DEBUG:
print("caching sha2: succeeded by fast path.")
pkt = conn._read_packet()
pkt.check_error() # pkt must be OK packet
return pkt
if n != 4:
raise OperationalError("caching sha2: Unknwon result for fast auth: %s" % n)
if DEBUG:
print("caching sha2: Trying full auth...")
if conn._secure:
if DEBUG:
print("caching sha2: Sending plain password via secure connection")
return _roundtrip(conn, conn.password + b'\0')
if not conn.server_public_key:
pkt = _roundtrip(conn, b'\x02') # Request public key
if not pkt.is_extra_auth_data():
raise OperationalError(
"caching sha2: Unknown packet for public key: %s" % pkt._data[:1]
)
conn.server_public_key = pkt._data[1:]
if DEBUG:
print(conn.server_public_key.decode('ascii'))
data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
pkt = _roundtrip(conn, data) | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/_auth.py | _auth.py |
from __future__ import print_function
from ._compat import PY2, range_type, text_type, str_type, JYTHON, IRONPYTHON
import errno
import io
import os
import socket
import struct
import sys
import traceback
import warnings
from . import _auth
from .charset import charset_by_name, charset_by_id
from .constants import CLIENT, COMMAND, CR, FIELD_TYPE, SERVER_STATUS
from . import converters
from .cursors import Cursor
from .optionfile import Parser
from .protocol import (
dump_packet, MysqlPacket, FieldDescriptorPacket, OKPacketWrapper,
EOFPacketWrapper, LoadLocalPacketWrapper
)
from .util import byte2int, int2byte
from . import err, VERSION_STRING
try:
import ssl
SSL_ENABLED = True
except ImportError:
ssl = None
SSL_ENABLED = False
try:
import getpass
DEFAULT_USER = getpass.getuser()
del getpass
except (ImportError, KeyError):
# KeyError occurs when there's no entry in OS database for a current user.
DEFAULT_USER = None
DEBUG = False
_py_version = sys.version_info[:2]
if PY2:
pass
elif _py_version < (3, 6):
# See http://bugs.python.org/issue24870
_surrogateescape_table = [chr(i) if i < 0x80 else chr(i + 0xdc00) for i in range(256)]
def _fast_surrogateescape(s):
return s.decode('latin1').translate(_surrogateescape_table)
else:
def _fast_surrogateescape(s):
return s.decode('ascii', 'surrogateescape')
# socket.makefile() in Python 2 is not usable because very inefficient and
# bad behavior about timeout.
# XXX: ._socketio doesn't work under IronPython.
if PY2 and not IRONPYTHON:
# read method of file-like returned by sock.makefile() is very slow.
# So we copy io-based one from Python 3.
from ._socketio import SocketIO
def _makefile(sock, mode):
return io.BufferedReader(SocketIO(sock, mode))
else:
# socket.makefile in Python 3 is nice.
def _makefile(sock, mode):
return sock.makefile(mode)
TEXT_TYPES = {
FIELD_TYPE.BIT,
FIELD_TYPE.BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.STRING,
FIELD_TYPE.TINY_BLOB,
FIELD_TYPE.VAR_STRING,
FIELD_TYPE.VARCHAR,
FIELD_TYPE.GEOMETRY,
}
DEFAULT_CHARSET = 'utf8mb4'
MAX_PACKET_LEN = 2**24-1
def pack_int24(n):
return struct.pack('<I', n)[:3]
# https://dev.mysql.com/doc/internals/en/integer.html#packet-Protocol::LengthEncodedInteger
def lenenc_int(i):
if (i < 0):
raise ValueError("Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i)
elif (i < 0xfb):
return int2byte(i)
elif (i < (1 << 16)):
return b'\xfc' + struct.pack('<H', i)
elif (i < (1 << 24)):
return b'\xfd' + struct.pack('<I', i)[:3]
elif (i < (1 << 64)):
return b'\xfe' + struct.pack('<Q', i)
else:
raise ValueError("Encoding %x is larger than %x - no representation in LengthEncodedInteger" % (i, (1 << 64)))
class Connection(object):
"""
Representation of a socket with a mysql server.
The proper way to get an instance of this class is to call
connect().
Establish a connection to the MySQL database. Accepts several
arguments:
:param host: Host where the database server is located
:param user: Username to log in as
:param password: Password to use.
:param database: Database to use, None to not use a particular one.
:param port: MySQL port to use, default is usually OK. (default: 3306)
:param bind_address: When the client has multiple network interfaces, specify
the interface from which to connect to the host. Argument can be
a hostname or an IP address.
:param unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
:param read_timeout: The timeout for reading from the connection in seconds (default: None - no timeout)
:param write_timeout: The timeout for writing to the connection in seconds (default: None - no timeout)
:param charset: Charset you want to use.
:param sql_mode: Default SQL_MODE to use.
:param read_default_file:
Specifies my.cnf file to read these parameters from under the [client] section.
:param conv:
Conversion dictionary to use instead of the default one.
This is used to provide custom marshalling and unmarshaling of types.
See converters.
:param use_unicode:
Whether or not to default to unicode strings.
This option defaults to true for Py3k.
:param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
:param cursorclass: Custom cursor class to use.
:param init_command: Initial SQL statement to run when connection is established.
:param connect_timeout: Timeout before throwing an exception when connecting.
(default: 10, min: 1, max: 31536000)
:param ssl:
A dict of arguments similar to mysql_ssl_set()'s parameters.
:param read_default_group: Group to read from in the configuration file.
:param compress: Not supported
:param named_pipe: Not supported
:param autocommit: Autocommit mode. None means use server default. (default: False)
:param local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
:param max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB)
Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB).
:param defer_connect: Don't explicitly connect on contruction - wait for connect call.
(default: False)
:param auth_plugin_map: A dict of plugin names to a class that processes that plugin.
The class will take the Connection object as the argument to the constructor.
The class needs an authenticate method taking an authentication packet as
an argument. For the dialog plugin, a prompt(echo, prompt) method can be used
(if no authenticate method) for returning a string from the user. (experimental)
:param server_public_key: SHA256 authenticaiton plugin public key value. (default: None)
:param db: Alias for database. (for compatibility to MySQLdb)
:param passwd: Alias for password. (for compatibility to MySQLdb)
:param binary_prefix: Add _binary prefix on bytes and bytearray. (default: False)
See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_ in the
specification.
"""
_sock = None
_auth_plugin_name = ''
_closed = False
_secure = False
def __init__(self, host=None, user=None, password="",
database=None, port=0, unix_socket=None,
charset='', sql_mode=None,
read_default_file=None, conv=None, use_unicode=None,
client_flag=0, cursorclass=Cursor, init_command=None,
connect_timeout=10, ssl=None, read_default_group=None,
compress=None, named_pipe=None,
autocommit=False, db=None, passwd=None, local_infile=False,
max_allowed_packet=16*1024*1024, defer_connect=False,
auth_plugin_map=None, read_timeout=None, write_timeout=None,
bind_address=None, binary_prefix=False, program_name=None,
server_public_key=None):
if use_unicode is None and sys.version_info[0] > 2:
use_unicode = True
if db is not None and database is None:
database = db
if passwd is not None and not password:
password = passwd
if compress or named_pipe:
raise NotImplementedError("compress and named_pipe arguments are not supported")
self._local_infile = bool(local_infile)
if self._local_infile:
client_flag |= CLIENT.LOCAL_FILES
if read_default_group and not read_default_file:
if sys.platform.startswith("win"):
read_default_file = "c:\\my.ini"
else:
read_default_file = "/etc/my.cnf"
if read_default_file:
if not read_default_group:
read_default_group = "client"
cfg = Parser()
cfg.read(os.path.expanduser(read_default_file))
def _config(key, arg):
if arg:
return arg
try:
return cfg.get(read_default_group, key)
except Exception:
return arg
user = _config("user", user)
password = _config("password", password)
host = _config("host", host)
database = _config("database", database)
unix_socket = _config("socket", unix_socket)
port = int(_config("port", port))
bind_address = _config("bind-address", bind_address)
charset = _config("default-character-set", charset)
if not ssl:
ssl = {}
if isinstance(ssl, dict):
for key in ["ca", "capath", "cert", "key", "cipher"]:
value = _config("ssl-" + key, ssl.get(key))
if value:
ssl[key] = value
self.ssl = False
if ssl:
if not SSL_ENABLED:
raise NotImplementedError("ssl module not found")
self.ssl = True
client_flag |= CLIENT.SSL
self.ctx = self._create_ssl_ctx(ssl)
self.host = host or "localhost"
self.port = port or 3306
self.user = user or DEFAULT_USER
self.password = password or b""
if isinstance(self.password, text_type):
self.password = self.password.encode('latin1')
self.db = database
self.unix_socket = unix_socket
self.bind_address = bind_address
if not (0 < connect_timeout <= 31536000):
raise ValueError("connect_timeout should be >0 and <=31536000")
self.connect_timeout = connect_timeout or None
if read_timeout is not None and read_timeout <= 0:
raise ValueError("read_timeout should be >= 0")
self._read_timeout = read_timeout
if write_timeout is not None and write_timeout <= 0:
raise ValueError("write_timeout should be >= 0")
self._write_timeout = write_timeout
if charset:
self.charset = charset
self.use_unicode = True
else:
self.charset = DEFAULT_CHARSET
self.use_unicode = False
if use_unicode is not None:
self.use_unicode = use_unicode
self.encoding = charset_by_name(self.charset).encoding
client_flag |= CLIENT.CAPABILITIES
if self.db:
client_flag |= CLIENT.CONNECT_WITH_DB
self.client_flag = client_flag
self.cursorclass = cursorclass
self._result = None
self._affected_rows = 0
self.host_info = "Not connected"
# specified autocommit mode. None means use server default.
self.autocommit_mode = autocommit
if conv is None:
conv = converters.conversions
# Need for MySQLdb compatibility.
self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
self.sql_mode = sql_mode
self.init_command = init_command
self.max_allowed_packet = max_allowed_packet
self._auth_plugin_map = auth_plugin_map or {}
self._binary_prefix = binary_prefix
self.server_public_key = server_public_key
self._connect_attrs = {
'_client_name': 'pymysql',
'_pid': str(os.getpid()),
'_client_version': VERSION_STRING,
}
if program_name:
self._connect_attrs["program_name"] = program_name
if defer_connect:
self._sock = None
else:
self.connect()
def _create_ssl_ctx(self, sslp):
if isinstance(sslp, ssl.SSLContext):
return sslp
ca = sslp.get('ca')
capath = sslp.get('capath')
hasnoca = ca is None and capath is None
ctx = ssl.create_default_context(cafile=ca, capath=capath)
ctx.check_hostname = not hasnoca and sslp.get('check_hostname', True)
ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
if 'cert' in sslp:
ctx.load_cert_chain(sslp['cert'], keyfile=sslp.get('key'))
if 'cipher' in sslp:
ctx.set_ciphers(sslp['cipher'])
ctx.options |= ssl.OP_NO_SSLv2
ctx.options |= ssl.OP_NO_SSLv3
return ctx
def close(self):
"""
Send the quit message and close the socket.
See `Connection.close() <https://www.python.org/dev/peps/pep-0249/#Connection.close>`_
in the specification.
:raise Error: If the connection is already closed.
"""
if self._closed:
raise err.Error("Already closed")
self._closed = True
if self._sock is None:
return
send_data = struct.pack('<iB', 1, COMMAND.COM_QUIT)
try:
self._write_bytes(send_data)
except Exception:
pass
finally:
self._force_close()
@property
def open(self):
"""Return True if the connection is open"""
return self._sock is not None
def _force_close(self):
"""Close connection without QUIT message"""
if self._sock:
try:
self._sock.close()
except: # noqa
pass
self._sock = None
self._rfile = None
__del__ = _force_close
def autocommit(self, value):
self.autocommit_mode = bool(value)
current = self.get_autocommit()
if value != current:
self._send_autocommit_mode()
def get_autocommit(self):
return bool(self.server_status &
SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
def _read_ok_packet(self):
pkt = self._read_packet()
if not pkt.is_ok_packet():
raise err.OperationalError(2014, "Command Out of Sync")
ok = OKPacketWrapper(pkt)
self.server_status = ok.server_status
return ok
def _send_autocommit_mode(self):
"""Set whether or not to commit after every execute()"""
self._execute_command(COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" %
self.escape(self.autocommit_mode))
self._read_ok_packet()
def begin(self):
"""Begin transaction."""
self._execute_command(COMMAND.COM_QUERY, "BEGIN")
self._read_ok_packet()
def commit(self):
"""
Commit changes to stable storage.
See `Connection.commit() <https://www.python.org/dev/peps/pep-0249/#commit>`_
in the specification.
"""
self._execute_command(COMMAND.COM_QUERY, "COMMIT")
self._read_ok_packet()
def rollback(self):
"""
Roll back the current transaction.
See `Connection.rollback() <https://www.python.org/dev/peps/pep-0249/#rollback>`_
in the specification.
"""
self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
self._read_ok_packet()
def show_warnings(self):
"""Send the "SHOW WARNINGS" SQL command."""
self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
result = MySQLResult(self)
result.read()
return result.rows
def select_db(self, db):
"""
Set current db.
:param db: The name of the db.
"""
self._execute_command(COMMAND.COM_INIT_DB, db)
self._read_ok_packet()
def escape(self, obj, mapping=None):
"""Escape whatever value you pass to it.
Non-standard, for internal use; do not use this in your applications.
"""
if isinstance(obj, str_type):
return "'" + self.escape_string(obj) + "'"
if isinstance(obj, (bytes, bytearray)):
ret = self._quote_bytes(obj)
if self._binary_prefix:
ret = "_binary" + ret
return ret
return converters.escape_item(obj, self.charset, mapping=mapping)
def literal(self, obj):
"""Alias for escape()
Non-standard, for internal use; do not use this in your applications.
"""
return self.escape(obj, self.encoders)
def escape_string(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return s.replace("'", "''")
return converters.escape_string(s)
def _quote_bytes(self, s):
if (self.server_status &
SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES):
return "'%s'" % (_fast_surrogateescape(s.replace(b"'", b"''")),)
return converters.escape_bytes(s)
def cursor(self, cursor=None):
"""
Create a new cursor to execute queries with.
:param cursor: The type of cursor to create; one of :py:class:`Cursor`,
:py:class:`SSCursor`, :py:class:`DictCursor`, or :py:class:`SSDictCursor`.
None means use Cursor.
"""
if cursor:
return cursor(self)
return self.cursorclass(self)
def __enter__(self):
"""Context manager that returns a Cursor"""
warnings.warn(
"Context manager API of Connection object is deprecated; Use conn.begin()",
DeprecationWarning)
return self.cursor()
def __exit__(self, exc, value, traceback):
"""On successful exit, commit. On exception, rollback"""
if exc:
self.rollback()
else:
self.commit()
# The following methods are INTERNAL USE ONLY (called from Cursor)
def query(self, sql, unbuffered=False):
# if DEBUG:
# print("DEBUG: sending query:", sql)
if isinstance(sql, text_type) and not (JYTHON or IRONPYTHON):
if PY2:
sql = sql.encode(self.encoding)
else:
sql = sql.encode(self.encoding, 'surrogateescape')
self._execute_command(COMMAND.COM_QUERY, sql)
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def next_result(self, unbuffered=False):
self._affected_rows = self._read_query_result(unbuffered=unbuffered)
return self._affected_rows
def affected_rows(self):
return self._affected_rows
def kill(self, thread_id):
arg = struct.pack('<I', thread_id)
self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
return self._read_ok_packet()
def ping(self, reconnect=True):
"""
Check if the server is alive.
:param reconnect: If the connection is closed, reconnect.
:raise Error: If the connection is closed and reconnect=False.
"""
if self._sock is None:
if reconnect:
self.connect()
reconnect = False
else:
raise err.Error("Already closed")
try:
self._execute_command(COMMAND.COM_PING, "")
self._read_ok_packet()
except Exception:
if reconnect:
self.connect()
self.ping(False)
else:
raise
def set_charset(self, charset):
# Make sure charset is supported.
encoding = charset_by_name(charset).encoding
self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s" % self.escape(charset))
self._read_packet()
self.charset = charset
self.encoding = encoding
def connect(self, sock=None):
self._closed = False
try:
if sock is None:
if self.unix_socket:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.connect_timeout)
sock.connect(self.unix_socket)
self.host_info = "Localhost via UNIX socket"
self._secure = True
if DEBUG: print('connected using unix_socket')
else:
kwargs = {}
if self.bind_address is not None:
kwargs['source_address'] = (self.bind_address, 0)
while True:
try:
sock = socket.create_connection(
(self.host, self.port), self.connect_timeout,
**kwargs)
break
except (OSError, IOError) as e:
if e.errno == errno.EINTR:
continue
raise
self.host_info = "socket %s:%d" % (self.host, self.port)
if DEBUG: print('connected using socket')
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
self._sock = sock
self._rfile = _makefile(sock, 'rb')
self._next_seq_id = 0
self._get_server_information()
self._request_authentication()
if self.sql_mode is not None:
c = self.cursor()
c.execute("SET sql_mode=%s", (self.sql_mode,))
if self.init_command is not None:
c = self.cursor()
c.execute(self.init_command)
c.close()
self.commit()
if self.autocommit_mode is not None:
self.autocommit(self.autocommit_mode)
except BaseException as e:
self._rfile = None
if sock is not None:
try:
sock.close()
except: # noqa
pass
if isinstance(e, (OSError, IOError, socket.error)):
exc = err.OperationalError(
2003,
"Can't connect to MySQL server on %r (%s)" % (
self.host, e))
# Keep original exception and traceback to investigate error.
exc.original_exception = e
exc.traceback = traceback.format_exc()
if DEBUG: print(exc.traceback)
raise exc
# If e is neither DatabaseError or IOError, It's a bug.
# But raising AssertionError hides original error.
# So just reraise it.
raise
def write_packet(self, payload):
"""Writes an entire "mysql packet" in its entirety to the network
addings its length and sequence number.
"""
# Internal note: when you build packet manualy and calls _write_bytes()
# directly, you should set self._next_seq_id properly.
data = pack_int24(len(payload)) + int2byte(self._next_seq_id) + payload
if DEBUG: dump_packet(data)
self._write_bytes(data)
self._next_seq_id = (self._next_seq_id + 1) % 256
def _read_packet(self, packet_type=MysqlPacket):
"""Read an entire "mysql packet" in its entirety from the network
and return a MysqlPacket type that represents the results.
:raise OperationalError: If the connection to the MySQL server is lost.
:raise InternalError: If the packet sequence number is wrong.
"""
buff = b''
while True:
packet_header = self._read_bytes(4)
#if DEBUG: dump_packet(packet_header)
btrl, btrh, packet_number = struct.unpack('<HBB', packet_header)
bytes_to_read = btrl + (btrh << 16)
if packet_number != self._next_seq_id:
self._force_close()
if packet_number == 0:
# MariaDB sends error packet with seqno==0 when shutdown
raise err.OperationalError(
CR.CR_SERVER_LOST,
"Lost connection to MySQL server during query")
raise err.InternalError(
"Packet sequence number wrong - got %d expected %d"
% (packet_number, self._next_seq_id))
self._next_seq_id = (self._next_seq_id + 1) % 256
recv_data = self._read_bytes(bytes_to_read)
if DEBUG: dump_packet(recv_data)
buff += recv_data
# https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
if bytes_to_read == 0xffffff:
continue
if bytes_to_read < MAX_PACKET_LEN:
break
packet = packet_type(buff, self.encoding)
packet.check_error()
return packet
def _read_bytes(self, num_bytes):
self._sock.settimeout(self._read_timeout)
while True:
try:
data = self._rfile.read(num_bytes)
break
except (IOError, OSError) as e:
if e.errno == errno.EINTR:
continue
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_LOST,
"Lost connection to MySQL server during query (%s)" % (e,))
except BaseException:
# Don't convert unknown exception to MySQLError.
self._force_close()
raise
if len(data) < num_bytes:
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_LOST, "Lost connection to MySQL server during query")
return data
def _write_bytes(self, data):
self._sock.settimeout(self._write_timeout)
try:
self._sock.sendall(data)
except IOError as e:
self._force_close()
raise err.OperationalError(
CR.CR_SERVER_GONE_ERROR,
"MySQL server has gone away (%r)" % (e,))
def _read_query_result(self, unbuffered=False):
self._result = None
if unbuffered:
try:
result = MySQLResult(self)
result.init_unbuffered_query()
except:
result.unbuffered_active = False
result.connection = None
raise
else:
result = MySQLResult(self)
result.read()
self._result = result
if result.server_status is not None:
self.server_status = result.server_status
return result.affected_rows
def insert_id(self):
if self._result:
return self._result.insert_id
else:
return 0
def _execute_command(self, command, sql):
"""
:raise InterfaceError: If the connection is closed.
:raise ValueError: If no username was specified.
"""
if not self._sock:
raise err.InterfaceError("(0, '')")
# If the last query was unbuffered, make sure it finishes before
# sending new commands
if self._result is not None:
if self._result.unbuffered_active:
warnings.warn("Previous unbuffered result was left incomplete")
self._result._finish_unbuffered_query()
while self._result.has_next:
self.next_result()
self._result = None
if isinstance(sql, text_type):
sql = sql.encode(self.encoding)
packet_size = min(MAX_PACKET_LEN, len(sql) + 1) # +1 is for command
# tiny optimization: build first packet manually instead of
# calling self..write_packet()
prelude = struct.pack('<iB', packet_size, command)
packet = prelude + sql[:packet_size-1]
self._write_bytes(packet)
if DEBUG: dump_packet(packet)
self._next_seq_id = 1
if packet_size < MAX_PACKET_LEN:
return
sql = sql[packet_size-1:]
while True:
packet_size = min(MAX_PACKET_LEN, len(sql))
self.write_packet(sql[:packet_size])
sql = sql[packet_size:]
if not sql and packet_size < MAX_PACKET_LEN:
break
def _request_authentication(self):
# https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
if int(self.server_version.split('.', 1)[0]) >= 5:
self.client_flag |= CLIENT.MULTI_RESULTS
if self.user is None:
raise ValueError("Did not specify a username")
charset_id = charset_by_name(self.charset).id
if isinstance(self.user, text_type):
self.user = self.user.encode(self.encoding)
data_init = struct.pack('<iIB23s', self.client_flag, MAX_PACKET_LEN, charset_id, b'')
if self.ssl and self.server_capabilities & CLIENT.SSL:
self.write_packet(data_init)
self._sock = self.ctx.wrap_socket(self._sock, server_hostname=self.host)
self._rfile = _makefile(self._sock, 'rb')
self._secure = True
data = data_init + self.user + b'\0'
authresp = b''
plugin_name = None
if self._auth_plugin_name == '':
plugin_name = b''
authresp = _auth.scramble_native_password(self.password, self.salt)
elif self._auth_plugin_name == 'mysql_native_password':
plugin_name = b'mysql_native_password'
authresp = _auth.scramble_native_password(self.password, self.salt)
elif self._auth_plugin_name == 'caching_sha2_password':
plugin_name = b'caching_sha2_password'
if self.password:
if DEBUG:
print("caching_sha2: trying fast path")
authresp = _auth.scramble_caching_sha2(self.password, self.salt)
else:
if DEBUG:
print("caching_sha2: empty password")
elif self._auth_plugin_name == 'sha256_password':
plugin_name = b'sha256_password'
if self.ssl and self.server_capabilities & CLIENT.SSL:
authresp = self.password + b'\0'
elif self.password:
authresp = b'\1' # request public key
else:
authresp = b'\0' # empty password
if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
data += lenenc_int(len(authresp)) + authresp
elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
data += struct.pack('B', len(authresp)) + authresp
else: # pragma: no cover - not testing against servers without secure auth (>=5.0)
data += authresp + b'\0'
if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
if isinstance(self.db, text_type):
self.db = self.db.encode(self.encoding)
data += self.db + b'\0'
if self.server_capabilities & CLIENT.PLUGIN_AUTH:
data += (plugin_name or b'') + b'\0'
if self.server_capabilities & CLIENT.CONNECT_ATTRS:
connect_attrs = b''
for k, v in self._connect_attrs.items():
k = k.encode('utf-8')
connect_attrs += struct.pack('B', len(k)) + k
v = v.encode('utf-8')
connect_attrs += struct.pack('B', len(v)) + v
data += struct.pack('B', len(connect_attrs)) + connect_attrs
self.write_packet(data)
auth_packet = self._read_packet()
# if authentication method isn't accepted the first byte
# will have the octet 254
if auth_packet.is_auth_switch_request():
if DEBUG: print("received auth switch")
# https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
auth_packet.read_uint8() # 0xfe packet identifier
plugin_name = auth_packet.read_string()
if self.server_capabilities & CLIENT.PLUGIN_AUTH and plugin_name is not None:
auth_packet = self._process_auth(plugin_name, auth_packet)
else:
# send legacy handshake
data = _auth.scramble_old_password(self.password, self.salt) + b'\0'
self.write_packet(data)
auth_packet = self._read_packet()
elif auth_packet.is_extra_auth_data():
if DEBUG:
print("received extra data")
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
if self._auth_plugin_name == "caching_sha2_password":
auth_packet = _auth.caching_sha2_password_auth(self, auth_packet)
elif self._auth_plugin_name == "sha256_password":
auth_packet = _auth.sha256_password_auth(self, auth_packet)
else:
raise err.OperationalError("Received extra packet for auth method %r", self._auth_plugin_name)
if DEBUG: print("Succeed to auth")
def _process_auth(self, plugin_name, auth_packet):
handler = self._get_auth_plugin_handler(plugin_name)
if handler:
try:
return handler.authenticate(auth_packet)
except AttributeError:
if plugin_name != b'dialog':
raise err.OperationalError(2059, "Authentication plugin '%s'"
" not loaded: - %r missing authenticate method" % (plugin_name, type(handler)))
if plugin_name == b"caching_sha2_password":
return _auth.caching_sha2_password_auth(self, auth_packet)
elif plugin_name == b"sha256_password":
return _auth.sha256_password_auth(self, auth_packet)
elif plugin_name == b"mysql_native_password":
data = _auth.scramble_native_password(self.password, auth_packet.read_all())
elif plugin_name == b"mysql_old_password":
data = _auth.scramble_old_password(self.password, auth_packet.read_all()) + b'\0'
elif plugin_name == b"mysql_clear_password":
# https://dev.mysql.com/doc/internals/en/clear-text-authentication.html
data = self.password + b'\0'
elif plugin_name == b"dialog":
pkt = auth_packet
while True:
flag = pkt.read_uint8()
echo = (flag & 0x06) == 0x02
last = (flag & 0x01) == 0x01
prompt = pkt.read_all()
if prompt == b"Password: ":
self.write_packet(self.password + b'\0')
elif handler:
resp = 'no response - TypeError within plugin.prompt method'
try:
resp = handler.prompt(echo, prompt)
self.write_packet(resp + b'\0')
except AttributeError:
raise err.OperationalError(2059, "Authentication plugin '%s'" \
" not loaded: - %r missing prompt method" % (plugin_name, handler))
except TypeError:
raise err.OperationalError(2061, "Authentication plugin '%s'" \
" %r didn't respond with string. Returned '%r' to prompt %r" % (plugin_name, handler, resp, prompt))
else:
raise err.OperationalError(2059, "Authentication plugin '%s' (%r) not configured" % (plugin_name, handler))
pkt = self._read_packet()
pkt.check_error()
if pkt.is_ok_packet() or last:
break
return pkt
else:
raise err.OperationalError(2059, "Authentication plugin '%s' not configured" % plugin_name)
self.write_packet(data)
pkt = self._read_packet()
pkt.check_error()
return pkt
def _get_auth_plugin_handler(self, plugin_name):
plugin_class = self._auth_plugin_map.get(plugin_name)
if not plugin_class and isinstance(plugin_name, bytes):
plugin_class = self._auth_plugin_map.get(plugin_name.decode('ascii'))
if plugin_class:
try:
handler = plugin_class(self)
except TypeError:
raise err.OperationalError(2059, "Authentication plugin '%s'"
" not loaded: - %r cannot be constructed with connection object" % (plugin_name, plugin_class))
else:
handler = None
return handler
# _mysql support
def thread_id(self):
return self.server_thread_id[0]
def character_set_name(self):
return self.charset
def get_host_info(self):
return self.host_info
def get_proto_info(self):
return self.protocol_version
def _get_server_information(self):
i = 0
packet = self._read_packet()
data = packet.get_all_data()
self.protocol_version = byte2int(data[i:i+1])
i += 1
server_end = data.find(b'\0', i)
self.server_version = data[i:server_end].decode('latin1')
i = server_end + 1
self.server_thread_id = struct.unpack('<I', data[i:i+4])
i += 4
self.salt = data[i:i+8]
i += 9 # 8 + 1(filler)
self.server_capabilities = struct.unpack('<H', data[i:i+2])[0]
i += 2
if len(data) >= i + 6:
lang, stat, cap_h, salt_len = struct.unpack('<BHHB', data[i:i+6])
i += 6
# TODO: deprecate server_language and server_charset.
# mysqlclient-python doesn't provide it.
self.server_language = lang
try:
self.server_charset = charset_by_id(lang).name
except KeyError:
# unknown collation
self.server_charset = None
self.server_status = stat
if DEBUG: print("server_status: %x" % stat)
self.server_capabilities |= cap_h << 16
if DEBUG: print("salt_len:", salt_len)
salt_len = max(12, salt_len - 9)
# reserved
i += 10
if len(data) >= i + salt_len:
# salt_len includes auth_plugin_data_part_1 and filler
self.salt += data[i:i+salt_len]
i += salt_len
i+=1
# AUTH PLUGIN NAME may appear here.
if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
# Due to Bug#59453 the auth-plugin-name is missing the terminating
# NUL-char in versions prior to 5.5.10 and 5.6.2.
# ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake
# didn't use version checks as mariadb is corrected and reports
# earlier than those two.
server_end = data.find(b'\0', i)
if server_end < 0: # pragma: no cover - very specific upstream bug
# not found \0 and last field so take it all
self._auth_plugin_name = data[i:].decode('utf-8')
else:
self._auth_plugin_name = data[i:server_end].decode('utf-8')
def get_server_info(self):
return self.server_version
Warning = err.Warning
Error = err.Error
InterfaceError = err.InterfaceError
DatabaseError = err.DatabaseError
DataError = err.DataError
OperationalError = err.OperationalError
IntegrityError = err.IntegrityError
InternalError = err.InternalError
ProgrammingError = err.ProgrammingError
NotSupportedError = err.NotSupportedError
class MySQLResult(object):
def __init__(self, connection):
"""
:type connection: Connection
"""
self.connection = connection
self.affected_rows = None
self.insert_id = None
self.server_status = None
self.warning_count = 0
self.message = None
self.field_count = 0
self.description = None
self.rows = None
self.has_next = None
self.unbuffered_active = False
def __del__(self):
if self.unbuffered_active:
self._finish_unbuffered_query()
def read(self):
try:
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
elif first_packet.is_load_local_packet():
self._read_load_local_packet(first_packet)
else:
self._read_result_packet(first_packet)
finally:
self.connection = None
def init_unbuffered_query(self):
"""
:raise OperationalError: If the connection to the MySQL server is lost.
:raise InternalError:
"""
self.unbuffered_active = True
first_packet = self.connection._read_packet()
if first_packet.is_ok_packet():
self._read_ok_packet(first_packet)
self.unbuffered_active = False
self.connection = None
elif first_packet.is_load_local_packet():
self._read_load_local_packet(first_packet)
self.unbuffered_active = False
self.connection = None
else:
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
# Apparently, MySQLdb picks this number because it's the maximum
# value of a 64bit unsigned integer. Since we're emulating MySQLdb,
# we set it to this instead of None, which would be preferred.
self.affected_rows = 18446744073709551615
def _read_ok_packet(self, first_packet):
ok_packet = OKPacketWrapper(first_packet)
self.affected_rows = ok_packet.affected_rows
self.insert_id = ok_packet.insert_id
self.server_status = ok_packet.server_status
self.warning_count = ok_packet.warning_count
self.message = ok_packet.message
self.has_next = ok_packet.has_next
def _read_load_local_packet(self, first_packet):
if not self.connection._local_infile:
raise RuntimeError(
"**WARN**: Received LOAD_LOCAL packet but local_infile option is false.")
load_packet = LoadLocalPacketWrapper(first_packet)
sender = LoadLocalFile(load_packet.filename, self.connection)
try:
sender.send_data()
except:
self.connection._read_packet() # skip ok packet
raise
ok_packet = self.connection._read_packet()
if not ok_packet.is_ok_packet(): # pragma: no cover - upstream induced protocol error
raise err.OperationalError(2014, "Commands Out of Sync")
self._read_ok_packet(ok_packet)
def _check_packet_is_eof(self, packet):
if not packet.is_eof_packet():
return False
#TODO: Support CLIENT.DEPRECATE_EOF
# 1) Add DEPRECATE_EOF to CAPABILITIES
# 2) Mask CAPABILITIES with server_capabilities
# 3) if server_capabilities & CLIENT.DEPRECATE_EOF: use OKPacketWrapper instead of EOFPacketWrapper
wp = EOFPacketWrapper(packet)
self.warning_count = wp.warning_count
self.has_next = wp.has_next
return True
def _read_result_packet(self, first_packet):
self.field_count = first_packet.read_length_encoded_integer()
self._get_descriptions()
self._read_rowdata_packet()
def _read_rowdata_packet_unbuffered(self):
# Check if in an active query
if not self.unbuffered_active:
return
# EOF
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None
self.rows = None
return
row = self._read_row_from_packet(packet)
self.affected_rows = 1
self.rows = (row,) # rows should tuple of row for MySQL-python compatibility.
return row
def _finish_unbuffered_query(self):
# After much reading on the MySQL protocol, it appears that there is,
# in fact, no way to stop MySQL from sending all the data after
# executing a query, so we just spin, and wait for an EOF packet.
while self.unbuffered_active:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.unbuffered_active = False
self.connection = None # release reference to kill cyclic reference.
def _read_rowdata_packet(self):
"""Read a rowdata packet for each data row in the result set."""
rows = []
while True:
packet = self.connection._read_packet()
if self._check_packet_is_eof(packet):
self.connection = None # release reference to kill cyclic reference.
break
rows.append(self._read_row_from_packet(packet))
self.affected_rows = len(rows)
self.rows = tuple(rows)
def _read_row_from_packet(self, packet):
row = []
for encoding, converter in self.converters:
try:
data = packet.read_length_coded_string()
except IndexError:
# No more columns in this row
# See https://github.com/PyMySQL/PyMySQL/pull/434
break
if data is not None:
if encoding is not None:
data = data.decode(encoding)
if DEBUG: print("DEBUG: DATA = ", data)
if converter is not None:
data = converter(data)
row.append(data)
return tuple(row)
def _get_descriptions(self):
"""Read a column descriptor packet for each column in the result."""
self.fields = []
self.converters = []
use_unicode = self.connection.use_unicode
conn_encoding = self.connection.encoding
description = []
for i in range_type(self.field_count):
field = self.connection._read_packet(FieldDescriptorPacket)
self.fields.append(field)
description.append(field.description())
field_type = field.type_code
if use_unicode:
if field_type == FIELD_TYPE.JSON:
# When SELECT from JSON column: charset = binary
# When SELECT CAST(... AS JSON): charset = connection encoding
# This behavior is different from TEXT / BLOB.
# We should decode result by connection encoding regardless charsetnr.
# See https://github.com/PyMySQL/PyMySQL/issues/488
encoding = conn_encoding # SELECT CAST(... AS JSON)
elif field_type in TEXT_TYPES:
if field.charsetnr == 63: # binary
# TEXTs with charset=binary means BINARY types.
encoding = None
else:
encoding = conn_encoding
else:
# Integers, Dates and Times, and other basic data is encoded in ascii
encoding = 'ascii'
else:
encoding = None
converter = self.connection.decoders.get(field_type)
if converter is converters.through:
converter = None
if DEBUG: print("DEBUG: field={}, converter={}".format(field, converter))
self.converters.append((encoding, converter))
eof_packet = self.connection._read_packet()
assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF'
self.description = tuple(description)
class LoadLocalFile(object):
def __init__(self, filename, connection):
self.filename = filename
self.connection = connection
def send_data(self):
"""Send data packets from the local file to the server"""
if not self.connection._sock:
raise err.InterfaceError("(0, '')")
conn = self.connection
try:
with open(self.filename, 'rb') as open_file:
packet_size = min(conn.max_allowed_packet, 16*1024) # 16KB is efficient enough
while True:
chunk = open_file.read(packet_size)
if not chunk:
break
conn.write_packet(chunk)
except IOError:
raise err.OperationalError(1017, "Can't find file '{0}'".format(self.filename))
finally:
# send the empty packet to signify we are done sending data
conn.write_packet(b'') | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/connections.py | connections.py |
from __future__ import print_function
from .charset import MBLENGTH
from ._compat import PY2, range_type
from .constants import FIELD_TYPE, SERVER_STATUS
from . import err
from .util import byte2int
import struct
import sys
DEBUG = False
NULL_COLUMN = 251
UNSIGNED_CHAR_COLUMN = 251
UNSIGNED_SHORT_COLUMN = 252
UNSIGNED_INT24_COLUMN = 253
UNSIGNED_INT64_COLUMN = 254
def dump_packet(data): # pragma: no cover
def printable(data):
if 32 <= byte2int(data) < 127:
if isinstance(data, int):
return chr(data)
return data
return '.'
try:
print("packet length:", len(data))
for i in range(1, 7):
f = sys._getframe(i)
print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
print("-" * 66)
except ValueError:
pass
dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)]
for d in dump_data:
print(' '.join("{:02X}".format(byte2int(x)) for x in d) +
' ' * (16 - len(d)) + ' ' * 2 +
''.join(printable(x) for x in d))
print("-" * 66)
print()
class MysqlPacket(object):
"""Representation of a MySQL response packet.
Provides an interface for reading/parsing the packet results.
"""
__slots__ = ('_position', '_data')
def __init__(self, data, encoding):
self._position = 0
self._data = data
def get_all_data(self):
return self._data
def read(self, size):
"""Read the first 'size' bytes in packet and advance cursor past them."""
result = self._data[self._position:(self._position+size)]
if len(result) != size:
error = ('Result length not requested length:\n'
'Expected=%s. Actual=%s. Position: %s. Data Length: %s'
% (size, len(result), self._position, len(self._data)))
if DEBUG:
print(error)
self.dump()
raise AssertionError(error)
self._position += size
return result
def read_all(self):
"""Read all remaining data in the packet.
(Subsequent read() will return errors.)
"""
result = self._data[self._position:]
self._position = None # ensure no subsequent read()
return result
def advance(self, length):
"""Advance the cursor in data buffer 'length' bytes."""
new_position = self._position + length
if new_position < 0 or new_position > len(self._data):
raise Exception('Invalid advance amount (%s) for cursor. '
'Position=%s' % (length, new_position))
self._position = new_position
def rewind(self, position=0):
"""Set the position of the data buffer cursor to 'position'."""
if position < 0 or position > len(self._data):
raise Exception("Invalid position to rewind cursor to: %s." % position)
self._position = position
def get_bytes(self, position, length=1):
"""Get 'length' bytes starting at 'position'.
Position is start of payload (first four packet header bytes are not
included) starting at index '0'.
No error checking is done. If requesting outside end of buffer
an empty string (or string shorter than 'length') may be returned!
"""
return self._data[position:(position+length)]
if PY2:
def read_uint8(self):
result = ord(self._data[self._position])
self._position += 1
return result
else:
def read_uint8(self):
result = self._data[self._position]
self._position += 1
return result
def read_uint16(self):
result = struct.unpack_from('<H', self._data, self._position)[0]
self._position += 2
return result
def read_uint24(self):
low, high = struct.unpack_from('<HB', self._data, self._position)
self._position += 3
return low + (high << 16)
def read_uint32(self):
result = struct.unpack_from('<I', self._data, self._position)[0]
self._position += 4
return result
def read_uint64(self):
result = struct.unpack_from('<Q', self._data, self._position)[0]
self._position += 8
return result
def read_string(self):
end_pos = self._data.find(b'\0', self._position)
if end_pos < 0:
return None
result = self._data[self._position:end_pos]
self._position = end_pos + 1
return result
def read_length_encoded_integer(self):
"""Read a 'Length Coded Binary' number from the data buffer.
Length coded numbers can be anywhere from 1 to 9 bytes depending
on the value of the first byte.
"""
c = self.read_uint8()
if c == NULL_COLUMN:
return None
if c < UNSIGNED_CHAR_COLUMN:
return c
elif c == UNSIGNED_SHORT_COLUMN:
return self.read_uint16()
elif c == UNSIGNED_INT24_COLUMN:
return self.read_uint24()
elif c == UNSIGNED_INT64_COLUMN:
return self.read_uint64()
def read_length_coded_string(self):
"""Read a 'Length Coded String' from the data buffer.
A 'Length Coded String' consists first of a length coded
(unsigned, positive) integer represented in 1-9 bytes followed by
that many bytes of binary data. (For example "cat" would be "3cat".)
"""
length = self.read_length_encoded_integer()
if length is None:
return None
return self.read(length)
def read_struct(self, fmt):
s = struct.Struct(fmt)
result = s.unpack_from(self._data, self._position)
self._position += s.size
return result
def is_ok_packet(self):
# https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
return self._data[0:1] == b'\0' and len(self._data) >= 7
def is_eof_packet(self):
# http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
# Caution: \xFE may be LengthEncodedInteger.
# If \xFE is LengthEncodedInteger header, 8bytes followed.
return self._data[0:1] == b'\xfe' and len(self._data) < 9
def is_auth_switch_request(self):
# http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
return self._data[0:1] == b'\xfe'
def is_extra_auth_data(self):
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
return self._data[0:1] == b'\x01'
def is_resultset_packet(self):
field_count = ord(self._data[0:1])
return 1 <= field_count <= 250
def is_load_local_packet(self):
return self._data[0:1] == b'\xfb'
def is_error_packet(self):
return self._data[0:1] == b'\xff'
def check_error(self):
if self.is_error_packet():
self.rewind()
self.advance(1) # field_count == error (we already know that)
errno = self.read_uint16()
if DEBUG: print("errno =", errno)
err.raise_mysql_exception(self._data)
def dump(self):
dump_packet(self._data)
class FieldDescriptorPacket(MysqlPacket):
"""A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code.
"""
def __init__(self, data, encoding):
MysqlPacket.__init__(self, data, encoding)
self._parse_field_descriptor(encoding)
def _parse_field_descriptor(self, encoding):
"""Parse the 'Field Descriptor' (Metadata) packet.
This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
"""
self.catalog = self.read_length_coded_string()
self.db = self.read_length_coded_string()
self.table_name = self.read_length_coded_string().decode(encoding)
self.org_table = self.read_length_coded_string().decode(encoding)
self.name = self.read_length_coded_string().decode(encoding)
self.org_name = self.read_length_coded_string().decode(encoding)
self.charsetnr, self.length, self.type_code, self.flags, self.scale = (
self.read_struct('<xHIBHBxx'))
# 'default' is a length coded binary and is still in the buffer?
# not used for normal result sets...
def description(self):
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
return (
self.name,
self.type_code,
None, # TODO: display_length; should this be self.length?
self.get_column_length(), # 'internal_size'
self.get_column_length(), # 'precision' # TODO: why!?!?
self.scale,
self.flags % 2 == 0)
def get_column_length(self):
if self.type_code == FIELD_TYPE.VAR_STRING:
mblen = MBLENGTH.get(self.charsetnr, 1)
return self.length // mblen
return self.length
def __str__(self):
return ('%s %r.%r.%r, type=%s, flags=%x'
% (self.__class__, self.db, self.table_name, self.name,
self.type_code, self.flags))
class OKPacketWrapper(object):
"""
OK Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_ok_packet():
raise ValueError('Cannot create ' + str(self.__class__.__name__) +
' object from invalid packet type')
self.packet = from_packet
self.packet.advance(1)
self.affected_rows = self.packet.read_length_encoded_integer()
self.insert_id = self.packet.read_length_encoded_integer()
self.server_status, self.warning_count = self.read_struct('<HH')
self.message = self.packet.read_all()
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class EOFPacketWrapper(object):
"""
EOF Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_eof_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.warning_count, self.server_status = self.packet.read_struct('<xhh')
if DEBUG: print("server_status=", self.server_status)
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class LoadLocalPacketWrapper(object):
"""
Load Local Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_load_local_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.filename = self.packet.get_all_data()[1:]
if DEBUG: print("filename=", self.filename)
def __getattr__(self, key):
return getattr(self.packet, key) | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/protocol.py | protocol.py |
import sys
from ._compat import PY2
from .constants import FIELD_TYPE
from .converters import escape_dict, escape_sequence, escape_string
from .err import (
Warning, Error, InterfaceError, DataError,
DatabaseError, OperationalError, IntegrityError, InternalError,
NotSupportedError, ProgrammingError, MySQLError)
from .times import (
Date, Time, Timestamp,
DateFromTicks, TimeFromTicks, TimestampFromTicks)
VERSION = (0, 9, 3, None)
if VERSION[3] is not None:
VERSION_STRING = "%d.%d.%d_%s" % VERSION
else:
VERSION_STRING = "%d.%d.%d" % VERSION[:3]
threadsafety = 1
apilevel = "2.0"
paramstyle = "pyformat"
class DBAPISet(frozenset):
def __ne__(self, other):
if isinstance(other, set):
return frozenset.__ne__(self, other)
else:
return other not in self
def __eq__(self, other):
if isinstance(other, frozenset):
return frozenset.__eq__(self, other)
else:
return other in self
def __hash__(self):
return frozenset.__hash__(self)
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING,
FIELD_TYPE.VAR_STRING])
BINARY = DBAPISet([FIELD_TYPE.BLOB, FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB, FIELD_TYPE.TINY_BLOB])
NUMBER = DBAPISet([FIELD_TYPE.DECIMAL, FIELD_TYPE.DOUBLE, FIELD_TYPE.FLOAT,
FIELD_TYPE.INT24, FIELD_TYPE.LONG, FIELD_TYPE.LONGLONG,
FIELD_TYPE.TINY, FIELD_TYPE.YEAR])
DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
TIME = DBAPISet([FIELD_TYPE.TIME])
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
DATETIME = TIMESTAMP
ROWID = DBAPISet()
def Binary(x):
"""Return x as a binary type."""
if PY2:
return bytearray(x)
else:
return bytes(x)
def Connect(*args, **kwargs):
"""
Connect to the database; see connections.Connection.__init__() for
more information.
"""
from .connections import Connection
return Connection(*args, **kwargs)
from . import connections as _orig_conn
if _orig_conn.Connection.__init__.__doc__ is not None:
Connect.__doc__ = _orig_conn.Connection.__init__.__doc__
del _orig_conn
def get_client_info(): # for MySQLdb compatibility
version = VERSION
if VERSION[3] is None:
version = VERSION[:3]
return '.'.join(map(str, version))
connect = Connection = Connect
# we include a doctored version_info here for MySQLdb compatibility
version_info = (1, 3, 12, "final", 0)
NULL = "NULL"
__version__ = get_client_info()
def thread_safe():
return True # match MySQLdb.thread_safe()
def install_as_MySQLdb():
"""
After this function is called, any application that imports MySQLdb or
_mysql will unwittingly actually use pymysql.
"""
sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"]
__all__ = [
'BINARY', 'Binary', 'Connect', 'Connection', 'DATE', 'Date',
'Time', 'Timestamp', 'DateFromTicks', 'TimeFromTicks', 'TimestampFromTicks',
'DataError', 'DatabaseError', 'Error', 'FIELD_TYPE', 'IntegrityError',
'InterfaceError', 'InternalError', 'MySQLError', 'NULL', 'NUMBER',
'NotSupportedError', 'DBAPISet', 'OperationalError', 'ProgrammingError',
'ROWID', 'STRING', 'TIME', 'TIMESTAMP', 'Warning', 'apilevel', 'connect',
'connections', 'constants', 'converters', 'cursors',
'escape_dict', 'escape_sequence', 'escape_string', 'get_client_info',
'paramstyle', 'threadsafety', 'version_info',
"install_as_MySQLdb",
"NULL", "__version__",
] | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/__init__.py | __init__.py |
from socket import *
import io
import errno
__all__ = ['SocketIO']
EINTR = errno.EINTR
_blocking_errnos = (errno.EAGAIN, errno.EWOULDBLOCK)
class SocketIO(io.RawIOBase):
"""Raw I/O implementation for stream sockets.
This class supports the makefile() method on sockets. It provides
the raw I/O interface on top of a socket object.
"""
# One might wonder why not let FileIO do the job instead. There are two
# main reasons why FileIO is not adapted:
# - it wouldn't work under Windows (where you can't used read() and
# write() on a socket handle)
# - it wouldn't work with socket timeouts (FileIO would ignore the
# timeout and consider the socket non-blocking)
# XXX More docs
def __init__(self, sock, mode):
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
raise ValueError("invalid mode: %r" % mode)
io.RawIOBase.__init__(self)
self._sock = sock
if "b" not in mode:
mode += "b"
self._mode = mode
self._reading = "r" in mode
self._writing = "w" in mode
self._timeout_occurred = False
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
except error as e:
n = e.args[0]
if n == EINTR:
continue
if n in _blocking_errnos:
return None
raise
def write(self, b):
"""Write the given bytes or bytearray object *b* to the socket
and return the number of bytes written. This can be less than
len(b) if not all data could be written. If the socket is
non-blocking and no bytes could be written None is returned.
"""
self._checkClosed()
self._checkWritable()
try:
return self._sock.send(b)
except error as e:
# XXX what about EINTR?
if e.args[0] in _blocking_errnos:
return None
raise
def readable(self):
"""True if the SocketIO is open for reading.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._reading
def writable(self):
"""True if the SocketIO is open for writing.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._writing
def seekable(self):
"""True if the SocketIO is open for seeking.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return super().seekable()
def fileno(self):
"""Return the file descriptor of the underlying socket.
"""
self._checkClosed()
return self._sock.fileno()
@property
def name(self):
if not self.closed:
return self.fileno()
else:
return -1
@property
def mode(self):
return self._mode
def close(self):
"""Close the SocketIO object. This doesn't close the underlying
socket, except if all references to it have disappeared.
"""
if self.closed:
return
io.RawIOBase.close(self)
self._sock._decref_socketios()
self._sock = None | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/_socketio.py | _socketio.py |
ERROR_FIRST = 1000
HASHCHK = 1000
NISAMCHK = 1001
NO = 1002
YES = 1003
CANT_CREATE_FILE = 1004
CANT_CREATE_TABLE = 1005
CANT_CREATE_DB = 1006
DB_CREATE_EXISTS = 1007
DB_DROP_EXISTS = 1008
DB_DROP_DELETE = 1009
DB_DROP_RMDIR = 1010
CANT_DELETE_FILE = 1011
CANT_FIND_SYSTEM_REC = 1012
CANT_GET_STAT = 1013
CANT_GET_WD = 1014
CANT_LOCK = 1015
CANT_OPEN_FILE = 1016
FILE_NOT_FOUND = 1017
CANT_READ_DIR = 1018
CANT_SET_WD = 1019
CHECKREAD = 1020
DISK_FULL = 1021
DUP_KEY = 1022
ERROR_ON_CLOSE = 1023
ERROR_ON_READ = 1024
ERROR_ON_RENAME = 1025
ERROR_ON_WRITE = 1026
FILE_USED = 1027
FILSORT_ABORT = 1028
FORM_NOT_FOUND = 1029
GET_ERRNO = 1030
ILLEGAL_HA = 1031
KEY_NOT_FOUND = 1032
NOT_FORM_FILE = 1033
NOT_KEYFILE = 1034
OLD_KEYFILE = 1035
OPEN_AS_READONLY = 1036
OUTOFMEMORY = 1037
OUT_OF_SORTMEMORY = 1038
UNEXPECTED_EOF = 1039
CON_COUNT_ERROR = 1040
OUT_OF_RESOURCES = 1041
BAD_HOST_ERROR = 1042
HANDSHAKE_ERROR = 1043
DBACCESS_DENIED_ERROR = 1044
ACCESS_DENIED_ERROR = 1045
NO_DB_ERROR = 1046
UNKNOWN_COM_ERROR = 1047
BAD_NULL_ERROR = 1048
BAD_DB_ERROR = 1049
TABLE_EXISTS_ERROR = 1050
BAD_TABLE_ERROR = 1051
NON_UNIQ_ERROR = 1052
SERVER_SHUTDOWN = 1053
BAD_FIELD_ERROR = 1054
WRONG_FIELD_WITH_GROUP = 1055
WRONG_GROUP_FIELD = 1056
WRONG_SUM_SELECT = 1057
WRONG_VALUE_COUNT = 1058
TOO_LONG_IDENT = 1059
DUP_FIELDNAME = 1060
DUP_KEYNAME = 1061
DUP_ENTRY = 1062
WRONG_FIELD_SPEC = 1063
PARSE_ERROR = 1064
EMPTY_QUERY = 1065
NONUNIQ_TABLE = 1066
INVALID_DEFAULT = 1067
MULTIPLE_PRI_KEY = 1068
TOO_MANY_KEYS = 1069
TOO_MANY_KEY_PARTS = 1070
TOO_LONG_KEY = 1071
KEY_COLUMN_DOES_NOT_EXITS = 1072
BLOB_USED_AS_KEY = 1073
TOO_BIG_FIELDLENGTH = 1074
WRONG_AUTO_KEY = 1075
READY = 1076
NORMAL_SHUTDOWN = 1077
GOT_SIGNAL = 1078
SHUTDOWN_COMPLETE = 1079
FORCING_CLOSE = 1080
IPSOCK_ERROR = 1081
NO_SUCH_INDEX = 1082
WRONG_FIELD_TERMINATORS = 1083
BLOBS_AND_NO_TERMINATED = 1084
TEXTFILE_NOT_READABLE = 1085
FILE_EXISTS_ERROR = 1086
LOAD_INFO = 1087
ALTER_INFO = 1088
WRONG_SUB_KEY = 1089
CANT_REMOVE_ALL_FIELDS = 1090
CANT_DROP_FIELD_OR_KEY = 1091
INSERT_INFO = 1092
UPDATE_TABLE_USED = 1093
NO_SUCH_THREAD = 1094
KILL_DENIED_ERROR = 1095
NO_TABLES_USED = 1096
TOO_BIG_SET = 1097
NO_UNIQUE_LOGFILE = 1098
TABLE_NOT_LOCKED_FOR_WRITE = 1099
TABLE_NOT_LOCKED = 1100
BLOB_CANT_HAVE_DEFAULT = 1101
WRONG_DB_NAME = 1102
WRONG_TABLE_NAME = 1103
TOO_BIG_SELECT = 1104
UNKNOWN_ERROR = 1105
UNKNOWN_PROCEDURE = 1106
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
WRONG_PARAMETERS_TO_PROCEDURE = 1108
UNKNOWN_TABLE = 1109
FIELD_SPECIFIED_TWICE = 1110
INVALID_GROUP_FUNC_USE = 1111
UNSUPPORTED_EXTENSION = 1112
TABLE_MUST_HAVE_COLUMNS = 1113
RECORD_FILE_FULL = 1114
UNKNOWN_CHARACTER_SET = 1115
TOO_MANY_TABLES = 1116
TOO_MANY_FIELDS = 1117
TOO_BIG_ROWSIZE = 1118
STACK_OVERRUN = 1119
WRONG_OUTER_JOIN = 1120
NULL_COLUMN_IN_INDEX = 1121
CANT_FIND_UDF = 1122
CANT_INITIALIZE_UDF = 1123
UDF_NO_PATHS = 1124
UDF_EXISTS = 1125
CANT_OPEN_LIBRARY = 1126
CANT_FIND_DL_ENTRY = 1127
FUNCTION_NOT_DEFINED = 1128
HOST_IS_BLOCKED = 1129
HOST_NOT_PRIVILEGED = 1130
PASSWORD_ANONYMOUS_USER = 1131
PASSWORD_NOT_ALLOWED = 1132
PASSWORD_NO_MATCH = 1133
UPDATE_INFO = 1134
CANT_CREATE_THREAD = 1135
WRONG_VALUE_COUNT_ON_ROW = 1136
CANT_REOPEN_TABLE = 1137
INVALID_USE_OF_NULL = 1138
REGEXP_ERROR = 1139
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
NONEXISTING_GRANT = 1141
TABLEACCESS_DENIED_ERROR = 1142
COLUMNACCESS_DENIED_ERROR = 1143
ILLEGAL_GRANT_FOR_TABLE = 1144
GRANT_WRONG_HOST_OR_USER = 1145
NO_SUCH_TABLE = 1146
NONEXISTING_TABLE_GRANT = 1147
NOT_ALLOWED_COMMAND = 1148
SYNTAX_ERROR = 1149
DELAYED_CANT_CHANGE_LOCK = 1150
TOO_MANY_DELAYED_THREADS = 1151
ABORTING_CONNECTION = 1152
NET_PACKET_TOO_LARGE = 1153
NET_READ_ERROR_FROM_PIPE = 1154
NET_FCNTL_ERROR = 1155
NET_PACKETS_OUT_OF_ORDER = 1156
NET_UNCOMPRESS_ERROR = 1157
NET_READ_ERROR = 1158
NET_READ_INTERRUPTED = 1159
NET_ERROR_ON_WRITE = 1160
NET_WRITE_INTERRUPTED = 1161
TOO_LONG_STRING = 1162
TABLE_CANT_HANDLE_BLOB = 1163
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
DELAYED_INSERT_TABLE_LOCKED = 1165
WRONG_COLUMN_NAME = 1166
WRONG_KEY_COLUMN = 1167
WRONG_MRG_TABLE = 1168
DUP_UNIQUE = 1169
BLOB_KEY_WITHOUT_LENGTH = 1170
PRIMARY_CANT_HAVE_NULL = 1171
TOO_MANY_ROWS = 1172
REQUIRES_PRIMARY_KEY = 1173
NO_RAID_COMPILED = 1174
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
KEY_DOES_NOT_EXITS = 1176
CHECK_NO_SUCH_TABLE = 1177
CHECK_NOT_IMPLEMENTED = 1178
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
ERROR_DURING_COMMIT = 1180
ERROR_DURING_ROLLBACK = 1181
ERROR_DURING_FLUSH_LOGS = 1182
ERROR_DURING_CHECKPOINT = 1183
NEW_ABORTING_CONNECTION = 1184
DUMP_NOT_IMPLEMENTED = 1185
FLUSH_MASTER_BINLOG_CLOSED = 1186
INDEX_REBUILD = 1187
MASTER = 1188
MASTER_NET_READ = 1189
MASTER_NET_WRITE = 1190
FT_MATCHING_KEY_NOT_FOUND = 1191
LOCK_OR_ACTIVE_TRANSACTION = 1192
UNKNOWN_SYSTEM_VARIABLE = 1193
CRASHED_ON_USAGE = 1194
CRASHED_ON_REPAIR = 1195
WARNING_NOT_COMPLETE_ROLLBACK = 1196
TRANS_CACHE_FULL = 1197
SLAVE_MUST_STOP = 1198
SLAVE_NOT_RUNNING = 1199
BAD_SLAVE = 1200
MASTER_INFO = 1201
SLAVE_THREAD = 1202
TOO_MANY_USER_CONNECTIONS = 1203
SET_CONSTANTS_ONLY = 1204
LOCK_WAIT_TIMEOUT = 1205
LOCK_TABLE_FULL = 1206
READ_ONLY_TRANSACTION = 1207
DROP_DB_WITH_READ_LOCK = 1208
CREATE_DB_WITH_READ_LOCK = 1209
WRONG_ARGUMENTS = 1210
NO_PERMISSION_TO_CREATE_USER = 1211
UNION_TABLES_IN_DIFFERENT_DIR = 1212
LOCK_DEADLOCK = 1213
TABLE_CANT_HANDLE_FT = 1214
CANNOT_ADD_FOREIGN = 1215
NO_REFERENCED_ROW = 1216
ROW_IS_REFERENCED = 1217
CONNECT_TO_MASTER = 1218
QUERY_ON_MASTER = 1219
ERROR_WHEN_EXECUTING_COMMAND = 1220
WRONG_USAGE = 1221
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
CANT_UPDATE_WITH_READLOCK = 1223
MIXING_NOT_ALLOWED = 1224
DUP_ARGUMENT = 1225
USER_LIMIT_REACHED = 1226
SPECIFIC_ACCESS_DENIED_ERROR = 1227
LOCAL_VARIABLE = 1228
GLOBAL_VARIABLE = 1229
NO_DEFAULT = 1230
WRONG_VALUE_FOR_VAR = 1231
WRONG_TYPE_FOR_VAR = 1232
VAR_CANT_BE_READ = 1233
CANT_USE_OPTION_HERE = 1234
NOT_SUPPORTED_YET = 1235
MASTER_FATAL_ERROR_READING_BINLOG = 1236
SLAVE_IGNORED_TABLE = 1237
INCORRECT_GLOBAL_LOCAL_VAR = 1238
WRONG_FK_DEF = 1239
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
OPERAND_COLUMNS = 1241
SUBQUERY_NO_1_ROW = 1242
UNKNOWN_STMT_HANDLER = 1243
CORRUPT_HELP_DB = 1244
CYCLIC_REFERENCE = 1245
AUTO_CONVERT = 1246
ILLEGAL_REFERENCE = 1247
DERIVED_MUST_HAVE_ALIAS = 1248
SELECT_REDUCED = 1249
TABLENAME_NOT_ALLOWED_HERE = 1250
NOT_SUPPORTED_AUTH_MODE = 1251
SPATIAL_CANT_HAVE_NULL = 1252
COLLATION_CHARSET_MISMATCH = 1253
SLAVE_WAS_RUNNING = 1254
SLAVE_WAS_NOT_RUNNING = 1255
TOO_BIG_FOR_UNCOMPRESS = 1256
ZLIB_Z_MEM_ERROR = 1257
ZLIB_Z_BUF_ERROR = 1258
ZLIB_Z_DATA_ERROR = 1259
CUT_VALUE_GROUP_CONCAT = 1260
WARN_TOO_FEW_RECORDS = 1261
WARN_TOO_MANY_RECORDS = 1262
WARN_NULL_TO_NOTNULL = 1263
WARN_DATA_OUT_OF_RANGE = 1264
WARN_DATA_TRUNCATED = 1265
WARN_USING_OTHER_HANDLER = 1266
CANT_AGGREGATE_2COLLATIONS = 1267
DROP_USER = 1268
REVOKE_GRANTS = 1269
CANT_AGGREGATE_3COLLATIONS = 1270
CANT_AGGREGATE_NCOLLATIONS = 1271
VARIABLE_IS_NOT_STRUCT = 1272
UNKNOWN_COLLATION = 1273
SLAVE_IGNORED_SSL_PARAMS = 1274
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
WARN_FIELD_RESOLVED = 1276
BAD_SLAVE_UNTIL_COND = 1277
MISSING_SKIP_SLAVE = 1278
UNTIL_COND_IGNORED = 1279
WRONG_NAME_FOR_INDEX = 1280
WRONG_NAME_FOR_CATALOG = 1281
WARN_QC_RESIZE = 1282
BAD_FT_COLUMN = 1283
UNKNOWN_KEY_CACHE = 1284
WARN_HOSTNAME_WONT_WORK = 1285
UNKNOWN_STORAGE_ENGINE = 1286
WARN_DEPRECATED_SYNTAX = 1287
NON_UPDATABLE_TABLE = 1288
FEATURE_DISABLED = 1289
OPTION_PREVENTS_STATEMENT = 1290
DUPLICATED_VALUE_IN_TYPE = 1291
TRUNCATED_WRONG_VALUE = 1292
TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
INVALID_ON_UPDATE = 1294
UNSUPPORTED_PS = 1295
GET_ERRMSG = 1296
GET_TEMPORARY_ERRMSG = 1297
UNKNOWN_TIME_ZONE = 1298
WARN_INVALID_TIMESTAMP = 1299
INVALID_CHARACTER_STRING = 1300
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
CONFLICTING_DECLARATIONS = 1302
SP_NO_RECURSIVE_CREATE = 1303
SP_ALREADY_EXISTS = 1304
SP_DOES_NOT_EXIST = 1305
SP_DROP_FAILED = 1306
SP_STORE_FAILED = 1307
SP_LILABEL_MISMATCH = 1308
SP_LABEL_REDEFINE = 1309
SP_LABEL_MISMATCH = 1310
SP_UNINIT_VAR = 1311
SP_BADSELECT = 1312
SP_BADRETURN = 1313
SP_BADSTATEMENT = 1314
UPDATE_LOG_DEPRECATED_IGNORED = 1315
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
QUERY_INTERRUPTED = 1317
SP_WRONG_NO_OF_ARGS = 1318
SP_COND_MISMATCH = 1319
SP_NORETURN = 1320
SP_NORETURNEND = 1321
SP_BAD_CURSOR_QUERY = 1322
SP_BAD_CURSOR_SELECT = 1323
SP_CURSOR_MISMATCH = 1324
SP_CURSOR_ALREADY_OPEN = 1325
SP_CURSOR_NOT_OPEN = 1326
SP_UNDECLARED_VAR = 1327
SP_WRONG_NO_OF_FETCH_ARGS = 1328
SP_FETCH_NO_DATA = 1329
SP_DUP_PARAM = 1330
SP_DUP_VAR = 1331
SP_DUP_COND = 1332
SP_DUP_CURS = 1333
SP_CANT_ALTER = 1334
SP_SUBSELECT_NYI = 1335
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
SP_VARCOND_AFTER_CURSHNDLR = 1337
SP_CURSOR_AFTER_HANDLER = 1338
SP_CASE_NOT_FOUND = 1339
FPARSER_TOO_BIG_FILE = 1340
FPARSER_BAD_HEADER = 1341
FPARSER_EOF_IN_COMMENT = 1342
FPARSER_ERROR_IN_PARAMETER = 1343
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
VIEW_NO_EXPLAIN = 1345
FRM_UNKNOWN_TYPE = 1346
WRONG_OBJECT = 1347
NONUPDATEABLE_COLUMN = 1348
VIEW_SELECT_DERIVED = 1349
VIEW_SELECT_CLAUSE = 1350
VIEW_SELECT_VARIABLE = 1351
VIEW_SELECT_TMPTABLE = 1352
VIEW_WRONG_LIST = 1353
WARN_VIEW_MERGE = 1354
WARN_VIEW_WITHOUT_KEY = 1355
VIEW_INVALID = 1356
SP_NO_DROP_SP = 1357
SP_GOTO_IN_HNDLR = 1358
TRG_ALREADY_EXISTS = 1359
TRG_DOES_NOT_EXIST = 1360
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
TRG_CANT_CHANGE_ROW = 1362
TRG_NO_SUCH_ROW_IN_TRG = 1363
NO_DEFAULT_FOR_FIELD = 1364
DIVISION_BY_ZERO = 1365
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
ILLEGAL_VALUE_FOR_TYPE = 1367
VIEW_NONUPD_CHECK = 1368
VIEW_CHECK_FAILED = 1369
PROCACCESS_DENIED_ERROR = 1370
RELAY_LOG_FAIL = 1371
PASSWD_LENGTH = 1372
UNKNOWN_TARGET_BINLOG = 1373
IO_ERR_LOG_INDEX_READ = 1374
BINLOG_PURGE_PROHIBITED = 1375
FSEEK_FAIL = 1376
BINLOG_PURGE_FATAL_ERR = 1377
LOG_IN_USE = 1378
LOG_PURGE_UNKNOWN_ERR = 1379
RELAY_LOG_INIT = 1380
NO_BINARY_LOGGING = 1381
RESERVED_SYNTAX = 1382
WSAS_FAILED = 1383
DIFF_GROUPS_PROC = 1384
NO_GROUP_FOR_PROC = 1385
ORDER_WITH_PROC = 1386
LOGGING_PROHIBIT_CHANGING_OF = 1387
NO_FILE_MAPPING = 1388
WRONG_MAGIC = 1389
PS_MANY_PARAM = 1390
KEY_PART_0 = 1391
VIEW_CHECKSUM = 1392
VIEW_MULTIUPDATE = 1393
VIEW_NO_INSERT_FIELD_LIST = 1394
VIEW_DELETE_MERGE_VIEW = 1395
CANNOT_USER = 1396
XAER_NOTA = 1397
XAER_INVAL = 1398
XAER_RMFAIL = 1399
XAER_OUTSIDE = 1400
XAER_RMERR = 1401
XA_RBROLLBACK = 1402
NONEXISTING_PROC_GRANT = 1403
PROC_AUTO_GRANT_FAIL = 1404
PROC_AUTO_REVOKE_FAIL = 1405
DATA_TOO_LONG = 1406
SP_BAD_SQLSTATE = 1407
STARTUP = 1408
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
CANT_CREATE_USER_WITH_GRANT = 1410
WRONG_VALUE_FOR_TYPE = 1411
TABLE_DEF_CHANGED = 1412
SP_DUP_HANDLER = 1413
SP_NOT_VAR_ARG = 1414
SP_NO_RETSET = 1415
CANT_CREATE_GEOMETRY_OBJECT = 1416
FAILED_ROUTINE_BREAK_BINLOG = 1417
BINLOG_UNSAFE_ROUTINE = 1418
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
EXEC_STMT_WITH_OPEN_CURSOR = 1420
STMT_HAS_NO_OPEN_CURSOR = 1421
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
NO_DEFAULT_FOR_VIEW_FIELD = 1423
SP_NO_RECURSION = 1424
TOO_BIG_SCALE = 1425
TOO_BIG_PRECISION = 1426
M_BIGGER_THAN_D = 1427
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
FOREIGN_DATA_STRING_INVALID = 1433
CANT_CREATE_FEDERATED_TABLE = 1434
TRG_IN_WRONG_SCHEMA = 1435
STACK_OVERRUN_NEED_MORE = 1436
TOO_LONG_BODY = 1437
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
TOO_BIG_DISPLAYWIDTH = 1439
XAER_DUPID = 1440
DATETIME_FUNCTION_OVERFLOW = 1441
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
VIEW_PREVENT_UPDATE = 1443
PS_NO_RECURSION = 1444
SP_CANT_SET_AUTOCOMMIT = 1445
MALFORMED_DEFINER = 1446
VIEW_FRM_NO_USER = 1447
VIEW_OTHER_USER = 1448
NO_SUCH_USER = 1449
FORBID_SCHEMA_CHANGE = 1450
ROW_IS_REFERENCED_2 = 1451
NO_REFERENCED_ROW_2 = 1452
SP_BAD_VAR_SHADOW = 1453
TRG_NO_DEFINER = 1454
OLD_FILE_FORMAT = 1455
SP_RECURSION_LIMIT = 1456
SP_PROC_TABLE_CORRUPT = 1457
SP_WRONG_NAME = 1458
TABLE_NEEDS_UPGRADE = 1459
SP_NO_AGGREGATE = 1460
MAX_PREPARED_STMT_COUNT_REACHED = 1461
VIEW_RECURSIVE = 1462
NON_GROUPING_FIELD_USED = 1463
TABLE_CANT_HANDLE_SPKEYS = 1464
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
USERNAME = 1466
HOSTNAME = 1467
WRONG_STRING_LENGTH = 1468
ERROR_LAST = 1468
# https://github.com/PyMySQL/PyMySQL/issues/607
CONSTRAINT_FAILED = 4025 | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/constants/ER.py | ER.py |
CR_ERROR_FIRST = 2000
CR_UNKNOWN_ERROR = 2000
CR_SOCKET_CREATE_ERROR = 2001
CR_CONNECTION_ERROR = 2002
CR_CONN_HOST_ERROR = 2003
CR_IPSOCK_ERROR = 2004
CR_UNKNOWN_HOST = 2005
CR_SERVER_GONE_ERROR = 2006
CR_VERSION_ERROR = 2007
CR_OUT_OF_MEMORY = 2008
CR_WRONG_HOST_INFO = 2009
CR_LOCALHOST_CONNECTION = 2010
CR_TCP_CONNECTION = 2011
CR_SERVER_HANDSHAKE_ERR = 2012
CR_SERVER_LOST = 2013
CR_COMMANDS_OUT_OF_SYNC = 2014
CR_NAMEDPIPE_CONNECTION = 2015
CR_NAMEDPIPEWAIT_ERROR = 2016
CR_NAMEDPIPEOPEN_ERROR = 2017
CR_NAMEDPIPESETSTATE_ERROR = 2018
CR_CANT_READ_CHARSET = 2019
CR_NET_PACKET_TOO_LARGE = 2020
CR_EMBEDDED_CONNECTION = 2021
CR_PROBE_SLAVE_STATUS = 2022
CR_PROBE_SLAVE_HOSTS = 2023
CR_PROBE_SLAVE_CONNECT = 2024
CR_PROBE_MASTER_CONNECT = 2025
CR_SSL_CONNECTION_ERROR = 2026
CR_MALFORMED_PACKET = 2027
CR_WRONG_LICENSE = 2028
CR_NULL_POINTER = 2029
CR_NO_PREPARE_STMT = 2030
CR_PARAMS_NOT_BOUND = 2031
CR_DATA_TRUNCATED = 2032
CR_NO_PARAMETERS_EXISTS = 2033
CR_INVALID_PARAMETER_NO = 2034
CR_INVALID_BUFFER_USE = 2035
CR_UNSUPPORTED_PARAM_TYPE = 2036
CR_SHARED_MEMORY_CONNECTION = 2037
CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
CR_SHARED_MEMORY_MAP_ERROR = 2043
CR_SHARED_MEMORY_EVENT_ERROR = 2044
CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
CR_CONN_UNKNOW_PROTOCOL = 2047
CR_INVALID_CONN_HANDLE = 2048
CR_SECURE_AUTH = 2049
CR_FETCH_CANCELED = 2050
CR_NO_DATA = 2051
CR_NO_STMT_METADATA = 2052
CR_NO_RESULT_SET = 2053
CR_NOT_IMPLEMENTED = 2054
CR_SERVER_LOST_EXTENDED = 2055
CR_STMT_CLOSED = 2056
CR_NEW_STMT_METADATA = 2057
CR_ALREADY_CONNECTED = 2058
CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
CR_DUPLICATE_CONNECTION_ATTR = 2060
CR_AUTH_PLUGIN_ERR = 2061
CR_ERROR_LAST = 2061 | zdpapi-mysql | /zdpapi_mysql-1.0.2-py3-none-any.whl/zdpapi_mysql/pymysql/constants/CR.py | CR.py |
from typing import Dict, List
from zdpapi_mysql import Mysql, Crud
from zdpapi.libs.fastapi import APIRouter
from .response import response_bool, response_bool_data
from .request_schema import IdsSchema
from .response_schema import ResponseBool, ResponseBoolData, ResponseBoolListData
# mysql 默认配置
default_mysql_config = {
"host": '127.0.0.1',
"port": 3306,
"user": 'root',
"password": 'root',
"db": 'test'
}
class CRUDRouter:
def __init__(self,
table_name: str, # 必填
columns: List[str], # 必填
schema, # 必填
chinese_name: str = None,
tags: List[str] = None,
mysql_config: Dict = default_mysql_config,
) -> None:
self.db = Mysql(**mysql_config)
self.table = table_name
self.crud = Crud(self.db, table_name, columns)
self.router = APIRouter()
self.schema = schema
self.chinese_name = chinese_name
if self.chinese_name is None:
self.chinese_name = table_name
self.tags = tags
if self.tags is None:
self.tags = [f"{self.chinese_name}管理"]
def register(self, app):
"""
注册路由到APP
"""
self.add(self.router)
self.add_many(self.router)
self.delete(self.router)
self.delete_ids(self.router)
self.update(self.router)
self.update_many(self.router)
self.find_one(self.router)
self.find_ids(self.router)
self.find_page(self.router)
app.include_router(self.router)
return app
def add(self, router):
"""
添加路由
"""
# @self.post(path, tags=tags, summary=f"新增单条{model_.__cname__}数据")
@router.post(f"/{self.table}", tags=self.tags, summary=f"新增单条{self.chinese_name}数据")
async def add_router(schema: self.schema):
await self.crud.add(schema.dict())
return response_bool
def add_many(self, router):
"""
添加多条数据的路由
"""
@router.post(f"/{self.table}s", tags=self.tags, summary=f"新增多条{self.chinese_name}数据")
async def add_many_router(data: List[self.schema]):
data = [item.dict() for item in data]
await self.crud.add_many(data)
return response_bool
def delete(self, router):
"""
根据ID删除数据的路由
"""
@router.delete(f"/{self.table}/"+"{id}", tags=self.tags, summary=f"删除单条{self.chinese_name}数据")
async def delete(id: int):
# 根据ID删除数据
await self.crud.delete(id)
return response_bool
def delete_ids(self, router):
"""
根据ID列表删除数据的路由
"""
@router.delete(f"/{self.table}s", tags=self.tags, summary=f"删除多条{self.chinese_name}数据")
async def delete_ids(ids: IdsSchema):
# 根据ID列表删除数据
await self.crud.delete_ids(tuple(ids.ids))
return response_bool
def update(self, router):
"""
根据ID更新数据的路由
"""
@router.put(f"/{self.table}/"+"{id}", tags=self.tags, summary=f"更新单条{self.chinese_name}数据")
async def update(id: int, data: self.schema):
data_ = data.dict()
if data_.get("id") is not None:
del data_["id"]
await self.crud.update(id, data_)
return response_bool
def update_many(self, router):
"""
根据ID批量更新数据的路由
"""
@router.put(f"/{self.table}s", tags=self.tags, summary=f"更新多条{self.chinese_name}数据")
async def update_many(data: List[self.schema]):
data_ = [item.dict() for item in data]
await self.crud.update_many(data_)
return response_bool
def find_one(self, router):
"""
根据ID查询单条数据
"""
@router.get(f"/{self.table}/"+"{id}", tags=self.tags, summary=f"查找单条{self.chinese_name}数据")
async def find_one(id: int):
result = await self.crud.find(id)
response = ResponseBoolData()
response.data = result
return response
def find_ids(self, router):
"""
根据ID列表查询多条数据
"""
@router.put(f"/{self.table}_ids", tags=self.tags, summary=f"查找多条{self.chinese_name}数据")
async def find_ids(ids: IdsSchema):
result = await self.crud.find_ids(ids.ids)
response = ResponseBoolListData()
response.data = result
return response
def find_page(self, router):
"""
根据ID列表查询多条数据
"""
@router.get(f"/{self.table}s", tags=self.tags, summary=f"分页查找多条{self.chinese_name}数据")
async def find_page(page: int = 1, size: int = 20, order_column: str = "-id"):
# 排序方式 id正序 -id逆序
order_type: str = "ASC"
if order_column.startswith("-"):
order_type = "DESC"
order_column = order_column[1:] # 移除“-”号
response = ResponseBoolListData()
# 查询总数
total = await self.crud.find_total()
if total == 0: # 没有数据,没必要继续查找,浪费性能
return response
response.total = total
# 分页查询
result = await self.crud.find_page(page, size, order_column, order_type)
response.data = result
return response | zdpapi-router | /zdpapi_router-1.0.0-py3-none-any.whl/zdpapi_router/crud.py | crud.py |
import paramiko
class SSH:
def __init__(self, hostname: str = '192.168.18.11', port: int = 22,
username: str = 'zhangdapeng', password: str = 'zhangdapeng') -> None:
# 建立一个sshclient对象
self.ssh = paramiko.SSHClient()
self.ssh_trans = paramiko.SSHClient()
# 允许将信任的主机自动加入到host_allow 列表,此方法必须放在connect方法的前面
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.hostname = hostname
self.port = port
self.username = username
self.password = password
# 调用connect方法连接服务器
self.connect()
# trans字典
self.trans = {
hostname: { # 键是该服务器的ip地址
"hostname": hostname, # 主机地址
"port": port, # 端口号
"username": username, # 用户名
"password": password # 密码
}
}
def add_trans(self, hostname:str, port:int, username: str, password: str):
"""
添加trans服务,用于在多个ssh之间切换
"""
self.trans[hostname] = { # 键是该服务器的ip地址
"hostname": hostname, # 主机地址
"port": port, # 端口号
"username": username, # 用户名
"password": password # 密码
}
def __set_trans(self, hostname:str):
"""
设置要连接的trans,用以切换不同的ssh
"""
trans_dict = self.trans.get(hostname)
if trans_dict is None:
raise Exception("不存在该主机的trans配置")
# 使用trans连接
self.trans_server = paramiko.Transport((trans_dict.get("hostname"), trans_dict.get("port")))
self.trans_server.connect(username=trans_dict.get("username"), password=trans_dict.get("password"))
self.ssh_trans._transport = self.trans_server
def execute_trans(self, hostname:str, command:str):
"""
使用trans的方式执行ssh命令
"""
self.__set_trans(hostname)
# 执行命令,和传统方法一样
stdin, stdout, stderr = self.ssh_trans.exec_command(command)
print(stdout.read().decode())
# 关闭连接
self.ssh_trans.close()
def __ftp_execute(self, hostname:str, local_path:str, remote_path:str, execute:str):
"""
使用FTP执行文件操作
local_path:本地路径
remote_path:远程路径
execute: 执行字符串,upload上传,download下载
"""
# 获取配置信息
trans_dict = self.trans.get(hostname)
if trans_dict is None:
raise Exception("不存在该主机的trans配置")
# 实例化一个trans对象
trans = paramiko.Transport(
(trans_dict.get("hostname"),
trans_dict.get("port")))
# 建立连接
trans.connect(
username=trans_dict.get("username"),
password=trans_dict.get("password"))
# 实例化一个 sftp对象,指定连接的通道
sftp = paramiko.SFTPClient.from_transport(trans)
if execute == "upload":
# 上传文件
sftp.put(localpath=local_path, remotepath=remote_path)
elif execute == "download":
# 下载文件
sftp.get(localpath=local_path, remotepath=remote_path)
else:
raise Exception("不支持的操作,execute暂时只支持upload和download")
# 下载文件
trans.close()
def ftp_download(self, hostname: str, local_path: str, remote_path: str):
"""
FTP下载文件
hostname: 主机地址
local_path:本地路径
remote_path:远程路径
"""
self.__ftp_execute(hostname, local_path, remote_path, "download")
def ftp_upload(self, hostname: str, local_path: str, remote_path: str):
"""
FTP上传文件
hostname: 主机地址
local_path:本地路径
remote_path:远程路径
"""
self.__ftp_execute(hostname, local_path, remote_path, "upload")
def connect(self):
"""
建立连接
"""
self.ssh.connect(hostname=self.hostname, port=self.port,
username=self.username, password=self.password)
def execute(self, command: str):
"""
执行命令
"""
self.connect()
# 执行命令
stdin, stdout, stderr = self.ssh.exec_command(command)
# 结果放到stdout中,如果有错误将放到stderr中
print(stdout.read().decode())
# 关闭连接
self.close()
def close(self):
"""
关闭连接
"""
self.ssh.close()
self.ssh_trans.close() | zdpapi-ssh | /zdpapi_ssh-0.1.2-py3-none-any.whl/zdpapi_ssh/ssh.py | ssh.py |
from typing import Dict, List
from pydantic import BaseModel
class ResponseSuccess(BaseModel):
"""
成功的响应,包括创建成功,修改成功,删除成功等
"""
status: bool = True # 状态
msg: str = "成功" # 信息
code: int = 10000 # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
class ResponseSuccessData(ResponseSuccess):
"""
成功的响应,同时携带数据返回,该数据是字典类型
"""
data: Dict = None
class ResponseSuccessListData(ResponseSuccess):
"""
成功的响应,同时携带数据返回,该数据是列表嵌套字典类型
"""
data: List[Dict] = None
class ResponseParamError(BaseModel):
"""
参数错误
"""
status: bool = False # 状态
msg: str = "参数错误" # 信息
code: int = 10001 # 状态码
class ResponseServerError(BaseModel):
"""
服务器内部错误
"""
status: bool = False # 状态
msg: str = "服务器内部错误" # 信息
code: int = 10002 # 状态码
class ResponseNotFound(BaseModel):
"""
资源不存在
"""
status: bool = False # 状态
msg: str = "资源不存在" # 信息
code: int = 10003 # 状态码
class ResponseGrpcCanNotUse(BaseModel):
"""
gRPC服务不可用
"""
status: bool = False # 状态
msg: str = "gRPC服务不可用" # 信息
code: int = 10004 # 状态码
class ResponseExistsError(BaseModel):
"""
数据已存在
"""
status: bool = False # 状态
msg: str = "数据已存在" # 信息
code: int = 10005 # 状态码
class ResponseUnAuth(BaseModel):
"""
没有访问权限
"""
status: bool = False # 状态
msg: str = "没有访问权限" # 信息
code: int = 10006 # 状态码
class ResponseTokenExpired(BaseModel):
"""
token已过期
"""
status: bool = False # 状态
msg: str = "token已过期" # 信息
code: int = 10007 # 状态码
class ResponseTimeout(BaseModel):
"""
服务连接超时
"""
status: bool = False # 状态
msg: str = "服务连接超时" # 信息
code: int = 10008 # 状态码
class ResponseCorsError(BaseModel):
"""
跨域请求失败
"""
status: bool = False # 状态
msg: str = "跨域请求失败" # 信息
code: int = 10009 # 状态码
class ResponseRequestLimitError(BaseModel):
"""
请求被限制
"""
status: bool = False # 状态
msg: str = "请求频率过快" # 信息
code: int = 10010 # 状态码 | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/response.py | response.py |
from enum import Enum
from typing import Any, Callable, Dict, Optional, Sequence
from pydantic.fields import FieldInfo, Undefined
class ParamTypes(Enum):
query = "query"
header = "header"
path = "path"
cookie = "cookie"
class Param(FieldInfo):
in_: ParamTypes
def __init__(
self,
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
):
self.deprecated = deprecated
self.example = example
self.examples = examples
self.include_in_schema = include_in_schema
super().__init__(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
**extra,
)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.default})"
class Path(Param):
in_ = ParamTypes.path
def __init__(
self,
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
):
self.in_ = self.in_
super().__init__(
default=...,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
deprecated=deprecated,
example=example,
examples=examples,
include_in_schema=include_in_schema,
**extra,
)
class Query(Param):
in_ = ParamTypes.query
def __init__(
self,
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
):
super().__init__(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
deprecated=deprecated,
example=example,
examples=examples,
include_in_schema=include_in_schema,
**extra,
)
class Header(Param):
in_ = ParamTypes.header
def __init__(
self,
default: Any = Undefined,
*,
alias: Optional[str] = None,
convert_underscores: bool = True,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
):
self.convert_underscores = convert_underscores
super().__init__(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
deprecated=deprecated,
example=example,
examples=examples,
include_in_schema=include_in_schema,
**extra,
)
class Cookie(Param):
in_ = ParamTypes.cookie
def __init__(
self,
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
):
super().__init__(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
deprecated=deprecated,
example=example,
examples=examples,
include_in_schema=include_in_schema,
**extra,
)
class Body(FieldInfo):
def __init__(
self,
default: Any = Undefined,
*,
embed: bool = False,
media_type: str = "application/json",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
):
self.embed = embed
self.media_type = media_type
self.example = example
self.examples = examples
super().__init__(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
**extra,
)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.default})"
class Form(Body):
def __init__(
self,
default: Any,
*,
media_type: str = "application/x-www-form-urlencoded",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
):
super().__init__(
default=default,
embed=True,
media_type=media_type,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
**extra,
)
class File(Form):
def __init__(
self,
default: Any,
*,
media_type: str = "multipart/form-data",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
):
super().__init__(
default=default,
media_type=media_type,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
**extra,
)
class Depends:
def __init__(
self, dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True
):
self.dependency = dependency
self.use_cache = use_cache
def __repr__(self) -> str:
attr = getattr(self.dependency, "__name__", type(self.dependency).__name__)
cache = "" if self.use_cache else ", use_cache=False"
return f"{self.__class__.__name__}({attr}{cache})"
class Security(Depends):
def __init__(
self,
dependency: Optional[Callable[..., Any]] = None,
*,
scopes: Optional[Sequence[str]] = None,
use_cache: bool = True,
):
super().__init__(dependency=dependency, use_cache=use_cache)
self.scopes = scopes or [] | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/params.py | params.py |
import dataclasses
from collections import defaultdict
from enum import Enum
from pathlib import PurePath
from types import GeneratorType
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
from pydantic import BaseModel
from pydantic.json import ENCODERS_BY_TYPE
SetIntStr = Set[Union[int, str]]
DictIntStrAny = Dict[Union[int, str], Any]
def generate_encoders_by_class_tuples(
type_encoder_map: Dict[Any, Callable[[Any], Any]]
) -> Dict[Callable[[Any], Any], Tuple[Any, ...]]:
encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(
tuple
)
for type_, encoder in type_encoder_map.items():
encoders_by_class_tuples[encoder] += (type_,)
return encoders_by_class_tuples
encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE)
def jsonable_encoder(
obj: Any,
include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None,
sqlalchemy_safe: bool = True,
) -> Any:
custom_encoder = custom_encoder or {}
if custom_encoder:
if type(obj) in custom_encoder:
return custom_encoder[type(obj)](obj)
else:
for encoder_type, encoder_instance in custom_encoder.items():
if isinstance(obj, encoder_type):
return encoder_instance(obj)
if include is not None and not isinstance(include, (set, dict)):
include = set(include)
if exclude is not None and not isinstance(exclude, (set, dict)):
exclude = set(exclude)
if isinstance(obj, BaseModel):
encoder = getattr(obj.__config__, "json_encoders", {})
if custom_encoder:
encoder.update(custom_encoder)
obj_dict = obj.dict(
include=include, # type: ignore # in Pydantic
exclude=exclude, # type: ignore # in Pydantic
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_none=exclude_none,
exclude_defaults=exclude_defaults,
)
if "__root__" in obj_dict:
obj_dict = obj_dict["__root__"]
return jsonable_encoder(
obj_dict,
exclude_none=exclude_none,
exclude_defaults=exclude_defaults,
custom_encoder=encoder,
sqlalchemy_safe=sqlalchemy_safe,
)
if dataclasses.is_dataclass(obj):
return dataclasses.asdict(obj)
if isinstance(obj, Enum):
return obj.value
if isinstance(obj, PurePath):
return str(obj)
if isinstance(obj, (str, int, float, type(None))):
return obj
if isinstance(obj, dict):
encoded_dict = {}
for key, value in obj.items():
if (
(
not sqlalchemy_safe
or (not isinstance(key, str))
or (not key.startswith("_sa"))
)
and (value is not None or not exclude_none)
and ((include and key in include) or not exclude or key not in exclude)
):
encoded_key = jsonable_encoder(
key,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_none=exclude_none,
custom_encoder=custom_encoder,
sqlalchemy_safe=sqlalchemy_safe,
)
encoded_value = jsonable_encoder(
value,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_none=exclude_none,
custom_encoder=custom_encoder,
sqlalchemy_safe=sqlalchemy_safe,
)
encoded_dict[encoded_key] = encoded_value
return encoded_dict
if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)):
encoded_list = []
for item in obj:
encoded_list.append(
jsonable_encoder(
item,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
custom_encoder=custom_encoder,
sqlalchemy_safe=sqlalchemy_safe,
)
)
return encoded_list
if type(obj) in ENCODERS_BY_TYPE:
return ENCODERS_BY_TYPE[type(obj)](obj)
for encoder, classes_tuple in encoders_by_class_tuples.items():
if isinstance(obj, classes_tuple):
return encoder(obj)
errors: List[Exception] = []
try:
data = dict(obj)
except Exception as e:
errors.append(e)
try:
data = vars(obj)
except Exception as e:
errors.append(e)
raise ValueError(errors)
return jsonable_encoder(
data,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
custom_encoder=custom_encoder,
sqlalchemy_safe=sqlalchemy_safe,
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/encoders.py | encoders.py |
import os.path
from enum import Enum
from typing import (
Any,
Awaitable,
Callable,
Coroutine,
Dict,
List,
Optional,
Sequence,
Type,
Union,
)
from pydantic import BaseModel
from . import routing
from .datastructures import Default, DefaultPlaceholder, UploadFile
from .encoders import DictIntStrAny, SetIntStr
from .exception_handlers import (
http_exception_handler,
request_validation_exception_handler,
)
from .exceptions import RequestValidationError
from .logger import logger
from .middleware.asyncexitstack import AsyncExitStackMiddleware
from .openapi.docs import (
get_redoc_html,
get_swagger_ui_html,
get_swagger_ui_oauth2_redirect_html,
)
from .openapi.utils import get_openapi
from .params import Depends, File
from .types import DecoratedCallable
from .utils import generate_unique_id
from zdppy_api.starlette.applications import Starlette
from zdppy_api.starlette.datastructures import State
from zdppy_api.starlette.exceptions import ExceptionMiddleware, HTTPException
from zdppy_api.starlette.middleware import Middleware
from zdppy_api.starlette.middleware.errors import ServerErrorMiddleware
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import HTMLResponse, JSONResponse, Response
from .starlette.routing import BaseRoute
from zdppy_api.starlette.types import ASGIApp, Receive, Scope, Send
from .middleware.cors import CORSMiddleware # 跨域中间件
class ResponseResult(BaseModel):
"""
统一响应模型
"""
status: bool = True # 状态
msg: str = "成功" # 信息
code: int = 10000 # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
data: Any = None
class Api(Starlette):
def __init__(self, *,
debug: bool = False,
routes: Optional[List[BaseRoute]] = None,
title: str = "ZDP-Py-Api",
description: str = "基于FastAPI二次开发,基于异步的Python RESTFUL API 快速开发框架",
version: str = "0.1.0",
openapi_url: Optional[str] = "/openapi.json", openapi_tags: Optional[List[Dict[str, Any]]] = None,
servers: Optional[List[Dict[str, Union[str, Any]]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
default_response_class: Type[Response] = Default(JSONResponse), docs_url: Optional[str] = "/docs",
redoc_url: Optional[str] = "/redoc",
swagger_ui_oauth2_redirect_url: Optional[str] = "/docs/oauth2-redirect",
swagger_ui_init_oauth: Optional[Dict[str, Any]] = None,
middleware: Optional[Sequence[Middleware]] = None, exception_handlers: Optional[
Dict[
Union[int, Type[Exception]],
Callable[[Request, Any], Coroutine[Any, Any, Response]],
]
] = None,
on_startup: Optional[Sequence[Callable[[], Any]]] = None,
on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, terms_of_service: Optional[str] = None,
contact: Optional[Dict[str, Union[str, Any]]] = None,
license_info: Optional[Dict[str, Union[str, Any]]] = None, openapi_prefix: str = "",
root_path: str = "", root_path_in_servers: bool = True,
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
callbacks: Optional[List[BaseRoute]] = None, deprecated: Optional[bool] = None,
include_in_schema: bool = True, swagger_ui_parameters: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
init_routers: List[str] = None, # 初始化路由
upload_directory: str = "uploads", # 上传文件的目录
**extra: Any) -> None:
super().__init__(debug, routes, middleware, exception_handlers, on_startup, on_shutdown)
self._debug: bool = debug
self.title = title
self.description = description
self.version = version
self.terms_of_service = terms_of_service
self.contact = contact
self.license_info = license_info
self.openapi_url = openapi_url
self.openapi_tags = openapi_tags
self.root_path_in_servers = root_path_in_servers
self.docs_url = docs_url
self.redoc_url = redoc_url
self.swagger_ui_oauth2_redirect_url = swagger_ui_oauth2_redirect_url
self.swagger_ui_init_oauth = swagger_ui_init_oauth
self.swagger_ui_parameters = swagger_ui_parameters
self.servers = servers or []
self.extra = extra
self.openapi_version = "3.0.2"
self.openapi_schema: Optional[Dict[str, Any]] = None
if self.openapi_url:
assert self.title, "A title must be provided for OpenAPI, e.g.: 'My API'"
assert self.version, "A version must be provided for OpenAPI, e.g.: '2.1.0'"
if openapi_prefix:
logger.warning(
'"openapi_prefix" has been deprecated in favor of "root_path", which '
"follows more closely the ASGI standard, is simpler, and more "
"automatic. Check the docs at "
"https://fastapi.tiangolo.com/advanced/sub-applications/"
)
self.root_path = root_path or openapi_prefix
self.state: State = State()
self.dependency_overrides: Dict[Callable[..., Any], Callable[..., Any]] = {}
self.router: routing.APIRouter = routing.APIRouter(
routes=routes,
dependency_overrides_provider=self,
on_startup=on_startup,
on_shutdown=on_shutdown,
default_response_class=default_response_class,
dependencies=dependencies,
callbacks=callbacks,
deprecated=deprecated,
include_in_schema=include_in_schema,
responses=responses,
generate_unique_id_function=generate_unique_id_function,
)
self.exception_handlers: Dict[
Any, Callable[[Request, Any], Union[Response, Awaitable[Response]]]
] = ({} if exception_handlers is None else dict(exception_handlers))
self.exception_handlers.setdefault(HTTPException, http_exception_handler)
self.exception_handlers.setdefault(
RequestValidationError, request_validation_exception_handler
)
self.user_middleware: List[Middleware] = (
[] if middleware is None else list(middleware)
)
self.middleware_stack: ASGIApp = self.build_middleware_stack()
self.setup()
# 上传文件目录
self.upload_directory = upload_directory
if not os.path.exists(upload_directory):
os.makedirs(upload_directory)
# 初始化路由
self.init_router(init_routers)
def init_router(self, routers: List[str]):
"""
初始化路由
"""
if routers is not None:
for router in routers:
if router == "health": # 初始化健康健康检查路由
self.add_health_router()
if router == "upload": # 初始化文件上传路由
self.add_upload_router()
if router == "uploads": # 初始化多文件上传路由
self.add_uploads_router()
def add_middleware_cors(self, origins: List[str] = ["*"]) -> None:
"""
添加跨域中间件
@param origins 运行的域名列表,默认是所有域名都可以
"""
self.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def build_middleware_stack(self) -> ASGIApp:
# Duplicate/override from zdppy_api.starlette to add AsyncExitStackMiddleware
# inside of ExceptionMiddleware, inside of custom user middlewares
debug = self.debug
error_handler = None
exception_handlers = {}
for key, value in self.exception_handlers.items():
if key in (500, Exception):
error_handler = value
else:
exception_handlers[key] = value
middleware = (
[Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)]
+ self.user_middleware
+ [
Middleware(
ExceptionMiddleware, handlers=exception_handlers, debug=debug
),
# Add FastAPI-specific AsyncExitStackMiddleware for dependencies with
# contextvars.
# This needs to happen after user middlewares because those create a
# new contextvars context copy by using a new AnyIO task group.
# The initial part of dependencies with yield is executed in the
# FastAPI code, inside all the middlewares, but the teardown part
# (after yield) is executed in the AsyncExitStack in this middleware,
# if the AsyncExitStack lived outside of the custom middlewares and
# contextvars were set in a dependency with yield in that internal
# contextvars context, the values would not be available in the
# outside context of the AsyncExitStack.
# By putting the middleware and the AsyncExitStack here, inside all
# user middlewares, the code before and after yield in dependencies
# with yield is executed in the same contextvars context, so all values
# set in contextvars before yield is still available after yield as
# would be expected.
# Additionally, by having this AsyncExitStack here, after the
# ExceptionMiddleware, now dependencies can catch handled exceptions,
# e.g. HTTPException, to customize the teardown code (e.g. DB session
# rollback).
Middleware(AsyncExitStackMiddleware),
]
)
app = self.router
for cls, options in reversed(middleware):
app = cls(app=app, **options)
return app
def openapi(self) -> Dict[str, Any]:
if not self.openapi_schema:
self.openapi_schema = get_openapi(
title=self.title,
version=self.version,
openapi_version=self.openapi_version,
description=self.description,
terms_of_service=self.terms_of_service,
contact=self.contact,
license_info=self.license_info,
routes=self.routes,
tags=self.openapi_tags,
servers=self.servers,
)
return self.openapi_schema
def setup(self) -> None:
if self.openapi_url:
urls = (server_data.get("url") for server_data in self.servers)
server_urls = {url for url in urls if url}
async def openapi(req: Request) -> JSONResponse:
root_path = req.scope.get("root_path", "").rstrip("/")
if root_path not in server_urls:
if root_path and self.root_path_in_servers:
self.servers.insert(0, {"url": root_path})
server_urls.add(root_path)
return JSONResponse(self.openapi())
self.add_route(self.openapi_url, openapi, include_in_schema=False)
if self.openapi_url and self.docs_url:
async def swagger_ui_html(req: Request) -> HTMLResponse:
root_path = req.scope.get("root_path", "").rstrip("/")
openapi_url = root_path + self.openapi_url
oauth2_redirect_url = self.swagger_ui_oauth2_redirect_url
if oauth2_redirect_url:
oauth2_redirect_url = root_path + oauth2_redirect_url
return get_swagger_ui_html(
openapi_url=openapi_url,
title=self.title + " - Swagger UI",
oauth2_redirect_url=oauth2_redirect_url,
init_oauth=self.swagger_ui_init_oauth,
swagger_ui_parameters=self.swagger_ui_parameters,
)
self.add_route(self.docs_url, swagger_ui_html, include_in_schema=False)
if self.swagger_ui_oauth2_redirect_url:
async def swagger_ui_redirect(req: Request) -> HTMLResponse:
return get_swagger_ui_oauth2_redirect_html()
self.add_route(
self.swagger_ui_oauth2_redirect_url,
swagger_ui_redirect,
include_in_schema=False,
)
if self.openapi_url and self.redoc_url:
async def redoc_html(req: Request) -> HTMLResponse:
root_path = req.scope.get("root_path", "").rstrip("/")
openapi_url = root_path + self.openapi_url
return get_redoc_html(
openapi_url=openapi_url, title=self.title + " - ReDoc"
)
self.add_route(self.redoc_url, redoc_html, include_in_schema=False)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.root_path:
scope["root_path"] = self.root_path
await super().__call__(scope, receive, send)
def add_api_route(
self,
path: str,
endpoint: Callable[..., Coroutine[Any, Any, Response]],
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[List[str]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(
JSONResponse
),
name: Optional[str] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> None:
self.router.add_api_route(
path,
endpoint=endpoint,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def api_route(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[List[str]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.router.add_api_route(
path,
func,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
return func
return decorator
def add_api_websocket_route(
self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None
) -> None:
self.router.add_api_websocket_route(path, endpoint, name=name)
def websocket(
self, path: str, name: Optional[str] = None
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.add_api_websocket_route(path, func, name=name)
return func
return decorator
def include_router(
self,
router: routing.APIRouter,
*,
prefix: str = "",
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
default_response_class: Type[Response] = Default(JSONResponse),
callbacks: Optional[List[BaseRoute]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> None:
self.router.include_router(
router,
prefix=prefix,
tags=tags,
dependencies=dependencies,
responses=responses,
deprecated=deprecated,
include_in_schema=include_in_schema,
default_response_class=default_response_class,
callbacks=callbacks,
generate_unique_id_function=generate_unique_id_function,
)
def get(
self,
path: str,
*,
response_model: Optional[Type[Any]] = ResponseResult, # 响应模型
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.get(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def put(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.put(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def post(
self,
path: str,
*,
response_model: Optional[Type[Any]] = ResponseResult,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.post(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def delete(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.delete(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
operation_id=operation_id,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def options(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.options(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def head(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.head(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def patch(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.patch(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def trace(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[routing.APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.router.trace(
path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def add_health_router(self):
"""
添加健康检查的路由
"""
@self.get("/health", summary="健康检查")
async def health():
return ResponseResult()
def add_upload_router(self):
"""
添加文件上传的路由
"""
@self.post("/upload", summary="单文件上传")
async def upload(file: UploadFile):
content = await file.read()
data = {
"filename": file.filename,
"content-type": file.content_type,
"size": len(content)
}
with open(f"{self.upload_directory}/{file.filename}", "wb") as f:
f.write(content)
await file.close()
return ResponseResult(data=data)
def add_uploads_router(self):
"""
添加多文件上传的路由
"""
@self.post("/uploads", summary="多文件上传")
async def upload(files: List[UploadFile]):
datas = []
for file in files:
content = await file.read()
data = {
"filename": file.filename,
"content-type": file.content_type,
"size": len(content)
}
datas.append(data)
with open(f"{self.upload_directory}/{file.filename}", "wb") as f:
f.write(content)
await file.close()
return ResponseResult(data=datas) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/applications.py | applications.py |
from typing import Any, Callable, Dict, Optional, Sequence
from . import params
from pydantic.fields import Undefined
def Path( # noqa: N802
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
) -> Any:
return params.Path(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
deprecated=deprecated,
include_in_schema=include_in_schema,
**extra,
)
def Query( # noqa: N802
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
) -> Any:
return params.Query(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
deprecated=deprecated,
include_in_schema=include_in_schema,
**extra,
)
def Header( # noqa: N802
default: Any = Undefined,
*,
alias: Optional[str] = None,
convert_underscores: bool = True,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
) -> Any:
return params.Header(
default=default,
alias=alias,
convert_underscores=convert_underscores,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
deprecated=deprecated,
include_in_schema=include_in_schema,
**extra,
)
def Cookie( # noqa: N802
default: Any = Undefined,
*,
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
**extra: Any,
) -> Any:
return params.Cookie(
default=default,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
deprecated=deprecated,
include_in_schema=include_in_schema,
**extra,
)
def Body( # noqa: N802
default: Any = Undefined,
*,
embed: bool = False,
media_type: str = "application/json",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
) -> Any:
return params.Body(
default=default,
embed=embed,
media_type=media_type,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
**extra,
)
def Form( # noqa: N802
default: Any = Undefined,
*,
media_type: str = "application/x-www-form-urlencoded",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
) -> Any:
return params.Form(
default=default,
media_type=media_type,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
**extra,
)
def File( # noqa: N802
default: Any = Undefined,
*,
media_type: str = "multipart/form-data",
alias: Optional[str] = None,
title: Optional[str] = None,
description: Optional[str] = None,
gt: Optional[float] = None,
ge: Optional[float] = None,
lt: Optional[float] = None,
le: Optional[float] = None,
min_length: Optional[int] = None,
max_length: Optional[int] = None,
regex: Optional[str] = None,
example: Any = Undefined,
examples: Optional[Dict[str, Any]] = None,
**extra: Any,
) -> Any:
return params.File(
default=default,
media_type=media_type,
alias=alias,
title=title,
description=description,
gt=gt,
ge=ge,
lt=lt,
le=le,
min_length=min_length,
max_length=max_length,
regex=regex,
example=example,
examples=examples,
**extra,
)
def Depends( # noqa: N802
dependency: Optional[Callable[..., Any]] = None, *, use_cache: bool = True
) -> Any:
return params.Depends(dependency=dependency, use_cache=use_cache)
def Security( # noqa: N802
dependency: Optional[Callable[..., Any]] = None,
*,
scopes: Optional[Sequence[str]] = None,
use_cache: bool = True,
) -> Any:
return params.Security(dependency=dependency, scopes=scopes, use_cache=use_cache) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/param_functions.py | param_functions.py |
import functools
import re
import warnings
from dataclasses import is_dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Dict, Optional, Set, Type, Union, cast
from . import exceptions
from .datastructures import DefaultPlaceholder, DefaultType
from .openapi.constants import REF_PREFIX
from pydantic import BaseConfig, BaseModel, create_model
from pydantic.class_validators import Validator
from pydantic.fields import FieldInfo, ModelField, UndefinedType
from pydantic.schema import model_process_schema
from pydantic.utils import lenient_issubclass
if TYPE_CHECKING: # pragma: nocover
from .routing import APIRoute
def get_model_definitions(
*,
flat_models: Set[Union[Type[BaseModel], Type[Enum]]],
model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
) -> Dict[str, Any]:
definitions: Dict[str, Dict[str, Any]] = {}
for model in flat_models:
m_schema, m_definitions, m_nested_models = model_process_schema(
model, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)
definitions.update(m_definitions)
model_name = model_name_map[model]
definitions[model_name] = m_schema
return definitions
def get_path_param_names(path: str) -> Set[str]:
return set(re.findall("{(.*?)}", path))
def create_response_field(
name: str,
type_: Type[Any],
class_validators: Optional[Dict[str, Validator]] = None,
default: Optional[Any] = None,
required: Union[bool, UndefinedType] = False,
model_config: Type[BaseConfig] = BaseConfig,
field_info: Optional[FieldInfo] = None,
alias: Optional[str] = None,
) -> ModelField:
"""
Create a new response field. Raises if type_ is invalid.
"""
class_validators = class_validators or {}
field_info = field_info or FieldInfo()
response_field = functools.partial(
ModelField,
name=name,
type_=type_,
class_validators=class_validators,
default=default,
required=required,
model_config=model_config,
alias=alias,
)
try:
return response_field(field_info=field_info)
except RuntimeError:
raise exceptions.FastAPIError(
f"Invalid args for response field! Hint: check that {type_} is a valid pydantic field type"
)
def create_cloned_field(
field: ModelField,
*,
cloned_types: Optional[Dict[Type[BaseModel], Type[BaseModel]]] = None,
) -> ModelField:
# _cloned_types has already cloned types, to support recursive models
if cloned_types is None:
cloned_types = dict()
original_type = field.type_
if is_dataclass(original_type) and hasattr(original_type, "__pydantic_model__"):
original_type = original_type.__pydantic_model__
use_type = original_type
if lenient_issubclass(original_type, BaseModel):
original_type = cast(Type[BaseModel], original_type)
use_type = cloned_types.get(original_type)
if use_type is None:
use_type = create_model(original_type.__name__, __base__=original_type)
cloned_types[original_type] = use_type
for f in original_type.__fields__.values():
use_type.__fields__[f.name] = create_cloned_field(
f, cloned_types=cloned_types
)
new_field = create_response_field(name=field.name, type_=use_type)
new_field.has_alias = field.has_alias
new_field.alias = field.alias
new_field.class_validators = field.class_validators
new_field.default = field.default
new_field.required = field.required
new_field.model_config = field.model_config
new_field.field_info = field.field_info
new_field.allow_none = field.allow_none
new_field.validate_always = field.validate_always
if field.sub_fields:
new_field.sub_fields = [
create_cloned_field(sub_field, cloned_types=cloned_types)
for sub_field in field.sub_fields
]
if field.key_field:
new_field.key_field = create_cloned_field(
field.key_field, cloned_types=cloned_types
)
new_field.validators = field.validators
new_field.pre_validators = field.pre_validators
new_field.post_validators = field.post_validators
new_field.parse_json = field.parse_json
new_field.shape = field.shape
new_field.populate_validators()
return new_field
def generate_operation_id_for_path(
*, name: str, path: str, method: str
) -> str: # pragma: nocover
warnings.warn(
"fastapi.utils.generate_operation_id_for_path() was deprecated, "
"it is not used internally, and will be removed soon",
DeprecationWarning,
stacklevel=2,
)
operation_id = name + path
operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id)
operation_id = operation_id + "_" + method.lower()
return operation_id
def generate_unique_id(route: "APIRoute") -> str:
operation_id = route.name + route.path_format
operation_id = re.sub("[^0-9a-zA-Z_]", "_", operation_id)
assert route.methods
operation_id = operation_id + "_" + list(route.methods)[0].lower()
return operation_id
def deep_dict_update(main_dict: Dict[Any, Any], update_dict: Dict[Any, Any]) -> None:
for key, value in update_dict.items():
if (
key in main_dict
and isinstance(main_dict[key], dict)
and isinstance(value, dict)
):
deep_dict_update(main_dict[key], value)
else:
main_dict[key] = value
def get_value_or_default(
first_item: Union[DefaultPlaceholder, DefaultType],
*extra_items: Union[DefaultPlaceholder, DefaultType],
) -> Union[DefaultPlaceholder, DefaultType]:
"""
Pass items or `DefaultPlaceholder`s by descending priority.
The first one to _not_ be a `DefaultPlaceholder` will be returned.
Otherwise, the first item (a `DefaultPlaceholder`) will be returned.
"""
items = (first_item,) + extra_items
for item in items:
if not isinstance(item, DefaultPlaceholder):
return item
return first_item | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/utils.py | utils.py |
import asyncio
import dataclasses
import email.message
import inspect
import json
from enum import Enum, IntEnum
from typing import (
Any,
Callable,
Coroutine,
Dict,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
)
from . import params
from .datastructures import Default, DefaultPlaceholder
from .dependencies.models import Dependant
from .dependencies.utils import (
get_body_field,
get_dependant,
get_parameterless_sub_dependant,
solve_dependencies,
)
from .encoders import DictIntStrAny, SetIntStr, jsonable_encoder
from .exceptions import RequestValidationError, WebSocketRequestValidationError
from .openapi.constants import STATUS_CODES_WITH_NO_BODY
from .types import DecoratedCallable
from .utils import (
create_cloned_field,
create_response_field,
generate_unique_id,
get_value_or_default,
)
from pydantic import BaseModel
from pydantic.error_wrappers import ErrorWrapper, ValidationError
from pydantic.fields import ModelField, Undefined
from zdppy_api.starlette import routing
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import JSONResponse, Response
from zdppy_api.starlette.routing import BaseRoute, Match
from zdppy_api.starlette.routing import Mount as Mount # noqa
from zdppy_api.starlette.routing import (
compile_path,
get_name,
request_response,
websocket_session,
)
from zdppy_api.starlette.status import WS_1008_POLICY_VIOLATION
from zdppy_api.starlette.types import ASGIApp, Scope
from zdppy_api.starlette.websockets import WebSocket
def _prepare_response_content(
res: Any,
*,
exclude_unset: bool,
exclude_defaults: bool = False,
exclude_none: bool = False,
) -> Any:
if isinstance(res, BaseModel):
read_with_orm_mode = getattr(res.__config__, "read_with_orm_mode", None)
if read_with_orm_mode:
# Let from_orm extract the data from this model instead of converting
# it now to a dict.
# Otherwise there's no way to extract lazy data that requires attribute
# access instead of dict iteration, e.g. lazy relationships.
return res
return res.dict(
by_alias=True,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
elif isinstance(res, list):
return [
_prepare_response_content(
item,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
for item in res
]
elif isinstance(res, dict):
return {
k: _prepare_response_content(
v,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
for k, v in res.items()
}
elif dataclasses.is_dataclass(res):
return dataclasses.asdict(res)
return res
async def serialize_response(
*,
field: Optional[ModelField] = None,
response_content: Any,
include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
by_alias: bool = True,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
is_coroutine: bool = True,
) -> Any:
if field:
errors = []
response_content = _prepare_response_content(
response_content,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
if is_coroutine:
value, errors_ = field.validate(response_content, {}, loc=("response",))
else:
value, errors_ = await run_in_threadpool( # type: ignore[misc]
field.validate, response_content, {}, loc=("response",)
)
if isinstance(errors_, ErrorWrapper):
errors.append(errors_)
elif isinstance(errors_, list):
errors.extend(errors_)
if errors:
raise ValidationError(errors, field.type_)
return jsonable_encoder(
value,
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
else:
return jsonable_encoder(response_content)
async def run_endpoint_function(
*, dependant: Dependant, values: Dict[str, Any], is_coroutine: bool
) -> Any:
# Only called by get_request_handler. Has been split into its own function to
# facilitate profiling endpoints, since inner functions are harder to profile.
assert dependant.call is not None, "dependant.call must be a function"
if is_coroutine:
return await dependant.call(**values)
else:
return await run_in_threadpool(dependant.call, **values)
def get_request_handler(
dependant: Dependant,
body_field: Optional[ModelField] = None,
status_code: Optional[int] = None,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(JSONResponse),
response_field: Optional[ModelField] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
dependency_overrides_provider: Optional[Any] = None,
) -> Callable[[Request], Coroutine[Any, Any, Response]]:
assert dependant.call is not None, "dependant.call must be a function"
is_coroutine = asyncio.iscoroutinefunction(dependant.call)
is_body_form = body_field and isinstance(body_field.field_info, params.Form)
if isinstance(response_class, DefaultPlaceholder):
actual_response_class: Type[Response] = response_class.value
else:
actual_response_class = response_class
async def app(request: Request) -> Response:
try:
body: Any = None
if body_field:
if is_body_form:
body = await request.form()
else:
body_bytes = await request.body()
if body_bytes:
json_body: Any = Undefined
content_type_value = request.headers.get("content-type")
if not content_type_value:
json_body = await request.json()
else:
message = email.message.Message()
message["content-type"] = content_type_value
if message.get_content_maintype() == "application":
subtype = message.get_content_subtype()
if subtype == "json" or subtype.endswith("+json"):
json_body = await request.json()
if json_body != Undefined:
body = json_body
else:
body = body_bytes
except json.JSONDecodeError as e:
raise RequestValidationError([ErrorWrapper(e, ("body", e.pos))], body=e.doc)
except Exception as e:
raise HTTPException(
status_code=400, detail="There was an error parsing the body"
) from e
solved_result = await solve_dependencies(
request=request,
dependant=dependant,
body=body,
dependency_overrides_provider=dependency_overrides_provider,
)
values, errors, background_tasks, sub_response, _ = solved_result
if errors:
raise RequestValidationError(errors, body=body)
else:
raw_response = await run_endpoint_function(
dependant=dependant, values=values, is_coroutine=is_coroutine
)
if isinstance(raw_response, Response):
if raw_response.background is None:
raw_response.background = background_tasks
return raw_response
response_data = await serialize_response(
field=response_field,
response_content=raw_response,
include=response_model_include,
exclude=response_model_exclude,
by_alias=response_model_by_alias,
exclude_unset=response_model_exclude_unset,
exclude_defaults=response_model_exclude_defaults,
exclude_none=response_model_exclude_none,
is_coroutine=is_coroutine,
)
response_args: Dict[str, Any] = {"background": background_tasks}
# If status_code was set, use it, otherwise use the default from the
# response class, in the case of redirect it's 307
if status_code is not None:
response_args["status_code"] = status_code
response = actual_response_class(response_data, **response_args)
response.headers.raw.extend(sub_response.headers.raw)
if sub_response.status_code:
response.status_code = sub_response.status_code
return response
return app
def get_websocket_app(
dependant: Dependant, dependency_overrides_provider: Optional[Any] = None
) -> Callable[[WebSocket], Coroutine[Any, Any, Any]]:
async def app(websocket: WebSocket) -> None:
solved_result = await solve_dependencies(
request=websocket,
dependant=dependant,
dependency_overrides_provider=dependency_overrides_provider,
)
values, errors, _, _2, _3 = solved_result
if errors:
await websocket.close(code=WS_1008_POLICY_VIOLATION)
raise WebSocketRequestValidationError(errors)
assert dependant.call is not None, "dependant.call must be a function"
await dependant.call(**values)
return app
class APIWebSocketRoute(routing.WebSocketRoute):
def __init__(
self,
path: str,
endpoint: Callable[..., Any],
*,
name: Optional[str] = None,
dependency_overrides_provider: Optional[Any] = None,
) -> None:
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
self.dependant = get_dependant(path=path, call=self.endpoint)
self.app = websocket_session(
get_websocket_app(
dependant=self.dependant,
dependency_overrides_provider=dependency_overrides_provider,
)
)
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
def matches(self, scope: Scope) -> Tuple[Match, Scope]:
match, child_scope = super().matches(scope)
if match != Match.NONE:
child_scope["route"] = self
return match, child_scope
class APIRoute(routing.Route):
def __init__(
self,
path: str,
endpoint: Callable[..., Any],
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
name: Optional[str] = None,
methods: Optional[Union[Set[str], List[str]]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(
JSONResponse
),
dependency_overrides_provider: Optional[Any] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Union[
Callable[["APIRoute"], str], DefaultPlaceholder
] = Default(generate_unique_id),
) -> None:
self.path = path
self.endpoint = endpoint
self.response_model = response_model
self.summary = summary
self.response_description = response_description
self.deprecated = deprecated
self.operation_id = operation_id
self.response_model_include = response_model_include
self.response_model_exclude = response_model_exclude
self.response_model_by_alias = response_model_by_alias
self.response_model_exclude_unset = response_model_exclude_unset
self.response_model_exclude_defaults = response_model_exclude_defaults
self.response_model_exclude_none = response_model_exclude_none
self.include_in_schema = include_in_schema
self.response_class = response_class
self.dependency_overrides_provider = dependency_overrides_provider
self.callbacks = callbacks
self.openapi_extra = openapi_extra
self.generate_unique_id_function = generate_unique_id_function
self.tags = tags or []
self.responses = responses or {}
self.name = get_name(endpoint) if name is None else name
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
if methods is None:
methods = ["GET"]
self.methods: Set[str] = {method.upper() for method in methods}
if isinstance(generate_unique_id_function, DefaultPlaceholder):
current_generate_unique_id: Callable[
["APIRoute"], str
] = generate_unique_id_function.value
else:
current_generate_unique_id = generate_unique_id_function
self.unique_id = self.operation_id or current_generate_unique_id(self)
# normalize enums e.g. http.HTTPStatus
if isinstance(status_code, IntEnum):
status_code = int(status_code)
self.status_code = status_code
if self.response_model:
assert (
status_code not in STATUS_CODES_WITH_NO_BODY
), f"Status code {status_code} must not have a response body"
response_name = "Response_" + self.unique_id
self.response_field = create_response_field(
name=response_name, type_=self.response_model
)
# Create a clone of the field, so that a Pydantic submodel is not returned
# as is just because it's an instance of a subclass of a more limited class
# e.g. UserInDB (containing hashed_password) could be a subclass of User
# that doesn't have the hashed_password. But because it's a subclass, it
# would pass the validation and be returned as is.
# By being a new field, no inheritance will be passed as is. A new model
# will be always created.
self.secure_cloned_response_field: Optional[
ModelField
] = create_cloned_field(self.response_field)
else:
self.response_field = None # type: ignore
self.secure_cloned_response_field = None
if dependencies:
self.dependencies = list(dependencies)
else:
self.dependencies = []
self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "")
# if a "form feed" character (page break) is found in the description text,
# truncate description text to the content preceding the first "form feed"
self.description = self.description.split("\f")[0]
response_fields = {}
for additional_status_code, response in self.responses.items():
assert isinstance(response, dict), "An additional response must be a dict"
model = response.get("model")
if model:
assert (
additional_status_code not in STATUS_CODES_WITH_NO_BODY
), f"Status code {additional_status_code} must not have a response body"
response_name = f"Response_{additional_status_code}_{self.unique_id}"
response_field = create_response_field(name=response_name, type_=model)
response_fields[additional_status_code] = response_field
if response_fields:
self.response_fields: Dict[Union[int, str], ModelField] = response_fields
else:
self.response_fields = {}
assert callable(endpoint), "An endpoint must be a callable"
self.dependant = get_dependant(path=self.path_format, call=self.endpoint)
for depends in self.dependencies[::-1]:
self.dependant.dependencies.insert(
0,
get_parameterless_sub_dependant(depends=depends, path=self.path_format),
)
self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id)
self.app = request_response(self.get_route_handler())
def get_route_handler(self) -> Callable[[Request], Coroutine[Any, Any, Response]]:
return get_request_handler(
dependant=self.dependant,
body_field=self.body_field,
status_code=self.status_code,
response_class=self.response_class,
response_field=self.secure_cloned_response_field,
response_model_include=self.response_model_include,
response_model_exclude=self.response_model_exclude,
response_model_by_alias=self.response_model_by_alias,
response_model_exclude_unset=self.response_model_exclude_unset,
response_model_exclude_defaults=self.response_model_exclude_defaults,
response_model_exclude_none=self.response_model_exclude_none,
dependency_overrides_provider=self.dependency_overrides_provider,
)
def matches(self, scope: Scope) -> Tuple[Match, Scope]:
match, child_scope = super().matches(scope)
if match != Match.NONE:
child_scope["route"] = self
return match, child_scope
class APIRouter(routing.Router):
def __init__(
self,
*,
prefix: str = "",
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
default_response_class: Type[Response] = Default(JSONResponse),
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
callbacks: Optional[List[BaseRoute]] = None,
routes: Optional[List[routing.BaseRoute]] = None,
redirect_slashes: bool = True,
default: Optional[ASGIApp] = None,
dependency_overrides_provider: Optional[Any] = None,
route_class: Type[APIRoute] = APIRoute,
on_startup: Optional[Sequence[Callable[[], Any]]] = None,
on_shutdown: Optional[Sequence[Callable[[], Any]]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> None:
super().__init__(
routes=routes,
redirect_slashes=redirect_slashes,
default=default,
on_startup=on_startup,
on_shutdown=on_shutdown,
)
if prefix:
assert prefix.startswith("/"), "A path prefix must start with '/'"
assert not prefix.endswith(
"/"
), "A path prefix must not end with '/', as the routes will start with '/'"
self.prefix = prefix
self.tags: List[Union[str, Enum]] = tags or []
self.dependencies = list(dependencies or []) or []
self.deprecated = deprecated
self.include_in_schema = include_in_schema
self.responses = responses or {}
self.callbacks = callbacks or []
self.dependency_overrides_provider = dependency_overrides_provider
self.route_class = route_class
self.default_response_class = default_response_class
self.generate_unique_id_function = generate_unique_id_function
def add_api_route(
self,
path: str,
endpoint: Callable[..., Any],
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[Union[Set[str], List[str]]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Union[Type[Response], DefaultPlaceholder] = Default(
JSONResponse
),
name: Optional[str] = None,
route_class_override: Optional[Type[APIRoute]] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Union[
Callable[[APIRoute], str], DefaultPlaceholder
] = Default(generate_unique_id),
) -> None:
route_class = route_class_override or self.route_class
responses = responses or {}
combined_responses = {**self.responses, **responses}
current_response_class = get_value_or_default(
response_class, self.default_response_class
)
current_tags = self.tags.copy()
if tags:
current_tags.extend(tags)
current_dependencies = self.dependencies.copy()
if dependencies:
current_dependencies.extend(dependencies)
current_callbacks = self.callbacks.copy()
if callbacks:
current_callbacks.extend(callbacks)
current_generate_unique_id = get_value_or_default(
generate_unique_id_function, self.generate_unique_id_function
)
route = route_class(
self.prefix + path,
endpoint=endpoint,
response_model=response_model,
status_code=status_code,
tags=current_tags,
dependencies=current_dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=combined_responses,
deprecated=deprecated or self.deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema and self.include_in_schema,
response_class=current_response_class,
name=name,
dependency_overrides_provider=self.dependency_overrides_provider,
callbacks=current_callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=current_generate_unique_id,
)
self.routes.append(route)
def api_route(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
methods: Optional[List[str]] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.add_api_route(
path,
func,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=methods,
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
return func
return decorator
def add_api_websocket_route(
self, path: str, endpoint: Callable[..., Any], name: Optional[str] = None
) -> None:
route = APIWebSocketRoute(
self.prefix + path,
endpoint=endpoint,
name=name,
dependency_overrides_provider=self.dependency_overrides_provider,
)
self.routes.append(route)
def websocket(
self, path: str, name: Optional[str] = None
) -> Callable[[DecoratedCallable], DecoratedCallable]:
def decorator(func: DecoratedCallable) -> DecoratedCallable:
self.add_api_websocket_route(path, func, name=name)
return func
return decorator
def include_router(
self,
router: "APIRouter",
*,
prefix: str = "",
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
default_response_class: Type[Response] = Default(JSONResponse),
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
callbacks: Optional[List[BaseRoute]] = None,
deprecated: Optional[bool] = None,
include_in_schema: bool = True,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> None:
if prefix:
assert prefix.startswith("/"), "A path prefix must start with '/'"
assert not prefix.endswith(
"/"
), "A path prefix must not end with '/', as the routes will start with '/'"
else:
for r in router.routes:
path = getattr(r, "path")
name = getattr(r, "name", "unknown")
if path is not None and not path:
raise Exception(
f"Prefix and path cannot be both empty (path operation: {name})"
)
if responses is None:
responses = {}
for route in router.routes:
if isinstance(route, APIRoute):
combined_responses = {**responses, **route.responses}
use_response_class = get_value_or_default(
route.response_class,
router.default_response_class,
default_response_class,
self.default_response_class,
)
current_tags = []
if tags:
current_tags.extend(tags)
if route.tags:
current_tags.extend(route.tags)
current_dependencies: List[params.Depends] = []
if dependencies:
current_dependencies.extend(dependencies)
if route.dependencies:
current_dependencies.extend(route.dependencies)
current_callbacks = []
if callbacks:
current_callbacks.extend(callbacks)
if route.callbacks:
current_callbacks.extend(route.callbacks)
current_generate_unique_id = get_value_or_default(
route.generate_unique_id_function,
router.generate_unique_id_function,
generate_unique_id_function,
self.generate_unique_id_function,
)
self.add_api_route(
prefix + route.path,
route.endpoint,
response_model=route.response_model,
status_code=route.status_code,
tags=current_tags,
dependencies=current_dependencies,
summary=route.summary,
description=route.description,
response_description=route.response_description,
responses=combined_responses,
deprecated=route.deprecated or deprecated or self.deprecated,
methods=route.methods,
operation_id=route.operation_id,
response_model_include=route.response_model_include,
response_model_exclude=route.response_model_exclude,
response_model_by_alias=route.response_model_by_alias,
response_model_exclude_unset=route.response_model_exclude_unset,
response_model_exclude_defaults=route.response_model_exclude_defaults,
response_model_exclude_none=route.response_model_exclude_none,
include_in_schema=route.include_in_schema
and self.include_in_schema
and include_in_schema,
response_class=use_response_class,
name=route.name,
route_class_override=type(route),
callbacks=current_callbacks,
openapi_extra=route.openapi_extra,
generate_unique_id_function=current_generate_unique_id,
)
elif isinstance(route, routing.Route):
methods = list(route.methods or [])
self.add_route(
prefix + route.path,
route.endpoint,
methods=methods,
include_in_schema=route.include_in_schema,
name=route.name,
)
elif isinstance(route, APIWebSocketRoute):
self.add_api_websocket_route(
prefix + route.path, route.endpoint, name=route.name
)
elif isinstance(route, routing.WebSocketRoute):
self.add_websocket_route(
prefix + route.path, route.endpoint, name=route.name
)
for handler in router.on_startup:
self.add_event_handler("startup", handler)
for handler in router.on_shutdown:
self.add_event_handler("shutdown", handler)
def get(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["GET"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def put(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["PUT"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def post(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["POST"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def delete(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["DELETE"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def options(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["OPTIONS"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def head(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["HEAD"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def patch(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["PATCH"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
)
def trace(
self,
path: str,
*,
response_model: Optional[Type[Any]] = None,
status_code: Optional[int] = None,
tags: Optional[List[Union[str, Enum]]] = None,
dependencies: Optional[Sequence[params.Depends]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
response_description: str = "Successful Response",
responses: Optional[Dict[Union[int, str], Dict[str, Any]]] = None,
deprecated: Optional[bool] = None,
operation_id: Optional[str] = None,
response_model_include: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_exclude: Optional[Union[SetIntStr, DictIntStrAny]] = None,
response_model_by_alias: bool = True,
response_model_exclude_unset: bool = False,
response_model_exclude_defaults: bool = False,
response_model_exclude_none: bool = False,
include_in_schema: bool = True,
response_class: Type[Response] = Default(JSONResponse),
name: Optional[str] = None,
callbacks: Optional[List[BaseRoute]] = None,
openapi_extra: Optional[Dict[str, Any]] = None,
generate_unique_id_function: Callable[[APIRoute], str] = Default(
generate_unique_id
),
) -> Callable[[DecoratedCallable], DecoratedCallable]:
return self.api_route(
path=path,
response_model=response_model,
status_code=status_code,
tags=tags,
dependencies=dependencies,
summary=summary,
description=description,
response_description=response_description,
responses=responses,
deprecated=deprecated,
methods=["TRACE"],
operation_id=operation_id,
response_model_include=response_model_include,
response_model_exclude=response_model_exclude,
response_model_by_alias=response_model_by_alias,
response_model_exclude_unset=response_model_exclude_unset,
response_model_exclude_defaults=response_model_exclude_defaults,
response_model_exclude_none=response_model_exclude_none,
include_in_schema=include_in_schema,
response_class=response_class,
name=name,
callbacks=callbacks,
openapi_extra=openapi_extra,
generate_unique_id_function=generate_unique_id_function,
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/routing.py | routing.py |
import json
from typing import Any, Dict, Optional
from ..encoders import jsonable_encoder
from zdppy_api.starlette.responses import HTMLResponse
swagger_ui_default_parameters = {
"dom_id": "#swagger-ui",
"layout": "BaseLayout",
"deepLinking": True,
"showExtensions": True,
"showCommonExtensions": True,
}
def get_swagger_ui_html(
*,
openapi_url: str,
title: str,
swagger_js_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui-bundle.js",
swagger_css_url: str = "https://cdn.jsdelivr.net/npm/swagger-ui-dist@4/swagger-ui.css",
swagger_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png",
oauth2_redirect_url: Optional[str] = None,
init_oauth: Optional[Dict[str, Any]] = None,
swagger_ui_parameters: Optional[Dict[str, Any]] = None,
) -> HTMLResponse:
current_swagger_ui_parameters = swagger_ui_default_parameters.copy()
if swagger_ui_parameters:
current_swagger_ui_parameters.update(swagger_ui_parameters)
html = f"""
<!DOCTYPE html>
<html>
<head>
<link type="text/css" rel="stylesheet" href="{swagger_css_url}">
<link rel="shortcut icon" href="{swagger_favicon_url}">
<title>{title}</title>
</head>
<body>
<div id="swagger-ui">
</div>
<script src="{swagger_js_url}"></script>
<!-- `SwaggerUIBundle` is now available on the page -->
<script>
const ui = SwaggerUIBundle({{
url: '{openapi_url}',
"""
for key, value in current_swagger_ui_parameters.items():
html += f"{json.dumps(key)}: {json.dumps(jsonable_encoder(value))},\n"
if oauth2_redirect_url:
html += f"oauth2RedirectUrl: window.location.origin + '{oauth2_redirect_url}',"
html += """
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIBundle.SwaggerUIStandalonePreset
],
})"""
if init_oauth:
html += f"""
ui.initOAuth({json.dumps(jsonable_encoder(init_oauth))})
"""
html += """
</script>
</body>
</html>
"""
return HTMLResponse(html)
def get_redoc_html(
*,
openapi_url: str,
title: str,
redoc_js_url: str = "https://cdn.jsdelivr.net/npm/redoc@next/bundles/redoc.standalone.js",
redoc_favicon_url: str = "https://fastapi.tiangolo.com/img/favicon.png",
with_google_fonts: bool = True,
) -> HTMLResponse:
html = f"""
<!DOCTYPE html>
<html>
<head>
<title>{title}</title>
<!-- needed for adaptive design -->
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
"""
if with_google_fonts:
html += """
<link href="https://fonts.googleapis.com/css?family=Montserrat:300,400,700|Roboto:300,400,700" rel="stylesheet">
"""
html += f"""
<link rel="shortcut icon" href="{redoc_favicon_url}">
<!--
ReDoc doesn't change outer page styles
-->
<style>
body {{
margin: 0;
padding: 0;
}}
</style>
</head>
<body>
<redoc spec-url="{openapi_url}"></redoc>
<script src="{redoc_js_url}"> </script>
</body>
</html>
"""
return HTMLResponse(html)
def get_swagger_ui_oauth2_redirect_html() -> HTMLResponse:
html = """
<!DOCTYPE html>
<html lang="en-US">
<body onload="run()">
</body>
</html>
<script>
'use strict';
function run () {
var oauth2 = window.opener.swaggerUIRedirectOauth2;
var sentState = oauth2.state;
var redirectUrl = oauth2.redirectUrl;
var isValid, qp, arr;
if (/code|token|error/.test(window.location.hash)) {
qp = window.location.hash.substring(1);
} else {
qp = location.search.substring(1);
}
arr = qp.split("&")
arr.forEach(function (v,i,_arr) { _arr[i] = '"' + v.replace('=', '":"') + '"';})
qp = qp ? JSON.parse('{' + arr.join() + '}',
function (key, value) {
return key === "" ? value : decodeURIComponent(value)
}
) : {}
isValid = qp.state === sentState
if ((
oauth2.auth.schema.get("flow") === "accessCode"||
oauth2.auth.schema.get("flow") === "authorizationCode"
) && !oauth2.auth.code) {
if (!isValid) {
oauth2.errCb({
authId: oauth2.auth.name,
source: "auth",
level: "warning",
message: "Authorization may be unsafe, passed state was changed in server Passed state wasn't returned from auth server"
});
}
if (qp.code) {
delete oauth2.state;
oauth2.auth.code = qp.code;
oauth2.callback({auth: oauth2.auth, redirectUrl: redirectUrl});
} else {
let oauthErrorMsg
if (qp.error) {
oauthErrorMsg = "["+qp.error+"]: " +
(qp.error_description ? qp.error_description+ ". " : "no accessCode received from the server. ") +
(qp.error_uri ? "More info: "+qp.error_uri : "");
}
oauth2.errCb({
authId: oauth2.auth.name,
source: "auth",
level: "error",
message: oauthErrorMsg || "[Authorization failed]: no accessCode received from the server"
});
}
} else {
oauth2.callback({auth: oauth2.auth, token: qp, isValid: isValid, redirectUrl: redirectUrl});
}
window.close();
}
</script>
"""
return HTMLResponse(content=html) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/openapi/docs.py | docs.py |
import http.client
import inspect
import warnings
from enum import Enum
from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Type, Union, cast
from .. import routing
from ..datastructures import DefaultPlaceholder
from ..dependencies.models import Dependant
from ..dependencies.utils import get_flat_dependant, get_flat_params
from ..encoders import jsonable_encoder
from ..openapi.constants import (
METHODS_WITH_BODY,
REF_PREFIX,
STATUS_CODES_WITH_NO_BODY,
)
from ..openapi.models import OpenAPI
from ..params import Body, Param
from ..responses import Response
from ..utils import (
deep_dict_update,
generate_operation_id_for_path,
get_model_definitions,
)
from pydantic import BaseModel
from pydantic.fields import ModelField, Undefined
from pydantic.schema import (
field_schema,
get_flat_models_from_fields,
get_model_name_map,
)
from pydantic.utils import lenient_issubclass
from zdppy_api.starlette.responses import JSONResponse
from zdppy_api.starlette.routing import BaseRoute
from zdppy_api.starlette.status import HTTP_422_UNPROCESSABLE_ENTITY
validation_error_definition = {
"title": "ValidationError",
"type": "object",
"properties": {
"loc": {
"title": "Location",
"type": "array",
"items": {"anyOf": [{"type": "string"}, {"type": "integer"}]},
},
"msg": {"title": "Message", "type": "string"},
"type": {"title": "Error Type", "type": "string"},
},
"required": ["loc", "msg", "type"],
}
validation_error_response_definition = {
"title": "HTTPValidationError",
"type": "object",
"properties": {
"detail": {
"title": "Detail",
"type": "array",
"items": {"$ref": REF_PREFIX + "ValidationError"},
}
},
}
status_code_ranges: Dict[str, str] = {
"1XX": "Information",
"2XX": "Success",
"3XX": "Redirection",
"4XX": "Client Error",
"5XX": "Server Error",
"DEFAULT": "Default Response",
}
def get_openapi_security_definitions(
flat_dependant: Dependant,
) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]:
security_definitions = {}
operation_security = []
for security_requirement in flat_dependant.security_requirements:
security_definition = jsonable_encoder(
security_requirement.security_scheme.model,
by_alias=True,
exclude_none=True,
)
security_name = security_requirement.security_scheme.scheme_name
security_definitions[security_name] = security_definition
operation_security.append({security_name: security_requirement.scopes})
return security_definitions, operation_security
def get_openapi_operation_parameters(
*,
all_route_params: Sequence[ModelField],
model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
) -> List[Dict[str, Any]]:
parameters = []
for param in all_route_params:
field_info = param.field_info
field_info = cast(Param, field_info)
if not field_info.include_in_schema:
continue
parameter = {
"name": param.alias,
"in": field_info.in_.value,
"required": param.required,
"schema": field_schema(
param, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)[0],
}
if field_info.description:
parameter["description"] = field_info.description
if field_info.examples:
parameter["examples"] = jsonable_encoder(field_info.examples)
elif field_info.example != Undefined:
parameter["example"] = jsonable_encoder(field_info.example)
if field_info.deprecated:
parameter["deprecated"] = field_info.deprecated
parameters.append(parameter)
return parameters
def get_openapi_operation_request_body(
*,
body_field: Optional[ModelField],
model_name_map: Dict[Union[Type[BaseModel], Type[Enum]], str],
) -> Optional[Dict[str, Any]]:
if not body_field:
return None
assert isinstance(body_field, ModelField)
body_schema, _, _ = field_schema(
body_field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)
field_info = cast(Body, body_field.field_info)
request_media_type = field_info.media_type
required = body_field.required
request_body_oai: Dict[str, Any] = {}
if required:
request_body_oai["required"] = required
request_media_content: Dict[str, Any] = {"schema": body_schema}
if field_info.examples:
request_media_content["examples"] = jsonable_encoder(field_info.examples)
elif field_info.example != Undefined:
request_media_content["example"] = jsonable_encoder(field_info.example)
request_body_oai["content"] = {request_media_type: request_media_content}
return request_body_oai
def generate_operation_id(
*, route: routing.APIRoute, method: str
) -> str: # pragma: nocover
warnings.warn(
"fastapi.openapi.utils.generate_operation_id() was deprecated, "
"it is not used internally, and will be removed soon",
DeprecationWarning,
stacklevel=2,
)
if route.operation_id:
return route.operation_id
path: str = route.path_format
return generate_operation_id_for_path(name=route.name, path=path, method=method)
def generate_operation_summary(*, route: routing.APIRoute, method: str) -> str:
if route.summary:
return route.summary
return route.name.replace("_", " ").title()
def get_openapi_operation_metadata(
*, route: routing.APIRoute, method: str, operation_ids: Set[str]
) -> Dict[str, Any]:
operation: Dict[str, Any] = {}
if route.tags:
operation["tags"] = route.tags
operation["summary"] = generate_operation_summary(route=route, method=method)
if route.description:
operation["description"] = route.description
operation_id = route.operation_id or route.unique_id
if operation_id in operation_ids:
message = (
f"Duplicate Operation ID {operation_id} for function "
+ f"{route.endpoint.__name__}"
)
file_name = getattr(route.endpoint, "__globals__", {}).get("__file__")
if file_name:
message += f" at {file_name}"
warnings.warn(message)
operation_ids.add(operation_id)
operation["operationId"] = operation_id
if route.deprecated:
operation["deprecated"] = route.deprecated
return operation
def get_openapi_path(
*, route: routing.APIRoute, model_name_map: Dict[type, str], operation_ids: Set[str]
) -> Tuple[Dict[str, Any], Dict[str, Any], Dict[str, Any]]:
path = {}
security_schemes: Dict[str, Any] = {}
definitions: Dict[str, Any] = {}
assert route.methods is not None, "Methods must be a list"
if isinstance(route.response_class, DefaultPlaceholder):
current_response_class: Type[Response] = route.response_class.value
else:
current_response_class = route.response_class
assert current_response_class, "A response class is needed to generate OpenAPI"
route_response_media_type: Optional[str] = current_response_class.media_type
if route.include_in_schema:
for method in route.methods:
operation = get_openapi_operation_metadata(
route=route, method=method, operation_ids=operation_ids
)
parameters: List[Dict[str, Any]] = []
flat_dependant = get_flat_dependant(route.dependant, skip_repeats=True)
security_definitions, operation_security = get_openapi_security_definitions(
flat_dependant=flat_dependant
)
if operation_security:
operation.setdefault("security", []).extend(operation_security)
if security_definitions:
security_schemes.update(security_definitions)
all_route_params = get_flat_params(route.dependant)
operation_parameters = get_openapi_operation_parameters(
all_route_params=all_route_params, model_name_map=model_name_map
)
parameters.extend(operation_parameters)
if parameters:
operation["parameters"] = list(
{param["name"]: param for param in parameters}.values()
)
if method in METHODS_WITH_BODY:
request_body_oai = get_openapi_operation_request_body(
body_field=route.body_field, model_name_map=model_name_map
)
if request_body_oai:
operation["requestBody"] = request_body_oai
if route.callbacks:
callbacks = {}
for callback in route.callbacks:
if isinstance(callback, routing.APIRoute):
(
cb_path,
cb_security_schemes,
cb_definitions,
) = get_openapi_path(
route=callback,
model_name_map=model_name_map,
operation_ids=operation_ids,
)
callbacks[callback.name] = {callback.path: cb_path}
operation["callbacks"] = callbacks
if route.status_code is not None:
status_code = str(route.status_code)
else:
# It would probably make more sense for all response classes to have an
# explicit default status_code, and to extract it from them, instead of
# doing this inspection tricks, that would probably be in the future
# TODO: probably make status_code a default class attribute for all
# responses in Starlette
response_signature = inspect.signature(current_response_class.__init__)
status_code_param = response_signature.parameters.get("status_code")
if status_code_param is not None:
if isinstance(status_code_param.default, int):
status_code = str(status_code_param.default)
operation.setdefault("responses", {}).setdefault(status_code, {})[
"description"
] = route.response_description
if (
route_response_media_type
and route.status_code not in STATUS_CODES_WITH_NO_BODY
):
response_schema = {"type": "string"}
if lenient_issubclass(current_response_class, JSONResponse):
if route.response_field:
response_schema, _, _ = field_schema(
route.response_field,
model_name_map=model_name_map,
ref_prefix=REF_PREFIX,
)
else:
response_schema = {}
operation.setdefault("responses", {}).setdefault(
status_code, {}
).setdefault("content", {}).setdefault(route_response_media_type, {})[
"schema"
] = response_schema
if route.responses:
operation_responses = operation.setdefault("responses", {})
for (
additional_status_code,
additional_response,
) in route.responses.items():
process_response = additional_response.copy()
process_response.pop("model", None)
status_code_key = str(additional_status_code).upper()
if status_code_key == "DEFAULT":
status_code_key = "default"
openapi_response = operation_responses.setdefault(
status_code_key, {}
)
assert isinstance(
process_response, dict
), "An additional response must be a dict"
field = route.response_fields.get(additional_status_code)
additional_field_schema: Optional[Dict[str, Any]] = None
if field:
additional_field_schema, _, _ = field_schema(
field, model_name_map=model_name_map, ref_prefix=REF_PREFIX
)
media_type = route_response_media_type or "application/json"
additional_schema = (
process_response.setdefault("content", {})
.setdefault(media_type, {})
.setdefault("schema", {})
)
deep_dict_update(additional_schema, additional_field_schema)
status_text: Optional[str] = status_code_ranges.get(
str(additional_status_code).upper()
) or http.client.responses.get(int(additional_status_code))
description = (
process_response.get("description")
or openapi_response.get("description")
or status_text
or "Additional Response"
)
deep_dict_update(openapi_response, process_response)
openapi_response["description"] = description
http422 = str(HTTP_422_UNPROCESSABLE_ENTITY)
if (all_route_params or route.body_field) and not any(
[
status in operation["responses"]
for status in [http422, "4XX", "default"]
]
):
operation["responses"][http422] = {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {"$ref": REF_PREFIX + "HTTPValidationError"}
}
},
}
if "ValidationError" not in definitions:
definitions.update(
{
"ValidationError": validation_error_definition,
"HTTPValidationError": validation_error_response_definition,
}
)
if route.openapi_extra:
deep_dict_update(operation, route.openapi_extra)
path[method.lower()] = operation
return path, security_schemes, definitions
def get_flat_models_from_routes(
routes: Sequence[BaseRoute],
) -> Set[Union[Type[BaseModel], Type[Enum]]]:
body_fields_from_routes: List[ModelField] = []
responses_from_routes: List[ModelField] = []
request_fields_from_routes: List[ModelField] = []
callback_flat_models: Set[Union[Type[BaseModel], Type[Enum]]] = set()
for route in routes:
if getattr(route, "include_in_schema", None) and isinstance(
route, routing.APIRoute
):
if route.body_field:
assert isinstance(
route.body_field, ModelField
), "A request body must be a Pydantic Field"
body_fields_from_routes.append(route.body_field)
if route.response_field:
responses_from_routes.append(route.response_field)
if route.response_fields:
responses_from_routes.extend(route.response_fields.values())
if route.callbacks:
callback_flat_models |= get_flat_models_from_routes(route.callbacks)
params = get_flat_params(route.dependant)
request_fields_from_routes.extend(params)
flat_models = callback_flat_models | get_flat_models_from_fields(
body_fields_from_routes + responses_from_routes + request_fields_from_routes,
known_models=set(),
)
return flat_models
def get_openapi(
*,
title: str,
version: str,
openapi_version: str = "3.0.2",
description: Optional[str] = None,
routes: Sequence[BaseRoute],
tags: Optional[List[Dict[str, Any]]] = None,
servers: Optional[List[Dict[str, Union[str, Any]]]] = None,
terms_of_service: Optional[str] = None,
contact: Optional[Dict[str, Union[str, Any]]] = None,
license_info: Optional[Dict[str, Union[str, Any]]] = None,
) -> Dict[str, Any]:
info: Dict[str, Any] = {"title": title, "version": version}
if description:
info["description"] = description
if terms_of_service:
info["termsOfService"] = terms_of_service
if contact:
info["contact"] = contact
if license_info:
info["license"] = license_info
output: Dict[str, Any] = {"openapi": openapi_version, "info": info}
if servers:
output["servers"] = servers
components: Dict[str, Dict[str, Any]] = {}
paths: Dict[str, Dict[str, Any]] = {}
operation_ids: Set[str] = set()
flat_models = get_flat_models_from_routes(routes)
model_name_map = get_model_name_map(flat_models)
definitions = get_model_definitions(
flat_models=flat_models, model_name_map=model_name_map
)
for route in routes:
if isinstance(route, routing.APIRoute):
result = get_openapi_path(
route=route, model_name_map=model_name_map, operation_ids=operation_ids
)
if result:
path, security_schemes, path_definitions = result
if path:
paths.setdefault(route.path_format, {}).update(path)
if security_schemes:
components.setdefault("securitySchemes", {}).update(
security_schemes
)
if path_definitions:
definitions.update(path_definitions)
if definitions:
components["schemas"] = {k: definitions[k] for k in sorted(definitions)}
if components:
output["components"] = components
output["paths"] = paths
if tags:
output["tags"] = tags
return jsonable_encoder(OpenAPI(**output), by_alias=True, exclude_none=True) # type: ignore | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/openapi/utils.py | utils.py |
from enum import Enum
from typing import Any, Callable, Dict, Iterable, List, Optional, Union
from ..logger import logger
from pydantic import AnyUrl, BaseModel, Field
try:
import email_validator # type: ignore
assert email_validator # make autoflake ignore the unused import
from pydantic import EmailStr
except ImportError: # pragma: no cover
class EmailStr(str): # type: ignore
@classmethod
def __get_validators__(cls) -> Iterable[Callable[..., Any]]:
yield cls.validate
@classmethod
def validate(cls, v: Any) -> str:
logger.warning(
"email-validator not installed, email fields will be treated as str.\n"
"To install, run: pip install email-validator"
)
return str(v)
class Contact(BaseModel):
name: Optional[str] = None
url: Optional[AnyUrl] = None
email: Optional[EmailStr] = None
class Config:
extra = "allow"
class License(BaseModel):
name: str
url: Optional[AnyUrl] = None
class Config:
extra = "allow"
class Info(BaseModel):
title: str
description: Optional[str] = None
termsOfService: Optional[str] = None
contact: Optional[Contact] = None
license: Optional[License] = None
version: str
class Config:
extra = "allow"
class ServerVariable(BaseModel):
enum: Optional[List[str]] = None
default: str
description: Optional[str] = None
class Config:
extra = "allow"
class Server(BaseModel):
url: Union[AnyUrl, str]
description: Optional[str] = None
variables: Optional[Dict[str, ServerVariable]] = None
class Config:
extra = "allow"
class Reference(BaseModel):
ref: str = Field(alias="$ref")
class Discriminator(BaseModel):
propertyName: str
mapping: Optional[Dict[str, str]] = None
class XML(BaseModel):
name: Optional[str] = None
namespace: Optional[str] = None
prefix: Optional[str] = None
attribute: Optional[bool] = None
wrapped: Optional[bool] = None
class Config:
extra = "allow"
class ExternalDocumentation(BaseModel):
description: Optional[str] = None
url: AnyUrl
class Config:
extra = "allow"
class Schema(BaseModel):
ref: Optional[str] = Field(default=None, alias="$ref")
title: Optional[str] = None
multipleOf: Optional[float] = None
maximum: Optional[float] = None
exclusiveMaximum: Optional[float] = None
minimum: Optional[float] = None
exclusiveMinimum: Optional[float] = None
maxLength: Optional[int] = Field(default=None, gte=0)
minLength: Optional[int] = Field(default=None, gte=0)
pattern: Optional[str] = None
maxItems: Optional[int] = Field(default=None, gte=0)
minItems: Optional[int] = Field(default=None, gte=0)
uniqueItems: Optional[bool] = None
maxProperties: Optional[int] = Field(default=None, gte=0)
minProperties: Optional[int] = Field(default=None, gte=0)
required: Optional[List[str]] = None
enum: Optional[List[Any]] = None
type: Optional[str] = None
allOf: Optional[List["Schema"]] = None
oneOf: Optional[List["Schema"]] = None
anyOf: Optional[List["Schema"]] = None
not_: Optional["Schema"] = Field(default=None, alias="not")
items: Optional[Union["Schema", List["Schema"]]] = None
properties: Optional[Dict[str, "Schema"]] = None
additionalProperties: Optional[Union["Schema", Reference, bool]] = None
description: Optional[str] = None
format: Optional[str] = None
default: Optional[Any] = None
nullable: Optional[bool] = None
discriminator: Optional[Discriminator] = None
readOnly: Optional[bool] = None
writeOnly: Optional[bool] = None
xml: Optional[XML] = None
externalDocs: Optional[ExternalDocumentation] = None
example: Optional[Any] = None
deprecated: Optional[bool] = None
class Config:
extra: str = "allow"
class Example(BaseModel):
summary: Optional[str] = None
description: Optional[str] = None
value: Optional[Any] = None
externalValue: Optional[AnyUrl] = None
class Config:
extra = "allow"
class ParameterInType(Enum):
query = "query"
header = "header"
path = "path"
cookie = "cookie"
class Encoding(BaseModel):
contentType: Optional[str] = None
headers: Optional[Dict[str, Union["Header", Reference]]] = None
style: Optional[str] = None
explode: Optional[bool] = None
allowReserved: Optional[bool] = None
class Config:
extra = "allow"
class MediaType(BaseModel):
schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema")
example: Optional[Any] = None
examples: Optional[Dict[str, Union[Example, Reference]]] = None
encoding: Optional[Dict[str, Encoding]] = None
class Config:
extra = "allow"
class ParameterBase(BaseModel):
description: Optional[str] = None
required: Optional[bool] = None
deprecated: Optional[bool] = None
# Serialization rules for simple scenarios
style: Optional[str] = None
explode: Optional[bool] = None
allowReserved: Optional[bool] = None
schema_: Optional[Union[Schema, Reference]] = Field(default=None, alias="schema")
example: Optional[Any] = None
examples: Optional[Dict[str, Union[Example, Reference]]] = None
# Serialization rules for more complex scenarios
content: Optional[Dict[str, MediaType]] = None
class Config:
extra = "allow"
class Parameter(ParameterBase):
name: str
in_: ParameterInType = Field(alias="in")
class Header(ParameterBase):
pass
class RequestBody(BaseModel):
description: Optional[str] = None
content: Dict[str, MediaType]
required: Optional[bool] = None
class Config:
extra = "allow"
class Link(BaseModel):
operationRef: Optional[str] = None
operationId: Optional[str] = None
parameters: Optional[Dict[str, Union[Any, str]]] = None
requestBody: Optional[Union[Any, str]] = None
description: Optional[str] = None
server: Optional[Server] = None
class Config:
extra = "allow"
class Response(BaseModel):
description: str
headers: Optional[Dict[str, Union[Header, Reference]]] = None
content: Optional[Dict[str, MediaType]] = None
links: Optional[Dict[str, Union[Link, Reference]]] = None
class Config:
extra = "allow"
class Operation(BaseModel):
tags: Optional[List[str]] = None
summary: Optional[str] = None
description: Optional[str] = None
externalDocs: Optional[ExternalDocumentation] = None
operationId: Optional[str] = None
parameters: Optional[List[Union[Parameter, Reference]]] = None
requestBody: Optional[Union[RequestBody, Reference]] = None
# Using Any for Specification Extensions
responses: Dict[str, Union[Response, Any]]
callbacks: Optional[Dict[str, Union[Dict[str, "PathItem"], Reference]]] = None
deprecated: Optional[bool] = None
security: Optional[List[Dict[str, List[str]]]] = None
servers: Optional[List[Server]] = None
class Config:
extra = "allow"
class PathItem(BaseModel):
ref: Optional[str] = Field(default=None, alias="$ref")
summary: Optional[str] = None
description: Optional[str] = None
get: Optional[Operation] = None
put: Optional[Operation] = None
post: Optional[Operation] = None
delete: Optional[Operation] = None
options: Optional[Operation] = None
head: Optional[Operation] = None
patch: Optional[Operation] = None
trace: Optional[Operation] = None
servers: Optional[List[Server]] = None
parameters: Optional[List[Union[Parameter, Reference]]] = None
class Config:
extra = "allow"
class SecuritySchemeType(Enum):
apiKey = "apiKey"
http = "http"
oauth2 = "oauth2"
openIdConnect = "openIdConnect"
class SecurityBase(BaseModel):
type_: SecuritySchemeType = Field(alias="type")
description: Optional[str] = None
class Config:
extra = "allow"
class APIKeyIn(Enum):
query = "query"
header = "header"
cookie = "cookie"
class APIKey(SecurityBase):
type_ = Field(SecuritySchemeType.apiKey, alias="type")
in_: APIKeyIn = Field(alias="in")
name: str
class HTTPBase(SecurityBase):
type_ = Field(SecuritySchemeType.http, alias="type")
scheme: str
class HTTPBearer(HTTPBase):
scheme = "bearer"
bearerFormat: Optional[str] = None
class OAuthFlow(BaseModel):
refreshUrl: Optional[str] = None
scopes: Dict[str, str] = {}
class Config:
extra = "allow"
class OAuthFlowImplicit(OAuthFlow):
authorizationUrl: str
class OAuthFlowPassword(OAuthFlow):
tokenUrl: str
class OAuthFlowClientCredentials(OAuthFlow):
tokenUrl: str
class OAuthFlowAuthorizationCode(OAuthFlow):
authorizationUrl: str
tokenUrl: str
class OAuthFlows(BaseModel):
implicit: Optional[OAuthFlowImplicit] = None
password: Optional[OAuthFlowPassword] = None
clientCredentials: Optional[OAuthFlowClientCredentials] = None
authorizationCode: Optional[OAuthFlowAuthorizationCode] = None
class Config:
extra = "allow"
class OAuth2(SecurityBase):
type_ = Field(SecuritySchemeType.oauth2, alias="type")
flows: OAuthFlows
class OpenIdConnect(SecurityBase):
type_ = Field(SecuritySchemeType.openIdConnect, alias="type")
openIdConnectUrl: str
SecurityScheme = Union[APIKey, HTTPBase, OAuth2, OpenIdConnect, HTTPBearer]
class Components(BaseModel):
schemas: Optional[Dict[str, Union[Schema, Reference]]] = None
responses: Optional[Dict[str, Union[Response, Reference]]] = None
parameters: Optional[Dict[str, Union[Parameter, Reference]]] = None
examples: Optional[Dict[str, Union[Example, Reference]]] = None
requestBodies: Optional[Dict[str, Union[RequestBody, Reference]]] = None
headers: Optional[Dict[str, Union[Header, Reference]]] = None
securitySchemes: Optional[Dict[str, Union[SecurityScheme, Reference]]] = None
links: Optional[Dict[str, Union[Link, Reference]]] = None
# Using Any for Specification Extensions
callbacks: Optional[Dict[str, Union[Dict[str, PathItem], Reference, Any]]] = None
class Config:
extra = "allow"
class Tag(BaseModel):
name: str
description: Optional[str] = None
externalDocs: Optional[ExternalDocumentation] = None
class Config:
extra = "allow"
class OpenAPI(BaseModel):
openapi: str
info: Info
servers: Optional[List[Server]] = None
# Using Any for Specification Extensions
paths: Dict[str, Union[PathItem, Any]]
components: Optional[Components] = None
security: Optional[List[Dict[str, List[str]]]] = None
tags: Optional[List[Tag]] = None
externalDocs: Optional[ExternalDocumentation] = None
class Config:
extra = "allow"
Schema.update_forward_refs()
Operation.update_forward_refs()
Encoding.update_forward_refs() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/openapi/models.py | models.py |
from typing import Any, Dict, List, Optional, Union
from ..exceptions import HTTPException
from ..openapi.models import OAuth2 as OAuth2Model
from ..openapi.models import OAuthFlows as OAuthFlowsModel
from ..param_functions import Form
from ..security.base import SecurityBase
from ..security.utils import get_authorization_scheme_param
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
class OAuth2PasswordRequestForm:
"""
This is a dependency class, use it like:
@app.post("/login")
def login(form_data: OAuth2PasswordRequestForm = Depends()):
data = form_data.parse()
print(data.username)
print(data.password)
for scope in data.scopes:
print(scope)
if data.client_id:
print(data.client_id)
if data.client_secret:
print(data.client_secret)
return data
It creates the following Form request parameters in your endpoint:
grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password".
Nevertheless, this dependency class is permissive and allows not passing it. If you want to enforce it,
use instead the OAuth2PasswordRequestFormStrict dependency.
username: username string. The OAuth2 spec requires the exact field name "username".
password: password string. The OAuth2 spec requires the exact field name "password".
scope: Optional string. Several scopes (each one a string) separated by spaces. E.g.
"items:read items:write users:read profile openid"
client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any)
using HTTP Basic auth, as: client_id:client_secret
client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any)
using HTTP Basic auth, as: client_id:client_secret
"""
def __init__(
self,
grant_type: str = Form(default=None, regex="password"),
username: str = Form(),
password: str = Form(),
scope: str = Form(default=""),
client_id: Optional[str] = Form(default=None),
client_secret: Optional[str] = Form(default=None),
):
self.grant_type = grant_type
self.username = username
self.password = password
self.scopes = scope.split()
self.client_id = client_id
self.client_secret = client_secret
class OAuth2PasswordRequestFormStrict(OAuth2PasswordRequestForm):
"""
This is a dependency class, use it like:
@app.post("/login")
def login(form_data: OAuth2PasswordRequestFormStrict = Depends()):
data = form_data.parse()
print(data.username)
print(data.password)
for scope in data.scopes:
print(scope)
if data.client_id:
print(data.client_id)
if data.client_secret:
print(data.client_secret)
return data
It creates the following Form request parameters in your endpoint:
grant_type: the OAuth2 spec says it is required and MUST be the fixed string "password".
This dependency is strict about it. If you want to be permissive, use instead the
OAuth2PasswordRequestForm dependency class.
username: username string. The OAuth2 spec requires the exact field name "username".
password: password string. The OAuth2 spec requires the exact field name "password".
scope: Optional string. Several scopes (each one a string) separated by spaces. E.g.
"items:read items:write users:read profile openid"
client_id: optional string. OAuth2 recommends sending the client_id and client_secret (if any)
using HTTP Basic auth, as: client_id:client_secret
client_secret: optional string. OAuth2 recommends sending the client_id and client_secret (if any)
using HTTP Basic auth, as: client_id:client_secret
"""
def __init__(
self,
grant_type: str = Form(regex="password"),
username: str = Form(),
password: str = Form(),
scope: str = Form(default=""),
client_id: Optional[str] = Form(default=None),
client_secret: Optional[str] = Form(default=None),
):
super().__init__(
grant_type=grant_type,
username=username,
password=password,
scope=scope,
client_id=client_id,
client_secret=client_secret,
)
class OAuth2(SecurityBase):
def __init__(
self,
*,
flows: Union[OAuthFlowsModel, Dict[str, Dict[str, Any]]] = OAuthFlowsModel(),
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: Optional[bool] = True
):
self.model = OAuth2Model(flows=flows, description=description)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(self, request: Request) -> Optional[str]:
authorization: str = request.headers.get("Authorization")
if not authorization:
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return authorization
class OAuth2PasswordBearer(OAuth2):
def __init__(
self,
tokenUrl: str,
scheme_name: Optional[str] = None,
scopes: Optional[Dict[str, str]] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
if not scopes:
scopes = {}
flows = OAuthFlowsModel(password={"tokenUrl": tokenUrl, "scopes": scopes})
super().__init__(
flows=flows,
scheme_name=scheme_name,
description=description,
auto_error=auto_error,
)
async def __call__(self, request: Request) -> Optional[str]:
authorization: str = request.headers.get("Authorization")
scheme, param = get_authorization_scheme_param(authorization)
if not authorization or scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
else:
return None
return param
class OAuth2AuthorizationCodeBearer(OAuth2):
def __init__(
self,
authorizationUrl: str,
tokenUrl: str,
refreshUrl: Optional[str] = None,
scheme_name: Optional[str] = None,
scopes: Optional[Dict[str, str]] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
if not scopes:
scopes = {}
flows = OAuthFlowsModel(
authorizationCode={
"authorizationUrl": authorizationUrl,
"tokenUrl": tokenUrl,
"refreshUrl": refreshUrl,
"scopes": scopes,
}
)
super().__init__(
flows=flows,
scheme_name=scheme_name,
description=description,
auto_error=auto_error,
)
async def __call__(self, request: Request) -> Optional[str]:
authorization: str = request.headers.get("Authorization")
scheme, param = get_authorization_scheme_param(authorization)
if not authorization or scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers={"WWW-Authenticate": "Bearer"},
)
else:
return None # pragma: nocover
return param
class SecurityScopes:
def __init__(self, scopes: Optional[List[str]] = None):
self.scopes = scopes or []
self.scope_str = " ".join(self.scopes) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/security/oauth2.py | oauth2.py |
import binascii
from base64 import b64decode
from typing import Optional
from ..exceptions import HTTPException
from ..openapi.models import HTTPBase as HTTPBaseModel
from ..openapi.models import HTTPBearer as HTTPBearerModel
from ..security.base import SecurityBase
from ..security.utils import get_authorization_scheme_param
from pydantic import BaseModel
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
class HTTPBasicCredentials(BaseModel):
username: str
password: str
class HTTPAuthorizationCredentials(BaseModel):
scheme: str
credentials: str
class HTTPBase(SecurityBase):
def __init__(
self,
*,
scheme: str,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
self.model = HTTPBaseModel(scheme=scheme, description=description)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(
self, request: Request
) -> Optional[HTTPAuthorizationCredentials]:
authorization: str = request.headers.get("Authorization")
scheme, credentials = get_authorization_scheme_param(authorization)
if not (authorization and scheme and credentials):
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials)
class HTTPBasic(HTTPBase):
def __init__(
self,
*,
scheme_name: Optional[str] = None,
realm: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
self.model = HTTPBaseModel(scheme="basic", description=description)
self.scheme_name = scheme_name or self.__class__.__name__
self.realm = realm
self.auto_error = auto_error
async def __call__( # type: ignore
self, request: Request
) -> Optional[HTTPBasicCredentials]:
authorization: str = request.headers.get("Authorization")
scheme, param = get_authorization_scheme_param(authorization)
if self.realm:
unauthorized_headers = {"WWW-Authenticate": f'Basic realm="{self.realm}"'}
else:
unauthorized_headers = {"WWW-Authenticate": "Basic"}
invalid_user_credentials_exc = HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Invalid authentication credentials",
headers=unauthorized_headers,
)
if not authorization or scheme.lower() != "basic":
if self.auto_error:
raise HTTPException(
status_code=HTTP_401_UNAUTHORIZED,
detail="Not authenticated",
headers=unauthorized_headers,
)
else:
return None
try:
data = b64decode(param).decode("ascii")
except (ValueError, UnicodeDecodeError, binascii.Error):
raise invalid_user_credentials_exc
username, separator, password = data.partition(":")
if not separator:
raise invalid_user_credentials_exc
return HTTPBasicCredentials(username=username, password=password)
class HTTPBearer(HTTPBase):
def __init__(
self,
*,
bearerFormat: Optional[str] = None,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
self.model = HTTPBearerModel(bearerFormat=bearerFormat, description=description)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(
self, request: Request
) -> Optional[HTTPAuthorizationCredentials]:
authorization: str = request.headers.get("Authorization")
scheme, credentials = get_authorization_scheme_param(authorization)
if not (authorization and scheme and credentials):
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
if scheme.lower() != "bearer":
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN,
detail="Invalid authentication credentials",
)
else:
return None
return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials)
class HTTPDigest(HTTPBase):
def __init__(
self,
*,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True,
):
self.model = HTTPBaseModel(scheme="digest", description=description)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(
self, request: Request
) -> Optional[HTTPAuthorizationCredentials]:
authorization: str = request.headers.get("Authorization")
scheme, credentials = get_authorization_scheme_param(authorization)
if not (authorization and scheme and credentials):
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
if scheme.lower() != "digest":
raise HTTPException(
status_code=HTTP_403_FORBIDDEN,
detail="Invalid authentication credentials",
)
return HTTPAuthorizationCredentials(scheme=scheme, credentials=credentials) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/security/http.py | http.py |
from typing import Optional
from ..openapi.models import APIKey, APIKeyIn
from ..security.base import SecurityBase
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.status import HTTP_403_FORBIDDEN
class APIKeyBase(SecurityBase):
pass
class APIKeyQuery(APIKeyBase):
def __init__(
self,
*,
name: str,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True
):
self.model: APIKey = APIKey(
**{"in": APIKeyIn.query}, name=name, description=description
)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(self, request: Request) -> Optional[str]:
api_key: str = request.query_params.get(self.model.name)
if not api_key:
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return api_key
class APIKeyHeader(APIKeyBase):
def __init__(
self,
*,
name: str,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True
):
self.model: APIKey = APIKey(
**{"in": APIKeyIn.header}, name=name, description=description
)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(self, request: Request) -> Optional[str]:
api_key: str = request.headers.get(self.model.name)
if not api_key:
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return api_key
class APIKeyCookie(APIKeyBase):
def __init__(
self,
*,
name: str,
scheme_name: Optional[str] = None,
description: Optional[str] = None,
auto_error: bool = True
):
self.model: APIKey = APIKey(
**{"in": APIKeyIn.cookie}, name=name, description=description
)
self.scheme_name = scheme_name or self.__class__.__name__
self.auto_error = auto_error
async def __call__(self, request: Request) -> Optional[str]:
api_key = request.cookies.get(self.model.name)
if not api_key:
if self.auto_error:
raise HTTPException(
status_code=HTTP_403_FORBIDDEN, detail="Not authenticated"
)
else:
return None
return api_key | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/security/api_key.py | api_key.py |
import enum
import json
import typing
from zdppy_api.starlette.requests import HTTPConnection
from zdppy_api.starlette.types import Message, Receive, Scope, Send
class WebSocketState(enum.Enum):
CONNECTING = 0
CONNECTED = 1
DISCONNECTED = 2
class WebSocketDisconnect(Exception):
def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None:
self.code = code
self.reason = reason or ""
class WebSocket(HTTPConnection):
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
super().__init__(scope)
assert scope["type"] == "websocket"
self._receive = receive
self._send = send
self.client_state = WebSocketState.CONNECTING
self.application_state = WebSocketState.CONNECTING
async def receive(self) -> Message:
"""
Receive ASGI websocket messages, ensuring valid state transitions.
"""
if self.client_state == WebSocketState.CONNECTING:
message = await self._receive()
message_type = message["type"]
if message_type != "websocket.connect":
raise RuntimeError(
'Expected ASGI message "websocket.connect", '
f"but got {message_type!r}"
)
self.client_state = WebSocketState.CONNECTED
return message
elif self.client_state == WebSocketState.CONNECTED:
message = await self._receive()
message_type = message["type"]
if message_type not in {"websocket.receive", "websocket.disconnect"}:
raise RuntimeError(
'Expected ASGI message "websocket.receive" or '
f'"websocket.disconnect", but got {message_type!r}'
)
if message_type == "websocket.disconnect":
self.client_state = WebSocketState.DISCONNECTED
return message
else:
raise RuntimeError(
'Cannot call "receive" once a disconnect message has been received.'
)
async def send(self, message: Message) -> None:
"""
Send ASGI websocket messages, ensuring valid state transitions.
"""
if self.application_state == WebSocketState.CONNECTING:
message_type = message["type"]
if message_type not in {"websocket.accept", "websocket.close"}:
raise RuntimeError(
'Expected ASGI message "websocket.connect", '
f"but got {message_type!r}"
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
else:
self.application_state = WebSocketState.CONNECTED
await self._send(message)
elif self.application_state == WebSocketState.CONNECTED:
message_type = message["type"]
if message_type not in {"websocket.send", "websocket.close"}:
raise RuntimeError(
'Expected ASGI message "websocket.send" or "websocket.close", '
f"but got {message_type!r}"
)
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
await self._send(message)
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.')
async def accept(
self,
subprotocol: typing.Optional[str] = None,
headers: typing.Optional[typing.Iterable[typing.Tuple[bytes, bytes]]] = None,
) -> None:
headers = headers or []
if self.client_state == WebSocketState.CONNECTING:
# If we haven't yet seen the 'connect' message, then wait for it first.
await self.receive()
await self.send(
{"type": "websocket.accept", "subprotocol": subprotocol, "headers": headers}
)
def _raise_on_disconnect(self, message: Message) -> None:
if message["type"] == "websocket.disconnect":
raise WebSocketDisconnect(message["code"])
async def receive_text(self) -> str:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
return message["text"]
async def receive_bytes(self) -> bytes:
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
return message["bytes"]
async def receive_json(self, mode: str = "text") -> typing.Any:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
if self.application_state != WebSocketState.CONNECTED:
raise RuntimeError(
'WebSocket is not connected. Need to call "accept" first.'
)
message = await self.receive()
self._raise_on_disconnect(message)
if mode == "text":
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
return json.loads(text)
async def iter_text(self) -> typing.AsyncIterator[str]:
try:
while True:
yield await self.receive_text()
except WebSocketDisconnect:
pass
async def iter_bytes(self) -> typing.AsyncIterator[bytes]:
try:
while True:
yield await self.receive_bytes()
except WebSocketDisconnect:
pass
async def iter_json(self) -> typing.AsyncIterator[typing.Any]:
try:
while True:
yield await self.receive_json()
except WebSocketDisconnect:
pass
async def send_text(self, data: str) -> None:
await self.send({"type": "websocket.send", "text": data})
async def send_bytes(self, data: bytes) -> None:
await self.send({"type": "websocket.send", "bytes": data})
async def send_json(self, data: typing.Any, mode: str = "text") -> None:
if mode not in {"text", "binary"}:
raise RuntimeError('The "mode" argument should be "text" or "binary".')
text = json.dumps(data)
if mode == "text":
await self.send({"type": "websocket.send", "text": text})
else:
await self.send({"type": "websocket.send", "bytes": text.encode("utf-8")})
async def close(
self, code: int = 1000, reason: typing.Optional[str] = None
) -> None:
await self.send(
{"type": "websocket.close", "code": code, "reason": reason or ""}
)
class WebSocketClose:
def __init__(self, code: int = 1000, reason: typing.Optional[str] = None) -> None:
self.code = code
self.reason = reason or ""
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await send(
{"type": "websocket.close", "code": self.code, "reason": self.reason}
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/websockets.py | websockets.py |
import importlib.util
import os
import stat
import typing
from email.utils import parsedate
from zdppy_api import anyio
from zdppy_api.starlette.datastructures import URL, Headers
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.responses import FileResponse, RedirectResponse, Response
from zdppy_api.starlette.types import Receive, Scope, Send
PathLike = typing.Union[str, "os.PathLike[str]"]
class NotModifiedResponse(Response):
NOT_MODIFIED_HEADERS = (
"cache-control",
"content-location",
"date",
"etag",
"expires",
"vary",
)
def __init__(self, headers: Headers):
super().__init__(
status_code=304,
headers={
name: value
for name, value in headers.items()
if name in self.NOT_MODIFIED_HEADERS
},
)
class StaticFiles:
def __init__(
self,
*,
directory: typing.Optional[PathLike] = None,
packages: typing.Optional[
typing.List[typing.Union[str, typing.Tuple[str, str]]]
] = None,
html: bool = False,
check_dir: bool = True,
) -> None:
self.directory = directory
self.packages = packages
self.all_directories = self.get_directories(directory, packages)
self.html = html
self.config_checked = False
if check_dir and directory is not None and not os.path.isdir(directory):
raise RuntimeError(f"Directory '{directory}' does not exist")
def get_directories(
self,
directory: typing.Optional[PathLike] = None,
packages: typing.Optional[
typing.List[typing.Union[str, typing.Tuple[str, str]]]
] = None,
) -> typing.List[PathLike]:
"""
Given `directory` and `packages` arguments, return a list of all the
directories that should be used for serving static files from.
"""
directories = []
if directory is not None:
directories.append(directory)
for package in packages or []:
if isinstance(package, tuple):
package, statics_dir = package
else:
statics_dir = "statics"
spec = importlib.util.find_spec(package)
assert spec is not None, f"Package {package!r} could not be found."
assert spec.origin is not None, f"Package {package!r} could not be found."
package_directory = os.path.normpath(
os.path.join(spec.origin, "..", statics_dir)
)
assert os.path.isdir(
package_directory
), f"Directory '{statics_dir!r}' in package {package!r} could not be found."
directories.append(package_directory)
return directories
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The ASGI entry point.
"""
assert scope["type"] == "http"
if not self.config_checked:
await self.check_config()
self.config_checked = True
path = self.get_path(scope)
response = await self.get_response(path, scope)
await response(scope, receive, send)
def get_path(self, scope: Scope) -> str:
"""
Given the ASGI scope, return the `path` string to serve up,
with OS specific path separators, and any '..', '.' components removed.
"""
return os.path.normpath(os.path.join(*scope["path"].split("/")))
async def get_response(self, path: str, scope: Scope) -> Response:
"""
Returns an HTTP response, given the incoming path, method and request headers.
"""
if scope["method"] not in ("GET", "HEAD"):
raise HTTPException(status_code=405)
try:
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, path
)
except PermissionError:
raise HTTPException(status_code=401)
except OSError:
raise
if stat_result and stat.S_ISREG(stat_result.st_mode):
# We have a static file to serve.
return self.file_response(full_path, stat_result, scope)
elif stat_result and stat.S_ISDIR(stat_result.st_mode) and self.html:
# We're in HTML mode, and have got a directory URL.
# Check if we have 'index.html' file to serve.
index_path = os.path.join(path, "index.html")
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, index_path
)
if stat_result is not None and stat.S_ISREG(stat_result.st_mode):
if not scope["path"].endswith("/"):
# Directory URLs should redirect to always end in "/".
url = URL(scope=scope)
url = url.replace(path=url.path + "/")
return RedirectResponse(url=url)
return self.file_response(full_path, stat_result, scope)
if self.html:
# Check for '404.html' if we're in HTML mode.
full_path, stat_result = await anyio.to_thread.run_sync(
self.lookup_path, "404.html"
)
if stat_result and stat.S_ISREG(stat_result.st_mode):
return FileResponse(
full_path,
stat_result=stat_result,
method=scope["method"],
status_code=404,
)
raise HTTPException(status_code=404)
def lookup_path(
self, path: str
) -> typing.Tuple[str, typing.Optional[os.stat_result]]:
for directory in self.all_directories:
full_path = os.path.realpath(os.path.join(directory, path))
directory = os.path.realpath(directory)
if os.path.commonprefix([full_path, directory]) != directory:
# Don't allow misbehaving clients to break out of the static files
# directory.
continue
try:
return full_path, os.stat(full_path)
except (FileNotFoundError, NotADirectoryError):
continue
return "", None
def file_response(
self,
full_path: PathLike,
stat_result: os.stat_result,
scope: Scope,
status_code: int = 200,
) -> Response:
method = scope["method"]
request_headers = Headers(scope=scope)
response = FileResponse(
full_path, status_code=status_code, stat_result=stat_result, method=method
)
if self.is_not_modified(response.headers, request_headers):
return NotModifiedResponse(response.headers)
return response
async def check_config(self) -> None:
"""
Perform a one-off configuration check that StaticFiles is actually
pointed at a directory, so that we can raise loud errors rather than
just returning 404 responses.
"""
if self.directory is None:
return
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.directory)
except FileNotFoundError:
raise RuntimeError(
f"StaticFiles directory '{self.directory}' does not exist."
)
if not (stat.S_ISDIR(stat_result.st_mode) or stat.S_ISLNK(stat_result.st_mode)):
raise RuntimeError(
f"StaticFiles path '{self.directory}' is not a directory."
)
def is_not_modified(
self, response_headers: Headers, request_headers: Headers
) -> bool:
"""
Given the request and response headers, return `True` if an HTTP
"Not Modified" response could be returned instead.
"""
try:
if_none_match = request_headers["if-none-match"]
etag = response_headers["etag"]
if if_none_match == etag:
return True
except KeyError:
pass
try:
if_modified_since = parsedate(request_headers["if-modified-since"])
last_modified = parsedate(response_headers["last-modified"])
if (
if_modified_since is not None
and last_modified is not None
and if_modified_since >= last_modified
):
return True
except KeyError:
pass
return False | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/staticfiles.py | staticfiles.py |
import typing
from enum import Enum
from urllib.parse import unquote_plus
from zdppy_api.starlette.datastructures import FormData, Headers, UploadFile
try:
import multipart
from multipart.multipart import parse_options_header
except ImportError: # pragma: nocover
parse_options_header = None
multipart = None
class FormMessage(Enum):
FIELD_START = 1
FIELD_NAME = 2
FIELD_DATA = 3
FIELD_END = 4
END = 5
class MultiPartMessage(Enum):
PART_BEGIN = 1
PART_DATA = 2
PART_END = 3
HEADER_FIELD = 4
HEADER_VALUE = 5
HEADER_END = 6
HEADERS_FINISHED = 7
END = 8
def _user_safe_decode(src: bytes, codec: str) -> str:
try:
return src.decode(codec)
except (UnicodeDecodeError, LookupError):
return src.decode("latin-1")
class FormParser:
def __init__(
self, headers: Headers, stream: typing.AsyncGenerator[bytes, None]
) -> None:
assert (
multipart is not None
), "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.messages: typing.List[typing.Tuple[FormMessage, bytes]] = []
def on_field_start(self) -> None:
message = (FormMessage.FIELD_START, b"")
self.messages.append(message)
def on_field_name(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_NAME, data[start:end])
self.messages.append(message)
def on_field_data(self, data: bytes, start: int, end: int) -> None:
message = (FormMessage.FIELD_DATA, data[start:end])
self.messages.append(message)
def on_field_end(self) -> None:
message = (FormMessage.FIELD_END, b"")
self.messages.append(message)
def on_end(self) -> None:
message = (FormMessage.END, b"")
self.messages.append(message)
async def parse(self) -> FormData:
# Callbacks dictionary.
callbacks = {
"on_field_start": self.on_field_start,
"on_field_name": self.on_field_name,
"on_field_data": self.on_field_data,
"on_field_end": self.on_field_end,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.QuerystringParser(callbacks)
field_name = b""
field_value = b""
items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = []
# Feed the parser with data from the request.
async for chunk in self.stream:
if chunk:
parser.write(chunk)
else:
parser.finalize()
messages = list(self.messages)
self.messages.clear()
for message_type, message_bytes in messages:
if message_type == FormMessage.FIELD_START:
field_name = b""
field_value = b""
elif message_type == FormMessage.FIELD_NAME:
field_name += message_bytes
elif message_type == FormMessage.FIELD_DATA:
field_value += message_bytes
elif message_type == FormMessage.FIELD_END:
name = unquote_plus(field_name.decode("latin-1"))
value = unquote_plus(field_value.decode("latin-1"))
items.append((name, value))
return FormData(items)
class MultiPartParser:
def __init__(
self, headers: Headers, stream: typing.AsyncGenerator[bytes, None]
) -> None:
assert (
multipart is not None
), "The `python-multipart` library must be installed to use form parsing."
self.headers = headers
self.stream = stream
self.messages: typing.List[typing.Tuple[MultiPartMessage, bytes]] = []
def on_part_begin(self) -> None:
message = (MultiPartMessage.PART_BEGIN, b"")
self.messages.append(message)
def on_part_data(self, data: bytes, start: int, end: int) -> None:
message = (MultiPartMessage.PART_DATA, data[start:end])
self.messages.append(message)
def on_part_end(self) -> None:
message = (MultiPartMessage.PART_END, b"")
self.messages.append(message)
def on_header_field(self, data: bytes, start: int, end: int) -> None:
message = (MultiPartMessage.HEADER_FIELD, data[start:end])
self.messages.append(message)
def on_header_value(self, data: bytes, start: int, end: int) -> None:
message = (MultiPartMessage.HEADER_VALUE, data[start:end])
self.messages.append(message)
def on_header_end(self) -> None:
message = (MultiPartMessage.HEADER_END, b"")
self.messages.append(message)
def on_headers_finished(self) -> None:
message = (MultiPartMessage.HEADERS_FINISHED, b"")
self.messages.append(message)
def on_end(self) -> None:
message = (MultiPartMessage.END, b"")
self.messages.append(message)
async def parse(self) -> FormData:
# Parse the Content-Type header to get the multipart boundary.
content_type, params = parse_options_header(self.headers["Content-Type"])
charset = params.get(b"charset", "utf-8")
if type(charset) == bytes:
charset = charset.decode("latin-1")
boundary = params[b"boundary"]
# Callbacks dictionary.
callbacks = {
"on_part_begin": self.on_part_begin,
"on_part_data": self.on_part_data,
"on_part_end": self.on_part_end,
"on_header_field": self.on_header_field,
"on_header_value": self.on_header_value,
"on_header_end": self.on_header_end,
"on_headers_finished": self.on_headers_finished,
"on_end": self.on_end,
}
# Create the parser.
parser = multipart.MultipartParser(boundary, callbacks)
header_field = b""
header_value = b""
content_disposition = None
content_type = b""
field_name = ""
data = b""
file: typing.Optional[UploadFile] = None
items: typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]] = []
item_headers: typing.List[typing.Tuple[bytes, bytes]] = []
# Feed the parser with data from the request.
async for chunk in self.stream:
parser.write(chunk)
messages = list(self.messages)
self.messages.clear()
for message_type, message_bytes in messages:
if message_type == MultiPartMessage.PART_BEGIN:
content_disposition = None
content_type = b""
data = b""
item_headers = []
elif message_type == MultiPartMessage.HEADER_FIELD:
header_field += message_bytes
elif message_type == MultiPartMessage.HEADER_VALUE:
header_value += message_bytes
elif message_type == MultiPartMessage.HEADER_END:
field = header_field.lower()
if field == b"content-disposition":
content_disposition = header_value
elif field == b"content-type":
content_type = header_value
item_headers.append((field, header_value))
header_field = b""
header_value = b""
elif message_type == MultiPartMessage.HEADERS_FINISHED:
disposition, options = parse_options_header(content_disposition)
field_name = _user_safe_decode(options[b"name"], charset)
if b"filename" in options:
filename = _user_safe_decode(options[b"filename"], charset)
file = UploadFile(
filename=filename,
content_type=content_type.decode("latin-1"),
headers=Headers(raw=item_headers),
)
else:
file = None
elif message_type == MultiPartMessage.PART_DATA:
if file is None:
data += message_bytes
else:
await file.write(message_bytes)
elif message_type == MultiPartMessage.PART_END:
if file is None:
items.append((field_name, _user_safe_decode(data, charset)))
else:
await file.seek(0)
items.append((field_name, file))
parser.finalize()
return FormData(items) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/formparsers.py | formparsers.py |
import os
import typing
from collections.abc import MutableMapping
from pathlib import Path
class undefined:
pass
class EnvironError(Exception):
pass
class Environ(MutableMapping):
def __init__(self, environ: typing.MutableMapping = os.environ):
self._environ = environ
self._has_been_read: typing.Set[typing.Any] = set()
def __getitem__(self, key: typing.Any) -> typing.Any:
self._has_been_read.add(key)
return self._environ.__getitem__(key)
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
if key in self._has_been_read:
raise EnvironError(
f"Attempting to set environ['{key}'], but the value has already been "
"read."
)
self._environ.__setitem__(key, value)
def __delitem__(self, key: typing.Any) -> None:
if key in self._has_been_read:
raise EnvironError(
f"Attempting to delete environ['{key}'], but the value has already "
"been read."
)
self._environ.__delitem__(key)
def __iter__(self) -> typing.Iterator:
return iter(self._environ)
def __len__(self) -> int:
return len(self._environ)
environ = Environ()
T = typing.TypeVar("T")
class Config:
def __init__(
self,
env_file: typing.Optional[typing.Union[str, Path]] = None,
environ: typing.Mapping[str, str] = environ,
) -> None:
self.environ = environ
self.file_values: typing.Dict[str, str] = {}
if env_file is not None and os.path.isfile(env_file):
self.file_values = self._read_file(env_file)
@typing.overload
def __call__(
self, key: str, cast: typing.Type[T], default: T = ...
) -> T: # pragma: no cover
...
@typing.overload
def __call__(
self, key: str, cast: typing.Type[str] = ..., default: str = ...
) -> str: # pragma: no cover
...
@typing.overload
def __call__(
self,
key: str,
cast: typing.Callable[[typing.Any], T] = ...,
default: typing.Any = ...,
) -> T: # pragma: no cover
...
@typing.overload
def __call__(
self, key: str, cast: typing.Type[str] = ..., default: T = ...
) -> typing.Union[T, str]: # pragma: no cover
...
def __call__(
self,
key: str,
cast: typing.Optional[typing.Callable] = None,
default: typing.Any = undefined,
) -> typing.Any:
return self.get(key, cast, default)
def get(
self,
key: str,
cast: typing.Optional[typing.Callable] = None,
default: typing.Any = undefined,
) -> typing.Any:
if key in self.environ:
value = self.environ[key]
return self._perform_cast(key, value, cast)
if key in self.file_values:
value = self.file_values[key]
return self._perform_cast(key, value, cast)
if default is not undefined:
return self._perform_cast(key, default, cast)
raise KeyError(f"Config '{key}' is missing, and has no default.")
def _read_file(self, file_name: typing.Union[str, Path]) -> typing.Dict[str, str]:
file_values: typing.Dict[str, str] = {}
with open(file_name) as input_file:
for line in input_file.readlines():
line = line.strip()
if "=" in line and not line.startswith("#"):
key, value = line.split("=", 1)
key = key.strip()
value = value.strip().strip("\"'")
file_values[key] = value
return file_values
def _perform_cast(
self, key: str, value: typing.Any, cast: typing.Optional[typing.Callable] = None
) -> typing.Any:
if cast is None or value is None:
return value
elif cast is bool and isinstance(value, str):
mapping = {"true": True, "1": True, "false": False, "0": False}
value = value.lower()
if value not in mapping:
raise ValueError(
f"Config '{key}' has value '{value}'. Not a valid bool."
)
return mapping[value]
try:
return cast(value)
except (TypeError, ValueError):
raise ValueError(
f"Config '{key}' has value '{value}'. Not a valid {cast.__name__}."
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/config.py | config.py |
import math
import typing
import uuid
T = typing.TypeVar("T")
class Convertor(typing.Generic[T]):
regex: typing.ClassVar[str] = ""
def convert(self, value: str) -> T:
raise NotImplementedError() # pragma: no cover
def to_string(self, value: T) -> str:
raise NotImplementedError() # pragma: no cover
class StringConvertor(Convertor):
regex = "[^/]+"
def convert(self, value: str) -> str:
return value
def to_string(self, value: str) -> str:
value = str(value)
assert "/" not in value, "May not contain path separators"
assert value, "Must not be empty"
return value
class PathConvertor(Convertor):
regex = ".*"
def convert(self, value: str) -> str:
return str(value)
def to_string(self, value: str) -> str:
return str(value)
class IntegerConvertor(Convertor):
regex = "[0-9]+"
def convert(self, value: str) -> int:
return int(value)
def to_string(self, value: int) -> str:
value = int(value)
assert value >= 0, "Negative integers are not supported"
return str(value)
class FloatConvertor(Convertor):
regex = "[0-9]+(.[0-9]+)?"
def convert(self, value: str) -> float:
return float(value)
def to_string(self, value: float) -> str:
value = float(value)
assert value >= 0.0, "Negative floats are not supported"
assert not math.isnan(value), "NaN values are not supported"
assert not math.isinf(value), "Infinite values are not supported"
return ("%0.20f" % value).rstrip("0").rstrip(".")
class UUIDConvertor(Convertor):
regex = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}"
def convert(self, value: str) -> uuid.UUID:
return uuid.UUID(value)
def to_string(self, value: uuid.UUID) -> str:
return str(value)
CONVERTOR_TYPES = {
"str": StringConvertor(),
"path": PathConvertor(),
"int": IntegerConvertor(),
"float": FloatConvertor(),
"uuid": UUIDConvertor(),
}
def register_url_convertor(key: str, convertor: Convertor) -> None:
CONVERTOR_TYPES[key] = convertor | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/convertors.py | convertors.py |
import inspect
import typing
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import Response
from zdppy_api.starlette.routing import BaseRoute, Mount, Route
try:
import yaml
except ImportError: # pragma: nocover
yaml = None # type: ignore
class OpenAPIResponse(Response):
media_type = "application/vnd.oai.openapi"
def render(self, content: typing.Any) -> bytes:
assert yaml is not None, "`pyyaml` must be installed to use OpenAPIResponse."
assert isinstance(
content, dict
), "The schema passed to OpenAPIResponse should be a dictionary."
return yaml.dump(content, default_flow_style=False).encode("utf-8")
class EndpointInfo(typing.NamedTuple):
path: str
http_method: str
func: typing.Callable
class BaseSchemaGenerator:
def get_schema(self, routes: typing.List[BaseRoute]) -> dict:
raise NotImplementedError() # pragma: no cover
def get_endpoints(
self, routes: typing.List[BaseRoute]
) -> typing.List[EndpointInfo]:
"""
Given the routes, yields the following information:
- path
eg: /users/
- http_method
one of 'get', 'post', 'put', 'patch', 'delete', 'options'
- func
method ready to extract the docstring
"""
endpoints_info: list = []
for route in routes:
if isinstance(route, Mount):
routes = route.routes or []
sub_endpoints = [
EndpointInfo(
path="".join((route.path, sub_endpoint.path)),
http_method=sub_endpoint.http_method,
func=sub_endpoint.func,
)
for sub_endpoint in self.get_endpoints(routes)
]
endpoints_info.extend(sub_endpoints)
elif not isinstance(route, Route) or not route.include_in_schema:
continue
elif inspect.isfunction(route.endpoint) or inspect.ismethod(route.endpoint):
for method in route.methods or ["GET"]:
if method == "HEAD":
continue
endpoints_info.append(
EndpointInfo(route.path, method.lower(), route.endpoint)
)
else:
for method in ["get", "post", "put", "patch", "delete", "options"]:
if not hasattr(route.endpoint, method):
continue
func = getattr(route.endpoint, method)
endpoints_info.append(
EndpointInfo(route.path, method.lower(), func)
)
return endpoints_info
def parse_docstring(self, func_or_method: typing.Callable) -> dict:
"""
Given a function, parse the docstring as YAML and return a dictionary of info.
"""
docstring = func_or_method.__doc__
if not docstring:
return {}
assert yaml is not None, "`pyyaml` must be installed to use parse_docstring."
# We support having regular docstrings before the schema
# definition. Here we return just the schema part from
# the docstring.
docstring = docstring.split("---")[-1]
parsed = yaml.safe_load(docstring)
if not isinstance(parsed, dict):
# A regular docstring (not yaml formatted) can return
# a simple string here, which wouldn't follow the schema.
return {}
return parsed
def OpenAPIResponse(self, request: Request) -> Response:
routes = request.app.routes
schema = self.get_schema(routes=routes)
return OpenAPIResponse(schema)
class SchemaGenerator(BaseSchemaGenerator):
def __init__(self, base_schema: dict) -> None:
self.base_schema = base_schema
def get_schema(self, routes: typing.List[BaseRoute]) -> dict:
schema = dict(self.base_schema)
schema.setdefault("paths", {})
endpoints_info = self.get_endpoints(routes)
for endpoint in endpoints_info:
parsed = self.parse_docstring(endpoint.func)
if not parsed:
continue
if endpoint.path not in schema["paths"]:
schema["paths"][endpoint.path] = {}
schema["paths"][endpoint.path][endpoint.http_method] = parsed
return schema | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/schemas.py | schemas.py |
import http.cookies
import json
import os
import stat
import sys
import typing
from email.utils import formatdate
from functools import partial
from mimetypes import guess_type as mimetypes_guess_type
from urllib.parse import quote
from zdppy_api import anyio
from zdppy_api.starlette._compat import md5_hexdigest
from zdppy_api.starlette.background import BackgroundTask
from zdppy_api.starlette.concurrency import iterate_in_threadpool
from zdppy_api.starlette.datastructures import URL, MutableHeaders
from zdppy_api.starlette.types import Receive, Scope, Send
# Workaround for adding samesite support to pre 3.8 python
http.cookies.Morsel._reserved["samesite"] = "SameSite" # type: ignore
# Compatibility wrapper for `mimetypes.guess_type` to support `os.PathLike` on <py3.8
def guess_type(
url: typing.Union[str, "os.PathLike[str]"], strict: bool = True
) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
if sys.version_info < (3, 8): # pragma: no cover
url = os.fspath(url)
return mimetypes_guess_type(url, strict)
class Response:
media_type = None
charset = "utf-8"
def __init__(
self,
content: typing.Any = None,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
self.status_code = status_code
if media_type is not None:
self.media_type = media_type
self.background = background
self.body = self.render(content)
self.init_headers(headers)
def render(self, content: typing.Any) -> bytes:
if content is None:
return b""
if isinstance(content, bytes):
return content
return content.encode(self.charset)
def init_headers(
self, headers: typing.Optional[typing.Mapping[str, str]] = None
) -> None:
if headers is None:
raw_headers: typing.List[typing.Tuple[bytes, bytes]] = []
populate_content_length = True
populate_content_type = True
else:
raw_headers = [
(k.lower().encode("latin-1"), v.encode("latin-1"))
for k, v in headers.items()
]
keys = [h[0] for h in raw_headers]
populate_content_length = b"content-length" not in keys
populate_content_type = b"content-type" not in keys
body = getattr(self, "body", None)
if (
body is not None
and populate_content_length
and not (self.status_code < 200 or self.status_code in (204, 304))
):
content_length = str(len(body))
raw_headers.append((b"content-length", content_length.encode("latin-1")))
content_type = self.media_type
if content_type is not None and populate_content_type:
if content_type.startswith("text/"):
content_type += "; charset=" + self.charset
raw_headers.append((b"content-type", content_type.encode("latin-1")))
self.raw_headers = raw_headers
@property
def headers(self) -> MutableHeaders:
if not hasattr(self, "_headers"):
self._headers = MutableHeaders(raw=self.raw_headers)
return self._headers
def set_cookie(
self,
key: str,
value: str = "",
max_age: typing.Optional[int] = None,
expires: typing.Optional[int] = None,
path: str = "/",
domain: typing.Optional[str] = None,
secure: bool = False,
httponly: bool = False,
samesite: str = "lax",
) -> None:
cookie: http.cookies.BaseCookie = http.cookies.SimpleCookie()
cookie[key] = value
if max_age is not None:
cookie[key]["max-age"] = max_age
if expires is not None:
cookie[key]["expires"] = expires
if path is not None:
cookie[key]["path"] = path
if domain is not None:
cookie[key]["domain"] = domain
if secure:
cookie[key]["secure"] = True
if httponly:
cookie[key]["httponly"] = True
if samesite is not None:
assert samesite.lower() in [
"strict",
"lax",
"none",
], "samesite must be either 'strict', 'lax' or 'none'"
cookie[key]["samesite"] = samesite
cookie_val = cookie.output(header="").strip()
self.raw_headers.append((b"set-cookie", cookie_val.encode("latin-1")))
def delete_cookie(
self,
key: str,
path: str = "/",
domain: typing.Optional[str] = None,
secure: bool = False,
httponly: bool = False,
samesite: str = "lax",
) -> None:
self.set_cookie(
key,
max_age=0,
expires=0,
path=path,
domain=domain,
secure=secure,
httponly=httponly,
samesite=samesite,
)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
await send({"type": "http.response.body", "body": self.body})
if self.background is not None:
await self.background()
class HTMLResponse(Response):
media_type = "text/html"
class PlainTextResponse(Response):
media_type = "text/plain"
class JSONResponse(Response):
media_type = "application/json"
def __init__(
self,
content: typing.Any,
status_code: int = 200,
headers: typing.Optional[dict] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
super().__init__(content, status_code, headers, media_type, background)
def render(self, content: typing.Any) -> bytes:
return json.dumps(
content,
ensure_ascii=False,
allow_nan=False,
indent=None,
separators=(",", ":"),
).encode("utf-8")
class RedirectResponse(Response):
def __init__(
self,
url: typing.Union[str, URL],
status_code: int = 307,
headers: typing.Optional[typing.Mapping[str, str]] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
super().__init__(
content=b"", status_code=status_code, headers=headers, background=background
)
self.headers["location"] = quote(str(url), safe=":/%#?=@[]!$&'()*+,;")
class StreamingResponse(Response):
def __init__(
self,
content: typing.Any,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> None:
if isinstance(content, typing.AsyncIterable):
self.body_iterator = content
else:
self.body_iterator = iterate_in_threadpool(content)
self.status_code = status_code
self.media_type = self.media_type if media_type is None else media_type
self.background = background
self.init_headers(headers)
async def listen_for_disconnect(self, receive: Receive) -> None:
while True:
message = await receive()
if message["type"] == "http.disconnect":
break
async def stream_response(self, send: Send) -> None:
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
async for chunk in self.body_iterator:
if not isinstance(chunk, bytes):
chunk = chunk.encode(self.charset)
await send({"type": "http.response.body", "body": chunk, "more_body": True})
await send({"type": "http.response.body", "body": b"", "more_body": False})
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
async with anyio.create_task_group() as task_group:
async def wrap(func: typing.Callable[[], typing.Coroutine]) -> None:
await func()
task_group.cancel_scope.cancel()
task_group.start_soon(wrap, partial(self.stream_response, send))
await wrap(partial(self.listen_for_disconnect, receive))
if self.background is not None:
await self.background()
class FileResponse(Response):
chunk_size = 64 * 1024
def __init__(
self,
path: typing.Union[str, "os.PathLike[str]"],
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
filename: typing.Optional[str] = None,
stat_result: typing.Optional[os.stat_result] = None,
method: typing.Optional[str] = None,
content_disposition_type: str = "attachment",
) -> None:
self.path = path
self.status_code = status_code
self.filename = filename
self.send_header_only = method is not None and method.upper() == "HEAD"
if media_type is None:
media_type = guess_type(filename or path)[0] or "text/plain"
self.media_type = media_type
self.background = background
self.init_headers(headers)
if self.filename is not None:
content_disposition_filename = quote(self.filename)
if content_disposition_filename != self.filename:
content_disposition = "{}; filename*=utf-8''{}".format(
content_disposition_type, content_disposition_filename
)
else:
content_disposition = '{}; filename="{}"'.format(
content_disposition_type, self.filename
)
self.headers.setdefault("content-disposition", content_disposition)
self.stat_result = stat_result
if stat_result is not None:
self.set_stat_headers(stat_result)
def set_stat_headers(self, stat_result: os.stat_result) -> None:
content_length = str(stat_result.st_size)
last_modified = formatdate(stat_result.st_mtime, usegmt=True)
etag_base = str(stat_result.st_mtime) + "-" + str(stat_result.st_size)
etag = md5_hexdigest(etag_base.encode(), usedforsecurity=False)
self.headers.setdefault("content-length", content_length)
self.headers.setdefault("last-modified", last_modified)
self.headers.setdefault("etag", etag)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.stat_result is None:
try:
stat_result = await anyio.to_thread.run_sync(os.stat, self.path)
self.set_stat_headers(stat_result)
except FileNotFoundError:
raise RuntimeError(f"File at path {self.path} does not exist.")
else:
mode = stat_result.st_mode
if not stat.S_ISREG(mode):
raise RuntimeError(f"File at path {self.path} is not a file.")
await send(
{
"type": "http.response.start",
"status": self.status_code,
"headers": self.raw_headers,
}
)
if self.send_header_only:
await send({"type": "http.response.body", "body": b"", "more_body": False})
else:
async with await anyio.open_file(self.path, mode="rb") as file:
more_body = True
while more_body:
chunk = await file.read(self.chunk_size)
more_body = len(chunk) == self.chunk_size
await send(
{
"type": "http.response.body",
"body": chunk,
"more_body": more_body,
}
)
if self.background is not None:
await self.background() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/responses.py | responses.py |
import typing
from zdppy_api.starlette.datastructures import State, URLPath
from zdppy_api.starlette.exceptions import ExceptionMiddleware
from zdppy_api.starlette.middleware import Middleware
from zdppy_api.starlette.middleware.base import BaseHTTPMiddleware
from zdppy_api.starlette.middleware.errors import ServerErrorMiddleware
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import Response
from zdppy_api.starlette.routing import BaseRoute, Router
from zdppy_api.starlette.types import ASGIApp, Receive, Scope, Send
class Starlette:
"""
Creates an application instance.
**Parameters:**
* **debug** - Boolean indicating if debug tracebacks should be returned on errors.
* **routes** - A list of routes to serve incoming HTTP and WebSocket requests.
* **middleware** - A list of middleware to run for every request. A starlette
application will always automatically include two middleware classes.
`ServerErrorMiddleware` is added as the very outermost middleware, to handle
any uncaught errors occurring anywhere in the entire stack.
`ExceptionMiddleware` is added as the very innermost middleware, to deal
with handled exception cases occurring in the routing or endpoints.
* **exception_handlers** - A mapping of either integer status codes,
or exception class types onto callables which handle the exceptions.
Exception handler callables should be of the form
`handler(request, exc) -> response` and may be be either standard functions, or
async functions.
* **on_startup** - A list of callables to run on application startup.
Startup handler callables do not take any arguments, and may be be either
standard functions, or async functions.
* **on_shutdown** - A list of callables to run on application shutdown.
Shutdown handler callables do not take any arguments, and may be be either
standard functions, or async functions.
"""
def __init__(
self,
debug: bool = False,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
middleware: typing.Optional[typing.Sequence[Middleware]] = None,
exception_handlers: typing.Optional[
typing.Mapping[
typing.Any,
typing.Callable[
[Request, Exception],
typing.Union[Response, typing.Awaitable[Response]],
],
]
] = None,
on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None,
on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None,
lifespan: typing.Optional[
typing.Callable[["Starlette"], typing.AsyncContextManager]
] = None,
) -> None:
# The lifespan context function is a newer style that replaces
# on_startup / on_shutdown handlers. Use one or the other, not both.
assert lifespan is None or (
on_startup is None and on_shutdown is None
), "Use either 'lifespan' or 'on_startup'/'on_shutdown', not both."
self._debug = debug
self.state = State()
self.router = Router(
routes, on_startup=on_startup, on_shutdown=on_shutdown, lifespan=lifespan
)
self.exception_handlers = (
{} if exception_handlers is None else dict(exception_handlers)
)
self.user_middleware = [] if middleware is None else list(middleware)
self.middleware_stack = self.build_middleware_stack()
def build_middleware_stack(self) -> ASGIApp:
debug = self.debug
error_handler = None
exception_handlers: typing.Dict[
typing.Any, typing.Callable[[Request, Exception], Response]
] = {}
for key, value in self.exception_handlers.items():
if key in (500, Exception):
error_handler = value
else:
exception_handlers[key] = value
middleware = (
[Middleware(ServerErrorMiddleware, handler=error_handler, debug=debug)]
+ self.user_middleware
+ [
Middleware(
ExceptionMiddleware, handlers=exception_handlers, debug=debug
)
]
)
app = self.router
for cls, options in reversed(middleware):
app = cls(app=app, **options)
return app
@property
def routes(self) -> typing.List[BaseRoute]:
return self.router.routes
@property
def debug(self) -> bool:
return self._debug
@debug.setter
def debug(self, value: bool) -> None:
self._debug = value
self.middleware_stack = self.build_middleware_stack()
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
return self.router.url_path_for(name, **path_params)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
scope["app"] = self
await self.middleware_stack(scope, receive, send)
# The following usages are now discouraged in favour of configuration
# during Starlette.__init__(...)
def on_event(self, event_type: str) -> typing.Callable: # pragma: nocover
return self.router.on_event(event_type)
def mount(
self, path: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: nocover
"""
We no longer document this API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Mount(path, ...),
...
]
app = Starlette(routes=routes)
"""
self.router.mount(path, app=app, name=name)
def host(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: no cover
"""
We no longer document this API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Host(path, ...),
...
]
app = Starlette(routes=routes)
"""
self.router.host(host, app=app, name=name)
def add_middleware(
self, middleware_class: type, **options: typing.Any
) -> None: # pragma: no cover
self.user_middleware.insert(0, Middleware(middleware_class, **options))
self.middleware_stack = self.build_middleware_stack()
def add_exception_handler(
self,
exc_class_or_status_code: typing.Union[int, typing.Type[Exception]],
handler: typing.Callable,
) -> None: # pragma: no cover
self.exception_handlers[exc_class_or_status_code] = handler
self.middleware_stack = self.build_middleware_stack()
def add_event_handler(
self, event_type: str, func: typing.Callable
) -> None: # pragma: no cover
self.router.add_event_handler(event_type, func)
def add_route(
self,
path: str,
route: typing.Callable,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None: # pragma: no cover
self.router.add_route(
path, route, methods=methods, name=name, include_in_schema=include_in_schema
)
def add_websocket_route(
self, path: str, route: typing.Callable, name: typing.Optional[str] = None
) -> None: # pragma: no cover
self.router.add_websocket_route(path, route, name=name)
def exception_handler(
self, exc_class_or_status_code: typing.Union[int, typing.Type[Exception]]
) -> typing.Callable: # pragma: nocover
def decorator(func: typing.Callable) -> typing.Callable:
self.add_exception_handler(exc_class_or_status_code, func)
return func
return decorator
def route(
self,
path: str,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> typing.Callable: # pragma: nocover
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Route(path, endpoint=..., ...),
...
]
app = Starlette(routes=routes)
"""
def decorator(func: typing.Callable) -> typing.Callable:
self.router.add_route(
path,
func,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
return func
return decorator
def websocket_route(
self, path: str, name: typing.Optional[str] = None
) -> typing.Callable: # pragma: nocover
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
WebSocketRoute(path, endpoint=..., ...),
...
]
app = Starlette(routes=routes)
"""
def decorator(func: typing.Callable) -> typing.Callable:
self.router.add_websocket_route(path, func, name=name)
return func
return decorator
def middleware(self, middleware_type: str) -> typing.Callable: # pragma: nocover
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
middleware = [
Middleware(...),
...
]
app = Starlette(middleware=middleware)
"""
assert (
middleware_type == "http"
), 'Currently only middleware("http") is supported.'
def decorator(func: typing.Callable) -> typing.Callable:
self.add_middleware(BaseHTTPMiddleware, dispatch=func)
return func
return decorator | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/applications.py | applications.py |
import json
import typing
from collections.abc import Mapping
from http import cookies as http_cookies
from zdppy_api import anyio
from zdppy_api.starlette.datastructures import URL, Address, FormData, Headers, QueryParams, State
from zdppy_api.starlette.formparsers import FormParser, MultiPartParser
from zdppy_api.starlette.types import Message, Receive, Scope, Send
try:
from multipart.multipart import parse_options_header
except ImportError: # pragma: nocover
parse_options_header = None
if typing.TYPE_CHECKING:
from zdppy_api.starlette.routing import Router
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
def cookie_parser(cookie_string: str) -> typing.Dict[str, str]:
"""
This function parses a ``Cookie`` HTTP header into a dict of key/value pairs.
It attempts to mimic browser cookie parsing behavior: browsers and web servers
frequently disregard the spec (RFC 6265) when setting and reading cookies,
so we attempt to suit the common scenarios here.
This function has been adapted from Django 3.1.0.
Note: we are explicitly _NOT_ using `SimpleCookie.load` because it is based
on an outdated spec and will fail on lots of input we want to support
"""
cookie_dict: typing.Dict[str, str] = {}
for chunk in cookie_string.split(";"):
if "=" in chunk:
key, val = chunk.split("=", 1)
else:
# Assume an empty name per
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
key, val = "", chunk
key, val = key.strip(), val.strip()
if key or val:
# unquote using Python's algorithm.
cookie_dict[key] = http_cookies._unquote(val)
return cookie_dict
class ClientDisconnect(Exception):
pass
class HTTPConnection(Mapping):
"""
A base class for incoming HTTP connections, that is used to provide
any functionality that is common to both `Request` and `WebSocket`.
"""
def __init__(self, scope: Scope, receive: typing.Optional[Receive] = None) -> None:
assert scope["type"] in ("http", "websocket")
self.scope = scope
def __getitem__(self, key: str) -> typing.Any:
return self.scope[key]
def __iter__(self) -> typing.Iterator[str]:
return iter(self.scope)
def __len__(self) -> int:
return len(self.scope)
# Don't use the `abc.Mapping.__eq__` implementation.
# Connection instances should never be considered equal
# unless `self is other`.
__eq__ = object.__eq__
__hash__ = object.__hash__
@property
def app(self) -> typing.Any:
return self.scope["app"]
@property
def url(self) -> URL:
if not hasattr(self, "_url"):
self._url = URL(scope=self.scope)
return self._url
@property
def base_url(self) -> URL:
if not hasattr(self, "_base_url"):
base_url_scope = dict(self.scope)
base_url_scope["path"] = "/"
base_url_scope["query_string"] = b""
base_url_scope["root_path"] = base_url_scope.get(
"app_root_path", base_url_scope.get("root_path", "")
)
self._base_url = URL(scope=base_url_scope)
return self._base_url
@property
def headers(self) -> Headers:
if not hasattr(self, "_headers"):
self._headers = Headers(scope=self.scope)
return self._headers
@property
def query_params(self) -> QueryParams:
if not hasattr(self, "_query_params"):
self._query_params = QueryParams(self.scope["query_string"])
return self._query_params
@property
def path_params(self) -> typing.Dict[str, typing.Any]:
return self.scope.get("path_params", {})
@property
def cookies(self) -> typing.Dict[str, str]:
if not hasattr(self, "_cookies"):
cookies: typing.Dict[str, str] = {}
cookie_header = self.headers.get("cookie")
if cookie_header:
cookies = cookie_parser(cookie_header)
self._cookies = cookies
return self._cookies
@property
def client(self) -> typing.Optional[Address]:
# client is a 2 item tuple of (host, port), None or missing
host_port = self.scope.get("client")
if host_port is not None:
return Address(*host_port)
return None
@property
def session(self) -> dict:
assert (
"session" in self.scope
), "SessionMiddleware must be installed to access request.session"
return self.scope["session"]
@property
def auth(self) -> typing.Any:
assert (
"auth" in self.scope
), "AuthenticationMiddleware must be installed to access request.auth"
return self.scope["auth"]
@property
def user(self) -> typing.Any:
assert (
"user" in self.scope
), "AuthenticationMiddleware must be installed to access request.user"
return self.scope["user"]
@property
def state(self) -> State:
if not hasattr(self, "_state"):
# Ensure 'state' has an empty dict if it's not already populated.
self.scope.setdefault("state", {})
# Create a state instance with a reference to the dict in which it should
# store info
self._state = State(self.scope["state"])
return self._state
def url_for(self, name: str, **path_params: typing.Any) -> str:
router: Router = self.scope["router"]
url_path = router.url_path_for(name, **path_params)
return url_path.make_absolute_url(base_url=self.base_url)
async def empty_receive() -> typing.NoReturn:
raise RuntimeError("Receive channel has not been made available")
async def empty_send(message: Message) -> typing.NoReturn:
raise RuntimeError("Send channel has not been made available")
class Request(HTTPConnection):
def __init__(
self, scope: Scope, receive: Receive = empty_receive, send: Send = empty_send
):
super().__init__(scope)
assert scope["type"] == "http"
self._receive = receive
self._send = send
self._stream_consumed = False
self._is_disconnected = False
@property
def method(self) -> str:
return self.scope["method"]
@property
def receive(self) -> Receive:
return self._receive
async def stream(self) -> typing.AsyncGenerator[bytes, None]:
if hasattr(self, "_body"):
yield self._body
yield b""
return
if self._stream_consumed:
raise RuntimeError("Stream consumed")
self._stream_consumed = True
while True:
message = await self._receive()
if message["type"] == "http.request":
body = message.get("body", b"")
if body:
yield body
if not message.get("more_body", False):
break
elif message["type"] == "http.disconnect":
self._is_disconnected = True
raise ClientDisconnect()
yield b""
async def body(self) -> bytes:
if not hasattr(self, "_body"):
chunks = []
async for chunk in self.stream():
chunks.append(chunk)
self._body = b"".join(chunks)
return self._body
async def json(self) -> typing.Any:
if not hasattr(self, "_json"):
body = await self.body()
self._json = json.loads(body)
return self._json
async def form(self) -> FormData:
if not hasattr(self, "_form"):
assert (
parse_options_header is not None
), "The `python-multipart` library must be installed to use form parsing."
content_type_header = self.headers.get("Content-Type")
content_type, options = parse_options_header(content_type_header)
if content_type == b"multipart/form-data":
multipart_parser = MultiPartParser(self.headers, self.stream())
self._form = await multipart_parser.parse()
elif content_type == b"application/x-www-form-urlencoded":
form_parser = FormParser(self.headers, self.stream())
self._form = await form_parser.parse()
else:
self._form = FormData()
return self._form
async def close(self) -> None:
if hasattr(self, "_form"):
await self._form.close()
async def is_disconnected(self) -> bool:
if not self._is_disconnected:
message: Message = {}
# If message isn't immediately available, move on
with anyio.CancelScope() as cs:
cs.cancel()
message = await self._receive()
if message.get("type") == "http.disconnect":
self._is_disconnected = True
return self._is_disconnected
async def send_push_promise(self, path: str) -> None:
if "http.response.push" in self.scope.get("extensions", {}):
raw_headers = []
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
raw_headers.append(
(name.encode("latin-1"), value.encode("latin-1"))
)
await self._send(
{"type": "http.response.push", "path": path, "headers": raw_headers}
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/requests.py | requests.py |
import tempfile
import typing
from collections.abc import Sequence
from shlex import shlex
from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.types import Scope
class Address(typing.NamedTuple):
host: str
port: int
class URL:
def __init__(
self,
url: str = "",
scope: typing.Optional[Scope] = None,
**components: typing.Any,
) -> None:
if scope is not None:
assert not url, 'Cannot set both "url" and "scope".'
assert not components, 'Cannot set both "scope" and "**components".'
scheme = scope.get("scheme", "http")
server = scope.get("server", None)
path = scope.get("root_path", "") + scope["path"]
query_string = scope.get("query_string", b"")
host_header = None
for key, value in scope["headers"]:
if key == b"host":
host_header = value.decode("latin-1")
break
if host_header is not None:
url = f"{scheme}://{host_header}{path}"
elif server is None:
url = path
else:
host, port = server
default_port = {"http": 80, "https": 443, "ws": 80, "wss": 443}[scheme]
if port == default_port:
url = f"{scheme}://{host}{path}"
else:
url = f"{scheme}://{host}:{port}{path}"
if query_string:
url += "?" + query_string.decode()
elif components:
assert not url, 'Cannot set both "url" and "**components".'
url = URL("").replace(**components).components.geturl()
self._url = url
@property
def components(self) -> SplitResult:
if not hasattr(self, "_components"):
self._components = urlsplit(self._url)
return self._components
@property
def scheme(self) -> str:
return self.components.scheme
@property
def netloc(self) -> str:
return self.components.netloc
@property
def path(self) -> str:
return self.components.path
@property
def query(self) -> str:
return self.components.query
@property
def fragment(self) -> str:
return self.components.fragment
@property
def username(self) -> typing.Union[None, str]:
return self.components.username
@property
def password(self) -> typing.Union[None, str]:
return self.components.password
@property
def hostname(self) -> typing.Union[None, str]:
return self.components.hostname
@property
def port(self) -> typing.Optional[int]:
return self.components.port
@property
def is_secure(self) -> bool:
return self.scheme in ("https", "wss")
def replace(self, **kwargs: typing.Any) -> "URL":
if (
"username" in kwargs
or "password" in kwargs
or "hostname" in kwargs
or "port" in kwargs
):
hostname = kwargs.pop("hostname", self.hostname)
port = kwargs.pop("port", self.port)
username = kwargs.pop("username", self.username)
password = kwargs.pop("password", self.password)
netloc = hostname
if port is not None:
netloc += f":{port}"
if username is not None:
userpass = username
if password is not None:
userpass += f":{password}"
netloc = f"{userpass}@{netloc}"
kwargs["netloc"] = netloc
components = self.components._replace(**kwargs)
return self.__class__(components.geturl())
def include_query_params(self, **kwargs: typing.Any) -> "URL":
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
params.update({str(key): str(value) for key, value in kwargs.items()})
query = urlencode(params.multi_items())
return self.replace(query=query)
def replace_query_params(self, **kwargs: typing.Any) -> "URL":
query = urlencode([(str(key), str(value)) for key, value in kwargs.items()])
return self.replace(query=query)
def remove_query_params(
self, keys: typing.Union[str, typing.Sequence[str]]
) -> "URL":
if isinstance(keys, str):
keys = [keys]
params = MultiDict(parse_qsl(self.query, keep_blank_values=True))
for key in keys:
params.pop(key, None)
query = urlencode(params.multi_items())
return self.replace(query=query)
def __eq__(self, other: typing.Any) -> bool:
return str(self) == str(other)
def __str__(self) -> str:
return self._url
def __repr__(self) -> str:
url = str(self)
if self.password:
url = str(self.replace(password="********"))
return f"{self.__class__.__name__}({repr(url)})"
class URLPath(str):
"""
A URL path string that may also hold an associated protocol and/or host.
Used by the routing to return `url_path_for` matches.
"""
def __new__(cls, path: str, protocol: str = "", host: str = "") -> "URLPath":
assert protocol in ("http", "websocket", "")
return str.__new__(cls, path)
def __init__(self, path: str, protocol: str = "", host: str = "") -> None:
self.protocol = protocol
self.host = host
def make_absolute_url(self, base_url: typing.Union[str, URL]) -> str:
if isinstance(base_url, str):
base_url = URL(base_url)
if self.protocol:
scheme = {
"http": {True: "https", False: "http"},
"websocket": {True: "wss", False: "ws"},
}[self.protocol][base_url.is_secure]
else:
scheme = base_url.scheme
netloc = self.host or base_url.netloc
path = base_url.path.rstrip("/") + str(self)
return str(URL(scheme=scheme, netloc=netloc, path=path))
class Secret:
"""
Holds a string value that should not be revealed in tracebacks etc.
You should cast the value to `str` at the point it is required.
"""
def __init__(self, value: str):
self._value = value
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}('**********')"
def __str__(self) -> str:
return self._value
class CommaSeparatedStrings(Sequence):
def __init__(self, value: typing.Union[str, typing.Sequence[str]]):
if isinstance(value, str):
splitter = shlex(value, posix=True)
splitter.whitespace = ","
splitter.whitespace_split = True
self._items = [item.strip() for item in splitter]
else:
self._items = list(value)
def __len__(self) -> int:
return len(self._items)
def __getitem__(self, index: typing.Union[int, slice]) -> typing.Any:
return self._items[index]
def __iter__(self) -> typing.Iterator[str]:
return iter(self._items)
def __repr__(self) -> str:
class_name = self.__class__.__name__
items = [item for item in self]
return f"{class_name}({items!r})"
def __str__(self) -> str:
return ", ".join(repr(item) for item in self)
class ImmutableMultiDict(typing.Mapping):
def __init__(
self,
*args: typing.Union[
"ImmutableMultiDict",
typing.Mapping,
typing.List[typing.Tuple[typing.Any, typing.Any]],
],
**kwargs: typing.Any,
) -> None:
assert len(args) < 2, "Too many arguments."
value = args[0] if args else []
if kwargs:
value = (
ImmutableMultiDict(value).multi_items()
+ ImmutableMultiDict(kwargs).multi_items()
)
if not value:
_items: typing.List[typing.Tuple[typing.Any, typing.Any]] = []
elif hasattr(value, "multi_items"):
value = typing.cast(ImmutableMultiDict, value)
_items = list(value.multi_items())
elif hasattr(value, "items"):
value = typing.cast(typing.Mapping, value)
_items = list(value.items())
else:
value = typing.cast(
typing.List[typing.Tuple[typing.Any, typing.Any]], value
)
_items = list(value)
self._dict = {k: v for k, v in _items}
self._list = _items
def getlist(self, key: typing.Any) -> typing.List[typing.Any]:
return [item_value for item_key, item_value in self._list if item_key == key]
def keys(self) -> typing.KeysView:
return self._dict.keys()
def values(self) -> typing.ValuesView:
return self._dict.values()
def items(self) -> typing.ItemsView:
return self._dict.items()
def multi_items(self) -> typing.List[typing.Tuple[str, str]]:
return list(self._list)
def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
if key in self._dict:
return self._dict[key]
return default
def __getitem__(self, key: typing.Any) -> str:
return self._dict[key]
def __contains__(self, key: typing.Any) -> bool:
return key in self._dict
def __iter__(self) -> typing.Iterator[typing.Any]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._dict)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, self.__class__):
return False
return sorted(self._list) == sorted(other._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
items = self.multi_items()
return f"{class_name}({items!r})"
class MultiDict(ImmutableMultiDict):
def __setitem__(self, key: typing.Any, value: typing.Any) -> None:
self.setlist(key, [value])
def __delitem__(self, key: typing.Any) -> None:
self._list = [(k, v) for k, v in self._list if k != key]
del self._dict[key]
def pop(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
self._list = [(k, v) for k, v in self._list if k != key]
return self._dict.pop(key, default)
def popitem(self) -> typing.Tuple:
key, value = self._dict.popitem()
self._list = [(k, v) for k, v in self._list if k != key]
return key, value
def poplist(self, key: typing.Any) -> typing.List:
values = [v for k, v in self._list if k == key]
self.pop(key)
return values
def clear(self) -> None:
self._dict.clear()
self._list.clear()
def setdefault(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
if key not in self:
self._dict[key] = default
self._list.append((key, default))
return self[key]
def setlist(self, key: typing.Any, values: typing.List) -> None:
if not values:
self.pop(key, None)
else:
existing_items = [(k, v) for (k, v) in self._list if k != key]
self._list = existing_items + [(key, value) for value in values]
self._dict[key] = values[-1]
def append(self, key: typing.Any, value: typing.Any) -> None:
self._list.append((key, value))
self._dict[key] = value
def update(
self,
*args: typing.Union[
"MultiDict",
typing.Mapping,
typing.List[typing.Tuple[typing.Any, typing.Any]],
],
**kwargs: typing.Any,
) -> None:
value = MultiDict(*args, **kwargs)
existing_items = [(k, v) for (k, v) in self._list if k not in value.keys()]
self._list = existing_items + value.multi_items()
self._dict.update(value)
class QueryParams(ImmutableMultiDict):
"""
An immutable multidict.
"""
def __init__(
self,
*args: typing.Union[
"ImmutableMultiDict",
typing.Mapping,
typing.List[typing.Tuple[typing.Any, typing.Any]],
str,
bytes,
],
**kwargs: typing.Any,
) -> None:
assert len(args) < 2, "Too many arguments."
value = args[0] if args else []
if isinstance(value, str):
super().__init__(parse_qsl(value, keep_blank_values=True), **kwargs)
elif isinstance(value, bytes):
super().__init__(
parse_qsl(value.decode("latin-1"), keep_blank_values=True), **kwargs
)
else:
super().__init__(*args, **kwargs) # type: ignore
self._list = [(str(k), str(v)) for k, v in self._list]
self._dict = {str(k): str(v) for k, v in self._dict.items()}
def __str__(self) -> str:
return urlencode(self._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
query_string = str(self)
return f"{class_name}({query_string!r})"
class UploadFile:
"""
An uploaded file included as part of the request data.
"""
spool_max_size = 1024 * 1024
file: typing.BinaryIO
headers: "Headers"
def __init__(
self,
filename: str,
file: typing.Optional[typing.BinaryIO] = None,
content_type: str = "",
*,
headers: "typing.Optional[Headers]" = None,
) -> None:
self.filename = filename
self.content_type = content_type
if file is None:
self.file = tempfile.SpooledTemporaryFile(max_size=self.spool_max_size) # type: ignore # noqa: E501
else:
self.file = file
self.headers = headers or Headers()
@property
def _in_memory(self) -> bool:
rolled_to_disk = getattr(self.file, "_rolled", True)
return not rolled_to_disk
async def write(self, data: bytes) -> None:
if self._in_memory:
self.file.write(data)
else:
await run_in_threadpool(self.file.write, data)
async def read(self, size: int = -1) -> bytes:
if self._in_memory:
return self.file.read(size)
return await run_in_threadpool(self.file.read, size)
async def seek(self, offset: int) -> None:
if self._in_memory:
self.file.seek(offset)
else:
await run_in_threadpool(self.file.seek, offset)
async def close(self) -> None:
if self._in_memory:
self.file.close()
else:
await run_in_threadpool(self.file.close)
class FormData(ImmutableMultiDict):
"""
An immutable multidict, containing both file uploads and text input.
"""
def __init__(
self,
*args: typing.Union[
"FormData",
typing.Mapping[str, typing.Union[str, UploadFile]],
typing.List[typing.Tuple[str, typing.Union[str, UploadFile]]],
],
**kwargs: typing.Union[str, UploadFile],
) -> None:
super().__init__(*args, **kwargs)
async def close(self) -> None:
for key, value in self.multi_items():
if isinstance(value, UploadFile):
await value.close()
class Headers(typing.Mapping[str, str]):
"""
An immutable, case-insensitive multidict.
"""
def __init__(
self,
headers: typing.Optional[typing.Mapping[str, str]] = None,
raw: typing.Optional[typing.List[typing.Tuple[bytes, bytes]]] = None,
scope: typing.Optional[typing.Mapping[str, typing.Any]] = None,
) -> None:
self._list: typing.List[typing.Tuple[bytes, bytes]] = []
if headers is not None:
assert raw is None, 'Cannot set both "headers" and "raw".'
assert scope is None, 'Cannot set both "headers" and "scope".'
self._list = [
(key.lower().encode("latin-1"), value.encode("latin-1"))
for key, value in headers.items()
]
elif raw is not None:
assert scope is None, 'Cannot set both "raw" and "scope".'
self._list = raw
elif scope is not None:
self._list = scope["headers"]
@property
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
return list(self._list)
def keys(self) -> typing.List[str]: # type: ignore
return [key.decode("latin-1") for key, value in self._list]
def values(self) -> typing.List[str]: # type: ignore
return [value.decode("latin-1") for key, value in self._list]
def items(self) -> typing.List[typing.Tuple[str, str]]: # type: ignore
return [
(key.decode("latin-1"), value.decode("latin-1"))
for key, value in self._list
]
def get(self, key: str, default: typing.Any = None) -> typing.Any:
try:
return self[key]
except KeyError:
return default
def getlist(self, key: str) -> typing.List[str]:
get_header_key = key.lower().encode("latin-1")
return [
item_value.decode("latin-1")
for item_key, item_value in self._list
if item_key == get_header_key
]
def mutablecopy(self) -> "MutableHeaders":
return MutableHeaders(raw=self._list[:])
def __getitem__(self, key: str) -> str:
get_header_key = key.lower().encode("latin-1")
for header_key, header_value in self._list:
if header_key == get_header_key:
return header_value.decode("latin-1")
raise KeyError(key)
def __contains__(self, key: typing.Any) -> bool:
get_header_key = key.lower().encode("latin-1")
for header_key, header_value in self._list:
if header_key == get_header_key:
return True
return False
def __iter__(self) -> typing.Iterator[typing.Any]:
return iter(self.keys())
def __len__(self) -> int:
return len(self._list)
def __eq__(self, other: typing.Any) -> bool:
if not isinstance(other, Headers):
return False
return sorted(self._list) == sorted(other._list)
def __repr__(self) -> str:
class_name = self.__class__.__name__
as_dict = dict(self.items())
if len(as_dict) == len(self):
return f"{class_name}({as_dict!r})"
return f"{class_name}(raw={self.raw!r})"
class MutableHeaders(Headers):
def __setitem__(self, key: str, value: str) -> None:
"""
Set the header `key` to `value`, removing any duplicate entries.
Retains insertion order.
"""
set_key = key.lower().encode("latin-1")
set_value = value.encode("latin-1")
found_indexes = []
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == set_key:
found_indexes.append(idx)
for idx in reversed(found_indexes[1:]):
del self._list[idx]
if found_indexes:
idx = found_indexes[0]
self._list[idx] = (set_key, set_value)
else:
self._list.append((set_key, set_value))
def __delitem__(self, key: str) -> None:
"""
Remove the header `key`.
"""
del_key = key.lower().encode("latin-1")
pop_indexes = []
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == del_key:
pop_indexes.append(idx)
for idx in reversed(pop_indexes):
del self._list[idx]
def __ior__(self, other: typing.Mapping) -> "MutableHeaders":
if not isinstance(other, typing.Mapping):
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
self.update(other)
return self
def __or__(self, other: typing.Mapping) -> "MutableHeaders":
if not isinstance(other, typing.Mapping):
raise TypeError(f"Expected a mapping but got {other.__class__.__name__}")
new = self.mutablecopy()
new.update(other)
return new
@property
def raw(self) -> typing.List[typing.Tuple[bytes, bytes]]:
return self._list
def setdefault(self, key: str, value: str) -> str:
"""
If the header `key` does not exist, then set it to `value`.
Returns the header value.
"""
set_key = key.lower().encode("latin-1")
set_value = value.encode("latin-1")
for idx, (item_key, item_value) in enumerate(self._list):
if item_key == set_key:
return item_value.decode("latin-1")
self._list.append((set_key, set_value))
return value
def update(self, other: typing.Mapping) -> None:
for key, val in other.items():
self[key] = val
def append(self, key: str, value: str) -> None:
"""
Append a header, preserving any duplicate entries.
"""
append_key = key.lower().encode("latin-1")
append_value = value.encode("latin-1")
self._list.append((append_key, append_value))
def add_vary_header(self, vary: str) -> None:
existing = self.get("vary")
if existing is not None:
vary = ", ".join([existing, vary])
self["vary"] = vary
class State:
"""
An object that can be used to store arbitrary state.
Used for `request.state` and `app.state`.
"""
_state: typing.Dict[str, typing.Any]
def __init__(self, state: typing.Optional[typing.Dict[str, typing.Any]] = None):
if state is None:
state = {}
super().__setattr__("_state", state)
def __setattr__(self, key: typing.Any, value: typing.Any) -> None:
self._state[key] = value
def __getattr__(self, key: typing.Any) -> typing.Any:
try:
return self._state[key]
except KeyError:
message = "'{}' object has no attribute '{}'"
raise AttributeError(message.format(self.__class__.__name__, key))
def __delattr__(self, key: typing.Any) -> None:
del self._state[key] | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/datastructures.py | datastructures.py |
import asyncio
import contextlib
import functools
import inspect
import re
import sys
import traceback
import types
import typing
import warnings
from enum import Enum
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.convertors import CONVERTOR_TYPES, Convertor
from zdppy_api.starlette.datastructures import URL, Headers, URLPath
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import PlainTextResponse, RedirectResponse
from zdppy_api.starlette.types import ASGIApp, Receive, Scope, Send
from zdppy_api.starlette.websockets import WebSocket, WebSocketClose
if sys.version_info >= (3, 7):
from contextlib import asynccontextmanager # pragma: no cover
else:
from contextlib2 import asynccontextmanager # pragma: no cover
class NoMatchFound(Exception):
"""
Raised by `.url_for(name, **path_params)` and `.url_path_for(name, **path_params)`
if no matching route exists.
"""
def __init__(self, name: str, path_params: typing.Dict[str, typing.Any]) -> None:
params = ", ".join(list(path_params.keys()))
super().__init__(f'No route exists for name "{name}" and params "{params}".')
class Match(Enum):
NONE = 0
PARTIAL = 1
FULL = 2
def iscoroutinefunction_or_partial(obj: typing.Any) -> bool:
"""
Correctly determines if an object is a coroutine function,
including those wrapped in functools.partial objects.
"""
while isinstance(obj, functools.partial):
obj = obj.func
return inspect.iscoroutinefunction(obj)
def request_response(func: typing.Callable) -> ASGIApp:
"""
Takes a function or coroutine `func(request) -> response`,
and returns an ASGI application.
"""
is_coroutine = iscoroutinefunction_or_partial(func)
async def app(scope: Scope, receive: Receive, send: Send) -> None:
request = Request(scope, receive=receive, send=send)
if is_coroutine:
response = await func(request)
else:
response = await run_in_threadpool(func, request)
await response(scope, receive, send)
return app
def websocket_session(func: typing.Callable) -> ASGIApp:
"""
Takes a coroutine `func(session)`, and returns an ASGI application.
"""
# assert asyncio.iscoroutinefunction(func), "WebSocket endpoints must be async"
async def app(scope: Scope, receive: Receive, send: Send) -> None:
session = WebSocket(scope, receive=receive, send=send)
await func(session)
return app
def get_name(endpoint: typing.Callable) -> str:
if inspect.isroutine(endpoint) or inspect.isclass(endpoint):
return endpoint.__name__
return endpoint.__class__.__name__
def replace_params(
path: str,
param_convertors: typing.Dict[str, Convertor],
path_params: typing.Dict[str, str],
) -> typing.Tuple[str, dict]:
for key, value in list(path_params.items()):
if "{" + key + "}" in path:
convertor = param_convertors[key]
value = convertor.to_string(value)
path = path.replace("{" + key + "}", value)
path_params.pop(key)
return path, path_params
# Match parameters in URL paths, eg. '{param}', and '{param:int}'
PARAM_REGEX = re.compile("{([a-zA-Z_][a-zA-Z0-9_]*)(:[a-zA-Z_][a-zA-Z0-9_]*)?}")
def compile_path(
path: str,
) -> typing.Tuple[typing.Pattern, str, typing.Dict[str, Convertor]]:
"""
Given a path string, like: "/{username:str}", return a three-tuple
of (regex, format, {param_name:convertor}).
regex: "/(?P<username>[^/]+)"
format: "/{username}"
convertors: {"username": StringConvertor()}
"""
path_regex = "^"
path_format = ""
duplicated_params = set()
idx = 0
param_convertors = {}
for match in PARAM_REGEX.finditer(path):
param_name, convertor_type = match.groups("str")
convertor_type = convertor_type.lstrip(":")
assert (
convertor_type in CONVERTOR_TYPES
), f"Unknown path convertor '{convertor_type}'"
convertor = CONVERTOR_TYPES[convertor_type]
path_regex += re.escape(path[idx : match.start()])
path_regex += f"(?P<{param_name}>{convertor.regex})"
path_format += path[idx : match.start()]
path_format += "{%s}" % param_name
if param_name in param_convertors:
duplicated_params.add(param_name)
param_convertors[param_name] = convertor
idx = match.end()
if duplicated_params:
names = ", ".join(sorted(duplicated_params))
ending = "s" if len(duplicated_params) > 1 else ""
raise ValueError(f"Duplicated param name{ending} {names} at path {path}")
path_regex += re.escape(path[idx:].split(":")[0]) + "$"
path_format += path[idx:]
return re.compile(path_regex), path_format, param_convertors
class BaseRoute:
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
raise NotImplementedError() # pragma: no cover
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
raise NotImplementedError() # pragma: no cover
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
raise NotImplementedError() # pragma: no cover
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
A route may be used in isolation as a stand-alone ASGI app.
This is a somewhat contrived case, as they'll almost always be used
within a Router, but could be useful for some tooling and minimal apps.
"""
match, child_scope = self.matches(scope)
if match == Match.NONE:
if scope["type"] == "http":
response = PlainTextResponse("Not Found", status_code=404)
await response(scope, receive, send)
elif scope["type"] == "websocket":
websocket_close = WebSocketClose()
await websocket_close(scope, receive, send)
return
scope.update(child_scope)
await self.handle(scope, receive, send)
class Route(BaseRoute):
def __init__(
self,
path: str,
endpoint: typing.Callable,
*,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None:
assert path.startswith("/"), "Routed paths must start with '/'"
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
self.include_in_schema = include_in_schema
endpoint_handler = endpoint
while isinstance(endpoint_handler, functools.partial):
endpoint_handler = endpoint_handler.func
if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler):
# Endpoint is function or method. Treat it as `func(request) -> response`.
self.app = request_response(endpoint)
if methods is None:
methods = ["GET"]
else:
# Endpoint is a class. Treat it as ASGI.
self.app = endpoint
if methods is None:
self.methods = None
else:
self.methods = {method.upper() for method in methods}
if "GET" in self.methods:
self.methods.add("HEAD")
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] == "http":
match = self.path_regex.match(scope["path"])
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"endpoint": self.endpoint, "path_params": path_params}
if self.methods and scope["method"] not in self.methods:
return Match.PARTIAL, child_scope
else:
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
seen_params = set(path_params.keys())
expected_params = set(self.param_convertors.keys())
if name != self.name or seen_params != expected_params:
raise NoMatchFound(name, path_params)
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
assert not remaining_params
return URLPath(path=path, protocol="http")
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.methods and scope["method"] not in self.methods:
headers = {"Allow": ", ".join(self.methods)}
if "app" in scope:
raise HTTPException(status_code=405, headers=headers)
else:
response = PlainTextResponse(
"Method Not Allowed", status_code=405, headers=headers
)
await response(scope, receive, send)
else:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Route)
and self.path == other.path
and self.endpoint == other.endpoint
and self.methods == other.methods
)
class WebSocketRoute(BaseRoute):
def __init__(
self, path: str, endpoint: typing.Callable, *, name: typing.Optional[str] = None
) -> None:
assert path.startswith("/"), "Routed paths must start with '/'"
self.path = path
self.endpoint = endpoint
self.name = get_name(endpoint) if name is None else name
endpoint_handler = endpoint
while isinstance(endpoint_handler, functools.partial):
endpoint_handler = endpoint_handler.func
if inspect.isfunction(endpoint_handler) or inspect.ismethod(endpoint_handler):
# Endpoint is function or method. Treat it as `func(websocket)`.
self.app = websocket_session(endpoint)
else:
# Endpoint is a class. Treat it as ASGI.
self.app = endpoint
self.path_regex, self.path_format, self.param_convertors = compile_path(path)
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] == "websocket":
match = self.path_regex.match(scope["path"])
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"endpoint": self.endpoint, "path_params": path_params}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
seen_params = set(path_params.keys())
expected_params = set(self.param_convertors.keys())
if name != self.name or seen_params != expected_params:
raise NoMatchFound(name, path_params)
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
assert not remaining_params
return URLPath(path=path, protocol="websocket")
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, WebSocketRoute)
and self.path == other.path
and self.endpoint == other.endpoint
)
class Mount(BaseRoute):
def __init__(
self,
path: str,
app: typing.Optional[ASGIApp] = None,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
name: typing.Optional[str] = None,
) -> None:
assert path == "" or path.startswith("/"), "Routed paths must start with '/'"
assert (
app is not None or routes is not None
), "Either 'app=...', or 'routes=' must be specified"
self.path = path.rstrip("/")
if app is not None:
self.app: ASGIApp = app
else:
self.app = Router(routes=routes)
self.name = name
self.path_regex, self.path_format, self.param_convertors = compile_path(
self.path + "/{path:path}"
)
@property
def routes(self) -> typing.List[BaseRoute]:
return getattr(self.app, "routes", [])
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] in ("http", "websocket"):
path = scope["path"]
match = self.path_regex.match(path)
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
remaining_path = "/" + matched_params.pop("path")
matched_path = path[: -len(remaining_path)]
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
root_path = scope.get("root_path", "")
child_scope = {
"path_params": path_params,
"app_root_path": scope.get("app_root_path", root_path),
"root_path": root_path + matched_path,
"path": remaining_path,
"endpoint": self.app,
}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
if self.name is not None and name == self.name and "path" in path_params:
# 'name' matches "<mount_name>".
path_params["path"] = path_params["path"].lstrip("/")
path, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
if not remaining_params:
return URLPath(path=path)
elif self.name is None or name.startswith(self.name + ":"):
if self.name is None:
# No mount name.
remaining_name = name
else:
# 'name' matches "<mount_name>:<child_name>".
remaining_name = name[len(self.name) + 1 :]
path_kwarg = path_params.get("path")
path_params["path"] = ""
path_prefix, remaining_params = replace_params(
self.path_format, self.param_convertors, path_params
)
if path_kwarg is not None:
remaining_params["path"] = path_kwarg
for route in self.routes or []:
try:
url = route.url_path_for(remaining_name, **remaining_params)
return URLPath(
path=path_prefix.rstrip("/") + str(url), protocol=url.protocol
)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Mount)
and self.path == other.path
and self.app == other.app
)
class Host(BaseRoute):
def __init__(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None:
self.host = host
self.app = app
self.name = name
self.host_regex, self.host_format, self.param_convertors = compile_path(host)
@property
def routes(self) -> typing.List[BaseRoute]:
return getattr(self.app, "routes", [])
def matches(self, scope: Scope) -> typing.Tuple[Match, Scope]:
if scope["type"] in ("http", "websocket"):
headers = Headers(scope=scope)
host = headers.get("host", "").split(":")[0]
match = self.host_regex.match(host)
if match:
matched_params = match.groupdict()
for key, value in matched_params.items():
matched_params[key] = self.param_convertors[key].convert(value)
path_params = dict(scope.get("path_params", {}))
path_params.update(matched_params)
child_scope = {"path_params": path_params, "endpoint": self.app}
return Match.FULL, child_scope
return Match.NONE, {}
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
if self.name is not None and name == self.name and "path" in path_params:
# 'name' matches "<mount_name>".
path = path_params.pop("path")
host, remaining_params = replace_params(
self.host_format, self.param_convertors, path_params
)
if not remaining_params:
return URLPath(path=path, host=host)
elif self.name is None or name.startswith(self.name + ":"):
if self.name is None:
# No mount name.
remaining_name = name
else:
# 'name' matches "<mount_name>:<child_name>".
remaining_name = name[len(self.name) + 1 :]
host, remaining_params = replace_params(
self.host_format, self.param_convertors, path_params
)
for route in self.routes or []:
try:
url = route.url_path_for(remaining_name, **remaining_params)
return URLPath(path=str(url), protocol=url.protocol, host=host)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def handle(self, scope: Scope, receive: Receive, send: Send) -> None:
await self.app(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return (
isinstance(other, Host)
and self.host == other.host
and self.app == other.app
)
_T = typing.TypeVar("_T")
class _AsyncLiftContextManager(typing.AsyncContextManager[_T]):
def __init__(self, cm: typing.ContextManager[_T]):
self._cm = cm
async def __aenter__(self) -> _T:
return self._cm.__enter__()
async def __aexit__(
self,
exc_type: typing.Optional[typing.Type[BaseException]],
exc_value: typing.Optional[BaseException],
traceback: typing.Optional[types.TracebackType],
) -> typing.Optional[bool]:
return self._cm.__exit__(exc_type, exc_value, traceback)
def _wrap_gen_lifespan_context(
lifespan_context: typing.Callable[[typing.Any], typing.Generator]
) -> typing.Callable[[typing.Any], typing.AsyncContextManager]:
cmgr = contextlib.contextmanager(lifespan_context)
@functools.wraps(cmgr)
def wrapper(app: typing.Any) -> _AsyncLiftContextManager:
return _AsyncLiftContextManager(cmgr(app))
return wrapper
class _DefaultLifespan:
def __init__(self, router: "Router"):
self._router = router
async def __aenter__(self) -> None:
await self._router.startup()
async def __aexit__(self, *exc_info: object) -> None:
await self._router.shutdown()
def __call__(self: _T, app: object) -> _T:
return self
class Router:
def __init__(
self,
routes: typing.Optional[typing.Sequence[BaseRoute]] = None,
redirect_slashes: bool = True,
default: typing.Optional[ASGIApp] = None,
on_startup: typing.Optional[typing.Sequence[typing.Callable]] = None,
on_shutdown: typing.Optional[typing.Sequence[typing.Callable]] = None,
lifespan: typing.Optional[
typing.Callable[[typing.Any], typing.AsyncContextManager]
] = None,
) -> None:
self.routes = [] if routes is None else list(routes)
self.redirect_slashes = redirect_slashes
self.default = self.not_found if default is None else default
self.on_startup = [] if on_startup is None else list(on_startup)
self.on_shutdown = [] if on_shutdown is None else list(on_shutdown)
if lifespan is None:
self.lifespan_context: typing.Callable[
[typing.Any], typing.AsyncContextManager
] = _DefaultLifespan(self)
elif inspect.isasyncgenfunction(lifespan):
warnings.warn(
"async generator function lifespans are deprecated, "
"use an @contextlib.asynccontextmanager function instead",
DeprecationWarning,
)
self.lifespan_context = asynccontextmanager(
lifespan, # type: ignore[arg-type]
)
elif inspect.isgeneratorfunction(lifespan):
warnings.warn(
"generator function lifespans are deprecated, "
"use an @contextlib.asynccontextmanager function instead",
DeprecationWarning,
)
self.lifespan_context = _wrap_gen_lifespan_context(
lifespan, # type: ignore[arg-type]
)
else:
self.lifespan_context = lifespan
async def not_found(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] == "websocket":
websocket_close = WebSocketClose()
await websocket_close(scope, receive, send)
return
# If we're running inside a starlette application then raise an
# exception, so that the configurable exception handler can deal with
# returning the response. For plain ASGI apps, just return the response.
if "app" in scope:
raise HTTPException(status_code=404)
else:
response = PlainTextResponse("Not Found", status_code=404)
await response(scope, receive, send)
def url_path_for(self, name: str, **path_params: typing.Any) -> URLPath:
for route in self.routes:
try:
return route.url_path_for(name, **path_params)
except NoMatchFound:
pass
raise NoMatchFound(name, path_params)
async def startup(self) -> None:
"""
Run any `.on_startup` event handlers.
"""
for handler in self.on_startup:
if asyncio.iscoroutinefunction(handler):
await handler()
else:
handler()
async def shutdown(self) -> None:
"""
Run any `.on_shutdown` event handlers.
"""
for handler in self.on_shutdown:
if asyncio.iscoroutinefunction(handler):
await handler()
else:
handler()
async def lifespan(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
Handle ASGI lifespan messages, which allows us to manage application
startup and shutdown events.
"""
started = False
app = scope.get("app")
await receive()
try:
async with self.lifespan_context(app):
await send({"type": "lifespan.startup.complete"})
started = True
await receive()
except BaseException:
exc_text = traceback.format_exc()
if started:
await send({"type": "lifespan.shutdown.failed", "message": exc_text})
else:
await send({"type": "lifespan.startup.failed", "message": exc_text})
raise
else:
await send({"type": "lifespan.shutdown.complete"})
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
"""
The main entry point to the Router class.
"""
assert scope["type"] in ("http", "websocket", "lifespan")
if "router" not in scope:
scope["router"] = self
if scope["type"] == "lifespan":
await self.lifespan(scope, receive, send)
return
partial = None
for route in self.routes:
# Determine if any route matches the incoming scope,
# and hand over to the matching route if found.
match, child_scope = route.matches(scope)
if match == Match.FULL:
scope.update(child_scope)
await route.handle(scope, receive, send)
return
elif match == Match.PARTIAL and partial is None:
partial = route
partial_scope = child_scope
if partial is not None:
# Handle partial matches. These are cases where an endpoint is
# able to handle the request, but is not a preferred option.
# We use this in particular to deal with "405 Method Not Allowed".
scope.update(partial_scope)
await partial.handle(scope, receive, send)
return
if scope["type"] == "http" and self.redirect_slashes and scope["path"] != "/":
redirect_scope = dict(scope)
if scope["path"].endswith("/"):
redirect_scope["path"] = redirect_scope["path"].rstrip("/")
else:
redirect_scope["path"] = redirect_scope["path"] + "/"
for route in self.routes:
match, child_scope = route.matches(redirect_scope)
if match != Match.NONE:
redirect_url = URL(scope=redirect_scope)
response = RedirectResponse(url=str(redirect_url))
await response(scope, receive, send)
return
await self.default(scope, receive, send)
def __eq__(self, other: typing.Any) -> bool:
return isinstance(other, Router) and self.routes == other.routes
# The following usages are now discouraged in favour of configuration
# during Router.__init__(...)
def mount(
self, path: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: nocover
"""
We no longer document this API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Mount(path, ...),
...
]
app = Starlette(routes=routes)
"""
route = Mount(path, app=app, name=name)
self.routes.append(route)
def host(
self, host: str, app: ASGIApp, name: typing.Optional[str] = None
) -> None: # pragma: no cover
"""
We no longer document this API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Host(path, ...),
...
]
app = Starlette(routes=routes)
"""
route = Host(host, app=app, name=name)
self.routes.append(route)
def add_route(
self,
path: str,
endpoint: typing.Callable,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> None: # pragma: nocover
route = Route(
path,
endpoint=endpoint,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
self.routes.append(route)
def add_websocket_route(
self, path: str, endpoint: typing.Callable, name: typing.Optional[str] = None
) -> None: # pragma: no cover
route = WebSocketRoute(path, endpoint=endpoint, name=name)
self.routes.append(route)
def route(
self,
path: str,
methods: typing.Optional[typing.List[str]] = None,
name: typing.Optional[str] = None,
include_in_schema: bool = True,
) -> typing.Callable: # pragma: nocover
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
Route(path, endpoint=..., ...),
...
]
app = Starlette(routes=routes)
"""
def decorator(func: typing.Callable) -> typing.Callable:
self.add_route(
path,
func,
methods=methods,
name=name,
include_in_schema=include_in_schema,
)
return func
return decorator
def websocket_route(
self, path: str, name: typing.Optional[str] = None
) -> typing.Callable: # pragma: nocover
"""
We no longer document this decorator style API, and its usage is discouraged.
Instead you should use the following approach:
routes = [
WebSocketRoute(path, endpoint=..., ...),
...
]
app = Starlette(routes=routes)
"""
def decorator(func: typing.Callable) -> typing.Callable:
self.add_websocket_route(path, func, name=name)
return func
return decorator
def add_event_handler(
self, event_type: str, func: typing.Callable
) -> None: # pragma: no cover
assert event_type in ("startup", "shutdown")
if event_type == "startup":
self.on_startup.append(func)
else:
self.on_shutdown.append(func)
def on_event(self, event_type: str) -> typing.Callable: # pragma: nocover
def decorator(func: typing.Callable) -> typing.Callable:
self.add_event_handler(event_type, func)
return func
return decorator | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/routing.py | routing.py |
import asyncio
import functools
import inspect
import typing
from urllib.parse import urlencode
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.requests import HTTPConnection, Request
from zdppy_api.starlette.responses import RedirectResponse, Response
from zdppy_api.starlette.websockets import WebSocket
def has_required_scope(conn: HTTPConnection, scopes: typing.Sequence[str]) -> bool:
for scope in scopes:
if scope not in conn.auth.scopes:
return False
return True
def requires(
scopes: typing.Union[str, typing.Sequence[str]],
status_code: int = 403,
redirect: typing.Optional[str] = None,
) -> typing.Callable:
scopes_list = [scopes] if isinstance(scopes, str) else list(scopes)
def decorator(func: typing.Callable) -> typing.Callable:
sig = inspect.signature(func)
for idx, parameter in enumerate(sig.parameters.values()):
if parameter.name == "request" or parameter.name == "websocket":
type_ = parameter.name
break
else:
raise Exception(
f'No "request" or "websocket" argument on function "{func}"'
)
if type_ == "websocket":
# Handle websocket functions. (Always async)
@functools.wraps(func)
async def websocket_wrapper(
*args: typing.Any, **kwargs: typing.Any
) -> None:
websocket = kwargs.get(
"websocket", args[idx] if idx < len(args) else None
)
assert isinstance(websocket, WebSocket)
if not has_required_scope(websocket, scopes_list):
await websocket.close()
else:
await func(*args, **kwargs)
return websocket_wrapper
elif asyncio.iscoroutinefunction(func):
# Handle async request/response functions.
@functools.wraps(func)
async def async_wrapper(
*args: typing.Any, **kwargs: typing.Any
) -> Response:
request = kwargs.get("request", args[idx] if idx < len(args) else None)
assert isinstance(request, Request)
if not has_required_scope(request, scopes_list):
if redirect is not None:
orig_request_qparam = urlencode({"next": str(request.url)})
next_url = "{redirect_path}?{orig_request}".format(
redirect_path=request.url_for(redirect),
orig_request=orig_request_qparam,
)
return RedirectResponse(url=next_url, status_code=303)
raise HTTPException(status_code=status_code)
return await func(*args, **kwargs)
return async_wrapper
else:
# Handle sync request/response functions.
@functools.wraps(func)
def sync_wrapper(*args: typing.Any, **kwargs: typing.Any) -> Response:
request = kwargs.get("request", args[idx] if idx < len(args) else None)
assert isinstance(request, Request)
if not has_required_scope(request, scopes_list):
if redirect is not None:
orig_request_qparam = urlencode({"next": str(request.url)})
next_url = "{redirect_path}?{orig_request}".format(
redirect_path=request.url_for(redirect),
orig_request=orig_request_qparam,
)
return RedirectResponse(url=next_url, status_code=303)
raise HTTPException(status_code=status_code)
return func(*args, **kwargs)
return sync_wrapper
return decorator
class AuthenticationError(Exception):
pass
class AuthenticationBackend:
async def authenticate(
self, conn: HTTPConnection
) -> typing.Optional[typing.Tuple["AuthCredentials", "BaseUser"]]:
raise NotImplementedError() # pragma: no cover
class AuthCredentials:
def __init__(self, scopes: typing.Optional[typing.Sequence[str]] = None):
self.scopes = [] if scopes is None else list(scopes)
class BaseUser:
@property
def is_authenticated(self) -> bool:
raise NotImplementedError() # pragma: no cover
@property
def display_name(self) -> str:
raise NotImplementedError() # pragma: no cover
@property
def identity(self) -> str:
raise NotImplementedError() # pragma: no cover
class SimpleUser(BaseUser):
def __init__(self, username: str) -> None:
self.username = username
@property
def is_authenticated(self) -> bool:
return True
@property
def display_name(self) -> str:
return self.username
class UnauthenticatedUser(BaseUser):
@property
def is_authenticated(self) -> bool:
return False
@property
def display_name(self) -> str:
return "" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/authentication.py | authentication.py |
import sys
import warnings
from typing import List
from zdppy_api.starlette._pep562 import pep562
__all__ = (
"HTTP_100_CONTINUE",
"HTTP_101_SWITCHING_PROTOCOLS",
"HTTP_102_PROCESSING",
"HTTP_103_EARLY_HINTS",
"HTTP_200_OK",
"HTTP_201_CREATED",
"HTTP_202_ACCEPTED",
"HTTP_203_NON_AUTHORITATIVE_INFORMATION",
"HTTP_204_NO_CONTENT",
"HTTP_205_RESET_CONTENT",
"HTTP_206_PARTIAL_CONTENT",
"HTTP_207_MULTI_STATUS",
"HTTP_208_ALREADY_REPORTED",
"HTTP_226_IM_USED",
"HTTP_300_MULTIPLE_CHOICES",
"HTTP_301_MOVED_PERMANENTLY",
"HTTP_302_FOUND",
"HTTP_303_SEE_OTHER",
"HTTP_304_NOT_MODIFIED",
"HTTP_305_USE_PROXY",
"HTTP_306_RESERVED",
"HTTP_307_TEMPORARY_REDIRECT",
"HTTP_308_PERMANENT_REDIRECT",
"HTTP_400_BAD_REQUEST",
"HTTP_401_UNAUTHORIZED",
"HTTP_402_PAYMENT_REQUIRED",
"HTTP_403_FORBIDDEN",
"HTTP_404_NOT_FOUND",
"HTTP_405_METHOD_NOT_ALLOWED",
"HTTP_406_NOT_ACCEPTABLE",
"HTTP_407_PROXY_AUTHENTICATION_REQUIRED",
"HTTP_408_REQUEST_TIMEOUT",
"HTTP_409_CONFLICT",
"HTTP_410_GONE",
"HTTP_411_LENGTH_REQUIRED",
"HTTP_412_PRECONDITION_FAILED",
"HTTP_413_REQUEST_ENTITY_TOO_LARGE",
"HTTP_414_REQUEST_URI_TOO_LONG",
"HTTP_415_UNSUPPORTED_MEDIA_TYPE",
"HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE",
"HTTP_417_EXPECTATION_FAILED",
"HTTP_418_IM_A_TEAPOT",
"HTTP_421_MISDIRECTED_REQUEST",
"HTTP_422_UNPROCESSABLE_ENTITY",
"HTTP_423_LOCKED",
"HTTP_424_FAILED_DEPENDENCY",
"HTTP_425_TOO_EARLY",
"HTTP_426_UPGRADE_REQUIRED",
"HTTP_428_PRECONDITION_REQUIRED",
"HTTP_429_TOO_MANY_REQUESTS",
"HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE",
"HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS",
"HTTP_500_INTERNAL_SERVER_ERROR",
"HTTP_501_NOT_IMPLEMENTED",
"HTTP_502_BAD_GATEWAY",
"HTTP_503_SERVICE_UNAVAILABLE",
"HTTP_504_GATEWAY_TIMEOUT",
"HTTP_505_HTTP_VERSION_NOT_SUPPORTED",
"HTTP_506_VARIANT_ALSO_NEGOTIATES",
"HTTP_507_INSUFFICIENT_STORAGE",
"HTTP_508_LOOP_DETECTED",
"HTTP_510_NOT_EXTENDED",
"HTTP_511_NETWORK_AUTHENTICATION_REQUIRED",
"WS_1000_NORMAL_CLOSURE",
"WS_1001_GOING_AWAY",
"WS_1002_PROTOCOL_ERROR",
"WS_1003_UNSUPPORTED_DATA",
"WS_1005_NO_STATUS_RCVD",
"WS_1006_ABNORMAL_CLOSURE",
"WS_1007_INVALID_FRAME_PAYLOAD_DATA",
"WS_1008_POLICY_VIOLATION",
"WS_1009_MESSAGE_TOO_BIG",
"WS_1010_MANDATORY_EXT",
"WS_1011_INTERNAL_ERROR",
"WS_1012_SERVICE_RESTART",
"WS_1013_TRY_AGAIN_LATER",
"WS_1014_BAD_GATEWAY",
"WS_1015_TLS_HANDSHAKE",
)
HTTP_100_CONTINUE = 100
HTTP_101_SWITCHING_PROTOCOLS = 101
HTTP_102_PROCESSING = 102
HTTP_103_EARLY_HINTS = 103
HTTP_200_OK = 200
HTTP_201_CREATED = 201
HTTP_202_ACCEPTED = 202
HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203
HTTP_204_NO_CONTENT = 204
HTTP_205_RESET_CONTENT = 205
HTTP_206_PARTIAL_CONTENT = 206
HTTP_207_MULTI_STATUS = 207
HTTP_208_ALREADY_REPORTED = 208
HTTP_226_IM_USED = 226
HTTP_300_MULTIPLE_CHOICES = 300
HTTP_301_MOVED_PERMANENTLY = 301
HTTP_302_FOUND = 302
HTTP_303_SEE_OTHER = 303
HTTP_304_NOT_MODIFIED = 304
HTTP_305_USE_PROXY = 305
HTTP_306_RESERVED = 306
HTTP_307_TEMPORARY_REDIRECT = 307
HTTP_308_PERMANENT_REDIRECT = 308
HTTP_400_BAD_REQUEST = 400
HTTP_401_UNAUTHORIZED = 401
HTTP_402_PAYMENT_REQUIRED = 402
HTTP_403_FORBIDDEN = 403
HTTP_404_NOT_FOUND = 404
HTTP_405_METHOD_NOT_ALLOWED = 405
HTTP_406_NOT_ACCEPTABLE = 406
HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407
HTTP_408_REQUEST_TIMEOUT = 408
HTTP_409_CONFLICT = 409
HTTP_410_GONE = 410
HTTP_411_LENGTH_REQUIRED = 411
HTTP_412_PRECONDITION_FAILED = 412
HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_414_REQUEST_URI_TOO_LONG = 414
HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416
HTTP_417_EXPECTATION_FAILED = 417
HTTP_418_IM_A_TEAPOT = 418
HTTP_421_MISDIRECTED_REQUEST = 421
HTTP_422_UNPROCESSABLE_ENTITY = 422
HTTP_423_LOCKED = 423
HTTP_424_FAILED_DEPENDENCY = 424
HTTP_425_TOO_EARLY = 425
HTTP_426_UPGRADE_REQUIRED = 426
HTTP_428_PRECONDITION_REQUIRED = 428
HTTP_429_TOO_MANY_REQUESTS = 429
HTTP_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 431
HTTP_451_UNAVAILABLE_FOR_LEGAL_REASONS = 451
HTTP_500_INTERNAL_SERVER_ERROR = 500
HTTP_501_NOT_IMPLEMENTED = 501
HTTP_502_BAD_GATEWAY = 502
HTTP_503_SERVICE_UNAVAILABLE = 503
HTTP_504_GATEWAY_TIMEOUT = 504
HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505
HTTP_506_VARIANT_ALSO_NEGOTIATES = 506
HTTP_507_INSUFFICIENT_STORAGE = 507
HTTP_508_LOOP_DETECTED = 508
HTTP_510_NOT_EXTENDED = 510
HTTP_511_NETWORK_AUTHENTICATION_REQUIRED = 511
"""
WebSocket codes
https://www.iana.org/assignments/websocket/websocket.xml#close-code-number
https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent
"""
WS_1000_NORMAL_CLOSURE = 1000
WS_1001_GOING_AWAY = 1001
WS_1002_PROTOCOL_ERROR = 1002
WS_1003_UNSUPPORTED_DATA = 1003
WS_1005_NO_STATUS_RCVD = 1005
WS_1006_ABNORMAL_CLOSURE = 1006
WS_1007_INVALID_FRAME_PAYLOAD_DATA = 1007
WS_1008_POLICY_VIOLATION = 1008
WS_1009_MESSAGE_TOO_BIG = 1009
WS_1010_MANDATORY_EXT = 1010
WS_1011_INTERNAL_ERROR = 1011
WS_1012_SERVICE_RESTART = 1012
WS_1013_TRY_AGAIN_LATER = 1013
WS_1014_BAD_GATEWAY = 1014
WS_1015_TLS_HANDSHAKE = 1015
__deprecated__ = {"WS_1004_NO_STATUS_RCVD": 1004, "WS_1005_ABNORMAL_CLOSURE": 1005}
def __getattr__(name: str) -> int:
deprecation_changes = {
"WS_1004_NO_STATUS_RCVD": "WS_1005_NO_STATUS_RCVD",
"WS_1005_ABNORMAL_CLOSURE": "WS_1006_ABNORMAL_CLOSURE",
}
deprecated = __deprecated__.get(name)
if deprecated:
stacklevel = 3 if sys.version_info >= (3, 7) else 4
warnings.warn(
f"'{name}' is deprecated. Use '{deprecation_changes[name]}' instead.",
category=DeprecationWarning,
stacklevel=stacklevel,
)
return deprecated
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
def __dir__() -> List[str]:
return sorted(list(__all__) + list(__deprecated__.keys())) # pragma: no cover
pep562(__name__) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/status.py | status.py |
import asyncio
import json
import typing
from zdppy_api.starlette import status
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.exceptions import HTTPException
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import PlainTextResponse, Response
from zdppy_api.starlette.types import Message, Receive, Scope, Send
from zdppy_api.starlette.websockets import WebSocket
class HTTPEndpoint:
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "http"
self.scope = scope
self.receive = receive
self.send = send
self._allowed_methods = [
method
for method in ("GET", "HEAD", "POST", "PUT", "PATCH", "DELETE", "OPTIONS")
if getattr(self, method.lower(), None) is not None
]
def __await__(self) -> typing.Generator:
return self.dispatch().__await__()
async def dispatch(self) -> None:
request = Request(self.scope, receive=self.receive)
handler_name = (
"get"
if request.method == "HEAD" and not hasattr(self, "head")
else request.method.lower()
)
handler: typing.Callable[[Request], typing.Any] = getattr(
self, handler_name, self.method_not_allowed
)
is_async = asyncio.iscoroutinefunction(handler)
if is_async:
response = await handler(request)
else:
response = await run_in_threadpool(handler, request)
await response(self.scope, self.receive, self.send)
async def method_not_allowed(self, request: Request) -> Response:
# If we're running inside a starlette application then raise an
# exception, so that the configurable exception handler can deal with
# returning the response. For plain ASGI apps, just return the response.
headers = {"Allow": ", ".join(self._allowed_methods)}
if "app" in self.scope:
raise HTTPException(status_code=405, headers=headers)
return PlainTextResponse("Method Not Allowed", status_code=405, headers=headers)
class WebSocketEndpoint:
encoding: typing.Optional[str] = None # May be "text", "bytes", or "json".
def __init__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "websocket"
self.scope = scope
self.receive = receive
self.send = send
def __await__(self) -> typing.Generator:
return self.dispatch().__await__()
async def dispatch(self) -> None:
websocket = WebSocket(self.scope, receive=self.receive, send=self.send)
await self.on_connect(websocket)
close_code = status.WS_1000_NORMAL_CLOSURE
try:
while True:
message = await websocket.receive()
if message["type"] == "websocket.receive":
data = await self.decode(websocket, message)
await self.on_receive(websocket, data)
elif message["type"] == "websocket.disconnect":
close_code = int(
message.get("code") or status.WS_1000_NORMAL_CLOSURE
)
break
except Exception as exc:
close_code = status.WS_1011_INTERNAL_ERROR
raise exc
finally:
await self.on_disconnect(websocket, close_code)
async def decode(self, websocket: WebSocket, message: Message) -> typing.Any:
if self.encoding == "text":
if "text" not in message:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Expected text websocket messages, but got bytes")
return message["text"]
elif self.encoding == "bytes":
if "bytes" not in message:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Expected bytes websocket messages, but got text")
return message["bytes"]
elif self.encoding == "json":
if message.get("text") is not None:
text = message["text"]
else:
text = message["bytes"].decode("utf-8")
try:
return json.loads(text)
except json.decoder.JSONDecodeError:
await websocket.close(code=status.WS_1003_UNSUPPORTED_DATA)
raise RuntimeError("Malformed JSON data received.")
assert (
self.encoding is None
), f"Unsupported 'encoding' attribute {self.encoding}"
return message["text"] if message.get("text") else message["bytes"]
async def on_connect(self, websocket: WebSocket) -> None:
"""Override to handle an incoming websocket connection"""
await websocket.accept()
async def on_receive(self, websocket: WebSocket, data: typing.Any) -> None:
"""Override to handle an incoming websocket message"""
async def on_disconnect(self, websocket: WebSocket, close_code: int) -> None:
"""Override to handle a disconnecting websocket""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/endpoints.py | endpoints.py |
import typing
from os import PathLike
from zdppy_api.starlette.background import BackgroundTask
from zdppy_api.starlette.responses import Response
from zdppy_api.starlette.types import Receive, Scope, Send
try:
import jinja2
# @contextfunction was renamed to @pass_context in Jinja 3.0, and was removed in 3.1
# hence we try to get pass_context (most installs will be >=3.1)
# and fall back to contextfunction,
# adding a type ignore for mypy to let us access an attribute that may not exist
if hasattr(jinja2, "pass_context"):
pass_context = jinja2.pass_context
else: # pragma: nocover
pass_context = jinja2.contextfunction # type: ignore[attr-defined]
except ImportError: # pragma: nocover
jinja2 = None # type: ignore
class _TemplateResponse(Response):
media_type = "text/html"
def __init__(
self,
template: typing.Any,
context: dict,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
):
self.template = template
self.context = context
content = template.render(context)
super().__init__(content, status_code, headers, media_type, background)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
request = self.context.get("request", {})
extensions = request.get("extensions", {})
if "http.response.template" in extensions:
await send(
{
"type": "http.response.template",
"template": self.template,
"context": self.context,
}
)
await super().__call__(scope, receive, send)
class Jinja2Templates:
"""
templates = Jinja2Templates("templates")
return templates.TemplateResponse("index.html", {"request": request})
"""
def __init__(
self, directory: typing.Union[str, PathLike], **env_options: typing.Any
) -> None:
assert jinja2 is not None, "jinja2 must be installed to use Jinja2Templates"
self.env = self._create_env(directory, **env_options)
def _create_env(
self, directory: typing.Union[str, PathLike], **env_options: typing.Any
) -> "jinja2.Environment":
@pass_context
def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
request = context["request"]
return request.url_for(name, **path_params)
loader = jinja2.FileSystemLoader(directory)
env_options.setdefault("loader", loader)
env_options.setdefault("autoescape", True)
env = jinja2.Environment(**env_options)
env.globals["url_for"] = url_for
return env
def get_template(self, name: str) -> "jinja2.Template":
return self.env.get_template(name)
def TemplateResponse(
self,
name: str,
context: dict,
status_code: int = 200,
headers: typing.Optional[typing.Mapping[str, str]] = None,
media_type: typing.Optional[str] = None,
background: typing.Optional[BackgroundTask] = None,
) -> _TemplateResponse:
if "request" not in context:
raise ValueError('context must include a "request" key')
template = self.get_template(name)
return _TemplateResponse(
template,
context,
status_code=status_code,
headers=headers,
media_type=media_type,
background=background,
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/templating.py | templating.py |
import asyncio
import http
import typing
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import PlainTextResponse, Response
from zdppy_api.starlette.types import ASGIApp, Message, Receive, Scope, Send
class HTTPException(Exception):
def __init__(
self,
status_code: int,
detail: typing.Optional[str] = None,
headers: typing.Optional[dict] = None,
) -> None:
if detail is None:
detail = http.HTTPStatus(status_code).phrase
self.status_code = status_code
self.detail = detail
self.headers = headers
def __repr__(self) -> str:
class_name = self.__class__.__name__
return f"{class_name}(status_code={self.status_code!r}, detail={self.detail!r})"
class ExceptionMiddleware:
def __init__(
self,
app: ASGIApp,
handlers: typing.Optional[
typing.Mapping[typing.Any, typing.Callable[[Request, Exception], Response]]
] = None,
debug: bool = False,
) -> None:
self.app = app
self.debug = debug # TODO: We ought to handle 404 cases if debug is set.
self._status_handlers: typing.Dict[int, typing.Callable] = {}
self._exception_handlers: typing.Dict[
typing.Type[Exception], typing.Callable
] = {HTTPException: self.http_exception}
if handlers is not None:
for key, value in handlers.items():
self.add_exception_handler(key, value)
def add_exception_handler(
self,
exc_class_or_status_code: typing.Union[int, typing.Type[Exception]],
handler: typing.Callable[[Request, Exception], Response],
) -> None:
if isinstance(exc_class_or_status_code, int):
self._status_handlers[exc_class_or_status_code] = handler
else:
assert issubclass(exc_class_or_status_code, Exception)
self._exception_handlers[exc_class_or_status_code] = handler
def _lookup_exception_handler(
self, exc: Exception
) -> typing.Optional[typing.Callable]:
for cls in type(exc).__mro__:
if cls in self._exception_handlers:
return self._exception_handlers[cls]
return None
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
response_started = False
async def sender(message: Message) -> None:
nonlocal response_started
if message["type"] == "http.response.start":
response_started = True
await send(message)
try:
await self.app(scope, receive, sender)
except Exception as exc:
handler = None
if isinstance(exc, HTTPException):
handler = self._status_handlers.get(exc.status_code)
if handler is None:
handler = self._lookup_exception_handler(exc)
if handler is None:
raise exc
if response_started:
msg = "Caught handled exception, but response already started."
raise RuntimeError(msg) from exc
request = Request(scope, receive=receive)
if asyncio.iscoroutinefunction(handler):
response = await handler(request, exc)
else:
response = await run_in_threadpool(handler, request, exc)
await response(scope, receive, sender)
def http_exception(self, request: Request, exc: HTTPException) -> Response:
if exc.status_code in {204, 304}:
return Response(status_code=exc.status_code, headers=exc.headers)
return PlainTextResponse(
exc.detail, status_code=exc.status_code, headers=exc.headers
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/exceptions.py | exceptions.py |
import gzip
import io
import typing
from zdppy_api.starlette.datastructures import Headers, MutableHeaders
from zdppy_api.starlette.types import ASGIApp, Message, Receive, Scope, Send
class GZipMiddleware:
def __init__(
self, app: ASGIApp, minimum_size: int = 500, compresslevel: int = 9
) -> None:
self.app = app
self.minimum_size = minimum_size
self.compresslevel = compresslevel
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] == "http":
headers = Headers(scope=scope)
if "gzip" in headers.get("Accept-Encoding", ""):
responder = GZipResponder(
self.app, self.minimum_size, compresslevel=self.compresslevel
)
await responder(scope, receive, send)
return
await self.app(scope, receive, send)
class GZipResponder:
def __init__(self, app: ASGIApp, minimum_size: int, compresslevel: int = 9) -> None:
self.app = app
self.minimum_size = minimum_size
self.send: Send = unattached_send
self.initial_message: Message = {}
self.started = False
self.gzip_buffer = io.BytesIO()
self.gzip_file = gzip.GzipFile(
mode="wb", fileobj=self.gzip_buffer, compresslevel=compresslevel
)
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
self.send = send
await self.app(scope, receive, self.send_with_gzip)
async def send_with_gzip(self, message: Message) -> None:
message_type = message["type"]
if message_type == "http.response.start":
# Don't send the initial message until we've determined how to
# modify the outgoing headers correctly.
self.initial_message = message
elif message_type == "http.response.body" and not self.started:
self.started = True
body = message.get("body", b"")
more_body = message.get("more_body", False)
if len(body) < self.minimum_size and not more_body:
# Don't apply GZip to small outgoing responses.
await self.send(self.initial_message)
await self.send(message)
elif not more_body:
# Standard GZip response.
self.gzip_file.write(body)
self.gzip_file.close()
body = self.gzip_buffer.getvalue()
headers = MutableHeaders(raw=self.initial_message["headers"])
headers["Content-Encoding"] = "gzip"
headers["Content-Length"] = str(len(body))
headers.add_vary_header("Accept-Encoding")
message["body"] = body
await self.send(self.initial_message)
await self.send(message)
else:
# Initial body in streaming GZip response.
headers = MutableHeaders(raw=self.initial_message["headers"])
headers["Content-Encoding"] = "gzip"
headers.add_vary_header("Accept-Encoding")
del headers["Content-Length"]
self.gzip_file.write(body)
message["body"] = self.gzip_buffer.getvalue()
self.gzip_buffer.seek(0)
self.gzip_buffer.truncate()
await self.send(self.initial_message)
await self.send(message)
elif message_type == "http.response.body":
# Remaining body in streaming GZip response.
body = message.get("body", b"")
more_body = message.get("more_body", False)
self.gzip_file.write(body)
if not more_body:
self.gzip_file.close()
message["body"] = self.gzip_buffer.getvalue()
self.gzip_buffer.seek(0)
self.gzip_buffer.truncate()
await self.send(message)
async def unattached_send(message: Message) -> typing.NoReturn:
raise RuntimeError("send awaitable not set") # pragma: no cover | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/gzip.py | gzip.py |
import io
import math
import sys
import typing
import warnings
from zdppy_api import anyio
from zdppy_api.starlette.types import Receive, Scope, Send
warnings.warn(
"starlette.middleware.wsgi is deprecated and will be removed in a future release. "
"Please refer to https://github.com/abersheeran/a2wsgi as a replacement.",
DeprecationWarning,
)
def build_environ(scope: Scope, body: bytes) -> dict:
"""
Builds a scope and request body into a WSGI environ object.
"""
environ = {
"REQUEST_METHOD": scope["method"],
"SCRIPT_NAME": scope.get("root_path", "").encode("utf8").decode("latin1"),
"PATH_INFO": scope["path"].encode("utf8").decode("latin1"),
"QUERY_STRING": scope["query_string"].decode("ascii"),
"SERVER_PROTOCOL": f"HTTP/{scope['http_version']}",
"wsgi.version": (1, 0),
"wsgi.url_scheme": scope.get("scheme", "http"),
"wsgi.input": io.BytesIO(body),
"wsgi.errors": sys.stdout,
"wsgi.multithread": True,
"wsgi.multiprocess": True,
"wsgi.run_once": False,
}
# Get server name and port - required in WSGI, not in ASGI
server = scope.get("server") or ("localhost", 80)
environ["SERVER_NAME"] = server[0]
environ["SERVER_PORT"] = server[1]
# Get client IP address
if scope.get("client"):
environ["REMOTE_ADDR"] = scope["client"][0]
# Go through headers and make them into environ entries
for name, value in scope.get("headers", []):
name = name.decode("latin1")
if name == "content-length":
corrected_name = "CONTENT_LENGTH"
elif name == "content-type":
corrected_name = "CONTENT_TYPE"
else:
corrected_name = f"HTTP_{name}".upper().replace("-", "_")
# HTTPbis say only ASCII chars are allowed in headers, but we latin1 just in
# case
value = value.decode("latin1")
if corrected_name in environ:
value = environ[corrected_name] + "," + value
environ[corrected_name] = value
return environ
class WSGIMiddleware:
def __init__(self, app: typing.Callable) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
assert scope["type"] == "http"
responder = WSGIResponder(self.app, scope)
await responder(receive, send)
class WSGIResponder:
def __init__(self, app: typing.Callable, scope: Scope) -> None:
self.app = app
self.scope = scope
self.status = None
self.response_headers = None
self.stream_send, self.stream_receive = anyio.create_memory_object_stream(
math.inf
)
self.response_started = False
self.exc_info: typing.Any = None
async def __call__(self, receive: Receive, send: Send) -> None:
body = b""
more_body = True
while more_body:
message = await receive()
body += message.get("body", b"")
more_body = message.get("more_body", False)
environ = build_environ(self.scope, body)
async with anyio.create_task_group() as task_group:
task_group.start_soon(self.sender, send)
async with self.stream_send:
await anyio.to_thread.run_sync(self.wsgi, environ, self.start_response)
if self.exc_info is not None:
raise self.exc_info[0].with_traceback(self.exc_info[1], self.exc_info[2])
async def sender(self, send: Send) -> None:
async with self.stream_receive:
async for message in self.stream_receive:
await send(message)
def start_response(
self,
status: str,
response_headers: typing.List[typing.Tuple[str, str]],
exc_info: typing.Any = None,
) -> None:
self.exc_info = exc_info
if not self.response_started:
self.response_started = True
status_code_string, _ = status.split(" ", 1)
status_code = int(status_code_string)
headers = [
(name.strip().encode("ascii").lower(), value.strip().encode("ascii"))
for name, value in response_headers
]
anyio.from_thread.run(
self.stream_send.send,
{
"type": "http.response.start",
"status": status_code,
"headers": headers,
},
)
def wsgi(self, environ: dict, start_response: typing.Callable) -> None:
for chunk in self.app(environ, start_response):
anyio.from_thread.run(
self.stream_send.send,
{"type": "http.response.body", "body": chunk, "more_body": True},
)
anyio.from_thread.run(
self.stream_send.send, {"type": "http.response.body", "body": b""}
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/wsgi.py | wsgi.py |
import typing
from zdppy_api.starlette.datastructures import URL, Headers
from zdppy_api.starlette.responses import PlainTextResponse, RedirectResponse, Response
from zdppy_api.starlette.types import ASGIApp, Receive, Scope, Send
ENFORCE_DOMAIN_WILDCARD = "Domain wildcard patterns must be like '*.example.com'."
class TrustedHostMiddleware:
def __init__(
self,
app: ASGIApp,
allowed_hosts: typing.Optional[typing.Sequence[str]] = None,
www_redirect: bool = True,
) -> None:
if allowed_hosts is None:
allowed_hosts = ["*"]
for pattern in allowed_hosts:
assert "*" not in pattern[1:], ENFORCE_DOMAIN_WILDCARD
if pattern.startswith("*") and pattern != "*":
assert pattern.startswith("*."), ENFORCE_DOMAIN_WILDCARD
self.app = app
self.allowed_hosts = list(allowed_hosts)
self.allow_any = "*" in allowed_hosts
self.www_redirect = www_redirect
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if self.allow_any or scope["type"] not in (
"http",
"websocket",
): # pragma: no cover
await self.app(scope, receive, send)
return
headers = Headers(scope=scope)
host = headers.get("host", "").split(":")[0]
is_valid_host = False
found_www_redirect = False
for pattern in self.allowed_hosts:
if host == pattern or (
pattern.startswith("*") and host.endswith(pattern[1:])
):
is_valid_host = True
break
elif "www." + host == pattern:
found_www_redirect = True
if is_valid_host:
await self.app(scope, receive, send)
else:
response: Response
if found_www_redirect and self.www_redirect:
url = URL(scope=scope)
redirect_url = url.replace(netloc="www." + url.netloc)
response = RedirectResponse(url=str(redirect_url))
else:
response = PlainTextResponse("Invalid host header", status_code=400)
await response(scope, receive, send) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/trustedhost.py | trustedhost.py |
import typing
from zdppy_api import anyio
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import Response, StreamingResponse
from zdppy_api.starlette.types import ASGIApp, Receive, Scope, Send
RequestResponseEndpoint = typing.Callable[[Request], typing.Awaitable[Response]]
DispatchFunction = typing.Callable[
[Request, RequestResponseEndpoint], typing.Awaitable[Response]
]
class BaseHTTPMiddleware:
def __init__(
self, app: ASGIApp, dispatch: typing.Optional[DispatchFunction] = None
) -> None:
self.app = app
self.dispatch_func = self.dispatch if dispatch is None else dispatch
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
async def call_next(request: Request) -> Response:
app_exc: typing.Optional[Exception] = None
send_stream, recv_stream = anyio.create_memory_object_stream()
async def coro() -> None:
nonlocal app_exc
async with send_stream:
try:
await self.app(scope, request.receive, send_stream.send)
except Exception as exc:
app_exc = exc
task_group.start_soon(coro)
try:
message = await recv_stream.receive()
except anyio.EndOfStream:
if app_exc is not None:
raise app_exc
raise RuntimeError("No response returned.")
assert message["type"] == "http.response.start"
async def body_stream() -> typing.AsyncGenerator[bytes, None]:
async with recv_stream:
async for message in recv_stream:
assert message["type"] == "http.response.body"
yield message.get("body", b"")
if app_exc is not None:
raise app_exc
response = StreamingResponse(
status_code=message["status"], content=body_stream()
)
response.raw_headers = message["headers"]
return response
async with anyio.create_task_group() as task_group:
request = Request(scope, receive=receive)
response = await self.dispatch_func(request, call_next)
await response(scope, receive, send)
task_group.cancel_scope.cancel()
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
raise NotImplementedError() # pragma: no cover | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/base.py | base.py |
import json
import typing
from base64 import b64decode, b64encode
import itsdangerous
from itsdangerous.exc import BadSignature
from zdppy_api.starlette.datastructures import MutableHeaders, Secret
from zdppy_api.starlette.requests import HTTPConnection
from zdppy_api.starlette.types import ASGIApp, Message, Receive, Scope, Send
class SessionMiddleware:
def __init__(
self,
app: ASGIApp,
secret_key: typing.Union[str, Secret],
session_cookie: str = "session",
max_age: typing.Optional[int] = 14 * 24 * 60 * 60, # 14 days, in seconds
path: str = "/",
same_site: str = "lax",
https_only: bool = False,
) -> None:
self.app = app
self.signer = itsdangerous.TimestampSigner(str(secret_key))
self.session_cookie = session_cookie
self.max_age = max_age
self.path = path
self.security_flags = "httponly; samesite=" + same_site
if https_only: # Secure flag can be used with HTTPS only
self.security_flags += "; secure"
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] not in ("http", "websocket"): # pragma: no cover
await self.app(scope, receive, send)
return
connection = HTTPConnection(scope)
initial_session_was_empty = True
if self.session_cookie in connection.cookies:
data = connection.cookies[self.session_cookie].encode("utf-8")
try:
data = self.signer.unsign(data, max_age=self.max_age)
scope["session"] = json.loads(b64decode(data))
initial_session_was_empty = False
except BadSignature:
scope["session"] = {}
else:
scope["session"] = {}
async def send_wrapper(message: Message) -> None:
if message["type"] == "http.response.start":
if scope["session"]:
# We have session data to persist.
data = b64encode(json.dumps(scope["session"]).encode("utf-8"))
data = self.signer.sign(data)
headers = MutableHeaders(scope=message)
header_value = "{session_cookie}={data}; path={path}; {max_age}{security_flags}".format( # noqa E501
session_cookie=self.session_cookie,
data=data.decode("utf-8"),
path=self.path,
max_age=f"Max-Age={self.max_age}; " if self.max_age else "",
security_flags=self.security_flags,
)
headers.append("Set-Cookie", header_value)
elif not initial_session_was_empty:
# The session has been cleared.
headers = MutableHeaders(scope=message)
header_value = "{session_cookie}={data}; path={path}; {expires}{security_flags}".format( # noqa E501
session_cookie=self.session_cookie,
data="null",
path=self.path,
expires="expires=Thu, 01 Jan 1970 00:00:00 GMT; ",
security_flags=self.security_flags,
)
headers.append("Set-Cookie", header_value)
await send(message)
await self.app(scope, receive, send_wrapper) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/sessions.py | sessions.py |
import asyncio
import html
import inspect
import traceback
import typing
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.requests import Request
from zdppy_api.starlette.responses import HTMLResponse, PlainTextResponse, Response
from zdppy_api.starlette.types import ASGIApp, Message, Receive, Scope, Send
STYLES = """
p {
color: #211c1c;
}
.traceback-container {
border: 1px solid #038BB8;
}
.traceback-title {
background-color: #038BB8;
color: lemonchiffon;
padding: 12px;
font-size: 20px;
margin-top: 0px;
}
.frame-line {
padding-left: 10px;
font-family: monospace;
}
.frame-filename {
font-family: monospace;
}
.center-line {
background-color: #038BB8;
color: #f9f6e1;
padding: 5px 0px 5px 5px;
}
.lineno {
margin-right: 5px;
}
.frame-title {
font-weight: unset;
padding: 10px 10px 10px 10px;
background-color: #E4F4FD;
margin-right: 10px;
color: #191f21;
font-size: 17px;
border: 1px solid #c7dce8;
}
.collapse-btn {
float: right;
padding: 0px 5px 1px 5px;
border: solid 1px #96aebb;
cursor: pointer;
}
.collapsed {
display: none;
}
.source-code {
font-family: courier;
font-size: small;
padding-bottom: 10px;
}
"""
JS = """
<script type="text/javascript">
function collapse(element){
const frameId = element.getAttribute("data-frame-id");
const frame = document.getElementById(frameId);
if (frame.classList.contains("collapsed")){
element.innerHTML = "‒";
frame.classList.remove("collapsed");
} else {
element.innerHTML = "+";
frame.classList.add("collapsed");
}
}
</script>
"""
TEMPLATE = """
<html>
<head>
<style type='text/css'>
{styles}
</style>
<title>Starlette Debugger</title>
</head>
<body>
<h1>500 Server Error</h1>
<h2>{error}</h2>
<div class="traceback-container">
<p class="traceback-title">Traceback</p>
<div>{exc_html}</div>
</div>
{js}
</body>
</html>
"""
FRAME_TEMPLATE = """
<div>
<p class="frame-title">File <span class="frame-filename">{frame_filename}</span>,
line <i>{frame_lineno}</i>,
in <b>{frame_name}</b>
<span class="collapse-btn" data-frame-id="{frame_filename}-{frame_lineno}" onclick="collapse(this)">{collapse_button}</span>
</p>
<div id="{frame_filename}-{frame_lineno}" class="source-code {collapsed}">{code_context}</div>
</div>
""" # noqa: E501
LINE = """
<p><span class="frame-line">
<span class="lineno">{lineno}.</span> {line}</span></p>
"""
CENTER_LINE = """
<p class="center-line"><span class="frame-line center-line">
<span class="lineno">{lineno}.</span> {line}</span></p>
"""
class ServerErrorMiddleware:
"""
Handles returning 500 responses when a server error occurs.
If 'debug' is set, then traceback responses will be returned,
otherwise the designated 'handler' will be called.
This middleware class should generally be used to wrap *everything*
else up, so that unhandled exceptions anywhere in the stack
always result in an appropriate 500 response.
"""
def __init__(
self,
app: ASGIApp,
handler: typing.Optional[typing.Callable] = None,
debug: bool = False,
) -> None:
self.app = app
self.handler = handler
self.debug = debug
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http":
await self.app(scope, receive, send)
return
response_started = False
async def _send(message: Message) -> None:
nonlocal response_started, send
if message["type"] == "http.response.start":
response_started = True
await send(message)
try:
await self.app(scope, receive, _send)
except Exception as exc:
request = Request(scope)
if self.debug:
# In debug mode, return traceback responses.
response = self.debug_response(request, exc)
elif self.handler is None:
# Use our default 500 error handler.
response = self.error_response(request, exc)
else:
# Use an installed 500 error handler.
if asyncio.iscoroutinefunction(self.handler):
response = await self.handler(request, exc)
else:
response = await run_in_threadpool(self.handler, request, exc)
if not response_started:
await response(scope, receive, send)
# We always continue to raise the exception.
# This allows servers to log the error, or allows test clients
# to optionally raise the error within the test case.
raise exc
def format_line(
self, index: int, line: str, frame_lineno: int, frame_index: int
) -> str:
values = {
# HTML escape - line could contain < or >
"line": html.escape(line).replace(" ", " "),
"lineno": (frame_lineno - frame_index) + index,
}
if index != frame_index:
return LINE.format(**values)
return CENTER_LINE.format(**values)
def generate_frame_html(self, frame: inspect.FrameInfo, is_collapsed: bool) -> str:
code_context = "".join(
self.format_line(index, line, frame.lineno, frame.index) # type: ignore
for index, line in enumerate(frame.code_context or [])
)
values = {
# HTML escape - filename could contain < or >, especially if it's a virtual
# file e.g. <stdin> in the REPL
"frame_filename": html.escape(frame.filename),
"frame_lineno": frame.lineno,
# HTML escape - if you try very hard it's possible to name a function with <
# or >
"frame_name": html.escape(frame.function),
"code_context": code_context,
"collapsed": "collapsed" if is_collapsed else "",
"collapse_button": "+" if is_collapsed else "‒",
}
return FRAME_TEMPLATE.format(**values)
def generate_html(self, exc: Exception, limit: int = 7) -> str:
traceback_obj = traceback.TracebackException.from_exception(
exc, capture_locals=True
)
exc_html = ""
is_collapsed = False
exc_traceback = exc.__traceback__
if exc_traceback is not None:
frames = inspect.getinnerframes(exc_traceback, limit)
for frame in reversed(frames):
exc_html += self.generate_frame_html(frame, is_collapsed)
is_collapsed = True
# escape error class and text
error = (
f"{html.escape(traceback_obj.exc_type.__name__)}: "
f"{html.escape(str(traceback_obj))}"
)
return TEMPLATE.format(styles=STYLES, js=JS, error=error, exc_html=exc_html)
def generate_plain_text(self, exc: Exception) -> str:
return "".join(traceback.format_exception(type(exc), exc, exc.__traceback__))
def debug_response(self, request: Request, exc: Exception) -> Response:
accept = request.headers.get("accept", "")
if "text/html" in accept:
content = self.generate_html(exc)
return HTMLResponse(content, status_code=500)
content = self.generate_plain_text(exc)
return PlainTextResponse(content, status_code=500)
def error_response(self, request: Request, exc: Exception) -> Response:
return PlainTextResponse("Internal Server Error", status_code=500) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/errors.py | errors.py |
import functools
import re
import typing
from zdppy_api.starlette.datastructures import Headers, MutableHeaders
from zdppy_api.starlette.responses import PlainTextResponse, Response
from zdppy_api.starlette.types import ASGIApp, Message, Receive, Scope, Send
ALL_METHODS = ("DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT")
SAFELISTED_HEADERS = {"Accept", "Accept-Language", "Content-Language", "Content-Type"}
class CORSMiddleware:
def __init__(
self,
app: ASGIApp,
allow_origins: typing.Sequence[str] = (),
allow_methods: typing.Sequence[str] = ("GET",),
allow_headers: typing.Sequence[str] = (),
allow_credentials: bool = False,
allow_origin_regex: typing.Optional[str] = None,
expose_headers: typing.Sequence[str] = (),
max_age: int = 600,
) -> None:
if "*" in allow_methods:
allow_methods = ALL_METHODS
compiled_allow_origin_regex = None
if allow_origin_regex is not None:
compiled_allow_origin_regex = re.compile(allow_origin_regex)
allow_all_origins = "*" in allow_origins
allow_all_headers = "*" in allow_headers
preflight_explicit_allow_origin = not allow_all_origins or allow_credentials
simple_headers = {}
if allow_all_origins:
simple_headers["Access-Control-Allow-Origin"] = "*"
if allow_credentials:
simple_headers["Access-Control-Allow-Credentials"] = "true"
if expose_headers:
simple_headers["Access-Control-Expose-Headers"] = ", ".join(expose_headers)
preflight_headers = {}
if preflight_explicit_allow_origin:
# The origin value will be set in preflight_response() if it is allowed.
preflight_headers["Vary"] = "Origin"
else:
preflight_headers["Access-Control-Allow-Origin"] = "*"
preflight_headers.update(
{
"Access-Control-Allow-Methods": ", ".join(allow_methods),
"Access-Control-Max-Age": str(max_age),
}
)
allow_headers = sorted(SAFELISTED_HEADERS | set(allow_headers))
if allow_headers and not allow_all_headers:
preflight_headers["Access-Control-Allow-Headers"] = ", ".join(allow_headers)
if allow_credentials:
preflight_headers["Access-Control-Allow-Credentials"] = "true"
self.app = app
self.allow_origins = allow_origins
self.allow_methods = allow_methods
self.allow_headers = [h.lower() for h in allow_headers]
self.allow_all_origins = allow_all_origins
self.allow_all_headers = allow_all_headers
self.preflight_explicit_allow_origin = preflight_explicit_allow_origin
self.allow_origin_regex = compiled_allow_origin_regex
self.simple_headers = simple_headers
self.preflight_headers = preflight_headers
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
if scope["type"] != "http": # pragma: no cover
await self.app(scope, receive, send)
return
method = scope["method"]
headers = Headers(scope=scope)
origin = headers.get("origin")
if origin is None:
await self.app(scope, receive, send)
return
if method == "OPTIONS" and "access-control-request-method" in headers:
response = self.preflight_response(request_headers=headers)
await response(scope, receive, send)
return
await self.simple_response(scope, receive, send, request_headers=headers)
def is_allowed_origin(self, origin: str) -> bool:
if self.allow_all_origins:
return True
if self.allow_origin_regex is not None and self.allow_origin_regex.fullmatch(
origin
):
return True
return origin in self.allow_origins
def preflight_response(self, request_headers: Headers) -> Response:
requested_origin = request_headers["origin"]
requested_method = request_headers["access-control-request-method"]
requested_headers = request_headers.get("access-control-request-headers")
headers = dict(self.preflight_headers)
failures = []
if self.is_allowed_origin(origin=requested_origin):
if self.preflight_explicit_allow_origin:
# The "else" case is already accounted for in self.preflight_headers
# and the value would be "*".
headers["Access-Control-Allow-Origin"] = requested_origin
else:
failures.append("origin")
if requested_method not in self.allow_methods:
failures.append("method")
# If we allow all headers, then we have to mirror back any requested
# headers in the response.
if self.allow_all_headers and requested_headers is not None:
headers["Access-Control-Allow-Headers"] = requested_headers
elif requested_headers is not None:
for header in [h.lower() for h in requested_headers.split(",")]:
if header.strip() not in self.allow_headers:
failures.append("headers")
break
# We don't strictly need to use 400 responses here, since its up to
# the browser to enforce the CORS policy, but its more informative
# if we do.
if failures:
failure_text = "Disallowed CORS " + ", ".join(failures)
return PlainTextResponse(failure_text, status_code=400, headers=headers)
return PlainTextResponse("OK", status_code=200, headers=headers)
async def simple_response(
self, scope: Scope, receive: Receive, send: Send, request_headers: Headers
) -> None:
send = functools.partial(self.send, send=send, request_headers=request_headers)
await self.app(scope, receive, send)
async def send(
self, message: Message, send: Send, request_headers: Headers
) -> None:
if message["type"] != "http.response.start":
await send(message)
return
message.setdefault("headers", [])
headers = MutableHeaders(scope=message)
headers.update(self.simple_headers)
origin = request_headers["Origin"]
has_cookie = "cookie" in request_headers
# If request includes any cookie headers, then we must respond
# with the specific origin instead of '*'.
if self.allow_all_origins and has_cookie:
self.allow_explicit_origin(headers, origin)
# If we only allow specific origins, then we have to mirror back
# the Origin header in the response.
elif not self.allow_all_origins and self.is_allowed_origin(origin=origin):
self.allow_explicit_origin(headers, origin)
await send(message)
@staticmethod
def allow_explicit_origin(headers: MutableHeaders, origin: str) -> None:
headers["Access-Control-Allow-Origin"] = origin
headers.add_vary_header("Origin") | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/starlette/middleware/cors.py | cors.py |
from __future__ import with_statement, absolute_import, print_function
from six import (
binary_type,
text_type,
PY3,
)
from .decoders import *
from .exceptions import *
try:
from urlparse import parse_qs
except ImportError:
from urllib.parse import parse_qs
import os
import re
import sys
import shutil
import logging
import tempfile
from io import BytesIO
from numbers import Number
# Unique missing object.
_missing = object()
# States for the querystring parser.
STATE_BEFORE_FIELD = 0
STATE_FIELD_NAME = 1
STATE_FIELD_DATA = 2
# States for the multipart parser
STATE_START = 0
STATE_START_BOUNDARY = 1
STATE_HEADER_FIELD_START = 2
STATE_HEADER_FIELD = 3
STATE_HEADER_VALUE_START = 4
STATE_HEADER_VALUE = 5
STATE_HEADER_VALUE_ALMOST_DONE = 6
STATE_HEADERS_ALMOST_DONE = 7
STATE_PART_DATA_START = 8
STATE_PART_DATA = 9
STATE_PART_DATA_END = 10
STATE_END = 11
STATES = [
"START",
"START_BOUNDARY", "HEADER_FEILD_START", "HEADER_FIELD", "HEADER_VALUE_START", "HEADER_VALUE",
"HEADER_VALUE_ALMOST_DONE", "HEADRES_ALMOST_DONE", "PART_DATA_START", "PART_DATA", "PART_DATA_END", "END"
]
# Flags for the multipart parser.
FLAG_PART_BOUNDARY = 1
FLAG_LAST_BOUNDARY = 2
# Get constants. Since iterating over a str on Python 2 gives you a 1-length
# string, but iterating over a bytes object on Python 3 gives you an integer,
# we need to save these constants.
CR = b'\r'[0]
LF = b'\n'[0]
COLON = b':'[0]
SPACE = b' '[0]
HYPHEN = b'-'[0]
AMPERSAND = b'&'[0]
SEMICOLON = b';'[0]
LOWER_A = b'a'[0]
LOWER_Z = b'z'[0]
NULL = b'\x00'[0]
# Lower-casing a character is different, because of the difference between
# str on Py2, and bytes on Py3. Same with getting the ordinal value of a byte,
# and joining a list of bytes together.
# These functions abstract that.
if PY3: # pragma: no cover
lower_char = lambda c: c | 0x20
ord_char = lambda c: c
join_bytes = lambda b: bytes(list(b))
else: # pragma: no cover
lower_char = lambda c: c.lower()
ord_char = lambda c: ord(c)
join_bytes = lambda b: b''.join(list(b))
# These are regexes for parsing header values.
SPECIAL_CHARS = re.escape(b'()<>@,;:\\"/[]?={} \t')
QUOTED_STR = br'"(?:\\.|[^"])*"'
VALUE_STR = br'(?:[^' + SPECIAL_CHARS + br']+|' + QUOTED_STR + br')'
OPTION_RE_STR = (
br'(?:;|^)\s*([^' + SPECIAL_CHARS + br']+)\s*=\s*(' + VALUE_STR + br')'
)
OPTION_RE = re.compile(OPTION_RE_STR)
QUOTE = b'"'[0]
def parse_options_header(value):
"""
Parses a Content-Type header into a value in the following format:
(content_type, {parameters})
"""
if not value:
return (b'', {})
# If we are passed a string, we assume that it conforms to WSGI and does
# not contain any code point that's not in latin-1.
if isinstance(value, text_type): # pragma: no cover
value = value.encode('latin-1')
# If we have no options, return the string as-is.
if b';' not in value:
return (value.lower().strip(), {})
# Split at the first semicolon, to get our value and then options.
ctype, rest = value.split(b';', 1)
options = {}
# Parse the options.
for match in OPTION_RE.finditer(rest):
key = match.group(1).lower()
value = match.group(2)
if value[0] == QUOTE and value[-1] == QUOTE:
# Unquote the value.
value = value[1:-1]
value = value.replace(b'\\\\', b'\\').replace(b'\\"', b'"')
# If the value is a filename, we need to fix a bug on IE6 that sends
# the full file path instead of the filename.
if key == b'filename':
if value[1:3] == b':\\' or value[:2] == b'\\\\':
value = value.split(b'\\')[-1]
options[key] = value
return ctype, options
class Field(object):
"""A Field object represents a (parsed) form field. It represents a single
field with a corresponding name and value.
The name that a :class:`Field` will be instantiated with is the same name
that would be found in the following HTML::
<input name="name_goes_here" type="text"/>
This class defines two methods, :meth:`on_data` and :meth:`on_end`, that
will be called when data is written to the Field, and when the Field is
finalized, respectively.
:param name: the name of the form field
"""
def __init__(self, name):
self._name = name
self._value = []
# We cache the joined version of _value for speed.
self._cache = _missing
@classmethod
def from_value(klass, name, value):
"""Create an instance of a :class:`Field`, and set the corresponding
value - either None or an actual value. This method will also
finalize the Field itself.
:param name: the name of the form field
:param value: the value of the form field - either a bytestring or
None
"""
f = klass(name)
if value is None:
f.set_none()
else:
f.write(value)
f.finalize()
return f
def write(self, data):
"""Write some data into the form field.
:param data: a bytestring
"""
return self.on_data(data)
def on_data(self, data):
"""This method is a callback that will be called whenever data is
written to the Field.
:param data: a bytestring
"""
self._value.append(data)
self._cache = _missing
return len(data)
def on_end(self):
"""This method is called whenever the Field is finalized.
"""
if self._cache is _missing:
self._cache = b''.join(self._value)
def finalize(self):
"""Finalize the form field.
"""
self.on_end()
def close(self):
"""Close the Field object. This will free any underlying cache.
"""
# Free our value array.
if self._cache is _missing:
self._cache = b''.join(self._value)
del self._value
def set_none(self):
"""Some fields in a querystring can possibly have a value of None - for
example, the string "foo&bar=&baz=asdf" will have a field with the
name "foo" and value None, one with name "bar" and value "", and one
with name "baz" and value "asdf". Since the write() interface doesn't
support writing None, this function will set the field value to None.
"""
self._cache = None
@property
def field_name(self):
"""This property returns the name of the field."""
return self._name
@property
def value(self):
"""This property returns the value of the form field."""
if self._cache is _missing:
self._cache = b''.join(self._value)
return self._cache
def __eq__(self, other):
if isinstance(other, Field):
return (
self.field_name == other.field_name and
self.value == other.value
)
else:
return NotImplemented
def __repr__(self):
if len(self.value) > 97:
# We get the repr, and then insert three dots before the final
# quote.
v = repr(self.value[:97])[:-1] + "...'"
else:
v = repr(self.value)
return "%s(field_name=%r, value=%s)" % (
self.__class__.__name__,
self.field_name,
v
)
class File(object):
"""This class represents an uploaded file. It handles writing file data to
either an in-memory file or a temporary file on-disk, if the optional
threshold is passed.
There are some options that can be passed to the File to change behavior
of the class. Valid options are as follows:
.. list-table::
:widths: 15 5 5 30
:header-rows: 1
* - Name
- Type
- Default
- Description
* - UPLOAD_DIR
- `str`
- None
- The directory to store uploaded files in. If this is None, a
temporary file will be created in the system's standard location.
* - UPLOAD_DELETE_TMP
- `bool`
- True
- Delete automatically created TMP file
* - UPLOAD_KEEP_FILENAME
- `bool`
- False
- Whether or not to keep the filename of the uploaded file. If True,
then the filename will be converted to a safe representation (e.g.
by removing any invalid path segments), and then saved with the
same name). Otherwise, a temporary name will be used.
* - UPLOAD_KEEP_EXTENSIONS
- `bool`
- False
- Whether or not to keep the uploaded file's extension. If False, the
file will be saved with the default temporary extension (usually
".tmp"). Otherwise, the file's extension will be maintained. Note
that this will properly combine with the UPLOAD_KEEP_FILENAME
setting.
* - MAX_MEMORY_FILE_SIZE
- `int`
- 1 MiB
- The maximum number of bytes of a File to keep in memory. By
default, the contents of a File are kept into memory until a certain
limit is reached, after which the contents of the File are written
to a temporary file. This behavior can be disabled by setting this
value to an appropriately large value (or, for example, infinity,
such as `float('inf')`.
:param file_name: The name of the file that this :class:`File` represents
:param field_name: The field name that uploaded this file. Note that this
can be None, if, for example, the file was uploaded
with Content-Type application/octet-stream
:param config: The configuration for this File. See above for valid
configuration keys and their corresponding values.
"""
def __init__(self, file_name, field_name=None, config={}):
# Save configuration, set other variables default.
self.logger = logging.getLogger(__name__)
self._config = config
self._in_memory = True
self._bytes_written = 0
self._fileobj = BytesIO()
# Save the provided field/file name.
self._field_name = field_name
self._file_name = file_name
# Our actual file name is None by default, since, depending on our
# config, we may not actually use the provided name.
self._actual_file_name = None
# Split the extension from the filename.
if file_name is not None:
base, ext = os.path.splitext(file_name)
self._file_base = base
self._ext = ext
@property
def field_name(self):
"""The form field associated with this file. May be None if there isn't
one, for example when we have an application/octet-stream upload.
"""
return self._field_name
@property
def file_name(self):
"""The file name given in the upload request.
"""
return self._file_name
@property
def actual_file_name(self):
"""The file name that this file is saved as. Will be None if it's not
currently saved on disk.
"""
return self._actual_file_name
@property
def file_object(self):
"""The file object that we're currently writing to. Note that this
will either be an instance of a :class:`io.BytesIO`, or a regular file
object.
"""
return self._fileobj
@property
def size(self):
"""The total size of this file, counted as the number of bytes that
currently have been written to the file.
"""
return self._bytes_written
@property
def in_memory(self):
"""A boolean representing whether or not this file object is currently
stored in-memory or on-disk.
"""
return self._in_memory
def flush_to_disk(self):
"""If the file is already on-disk, do nothing. Otherwise, copy from
the in-memory buffer to a disk file, and then reassign our internal
file object to this new disk file.
Note that if you attempt to flush a file that is already on-disk, a
warning will be logged to this module's logger.
"""
if not self._in_memory:
self.logger.warning(
"Trying to flush to disk when we're not in memory"
)
return
# Go back to the start of our file.
self._fileobj.seek(0)
# Open a new file.
new_file = self._get_disk_file()
# Copy the file objects.
shutil.copyfileobj(self._fileobj, new_file)
# Seek to the new position in our new file.
new_file.seek(self._bytes_written)
# Reassign the fileobject.
old_fileobj = self._fileobj
self._fileobj = new_file
# We're no longer in memory.
self._in_memory = False
# Close the old file object.
old_fileobj.close()
def _get_disk_file(self):
"""This function is responsible for getting a file object on-disk for us.
"""
self.logger.info("Opening a file on disk")
file_dir = self._config.get('UPLOAD_DIR')
keep_filename = self._config.get('UPLOAD_KEEP_FILENAME', False)
keep_extensions = self._config.get('UPLOAD_KEEP_EXTENSIONS', False)
delete_tmp = self._config.get('UPLOAD_DELETE_TMP', True)
# If we have a directory and are to keep the filename...
if file_dir is not None and keep_filename:
self.logger.info("Saving with filename in: %r", file_dir)
# Build our filename.
# TODO: what happens if we don't have a filename?
fname = self._file_base
if keep_extensions:
fname = fname + self._ext
path = os.path.join(file_dir, fname)
try:
self.logger.info("Opening file: %r", path)
tmp_file = open(path, 'w+b')
except (IOError, OSError) as e:
tmp_file = None
self.logger.exception("Error opening temporary file")
raise FileError("Error opening temporary file: %r" % path)
else:
# Build options array.
# Note that on Python 3, tempfile doesn't support byte names. We
# encode our paths using the default filesystem encoding.
options = {}
if keep_extensions:
ext = self._ext
if isinstance(ext, binary_type):
ext = ext.decode(sys.getfilesystemencoding())
options['suffix'] = ext
if file_dir is not None:
d = file_dir
if isinstance(d, binary_type):
d = d.decode(sys.getfilesystemencoding())
options['dir'] = d
options['delete'] = delete_tmp
# Create a temporary (named) file with the appropriate settings.
self.logger.info("Creating a temporary file with options: %r",
options)
try:
tmp_file = tempfile.NamedTemporaryFile(**options)
except (IOError, OSError):
self.logger.exception("Error creating named temporary file")
raise FileError("Error creating named temporary file")
fname = tmp_file.name
# Encode filename as bytes.
if isinstance(fname, text_type):
fname = fname.encode(sys.getfilesystemencoding())
self._actual_file_name = fname
return tmp_file
def write(self, data):
"""Write some data to the File.
:param data: a bytestring
"""
return self.on_data(data)
def on_data(self, data):
"""This method is a callback that will be called whenever data is
written to the File.
:param data: a bytestring
"""
pos = self._fileobj.tell()
bwritten = self._fileobj.write(data)
# true file objects write returns None
if bwritten is None:
bwritten = self._fileobj.tell() - pos
# If the bytes written isn't the same as the length, just return.
if bwritten != len(data):
self.logger.warning("bwritten != len(data) (%d != %d)", bwritten,
len(data))
return bwritten
# Keep track of how many bytes we've written.
self._bytes_written += bwritten
# If we're in-memory and are over our limit, we create a file.
if (self._in_memory and
self._config.get('MAX_MEMORY_FILE_SIZE') is not None and
(self._bytes_written >
self._config.get('MAX_MEMORY_FILE_SIZE'))):
self.logger.info("Flushing to disk")
self.flush_to_disk()
# Return the number of bytes written.
return bwritten
def on_end(self):
"""This method is called whenever the Field is finalized.
"""
# Flush the underlying file object
self._fileobj.flush()
def finalize(self):
"""Finalize the form file. This will not close the underlying file,
but simply signal that we are finished writing to the File.
"""
self.on_end()
def close(self):
"""Close the File object. This will actually close the underlying
file object (whether it's a :class:`io.BytesIO` or an actual file
object).
"""
self._fileobj.close()
def __repr__(self):
return "%s(file_name=%r, field_name=%r)" % (
self.__class__.__name__,
self.file_name,
self.field_name
)
class BaseParser(object):
"""This class is the base class for all parsers. It contains the logic for
calling and adding callbacks.
A callback can be one of two different forms. "Notification callbacks" are
callbacks that are called when something happens - for example, when a new
part of a multipart message is encountered by the parser. "Data callbacks"
are called when we get some sort of data - for example, part of the body of
a multipart chunk. Notification callbacks are called with no parameters,
whereas data callbacks are called with three, as follows::
data_callback(data, start, end)
The "data" parameter is a bytestring (i.e. "foo" on Python 2, or b"foo" on
Python 3). "start" and "end" are integer indexes into the "data" string
that represent the data of interest. Thus, in a data callback, the slice
`data[start:end]` represents the data that the callback is "interested in".
The callback is not passed a copy of the data, since copying severely hurts
performance.
"""
def __init__(self):
self.logger = logging.getLogger(__name__)
def callback(self, name, data=None, start=None, end=None):
"""This function calls a provided callback with some data. If the
callback is not set, will do nothing.
:param name: The name of the callback to call (as a string).
:param data: Data to pass to the callback. If None, then it is
assumed that the callback is a notification callback,
and no parameters are given.
:param end: An integer that is passed to the data callback.
:param start: An integer that is passed to the data callback.
"""
name = "on_" + name
func = self.callbacks.get(name)
if func is None:
return
# Depending on whether we're given a buffer...
if data is not None:
# Don't do anything if we have start == end.
if start is not None and start == end:
return
self.logger.debug("Calling %s with data[%d:%d]", name, start, end)
func(data, start, end)
else:
self.logger.debug("Calling %s with no data", name)
func()
def set_callback(self, name, new_func):
"""Update the function for a callback. Removes from the callbacks dict
if new_func is None.
:param name: The name of the callback to call (as a string).
:param new_func: The new function for the callback. If None, then the
callback will be removed (with no error if it does not
exist).
"""
if new_func is None:
self.callbacks.pop('on_' + name, None)
else:
self.callbacks['on_' + name] = new_func
def close(self):
pass # pragma: no cover
def finalize(self):
pass # pragma: no cover
def __repr__(self):
return "%s()" % self.__class__.__name__
class OctetStreamParser(BaseParser):
"""This parser parses an octet-stream request body and calls callbacks when
incoming data is received. Callbacks are as follows:
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_start
- None
- Called when the first data is parsed.
* - on_data
- data, start, end
- Called for each data chunk that is parsed.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, callbacks={}, max_size=float('inf')):
super(OctetStreamParser, self).__init__()
self.callbacks = callbacks
self._started = False
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
def write(self, data):
"""Write some data to the parser, which will perform size verification,
and then pass the data to the underlying callback.
:param data: a bytestring
"""
if not self._started:
self.callback('start')
self._started = True
# Truncate data length.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
# Increment size, then callback, in case there's an exception.
self._current_size += data_len
self.callback('data', data, 0, data_len)
return data_len
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing,
and sends the on_end callback.
"""
self.callback('end')
def __repr__(self):
return "%s()" % self.__class__.__name__
class QuerystringParser(BaseParser):
"""This is a streaming querystring parser. It will consume data, and call
the callbacks given when it has data.
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_field_start
- None
- Called when a new field is encountered.
* - on_field_name
- data, start, end
- Called when a portion of a field's name is encountered.
* - on_field_data
- data, start, end
- Called when a portion of a field's data is encountered.
* - on_field_end
- None
- Called when the end of a field is encountered.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param strict_parsing: Whether or not to parse the body strictly. Defaults
to False. If this is set to True, then the behavior
of the parser changes as the following: if a field
has a value with an equal sign (e.g. "foo=bar", or
"foo="), it is always included. If a field has no
equals sign (e.g. "...&name&..."), it will be
treated as an error if 'strict_parsing' is True,
otherwise included. If an error is encountered,
then a
:class:`multipart.exceptions.QuerystringParseError`
will be raised.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, callbacks={}, strict_parsing=False,
max_size=float('inf')):
super(QuerystringParser, self).__init__()
self.state = STATE_BEFORE_FIELD
self._found_sep = False
self.callbacks = callbacks
# Max-size stuff
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
# Should parsing be strict?
self.strict_parsing = strict_parsing
def write(self, data):
"""Write some data to the parser, which will perform size verification,
parse into either a field name or value, and then pass the
corresponding data to the underlying callback. If an error is
encountered while parsing, a QuerystringParseError will be raised. The
"offset" attribute of the raised exception will be set to the offset in
the input data chunk (NOT the overall stream) that caused the error.
:param data: a bytestring
"""
# Handle sizing.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
l = 0
try:
l = self._internal_write(data, data_len)
finally:
self._current_size += l
return l
def _internal_write(self, data, length):
state = self.state
strict_parsing = self.strict_parsing
found_sep = self._found_sep
i = 0
while i < length:
ch = data[i]
# Depending on our state...
if state == STATE_BEFORE_FIELD:
# If the 'found_sep' flag is set, we've already encountered
# and skipped a single seperator. If so, we check our strict
# parsing flag and decide what to do. Otherwise, we haven't
# yet reached a seperator, and thus, if we do, we need to skip
# it as it will be the boundary between fields that's supposed
# to be there.
if ch == AMPERSAND or ch == SEMICOLON:
if found_sep:
# If we're parsing strictly, we disallow blank chunks.
if strict_parsing:
e = QuerystringParseError(
"Skipping duplicate ampersand/semicolon at "
"%d" % i
)
e.offset = i
raise e
else:
self.logger.debug("Skipping duplicate ampersand/"
"semicolon at %d", i)
else:
# This case is when we're skipping the (first)
# seperator between fields, so we just set our flag
# and continue on.
found_sep = True
else:
# Emit a field-start event, and go to that state. Also,
# reset the "found_sep" flag, for the next time we get to
# this state.
self.callback('field_start')
i -= 1
state = STATE_FIELD_NAME
found_sep = False
elif state == STATE_FIELD_NAME:
# Try and find a seperator - we ensure that, if we do, we only
# look for the equal sign before it.
sep_pos = data.find(b'&', i)
if sep_pos == -1:
sep_pos = data.find(b';', i)
# See if we can find an equals sign in the remaining data. If
# so, we can immedately emit the field name and jump to the
# data state.
if sep_pos != -1:
equals_pos = data.find(b'=', i, sep_pos)
else:
equals_pos = data.find(b'=', i)
if equals_pos != -1:
# Emit this name.
self.callback('field_name', data, i, equals_pos)
# Jump i to this position. Note that it will then have 1
# added to it below, which means the next iteration of this
# loop will inspect the character after the equals sign.
i = equals_pos
state = STATE_FIELD_DATA
else:
# No equals sign found.
if not strict_parsing:
# See also comments in the STATE_FIELD_DATA case below.
# If we found the seperator, we emit the name and just
# end - there's no data callback at all (not even with
# a blank value).
if sep_pos != -1:
self.callback('field_name', data, i, sep_pos)
self.callback('field_end')
i = sep_pos - 1
state = STATE_BEFORE_FIELD
else:
# Otherwise, no seperator in this block, so the
# rest of this chunk must be a name.
self.callback('field_name', data, i, length)
i = length
else:
# We're parsing strictly. If we find a seperator,
# this is an error - we require an equals sign.
if sep_pos != -1:
e = QuerystringParseError(
"When strict_parsing is True, we require an "
"equals sign in all field chunks. Did not "
"find one in the chunk that starts at %d" %
(i,)
)
e.offset = i
raise e
# No seperator in the rest of this chunk, so it's just
# a field name.
self.callback('field_name', data, i, length)
i = length
elif state == STATE_FIELD_DATA:
# Try finding either an ampersand or a semicolon after this
# position.
sep_pos = data.find(b'&', i)
if sep_pos == -1:
sep_pos = data.find(b';', i)
# If we found it, callback this bit as data and then go back
# to expecting to find a field.
if sep_pos != -1:
self.callback('field_data', data, i, sep_pos)
self.callback('field_end')
# Note that we go to the seperator, which brings us to the
# "before field" state. This allows us to properly emit
# "field_start" events only when we actually have data for
# a field of some sort.
i = sep_pos - 1
state = STATE_BEFORE_FIELD
# Otherwise, emit the rest as data and finish.
else:
self.callback('field_data', data, i, length)
i = length
else: # pragma: no cover (error case)
msg = "Reached an unknown state %d at %d" % (state, i)
self.logger.warning(msg)
e = QuerystringParseError(msg)
e.offset = i
raise e
i += 1
self.state = state
self._found_sep = found_sep
return len(data)
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing,
if we're still in the middle of a field, an on_field_end callback, and
then the on_end callback.
"""
# If we're currently in the middle of a field, we finish it.
if self.state == STATE_FIELD_DATA:
self.callback('field_end')
self.callback('end')
def __repr__(self):
return "%s(keep_blank_values=%r, strict_parsing=%r, max_size=%r)" % (
self.__class__.__name__,
self.keep_blank_values, self.strict_parsing, self.max_size
)
class MultipartParser(BaseParser):
"""This class is a streaming multipart/form-data parser.
.. list-table::
:widths: 15 10 30
:header-rows: 1
* - Callback Name
- Parameters
- Description
* - on_part_begin
- None
- Called when a new part of the multipart message is encountered.
* - on_part_data
- data, start, end
- Called when a portion of a part's data is encountered.
* - on_part_end
- None
- Called when the end of a part is reached.
* - on_header_begin
- None
- Called when we've found a new header in a part of a multipart
message
* - on_header_field
- data, start, end
- Called each time an additional portion of a header is read (i.e. the
part of the header that is before the colon; the "Foo" in
"Foo: Bar").
* - on_header_value
- data, start, end
- Called when we get data for a header.
* - on_header_end
- None
- Called when the current header is finished - i.e. we've reached the
newline at the end of the header.
* - on_headers_finished
- None
- Called when all headers are finished, and before the part data
starts.
* - on_end
- None
- Called when the parser is finished parsing all data.
:param boundary: The multipart boundary. This is required, and must match
what is given in the HTTP request - usually in the
Content-Type header.
:param callbacks: A dictionary of callbacks. See the documentation for
:class:`BaseParser`.
:param max_size: The maximum size of body to parse. Defaults to infinity -
i.e. unbounded.
"""
def __init__(self, boundary, callbacks={}, max_size=float('inf')):
# Initialize parser state.
super(MultipartParser, self).__init__()
self.state = STATE_START
self.index = self.flags = 0
self.callbacks = callbacks
if not isinstance(max_size, Number) or max_size < 1:
raise ValueError("max_size must be a positive number, not %r" %
max_size)
self.max_size = max_size
self._current_size = 0
# Setup marks. These are used to track the state of data recieved.
self.marks = {}
# TODO: Actually use this rather than the dumb version we currently use
# # Precompute the skip table for the Boyer-Moore-Horspool algorithm.
# skip = [len(boundary) for x in range(256)]
# for i in range(len(boundary) - 1):
# skip[ord_char(boundary[i])] = len(boundary) - i - 1
#
# # We use a tuple since it's a constant, and marginally faster.
# self.skip = tuple(skip)
# Save our boundary.
if isinstance(boundary, text_type): # pragma: no cover
boundary = boundary.encode('latin-1')
self.boundary = b'\r\n--' + boundary
# Get a set of characters that belong to our boundary.
self.boundary_chars = frozenset(self.boundary)
# We also create a lookbehind list.
# Note: the +8 is since we can have, at maximum, "\r\n--" + boundary +
# "--\r\n" at the final boundary, and the length of '\r\n--' and
# '--\r\n' is 8 bytes.
self.lookbehind = [NULL for x in range(len(boundary) + 8)]
def write(self, data):
"""Write some data to the parser, which will perform size verification,
and then parse the data into the appropriate location (e.g. header,
data, etc.), and pass this on to the underlying callback. If an error
is encountered, a MultipartParseError will be raised. The "offset"
attribute on the raised exception will be set to the offset of the byte
in the input chunk that caused the error.
:param data: a bytestring
"""
# Handle sizing.
data_len = len(data)
if (self._current_size + data_len) > self.max_size:
# We truncate the length of data that we are to process.
new_size = int(self.max_size - self._current_size)
self.logger.warning("Current size is %d (max %d), so truncating "
"data length from %d to %d",
self._current_size, self.max_size, data_len,
new_size)
data_len = new_size
l = 0
try:
l = self._internal_write(data, data_len)
finally:
self._current_size += l
return l
def _internal_write(self, data, length):
# Get values from locals.
boundary = self.boundary
# Get our state, flags and index. These are persisted between calls to
# this function.
state = self.state
index = self.index
flags = self.flags
# Our index defaults to 0.
i = 0
# Set a mark.
def set_mark(name):
self.marks[name] = i
# Remove a mark.
def delete_mark(name, reset=False):
self.marks.pop(name, None)
# Helper function that makes calling a callback with data easier. The
# 'remaining' parameter will callback from the marked value until the
# end of the buffer, and reset the mark, instead of deleting it. This
# is used at the end of the function to call our callbacks with any
# remaining data in this chunk.
def data_callback(name, remaining=False):
marked_index = self.marks.get(name)
if marked_index is None:
return
# If we're getting remaining data, we ignore the current i value
# and just call with the remaining data.
if remaining:
self.callback(name, data, marked_index, length)
self.marks[name] = 0
# Otherwise, we call it from the mark to the current byte we're
# processing.
else:
self.callback(name, data, marked_index, i)
self.marks.pop(name, None)
# For each byte...
while i < length:
c = data[i]
if state == STATE_START:
# Skip leading newlines
if c == CR or c == LF:
i += 1
self.logger.debug("Skipping leading CR/LF at %d", i)
continue
# index is used as in index into our boundary. Set to 0.
index = 0
# Move to the next state, but decrement i so that we re-process
# this character.
state = STATE_START_BOUNDARY
i -= 1
elif state == STATE_START_BOUNDARY:
# Check to ensure that the last 2 characters in our boundary
# are CRLF.
if index == len(boundary) - 2:
if c != CR:
# Error!
msg = "Did not find CR at end of boundary (%d)" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
index += 1
elif index == len(boundary) - 2 + 1:
if c != LF:
msg = "Did not find LF at end of boundary (%d)" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# The index is now used for indexing into our boundary.
index = 0
# Callback for the start of a part.
self.callback('part_begin')
# Move to the next character and state.
state = STATE_HEADER_FIELD_START
else:
# Check to ensure our boundary matches
if c != boundary[index + 2]:
msg = "Did not find boundary character %r at index " \
"%d" % (c, index + 2)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Increment index into boundary and continue.
index += 1
elif state == STATE_HEADER_FIELD_START:
# Mark the start of a header field here, reset the index, and
# continue parsing our header field.
index = 0
# Set a mark of our header field.
set_mark('header_field')
# Move to parsing header fields.
state = STATE_HEADER_FIELD
i -= 1
elif state == STATE_HEADER_FIELD:
# If we've reached a CR at the beginning of a header, it means
# that we've reached the second of 2 newlines, and so there are
# no more headers to parse.
if c == CR:
delete_mark('header_field')
state = STATE_HEADERS_ALMOST_DONE
i += 1
continue
# Increment our index in the header.
index += 1
# Do nothing if we encounter a hyphen.
if c == HYPHEN:
pass
# If we've reached a colon, we're done with this header.
elif c == COLON:
# A 0-length header is an error.
if index == 1:
msg = "Found 0-length header at %d" % (i,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Call our callback with the header field.
data_callback('header_field')
# Move to parsing the header value.
state = STATE_HEADER_VALUE_START
else:
# Lower-case this character, and ensure that it is in fact
# a valid letter. If not, it's an error.
cl = lower_char(c)
if cl < LOWER_A or cl > LOWER_Z:
msg = "Found non-alphanumeric character %r in " \
"header at %d" % (c, i)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
elif state == STATE_HEADER_VALUE_START:
# Skip leading spaces.
if c == SPACE:
i += 1
continue
# Mark the start of the header value.
set_mark('header_value')
# Move to the header-value state, reprocessing this character.
state = STATE_HEADER_VALUE
i -= 1
elif state == STATE_HEADER_VALUE:
# If we've got a CR, we're nearly done our headers. Otherwise,
# we do nothing and just move past this character.
if c == CR:
data_callback('header_value')
self.callback('header_end')
state = STATE_HEADER_VALUE_ALMOST_DONE
elif state == STATE_HEADER_VALUE_ALMOST_DONE:
# The last character should be a LF. If not, it's an error.
if c != LF:
msg = "Did not find LF character at end of header " \
"(found %r)" % (c,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Move back to the start of another header. Note that if that
# state detects ANOTHER newline, it'll trigger the end of our
# headers.
state = STATE_HEADER_FIELD_START
elif state == STATE_HEADERS_ALMOST_DONE:
# We're almost done our headers. This is reached when we parse
# a CR at the beginning of a header, so our next character
# should be a LF, or it's an error.
if c != LF:
msg = "Did not find LF at end of headers (found %r)" % (c,)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
self.callback('headers_finished')
state = STATE_PART_DATA_START
elif state == STATE_PART_DATA_START:
# Mark the start of our part data.
set_mark('part_data')
# Start processing part data, including this character.
state = STATE_PART_DATA
i -= 1
elif state == STATE_PART_DATA:
# We're processing our part data right now. During this, we
# need to efficiently search for our boundary, since any data
# on any number of lines can be a part of the current data.
# We use the Boyer-Moore-Horspool algorithm to efficiently
# search through the remainder of the buffer looking for our
# boundary.
# Save the current value of our index. We use this in case we
# find part of a boundary, but it doesn't match fully.
prev_index = index
# Set up variables.
boundary_length = len(boundary)
boundary_end = boundary_length - 1
data_length = length
boundary_chars = self.boundary_chars
# If our index is 0, we're starting a new part, so start our
# search.
if index == 0:
# Search forward until we either hit the end of our buffer,
# or reach a character that's in our boundary.
i += boundary_end
while i < data_length - 1 and data[i] not in boundary_chars:
i += boundary_length
# Reset i back the length of our boundary, which is the
# earliest possible location that could be our match (i.e.
# if we've just broken out of our loop since we saw the
# last character in our boundary)
i -= boundary_end
c = data[i]
# Now, we have a couple of cases here. If our index is before
# the end of the boundary...
if index < boundary_length:
# If the character matches...
if boundary[index] == c:
# If we found a match for our boundary, we send the
# existing data.
if index == 0:
data_callback('part_data')
# The current character matches, so continue!
index += 1
else:
index = 0
# Our index is equal to the length of our boundary!
elif index == boundary_length:
# First we increment it.
index += 1
# Now, if we've reached a newline, we need to set this as
# the potential end of our boundary.
if c == CR:
flags |= FLAG_PART_BOUNDARY
# Otherwise, if this is a hyphen, we might be at the last
# of all boundaries.
elif c == HYPHEN:
flags |= FLAG_LAST_BOUNDARY
# Otherwise, we reset our index, since this isn't either a
# newline or a hyphen.
else:
index = 0
# Our index is right after the part boundary, which should be
# a LF.
elif index == boundary_length + 1:
# If we're at a part boundary (i.e. we've seen a CR
# character already)...
if flags & FLAG_PART_BOUNDARY:
# We need a LF character next.
if c == LF:
# Unset the part boundary flag.
flags &= (~FLAG_PART_BOUNDARY)
# Callback indicating that we've reached the end of
# a part, and are starting a new one.
self.callback('part_end')
self.callback('part_begin')
# Move to parsing new headers.
index = 0
state = STATE_HEADER_FIELD_START
i += 1
continue
# We didn't find an LF character, so no match. Reset
# our index and clear our flag.
index = 0
flags &= (~FLAG_PART_BOUNDARY)
# Otherwise, if we're at the last boundary (i.e. we've
# seen a hyphen already)...
elif flags & FLAG_LAST_BOUNDARY:
# We need a second hyphen here.
if c == HYPHEN:
# Callback to end the current part, and then the
# message.
self.callback('part_end')
self.callback('end')
state = STATE_END
else:
# No match, so reset index.
index = 0
# If we have an index, we need to keep this byte for later, in
# case we can't match the full boundary.
if index > 0:
self.lookbehind[index - 1] = c
# Otherwise, our index is 0. If the previous index is not, it
# means we reset something, and we need to take the data we
# thought was part of our boundary and send it along as actual
# data.
elif prev_index > 0:
# Callback to write the saved data.
lb_data = join_bytes(self.lookbehind)
self.callback('part_data', lb_data, 0, prev_index)
# Overwrite our previous index.
prev_index = 0
# Re-set our mark for part data.
set_mark('part_data')
# Re-consider the current character, since this could be
# the start of the boundary itself.
i -= 1
elif state == STATE_END:
# Do nothing and just consume a byte in the end state.
if c not in (CR, LF):
self.logger.warning("Consuming a byte '0x%x' in the end state", c)
else: # pragma: no cover (error case)
# We got into a strange state somehow! Just stop processing.
msg = "Reached an unknown state %d at %d" % (state, i)
self.logger.warning(msg)
e = MultipartParseError(msg)
e.offset = i
raise e
# Move to the next byte.
i += 1
# We call our callbacks with any remaining data. Note that we pass
# the 'remaining' flag, which sets the mark back to 0 instead of
# deleting it, if it's found. This is because, if the mark is found
# at this point, we assume that there's data for one of these things
# that has been parsed, but not yet emitted. And, as such, it implies
# that we haven't yet reached the end of this 'thing'. So, by setting
# the mark to 0, we cause any data callbacks that take place in future
# calls to this function to start from the beginning of that buffer.
data_callback('header_field', True)
data_callback('header_value', True)
data_callback('part_data', True)
# Save values to locals.
self.state = state
self.index = index
self.flags = flags
# Return our data length to indicate no errors, and that we processed
# all of it.
return length
def finalize(self):
"""Finalize this parser, which signals to that we are finished parsing.
Note: It does not currently, but in the future, it will verify that we
are in the final state of the parser (i.e. the end of the multipart
message is well-formed), and, if not, throw an error.
"""
# TODO: verify that we're in the state STATE_END, otherwise throw an
# error or otherwise state that we're not finished parsing.
pass
def __repr__(self):
return "%s(boundary=%r)" % (self.__class__.__name__, self.boundary)
class FormParser(object):
"""This class is the all-in-one form parser. Given all the information
necessary to parse a form, it will instantiate the correct parser, create
the proper :class:`Field` and :class:`File` classes to store the data that
is parsed, and call the two given callbacks with each field and file as
they become available.
:param content_type: The Content-Type of the incoming request. This is
used to select the appropriate parser.
:param on_field: The callback to call when a field has been parsed and is
ready for usage. See above for parameters.
:param on_file: The callback to call when a file has been parsed and is
ready for usage. See above for parameters.
:param on_end: An optional callback to call when all fields and files in a
request has been parsed. Can be None.
:param boundary: If the request is a multipart/form-data request, this
should be the boundary of the request, as given in the
Content-Type header, as a bytestring.
:param file_name: If the request is of type application/octet-stream, then
the body of the request will not contain any information
about the uploaded file. In such cases, you can provide
the file name of the uploaded file manually.
:param FileClass: The class to use for uploaded files. Defaults to
:class:`File`, but you can provide your own class if you
wish to customize behaviour. The class will be
instantiated as FileClass(file_name, field_name), and it
must provide the folllowing functions::
file_instance.write(data)
file_instance.finalize()
file_instance.close()
:param FieldClass: The class to use for uploaded fields. Defaults to
:class:`Field`, but you can provide your own class if
you wish to customize behaviour. The class will be
instantiated as FieldClass(field_name), and it must
provide the folllowing functions::
field_instance.write(data)
field_instance.finalize()
field_instance.close()
:param config: Configuration to use for this FormParser. The default
values are taken from the DEFAULT_CONFIG value, and then
any keys present in this dictionary will overwrite the
default values.
"""
#: This is the default configuration for our form parser.
#: Note: all file sizes should be in bytes.
DEFAULT_CONFIG = {
'MAX_BODY_SIZE': float('inf'),
'MAX_MEMORY_FILE_SIZE': 1 * 1024 * 1024,
'UPLOAD_DIR': None,
'UPLOAD_KEEP_FILENAME': False,
'UPLOAD_KEEP_EXTENSIONS': False,
# Error on invalid Content-Transfer-Encoding?
'UPLOAD_ERROR_ON_BAD_CTE': False,
}
def __init__(self, content_type, on_field, on_file, on_end=None,
boundary=None, file_name=None, FileClass=File,
FieldClass=Field, config={}):
self.logger = logging.getLogger(__name__)
# Save variables.
self.content_type = content_type
self.boundary = boundary
self.bytes_received = 0
self.parser = None
# Save callbacks.
self.on_field = on_field
self.on_file = on_file
self.on_end = on_end
# Save classes.
self.FileClass = File
self.FieldClass = Field
# Set configuration options.
self.config = self.DEFAULT_CONFIG.copy()
self.config.update(config)
# Depending on the Content-Type, we instantiate the correct parser.
if content_type == 'application/octet-stream':
# Work around the lack of 'nonlocal' in Py2
class vars(object):
f = None
def on_start():
vars.f = FileClass(file_name, None, config=self.config)
def on_data(data, start, end):
vars.f.write(data[start:end])
def on_end():
# Finalize the file itself.
vars.f.finalize()
# Call our callback.
on_file(vars.f)
# Call the on-end callback.
if self.on_end is not None:
self.on_end()
callbacks = {
'on_start': on_start,
'on_data': on_data,
'on_end': on_end,
}
# Instantiate an octet-stream parser
parser = OctetStreamParser(callbacks,
max_size=self.config['MAX_BODY_SIZE'])
elif (content_type == 'application/x-www-form-urlencoded' or
content_type == 'application/x-url-encoded'):
name_buffer = []
class vars(object):
f = None
def on_field_start():
pass
def on_field_name(data, start, end):
name_buffer.append(data[start:end])
def on_field_data(data, start, end):
if vars.f is None:
vars.f = FieldClass(b''.join(name_buffer))
del name_buffer[:]
vars.f.write(data[start:end])
def on_field_end():
# Finalize and call callback.
if vars.f is None:
# If we get here, it's because there was no field data.
# We create a field, set it to None, and then continue.
vars.f = FieldClass(b''.join(name_buffer))
del name_buffer[:]
vars.f.set_none()
vars.f.finalize()
on_field(vars.f)
vars.f = None
def on_end():
if self.on_end is not None:
self.on_end()
# Setup callbacks.
callbacks = {
'on_field_start': on_field_start,
'on_field_name': on_field_name,
'on_field_data': on_field_data,
'on_field_end': on_field_end,
'on_end': on_end,
}
# Instantiate parser.
parser = QuerystringParser(
callbacks=callbacks,
max_size=self.config['MAX_BODY_SIZE']
)
elif content_type == 'multipart/form-data':
if boundary is None:
self.logger.error("No boundary given")
raise FormParserError("No boundary given")
header_name = []
header_value = []
headers = {}
# No 'nonlocal' on Python 2 :-(
class vars(object):
f = None
writer = None
is_file = False
def on_part_begin():
pass
def on_part_data(data, start, end):
bytes_processed = vars.writer.write(data[start:end])
# TODO: check for error here.
return bytes_processed
def on_part_end():
vars.f.finalize()
if vars.is_file:
on_file(vars.f)
else:
on_field(vars.f)
def on_header_field(data, start, end):
header_name.append(data[start:end])
def on_header_value(data, start, end):
header_value.append(data[start:end])
def on_header_end():
headers[b''.join(header_name)] = b''.join(header_value)
del header_name[:]
del header_value[:]
def on_headers_finished():
# Reset the 'is file' flag.
vars.is_file = False
# Parse the content-disposition header.
# TODO: handle mixed case
content_disp = headers.get(b'Content-Disposition')
disp, options = parse_options_header(content_disp)
# Get the field and filename.
field_name = options.get(b'name')
file_name = options.get(b'filename')
# TODO: check for errors
# Create the proper class.
if file_name is None:
vars.f = FieldClass(field_name)
else:
vars.f = FileClass(file_name, field_name, config=self.config)
vars.is_file = True
# Parse the given Content-Transfer-Encoding to determine what
# we need to do with the incoming data.
# TODO: check that we properly handle 8bit / 7bit encoding.
transfer_encoding = headers.get(b'Content-Transfer-Encoding',
b'7bit')
if (transfer_encoding == b'binary' or
transfer_encoding == b'8bit' or
transfer_encoding == b'7bit'):
vars.writer = vars.f
elif transfer_encoding == b'base64':
vars.writer = Base64Decoder(vars.f)
elif transfer_encoding == b'quoted-printable':
vars.writer = QuotedPrintableDecoder(vars.f)
else:
self.logger.warning("Unknown Content-Transfer-Encoding: "
"%r", transfer_encoding)
if self.config['UPLOAD_ERROR_ON_BAD_CTE']:
raise FormParserError(
'Unknown Content-Transfer-Encoding "{0}"'.format(
transfer_encoding
)
)
else:
# If we aren't erroring, then we just treat this as an
# unencoded Content-Transfer-Encoding.
vars.writer = vars.f
def on_end():
vars.writer.finalize()
if self.on_end is not None:
self.on_end()
# These are our callbacks for the parser.
callbacks = {
'on_part_begin': on_part_begin,
'on_part_data': on_part_data,
'on_part_end': on_part_end,
'on_header_field': on_header_field,
'on_header_value': on_header_value,
'on_header_end': on_header_end,
'on_headers_finished': on_headers_finished,
'on_end': on_end,
}
# Instantiate a multipart parser.
parser = MultipartParser(boundary, callbacks,
max_size=self.config['MAX_BODY_SIZE'])
else:
self.logger.warning("Unknown Content-Type: %r", content_type)
raise FormParserError("Unknown Content-Type: {0}".format(
content_type
))
self.parser = parser
def write(self, data):
"""Write some data. The parser will forward this to the appropriate
underlying parser.
:param data: a bytestring
"""
self.bytes_received += len(data)
# TODO: check the parser's return value for errors?
return self.parser.write(data)
def finalize(self):
"""Finalize the parser."""
if self.parser is not None and hasattr(self.parser, 'finalize'):
self.parser.finalize()
def close(self):
"""Close the parser."""
if self.parser is not None and hasattr(self.parser, 'close'):
self.parser.close()
def __repr__(self):
return "%s(content_type=%r, parser=%r)" % (
self.__class__.__name__,
self.content_type,
self.parser,
)
def create_form_parser(headers, on_field, on_file, trust_x_headers=False,
config={}):
"""This function is a helper function to aid in creating a FormParser
instances. Given a dictionary-like headers object, it will determine
the correct information needed, instantiate a FormParser with the
appropriate values and given callbacks, and then return the corresponding
parser.
:param headers: A dictionary-like object of HTTP headers. The only
required header is Content-Type.
:param on_field: Callback to call with each parsed field.
:param on_file: Callback to call with each parsed file.
:param trust_x_headers: Whether or not to trust information received from
certain X-Headers - for example, the file name from
X-File-Name.
:param config: Configuration variables to pass to the FormParser.
"""
content_type = headers.get('Content-Type')
if content_type is None:
logging.getLogger(__name__).warning("No Content-Type header given")
raise ValueError("No Content-Type header given!")
# Boundaries are optional (the FormParser will raise if one is needed
# but not given).
content_type, params = parse_options_header(content_type)
boundary = params.get(b'boundary')
# We need content_type to be a string, not a bytes object.
content_type = content_type.decode('latin-1')
# File names are optional.
file_name = headers.get('X-File-Name')
# Instantiate a form parser.
form_parser = FormParser(content_type,
on_field,
on_file,
boundary=boundary,
file_name=file_name,
config=config)
# Return our parser.
return form_parser
def parse_form(headers, input_stream, on_field, on_file, chunk_size=1048576,
**kwargs):
"""This function is useful if you just want to parse a request body,
without too much work. Pass it a dictionary-like object of the request's
headers, and a file-like object for the input stream, along with two
callbacks that will get called whenever a field or file is parsed.
:param headers: A dictionary-like object of HTTP headers. The only
required header is Content-Type.
:param input_stream: A file-like object that represents the request body.
The read() method must return bytestrings.
:param on_field: Callback to call with each parsed field.
:param on_file: Callback to call with each parsed file.
:param chunk_size: The maximum size to read from the input stream and write
to the parser at one time. Defaults to 1 MiB.
"""
# Create our form parser.
parser = create_form_parser(headers, on_field, on_file)
# Read chunks of 100KiB and write to the parser, but never read more than
# the given Content-Length, if any.
content_length = headers.get('Content-Length')
if content_length is not None:
content_length = int(content_length)
else:
content_length = float('inf')
bytes_read = 0
while True:
# Read only up to the Content-Length given.
max_readable = min(content_length - bytes_read, 1048576)
buff = input_stream.read(max_readable)
# Write to the parser and update our length.
parser.write(buff)
bytes_read += len(buff)
# If we get a buffer that's smaller than the size requested, or if we
# have read up to our content length, we're done.
if len(buff) != max_readable or bytes_read == content_length:
break
# Tell our parser that we're done writing data.
parser.finalize() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/multipart/multipart.py | multipart.py |
import base64
import binascii
from .exceptions import Base64Error, DecodeError
class Base64Decoder(object):
"""This object provides an interface to decode a stream of Base64 data. It
is instantiated with an "underlying object", and whenever a write()
operation is performed, it will decode the incoming data as Base64, and
call write() on the underlying object. This is primarily used for decoding
form data encoded as Base64, but can be used for other purposes::
from multipart.decoders import Base64Decoder
fd = open("notb64.txt", "wb")
decoder = Base64Decoder(fd)
try:
decoder.write("Zm9vYmFy") # "foobar" in Base64
decoder.finalize()
finally:
decoder.close()
# The contents of "notb64.txt" should be "foobar".
This object will also pass all finalize() and close() calls to the
underlying object, if the underlying object supports them.
Note that this class maintains a cache of base64 chunks, so that a write of
arbitrary size can be performed. You must call :meth:`finalize` on this
object after all writes are completed to ensure that all data is flushed
to the underlying object.
:param underlying: the underlying object to pass writes to
"""
def __init__(self, underlying):
self.cache = bytearray()
self.underlying = underlying
def write(self, data):
"""Takes any input data provided, decodes it as base64, and passes it
on to the underlying object. If the data provided is invalid base64
data, then this method will raise
a :class:`multipart.exceptions.DecodeError`
:param data: base64 data to decode
"""
# Prepend any cache info to our data.
if len(self.cache) > 0:
data = self.cache + data
# Slice off a string that's a multiple of 4.
decode_len = (len(data) // 4) * 4
val = data[:decode_len]
# Decode and write, if we have any.
if len(val) > 0:
try:
decoded = base64.b64decode(val)
except Base64Error:
raise DecodeError('There was an error raised while decoding '
'base64-encoded data.')
self.underlying.write(decoded)
# Get the remaining bytes and save in our cache.
remaining_len = len(data) % 4
if remaining_len > 0:
self.cache = data[-remaining_len:]
else:
self.cache = b''
# Return the length of the data to indicate no error.
return len(data)
def close(self):
"""Close this decoder. If the underlying object has a `close()`
method, this function will call it.
"""
if hasattr(self.underlying, 'close'):
self.underlying.close()
def finalize(self):
"""Finalize this object. This should be called when no more data
should be written to the stream. This function can raise a
:class:`multipart.exceptions.DecodeError` if there is some remaining
data in the cache.
If the underlying object has a `finalize()` method, this function will
call it.
"""
if len(self.cache) > 0:
raise DecodeError('There are %d bytes remaining in the '
'Base64Decoder cache when finalize() is called'
% len(self.cache))
if hasattr(self.underlying, 'finalize'):
self.underlying.finalize()
def __repr__(self):
return "%s(underlying=%r)" % (self.__class__.__name__, self.underlying)
class QuotedPrintableDecoder(object):
"""This object provides an interface to decode a stream of quoted-printable
data. It is instantiated with an "underlying object", in the same manner
as the :class:`multipart.decoders.Base64Decoder` class. This class behaves
in exactly the same way, including maintaining a cache of quoted-printable
chunks.
:param underlying: the underlying object to pass writes to
"""
def __init__(self, underlying):
self.cache = b''
self.underlying = underlying
def write(self, data):
"""Takes any input data provided, decodes it as quoted-printable, and
passes it on to the underlying object.
:param data: quoted-printable data to decode
"""
# Prepend any cache info to our data.
if len(self.cache) > 0:
data = self.cache + data
# If the last 2 characters have an '=' sign in it, then we won't be
# able to decode the encoded value and we'll need to save it for the
# next decoding step.
if data[-2:].find(b'=') != -1:
enc, rest = data[:-2], data[-2:]
else:
enc = data
rest = b''
# Encode and write, if we have data.
if len(enc) > 0:
self.underlying.write(binascii.a2b_qp(enc))
# Save remaining in cache.
self.cache = rest
return len(data)
def close(self):
"""Close this decoder. If the underlying object has a `close()`
method, this function will call it.
"""
if hasattr(self.underlying, 'close'):
self.underlying.close()
def finalize(self):
"""Finalize this object. This should be called when no more data
should be written to the stream. This function will not raise any
exceptions, but it may write more data to the underlying object if
there is data remaining in the cache.
If the underlying object has a `finalize()` method, this function will
call it.
"""
# If we have a cache, write and then remove it.
if len(self.cache) > 0:
self.underlying.write(binascii.a2b_qp(self.cache))
self.cache = b''
# Finalize our underlying stream.
if hasattr(self.underlying, 'finalize'):
self.underlying.finalize()
def __repr__(self):
return "%s(underlying=%r)" % (self.__class__.__name__, self.underlying) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/multipart/decoders.py | decoders.py |
from contextvars import ContextVar
from typing import Optional
import sys
current_async_library_cvar = ContextVar(
"current_async_library_cvar", default=None
) # type: ContextVar[Optional[str]]
class AsyncLibraryNotFoundError(RuntimeError):
pass
def current_async_library() -> str:
"""Detect which async library is currently running.
The following libraries are currently supported:
================ =========== ============================
Library Requires Magic string
================ =========== ============================
**Trio** Trio v0.6+ ``"trio"``
**Curio** - ``"curio"``
**asyncio** ``"asyncio"``
**Trio-asyncio** v0.8.2+ ``"trio"`` or ``"asyncio"``,
depending on current mode
================ =========== ============================
Returns:
A string like ``"trio"``.
Raises:
AsyncLibraryNotFoundError: if called from synchronous context,
or if the current async library was not recognized.
Examples:
.. code-block:: python3
from sniffio import current_async_library
async def generic_sleep(seconds):
library = current_async_library()
if library == "trio":
import trio
await trio.sleep(seconds)
elif library == "asyncio":
import asyncio
await asyncio.sleep(seconds)
# ... and so on ...
else:
raise RuntimeError(f"Unsupported library {library!r}")
"""
value = current_async_library_cvar.get()
if value is not None:
return value
# Sniff for curio (for now)
if 'curio' in sys.modules:
from curio.meta import curio_running
if curio_running():
return 'curio'
# Need to sniff for asyncio
if "asyncio" in sys.modules:
import asyncio
try:
current_task = asyncio.current_task # type: ignore[attr-defined]
except AttributeError:
current_task = asyncio.Task.current_task # type: ignore[attr-defined]
try:
if current_task() is not None:
if (3, 7) <= sys.version_info:
# asyncio has contextvars support, and we're in a task, so
# we can safely cache the sniffed value
current_async_library_cvar.set("asyncio")
return "asyncio"
except RuntimeError:
pass
raise AsyncLibraryNotFoundError(
"unknown async library, or not in async context"
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/sniffio/_impl.py | _impl.py |
import enum
import sys
from dataclasses import dataclass
from typing import Any, Dict, Generic, Set, TypeVar, Union, overload
from weakref import WeakKeyDictionary
from ._core._eventloop import get_asynclib
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
T = TypeVar("T")
D = TypeVar("D")
async def checkpoint() -> None:
"""
Check for cancellation and allow the scheduler to switch to another task.
Equivalent to (but more efficient than)::
await checkpoint_if_cancelled()
await cancel_shielded_checkpoint()
.. versionadded:: 3.0
"""
await get_asynclib().checkpoint()
async def checkpoint_if_cancelled() -> None:
"""
Enter a checkpoint if the enclosing cancel scope has been cancelled.
This does not allow the scheduler to switch to a different task.
.. versionadded:: 3.0
"""
await get_asynclib().checkpoint_if_cancelled()
async def cancel_shielded_checkpoint() -> None:
"""
Allow the scheduler to switch to another task but without checking for cancellation.
Equivalent to (but potentially more efficient than)::
with CancelScope(shield=True):
await checkpoint()
.. versionadded:: 3.0
"""
await get_asynclib().cancel_shielded_checkpoint()
def current_token() -> object:
"""Return a backend specific token object that can be used to get back to the event loop."""
return get_asynclib().current_token()
_run_vars = WeakKeyDictionary() # type: WeakKeyDictionary[Any, Dict[str, Any]]
_token_wrappers: Dict[Any, "_TokenWrapper"] = {}
@dataclass(frozen=True)
class _TokenWrapper:
__slots__ = "_token", "__weakref__"
_token: object
class _NoValueSet(enum.Enum):
NO_VALUE_SET = enum.auto()
class RunvarToken(Generic[T]):
__slots__ = "_var", "_value", "_redeemed"
def __init__(
self, var: "RunVar[T]", value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]]
):
self._var = var
self._value: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = value
self._redeemed = False
class RunVar(Generic[T]):
"""Like a :class:`~contextvars.ContextVar`, expect scoped to the running event loop."""
__slots__ = "_name", "_default"
NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET
_token_wrappers: Set[_TokenWrapper] = set()
def __init__(
self,
name: str,
default: Union[T, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET,
):
self._name = name
self._default = default
@property
def _current_vars(self) -> Dict[str, T]:
token = current_token()
while True:
try:
return _run_vars[token]
except TypeError:
# Happens when token isn't weak referable (TrioToken).
# This workaround does mean that some memory will leak on Trio until the problem
# is fixed on their end.
token = _TokenWrapper(token)
self._token_wrappers.add(token)
except KeyError:
run_vars = _run_vars[token] = {}
return run_vars
@overload
def get(self, default: D) -> Union[T, D]:
...
@overload
def get(self) -> T:
...
def get(
self, default: Union[D, Literal[_NoValueSet.NO_VALUE_SET]] = NO_VALUE_SET
) -> Union[T, D]:
try:
return self._current_vars[self._name]
except KeyError:
if default is not RunVar.NO_VALUE_SET:
return default
elif self._default is not RunVar.NO_VALUE_SET:
return self._default
raise LookupError(
f'Run variable "{self._name}" has no value and no default set'
)
def set(self, value: T) -> RunvarToken[T]:
current_vars = self._current_vars
token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET))
current_vars[self._name] = value
return token
def reset(self, token: RunvarToken[T]) -> None:
if token._var is not self:
raise ValueError("This token does not belong to this RunVar")
if token._redeemed:
raise ValueError("This token has already been used")
if token._value is _NoValueSet.NO_VALUE_SET:
try:
del self._current_vars[self._name]
except KeyError:
pass
else:
self._current_vars[self._name] = token._value
token._redeemed = True
def __repr__(self) -> str:
return f"<RunVar name={self._name!r}>" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/lowlevel.py | lowlevel.py |
from typing import Callable, Optional, TypeVar
from warnings import warn
from ._core._eventloop import get_asynclib
from .abc import CapacityLimiter
T_Retval = TypeVar("T_Retval")
async def run_sync(
func: Callable[..., T_Retval],
*args: object,
cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None
) -> T_Retval:
"""
Call the given function with the given arguments in a worker thread.
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
the thread will still run its course but its return value (or any raised exception) will be
ignored.
:param func: a callable
:param args: positional arguments for the callable
:param cancellable: ``True`` to allow cancellation of the operation
:param limiter: capacity limiter to use to limit the total amount of threads running
(if omitted, the default limiter is used)
:return: an awaitable that yields the return value of the function.
"""
return await get_asynclib().run_sync_in_worker_thread(
func, *args, cancellable=cancellable, limiter=limiter
)
async def run_sync_in_worker_thread(
func: Callable[..., T_Retval],
*args: object,
cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None
) -> T_Retval:
warn(
"run_sync_in_worker_thread() has been deprecated, use anyio.to_thread.run_sync() instead",
DeprecationWarning,
)
return await run_sync(func, *args, cancellable=cancellable, limiter=limiter)
def current_default_thread_limiter() -> CapacityLimiter:
"""
Return the capacity limiter that is used by default to limit the number of concurrent threads.
:return: a capacity limiter object
"""
return get_asynclib().current_default_thread_limiter()
def current_default_worker_thread_limiter() -> CapacityLimiter:
warn(
"current_default_worker_thread_limiter() has been deprecated, "
"use anyio.to_thread.current_default_thread_limiter() instead",
DeprecationWarning,
)
return current_default_thread_limiter() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/to_thread.py | to_thread.py |
import os
import pickle
import subprocess
import sys
from collections import deque
from importlib.util import module_from_spec, spec_from_file_location
from typing import Callable, Deque, List, Optional, Set, Tuple, TypeVar, cast
from ._core._eventloop import current_time, get_asynclib, get_cancelled_exc_class
from ._core._exceptions import BrokenWorkerProcess
from ._core._subprocesses import open_process
from ._core._synchronization import CapacityLimiter
from ._core._tasks import CancelScope, fail_after
from .abc import ByteReceiveStream, ByteSendStream, Process
from .lowlevel import RunVar, checkpoint_if_cancelled
from .streams.buffered import BufferedByteReceiveStream
WORKER_MAX_IDLE_TIME = 300 # 5 minutes
T_Retval = TypeVar("T_Retval")
_process_pool_workers: RunVar[Set[Process]] = RunVar("_process_pool_workers")
_process_pool_idle_workers: RunVar[Deque[Tuple[Process, float]]] = RunVar(
"_process_pool_idle_workers"
)
_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter")
async def run_sync(
func: Callable[..., T_Retval],
*args: object,
cancellable: bool = False,
limiter: Optional[CapacityLimiter] = None,
) -> T_Retval:
"""
Call the given function with the given arguments in a worker process.
If the ``cancellable`` option is enabled and the task waiting for its completion is cancelled,
the worker process running it will be abruptly terminated using SIGKILL (or
``terminateProcess()`` on Windows).
:param func: a callable
:param args: positional arguments for the callable
:param cancellable: ``True`` to allow cancellation of the operation while it's running
:param limiter: capacity limiter to use to limit the total amount of processes running
(if omitted, the default limiter is used)
:return: an awaitable that yields the return value of the function.
"""
async def send_raw_command(pickled_cmd: bytes) -> object:
try:
await stdin.send(pickled_cmd)
response = await buffered.receive_until(b"\n", 50)
status, length = response.split(b" ")
if status not in (b"RETURN", b"EXCEPTION"):
raise RuntimeError(
f"Worker process returned unexpected response: {response!r}"
)
pickled_response = await buffered.receive_exactly(int(length))
except BaseException as exc:
workers.discard(process)
try:
process.kill()
with CancelScope(shield=True):
await process.aclose()
except ProcessLookupError:
pass
if isinstance(exc, get_cancelled_exc_class()):
raise
else:
raise BrokenWorkerProcess from exc
retval = pickle.loads(pickled_response)
if status == b"EXCEPTION":
assert isinstance(retval, BaseException)
raise retval
else:
return retval
# First pickle the request before trying to reserve a worker process
await checkpoint_if_cancelled()
request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL)
# If this is the first run in this event loop thread, set up the necessary variables
try:
workers = _process_pool_workers.get()
idle_workers = _process_pool_idle_workers.get()
except LookupError:
workers = set()
idle_workers = deque()
_process_pool_workers.set(workers)
_process_pool_idle_workers.set(idle_workers)
get_asynclib().setup_process_pool_exit_at_shutdown(workers)
async with (limiter or current_default_process_limiter()):
# Pop processes from the pool (starting from the most recently used) until we find one that
# hasn't exited yet
process: Process
while idle_workers:
process, idle_since = idle_workers.pop()
if process.returncode is None:
stdin = cast(ByteSendStream, process.stdin)
buffered = BufferedByteReceiveStream(
cast(ByteReceiveStream, process.stdout)
)
# Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME seconds or
# longer
now = current_time()
killed_processes: List[Process] = []
while idle_workers:
if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME:
break
process, idle_since = idle_workers.popleft()
process.kill()
workers.remove(process)
killed_processes.append(process)
with CancelScope(shield=True):
for process in killed_processes:
await process.aclose()
break
workers.remove(process)
else:
command = [sys.executable, "-u", "-m", __name__]
process = await open_process(
command, stdin=subprocess.PIPE, stdout=subprocess.PIPE
)
try:
stdin = cast(ByteSendStream, process.stdin)
buffered = BufferedByteReceiveStream(
cast(ByteReceiveStream, process.stdout)
)
with fail_after(20):
message = await buffered.receive(6)
if message != b"READY\n":
raise BrokenWorkerProcess(
f"Worker process returned unexpected response: {message!r}"
)
main_module_path = getattr(sys.modules["__main__"], "__file__", None)
pickled = pickle.dumps(
("init", sys.path, main_module_path),
protocol=pickle.HIGHEST_PROTOCOL,
)
await send_raw_command(pickled)
except (BrokenWorkerProcess, get_cancelled_exc_class()):
raise
except BaseException as exc:
process.kill()
raise BrokenWorkerProcess(
"Error during worker process initialization"
) from exc
workers.add(process)
with CancelScope(shield=not cancellable):
try:
return cast(T_Retval, await send_raw_command(request))
finally:
if process in workers:
idle_workers.append((process, current_time()))
def current_default_process_limiter() -> CapacityLimiter:
"""
Return the capacity limiter that is used by default to limit the number of worker processes.
:return: a capacity limiter object
"""
try:
return _default_process_limiter.get()
except LookupError:
limiter = CapacityLimiter(os.cpu_count() or 2)
_default_process_limiter.set(limiter)
return limiter
def process_worker() -> None:
# Redirect standard streams to os.devnull so that user code won't interfere with the
# parent-worker communication
stdin = sys.stdin
stdout = sys.stdout
sys.stdin = open(os.devnull)
sys.stdout = open(os.devnull, "w")
stdout.buffer.write(b"READY\n")
while True:
retval = exception = None
try:
command, *args = pickle.load(stdin.buffer)
except EOFError:
return
except BaseException as exc:
exception = exc
else:
if command == "run":
func, args = args
try:
retval = func(*args)
except BaseException as exc:
exception = exc
elif command == "init":
main_module_path: Optional[str]
sys.path, main_module_path = args
del sys.modules["__main__"]
if main_module_path:
# Load the parent's main module but as __mp_main__ instead of __main__
# (like multiprocessing does) to avoid infinite recursion
try:
spec = spec_from_file_location("__mp_main__", main_module_path)
if spec and spec.loader:
main = module_from_spec(spec)
spec.loader.exec_module(main)
sys.modules["__main__"] = main
except BaseException as exc:
exception = exc
try:
if exception is not None:
status = b"EXCEPTION"
pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL)
else:
status = b"RETURN"
pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL)
except BaseException as exc:
exception = exc
status = b"EXCEPTION"
pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL)
stdout.buffer.write(b"%s %d\n" % (status, len(pickled)))
stdout.buffer.write(pickled)
# Respect SIGTERM
if isinstance(exception, SystemExit):
raise exception
if __name__ == "__main__":
process_worker() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/to_process.py | to_process.py |
import threading
from asyncio import iscoroutine
from concurrent.futures import FIRST_COMPLETED, Future, ThreadPoolExecutor, wait
from contextlib import AbstractContextManager, contextmanager
from types import TracebackType
from typing import (
Any,
AsyncContextManager,
Callable,
ContextManager,
Coroutine,
Dict,
Generator,
Iterable,
Optional,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
from warnings import warn
from ._core import _eventloop
from ._core._eventloop import get_asynclib, get_cancelled_exc_class, threadlocals
from ._core._synchronization import Event
from ._core._tasks import CancelScope, create_task_group
from .abc._tasks import TaskStatus
T_Retval = TypeVar("T_Retval")
T_co = TypeVar("T_co")
def run(func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object) -> T_Retval:
"""
Call a coroutine function from a worker thread.
:param func: a coroutine function
:param args: positional arguments for the callable
:return: the return value of the coroutine function
"""
try:
asynclib = threadlocals.current_async_module
except AttributeError:
raise RuntimeError("This function can only be run from an AnyIO worker thread")
return asynclib.run_async_from_thread(func, *args)
def run_async_from_thread(
func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object
) -> T_Retval:
warn(
"run_async_from_thread() has been deprecated, use anyio.from_thread.run() instead",
DeprecationWarning,
)
return run(func, *args)
def run_sync(func: Callable[..., T_Retval], *args: object) -> T_Retval:
"""
Call a function in the event loop thread from a worker thread.
:param func: a callable
:param args: positional arguments for the callable
:return: the return value of the callable
"""
try:
asynclib = threadlocals.current_async_module
except AttributeError:
raise RuntimeError("This function can only be run from an AnyIO worker thread")
return asynclib.run_sync_from_thread(func, *args)
def run_sync_from_thread(func: Callable[..., T_Retval], *args: object) -> T_Retval:
warn(
"run_sync_from_thread() has been deprecated, use anyio.from_thread.run_sync() instead",
DeprecationWarning,
)
return run_sync(func, *args)
class _BlockingAsyncContextManager(AbstractContextManager):
_enter_future: Future
_exit_future: Future
_exit_event: Event
_exit_exc_info: Tuple[
Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]
] = (None, None, None)
def __init__(self, async_cm: AsyncContextManager[T_co], portal: "BlockingPortal"):
self._async_cm = async_cm
self._portal = portal
async def run_async_cm(self) -> Optional[bool]:
try:
self._exit_event = Event()
value = await self._async_cm.__aenter__()
except BaseException as exc:
self._enter_future.set_exception(exc)
raise
else:
self._enter_future.set_result(value)
try:
# Wait for the sync context manager to exit.
# This next statement can raise `get_cancelled_exc_class()` if
# something went wrong in a task group in this async context
# manager.
await self._exit_event.wait()
finally:
# In case of cancellation, it could be that we end up here before
# `_BlockingAsyncContextManager.__exit__` is called, and an
# `_exit_exc_info` has been set.
result = await self._async_cm.__aexit__(*self._exit_exc_info)
return result
def __enter__(self) -> T_co:
self._enter_future = Future()
self._exit_future = self._portal.start_task_soon(self.run_async_cm)
cm = self._enter_future.result()
return cast(T_co, cm)
def __exit__(
self,
__exc_type: Optional[Type[BaseException]],
__exc_value: Optional[BaseException],
__traceback: Optional[TracebackType],
) -> Optional[bool]:
self._exit_exc_info = __exc_type, __exc_value, __traceback
self._portal.call(self._exit_event.set)
return self._exit_future.result()
class _BlockingPortalTaskStatus(TaskStatus):
def __init__(self, future: Future):
self._future = future
def started(self, value: object = None) -> None:
self._future.set_result(value)
class BlockingPortal:
"""An object that lets external threads run code in an asynchronous event loop."""
def __new__(cls) -> "BlockingPortal":
return get_asynclib().BlockingPortal()
def __init__(self) -> None:
self._event_loop_thread_id: Optional[int] = threading.get_ident()
self._stop_event = Event()
self._task_group = create_task_group()
self._cancelled_exc_class = get_cancelled_exc_class()
async def __aenter__(self) -> "BlockingPortal":
await self._task_group.__aenter__()
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
await self.stop()
return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
def _check_running(self) -> None:
if self._event_loop_thread_id is None:
raise RuntimeError("This portal is not running")
if self._event_loop_thread_id == threading.get_ident():
raise RuntimeError(
"This method cannot be called from the event loop thread"
)
async def sleep_until_stopped(self) -> None:
"""Sleep until :meth:`stop` is called."""
await self._stop_event.wait()
async def stop(self, cancel_remaining: bool = False) -> None:
"""
Signal the portal to shut down.
This marks the portal as no longer accepting new calls and exits from
:meth:`sleep_until_stopped`.
:param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` to let them
finish before returning
"""
self._event_loop_thread_id = None
self._stop_event.set()
if cancel_remaining:
self._task_group.cancel_scope.cancel()
async def _call_func(
self, func: Callable, args: tuple, kwargs: Dict[str, Any], future: Future
) -> None:
def callback(f: Future) -> None:
if f.cancelled() and self._event_loop_thread_id not in (
None,
threading.get_ident(),
):
self.call(scope.cancel)
try:
retval = func(*args, **kwargs)
if iscoroutine(retval):
with CancelScope() as scope:
if future.cancelled():
scope.cancel()
else:
future.add_done_callback(callback)
retval = await retval
except self._cancelled_exc_class:
future.cancel()
except BaseException as exc:
if not future.cancelled():
future.set_exception(exc)
# Let base exceptions fall through
if not isinstance(exc, Exception):
raise
else:
if not future.cancelled():
future.set_result(retval)
finally:
scope = None # type: ignore[assignment]
def _spawn_task_from_thread(
self,
func: Callable,
args: tuple,
kwargs: Dict[str, Any],
name: object,
future: Future,
) -> None:
"""
Spawn a new task using the given callable.
Implementors must ensure that the future is resolved when the task finishes.
:param func: a callable
:param args: positional arguments to be passed to the callable
:param kwargs: keyword arguments to be passed to the callable
:param name: name of the task (will be coerced to a string if not ``None``)
:param future: a future that will resolve to the return value of the callable, or the
exception raised during its execution
"""
raise NotImplementedError
@overload
def call(
self, func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object
) -> T_Retval:
...
@overload
def call(self, func: Callable[..., T_Retval], *args: object) -> T_Retval:
...
def call(
self,
func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object
) -> T_Retval:
"""
Call the given function in the event loop thread.
If the callable returns a coroutine object, it is awaited on.
:param func: any callable
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
"""
return cast(T_Retval, self.start_task_soon(func, *args).result())
@overload
def spawn_task(
self,
func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object,
name: object = None
) -> "Future[T_Retval]":
...
@overload
def spawn_task(
self, func: Callable[..., T_Retval], *args: object, name: object = None
) -> "Future[T_Retval]":
...
def spawn_task(
self,
func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object,
name: object = None
) -> "Future[T_Retval]":
"""
Start a task in the portal's task group.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a future that resolves with the return value of the callable if the task completes
successfully, or with the exception raised in the task
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
.. versionadded:: 2.1
.. deprecated:: 3.0
Use :meth:`start_task_soon` instead. If your code needs AnyIO 2 compatibility, you
can keep using this until AnyIO 4.
"""
warn(
"spawn_task() is deprecated -- use start_task_soon() instead",
DeprecationWarning,
)
return self.start_task_soon(func, *args, name=name) # type: ignore[arg-type]
@overload
def start_task_soon(
self,
func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object,
name: object = None
) -> "Future[T_Retval]":
...
@overload
def start_task_soon(
self, func: Callable[..., T_Retval], *args: object, name: object = None
) -> "Future[T_Retval]":
...
def start_task_soon(
self,
func: Callable[..., Union[Coroutine[Any, Any, T_Retval], T_Retval]],
*args: object,
name: object = None
) -> "Future[T_Retval]":
"""
Start a task in the portal's task group.
The task will be run inside a cancel scope which can be cancelled by cancelling the
returned future.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a future that resolves with the return value of the callable if the task completes
successfully, or with the exception raised in the task
:raises RuntimeError: if the portal is not running or if this method is called from within
the event loop thread
.. versionadded:: 3.0
"""
self._check_running()
f: Future = Future()
self._spawn_task_from_thread(func, args, {}, name, f)
return f
def start_task(
self,
func: Callable[..., Coroutine[Any, Any, Any]],
*args: object,
name: object = None
) -> Tuple["Future[Any]", Any]:
"""
Start a task in the portal's task group and wait until it signals for readiness.
This method works the same way as :meth:`TaskGroup.start`.
:param func: the target coroutine function
:param args: positional arguments passed to ``func``
:param name: name of the task (will be coerced to a string if not ``None``)
:return: a tuple of (future, task_status_value) where the ``task_status_value`` is the
value passed to ``task_status.started()`` from within the target function
.. versionadded:: 3.0
"""
def task_done(future: Future) -> None:
if not task_status_future.done():
if future.cancelled():
task_status_future.cancel()
elif future.exception():
task_status_future.set_exception(future.exception())
else:
exc = RuntimeError(
"Task exited without calling task_status.started()"
)
task_status_future.set_exception(exc)
self._check_running()
task_status_future: Future = Future()
task_status = _BlockingPortalTaskStatus(task_status_future)
f: Future = Future()
f.add_done_callback(task_done)
self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f)
return f, task_status_future.result()
def wrap_async_context_manager(
self, cm: AsyncContextManager[T_co]
) -> ContextManager[T_co]:
"""
Wrap an async context manager as a synchronous context manager via this portal.
Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping in the
middle until the synchronous context manager exits.
:param cm: an asynchronous context manager
:return: a synchronous context manager
.. versionadded:: 2.1
"""
return _BlockingAsyncContextManager(cm, self)
def create_blocking_portal() -> BlockingPortal:
"""
Create a portal for running functions in the event loop thread from external threads.
Use this function in asynchronous code when you need to allow external threads access to the
event loop where your asynchronous code is currently running.
.. deprecated:: 3.0
Use :class:`.BlockingPortal` directly.
"""
warn(
"create_blocking_portal() has been deprecated -- use anyio.from_thread.BlockingPortal() "
"directly",
DeprecationWarning,
)
return BlockingPortal()
@contextmanager
def start_blocking_portal(
backend: str = "asyncio", backend_options: Optional[Dict[str, Any]] = None
) -> Generator[BlockingPortal, Any, None]:
"""
Start a new event loop in a new thread and run a blocking portal in its main task.
The parameters are the same as for :func:`~anyio.run`.
:param backend: name of the backend
:param backend_options: backend options
:return: a context manager that yields a blocking portal
.. versionchanged:: 3.0
Usage as a context manager is now required.
"""
async def run_portal() -> None:
async with BlockingPortal() as portal_:
if future.set_running_or_notify_cancel():
future.set_result(portal_)
await portal_.sleep_until_stopped()
future: Future[BlockingPortal] = Future()
with ThreadPoolExecutor(1) as executor:
run_future = executor.submit(
_eventloop.run,
run_portal, # type: ignore[arg-type]
backend=backend,
backend_options=backend_options,
)
try:
wait(
cast(Iterable[Future], [run_future, future]),
return_when=FIRST_COMPLETED,
)
except BaseException:
future.cancel()
run_future.cancel()
raise
if future.done():
portal = future.result()
try:
yield portal
except BaseException:
portal.call(portal.stop, True)
raise
portal.call(portal.stop, False)
run_future.result() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/from_thread.py | from_thread.py |
__all__ = (
"maybe_async",
"maybe_async_cm",
"run",
"sleep",
"sleep_forever",
"sleep_until",
"current_time",
"get_all_backends",
"get_cancelled_exc_class",
"BrokenResourceError",
"BrokenWorkerProcess",
"BusyResourceError",
"ClosedResourceError",
"DelimiterNotFound",
"EndOfStream",
"ExceptionGroup",
"IncompleteRead",
"TypedAttributeLookupError",
"WouldBlock",
"AsyncFile",
"Path",
"open_file",
"wrap_file",
"aclose_forcefully",
"open_signal_receiver",
"connect_tcp",
"connect_unix",
"create_tcp_listener",
"create_unix_listener",
"create_udp_socket",
"create_connected_udp_socket",
"getaddrinfo",
"getnameinfo",
"wait_socket_readable",
"wait_socket_writable",
"create_memory_object_stream",
"run_process",
"open_process",
"create_lock",
"CapacityLimiter",
"CapacityLimiterStatistics",
"Condition",
"ConditionStatistics",
"Event",
"EventStatistics",
"Lock",
"LockStatistics",
"Semaphore",
"SemaphoreStatistics",
"create_condition",
"create_event",
"create_semaphore",
"create_capacity_limiter",
"open_cancel_scope",
"fail_after",
"move_on_after",
"current_effective_deadline",
"TASK_STATUS_IGNORED",
"CancelScope",
"create_task_group",
"TaskInfo",
"get_current_task",
"get_running_tasks",
"wait_all_tasks_blocked",
"run_sync_in_worker_thread",
"run_async_from_thread",
"run_sync_from_thread",
"current_default_worker_thread_limiter",
"create_blocking_portal",
"start_blocking_portal",
"typed_attribute",
"TypedAttributeSet",
"TypedAttributeProvider",
)
from typing import Any
from ._core._compat import maybe_async, maybe_async_cm
from ._core._eventloop import (
current_time,
get_all_backends,
get_cancelled_exc_class,
run,
sleep,
sleep_forever,
sleep_until,
)
from ._core._exceptions import (
BrokenResourceError,
BrokenWorkerProcess,
BusyResourceError,
ClosedResourceError,
DelimiterNotFound,
EndOfStream,
ExceptionGroup,
IncompleteRead,
TypedAttributeLookupError,
WouldBlock,
)
from ._core._fileio import AsyncFile, Path, open_file, wrap_file
from ._core._resources import aclose_forcefully
from ._core._signals import open_signal_receiver
from ._core._sockets import (
connect_tcp,
connect_unix,
create_connected_udp_socket,
create_tcp_listener,
create_udp_socket,
create_unix_listener,
getaddrinfo,
getnameinfo,
wait_socket_readable,
wait_socket_writable,
)
from ._core._streams import create_memory_object_stream
from ._core._subprocesses import open_process, run_process
from ._core._synchronization import (
CapacityLimiter,
CapacityLimiterStatistics,
Condition,
ConditionStatistics,
Event,
EventStatistics,
Lock,
LockStatistics,
Semaphore,
SemaphoreStatistics,
create_capacity_limiter,
create_condition,
create_event,
create_lock,
create_semaphore,
)
from ._core._tasks import (
TASK_STATUS_IGNORED,
CancelScope,
create_task_group,
current_effective_deadline,
fail_after,
move_on_after,
open_cancel_scope,
)
from ._core._testing import (
TaskInfo,
get_current_task,
get_running_tasks,
wait_all_tasks_blocked,
)
from ._core._typedattr import TypedAttributeProvider, TypedAttributeSet, typed_attribute
# Re-exported here, for backwards compatibility
# isort: off
from .to_thread import current_default_worker_thread_limiter, run_sync_in_worker_thread
from .from_thread import (
create_blocking_portal,
run_async_from_thread,
run_sync_from_thread,
start_blocking_portal,
)
# Re-export imports so they look like they live directly in this package
key: str
value: Any
for key, value in list(locals().items()):
if getattr(value, "__module__", "").startswith("anyio."):
value.__module__ = __name__ | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/__init__.py | __init__.py |
import math
from types import TracebackType
from typing import Optional, Type
from warnings import warn
from ..abc._tasks import TaskGroup, TaskStatus
from ._compat import (
DeprecatedAsyncContextManager,
DeprecatedAwaitable,
DeprecatedAwaitableFloat,
)
from ._eventloop import get_asynclib
class _IgnoredTaskStatus(TaskStatus):
def started(self, value: object = None) -> None:
pass
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
class CancelScope(DeprecatedAsyncContextManager["CancelScope"]):
"""
Wraps a unit of work that can be made separately cancellable.
:param deadline: The time (clock value) when this scope is cancelled automatically
:param shield: ``True`` to shield the cancel scope from external cancellation
"""
def __new__(
cls, *, deadline: float = math.inf, shield: bool = False
) -> "CancelScope":
return get_asynclib().CancelScope(shield=shield, deadline=deadline)
def cancel(self) -> DeprecatedAwaitable:
"""Cancel this scope immediately."""
raise NotImplementedError
@property
def deadline(self) -> float:
"""
The time (clock value) when this scope is cancelled automatically.
Will be ``float('inf')`` if no timeout has been set.
"""
raise NotImplementedError
@deadline.setter
def deadline(self, value: float) -> None:
raise NotImplementedError
@property
def cancel_called(self) -> bool:
"""``True`` if :meth:`cancel` has been called."""
raise NotImplementedError
@property
def shield(self) -> bool:
"""
``True`` if this scope is shielded from external cancellation.
While a scope is shielded, it will not receive cancellations from outside.
"""
raise NotImplementedError
@shield.setter
def shield(self, value: bool) -> None:
raise NotImplementedError
def __enter__(self) -> "CancelScope":
raise NotImplementedError
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
raise NotImplementedError
def open_cancel_scope(*, shield: bool = False) -> CancelScope:
"""
Open a cancel scope.
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
.. deprecated:: 3.0
Use :class:`~CancelScope` directly.
"""
warn(
"open_cancel_scope() is deprecated -- use CancelScope() directly",
DeprecationWarning,
)
return get_asynclib().CancelScope(shield=shield)
class FailAfterContextManager(DeprecatedAsyncContextManager[CancelScope]):
def __init__(self, cancel_scope: CancelScope):
self._cancel_scope = cancel_scope
def __enter__(self) -> CancelScope:
return self._cancel_scope.__enter__()
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
retval = self._cancel_scope.__exit__(exc_type, exc_val, exc_tb)
if self._cancel_scope.cancel_called:
raise TimeoutError
return retval
def fail_after(delay: Optional[float], shield: bool = False) -> FailAfterContextManager:
"""
Create a context manager which raises a :class:`TimeoutError` if does not finish in time.
:param delay: maximum allowed time (in seconds) before raising the exception, or ``None`` to
disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a context manager that yields a cancel scope
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.abc.CancelScope`\\]
"""
deadline = (
(get_asynclib().current_time() + delay) if delay is not None else math.inf
)
cancel_scope = get_asynclib().CancelScope(deadline=deadline, shield=shield)
return FailAfterContextManager(cancel_scope)
def move_on_after(delay: Optional[float], shield: bool = False) -> CancelScope:
"""
Create a cancel scope with a deadline that expires after the given delay.
:param delay: maximum allowed time (in seconds) before exiting the context block, or ``None``
to disable the timeout
:param shield: ``True`` to shield the cancel scope from external cancellation
:return: a cancel scope
"""
deadline = (
(get_asynclib().current_time() + delay) if delay is not None else math.inf
)
return get_asynclib().CancelScope(deadline=deadline, shield=shield)
def current_effective_deadline() -> DeprecatedAwaitableFloat:
"""
Return the nearest deadline among all the cancel scopes effective for the current task.
:return: a clock value from the event loop's internal clock (``float('inf')`` if there is no
deadline in effect)
:rtype: float
"""
return DeprecatedAwaitableFloat(
get_asynclib().current_effective_deadline(), current_effective_deadline
)
def create_task_group() -> "TaskGroup":
"""
Create a task group.
:return: a task group
"""
return get_asynclib().TaskGroup() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_tasks.py | _tasks.py |
from traceback import format_exception
from typing import List
class BrokenResourceError(Exception):
"""
Raised when trying to use a resource that has been rendered unusable due to external causes
(e.g. a send stream whose peer has disconnected).
"""
class BrokenWorkerProcess(Exception):
"""
Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or otherwise
misbehaves.
"""
class BusyResourceError(Exception):
"""Raised when two tasks are trying to read from or write to the same resource concurrently."""
def __init__(self, action: str):
super().__init__(f"Another task is already {action} this resource")
class ClosedResourceError(Exception):
"""Raised when trying to use a resource that has been closed."""
class DelimiterNotFound(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
maximum number of bytes has been read without the delimiter being found.
"""
def __init__(self, max_bytes: int) -> None:
super().__init__(
f"The delimiter was not found among the first {max_bytes} bytes"
)
class EndOfStream(Exception):
"""Raised when trying to read from a stream that has been closed from the other end."""
class ExceptionGroup(BaseException):
"""
Raised when multiple exceptions have been raised in a task group.
:var ~typing.Sequence[BaseException] exceptions: the sequence of exceptions raised together
"""
SEPARATOR = "----------------------------\n"
exceptions: List[BaseException]
def __str__(self) -> str:
tracebacks = [
"".join(format_exception(type(exc), exc, exc.__traceback__))
for exc in self.exceptions
]
return (
f"{len(self.exceptions)} exceptions were raised in the task group:\n"
f"{self.SEPARATOR}{self.SEPARATOR.join(tracebacks)}"
)
def __repr__(self) -> str:
exception_reprs = ", ".join(repr(exc) for exc in self.exceptions)
return f"<{self.__class__.__name__}: {exception_reprs}>"
class IncompleteRead(Exception):
"""
Raised during :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
connection is closed before the requested amount of bytes has been read.
"""
def __init__(self) -> None:
super().__init__(
"The stream was closed before the read operation could be completed"
)
class TypedAttributeLookupError(LookupError):
"""
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute is not
found and no default value has been given.
"""
class WouldBlock(Exception):
"""Raised by ``X_nowait`` functions if ``X()`` would block.""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_exceptions.py | _exceptions.py |
import math
import sys
import threading
from contextlib import contextmanager
from importlib import import_module
from typing import (
Any,
Callable,
Coroutine,
Dict,
Generator,
Optional,
Tuple,
Type,
TypeVar,
)
from zdppy_api import sniffio
# This must be updated when new backends are introduced
from ._compat import DeprecatedAwaitableFloat
BACKENDS = "asyncio", "trio"
T_Retval = TypeVar("T_Retval")
threadlocals = threading.local()
def run(
func: Callable[..., Coroutine[Any, Any, T_Retval]],
*args: object,
backend: str = "asyncio",
backend_options: Optional[Dict[str, Any]] = None,
) -> T_Retval:
"""
Run the given coroutine function in an asynchronous event loop.
The current thread must not be already running an event loop.
:param func: a coroutine function
:param args: positional arguments to ``func``
:param backend: name of the asynchronous event loop implementation – currently either
``asyncio`` or ``trio``
:param backend_options: keyword arguments to call the backend ``run()`` implementation with
(documented :ref:`here <backend options>`)
:return: the return value of the coroutine function
:raises RuntimeError: if an asynchronous event loop is already running in this thread
:raises LookupError: if the named backend is not found
"""
try:
asynclib_name = sniffio.current_async_library()
except sniffio.AsyncLibraryNotFoundError:
pass
else:
raise RuntimeError(f"Already running {asynclib_name} in this thread")
try:
asynclib = import_module(f"..._backends._{backend}", package=__name__)
except ImportError as exc:
raise LookupError(f"No such backend: {backend}") from exc
token = None
if sniffio.current_async_library_cvar.get(None) is None:
# Since we're in control of the event loop, we can cache the name of the async library
token = sniffio.current_async_library_cvar.set(backend)
try:
backend_options = backend_options or {}
return asynclib.run(func, *args, **backend_options)
finally:
if token:
sniffio.current_async_library_cvar.reset(token)
async def sleep(delay: float) -> None:
"""
Pause the current task for the specified duration.
:param delay: the duration, in seconds
"""
return await get_asynclib().sleep(delay)
async def sleep_forever() -> None:
"""
Pause the current task until it's cancelled.
This is a shortcut for ``sleep(math.inf)``.
.. versionadded:: 3.1
"""
await sleep(math.inf)
async def sleep_until(deadline: float) -> None:
"""
Pause the current task until the given time.
:param deadline: the absolute time to wake up at (according to the internal monotonic clock of
the event loop)
.. versionadded:: 3.1
"""
now = current_time()
await sleep(max(deadline - now, 0))
def current_time() -> DeprecatedAwaitableFloat:
"""
Return the current value of the event loop's internal clock.
:return: the clock value (seconds)
"""
return DeprecatedAwaitableFloat(get_asynclib().current_time(), current_time)
def get_all_backends() -> Tuple[str, ...]:
"""Return a tuple of the names of all built-in backends."""
return BACKENDS
def get_cancelled_exc_class() -> Type[BaseException]:
"""Return the current async library's cancellation exception class."""
return get_asynclib().CancelledError
#
# Private API
#
@contextmanager
def claim_worker_thread(backend: str) -> Generator[Any, None, None]:
module = sys.modules["anyio._backends._" + backend]
threadlocals.current_async_module = module
try:
yield
finally:
del threadlocals.current_async_module
def get_asynclib(asynclib_name: Optional[str] = None) -> Any:
if asynclib_name is None:
asynclib_name = sniffio.current_async_library()
modulename = "anyio._backends._" + asynclib_name
try:
return sys.modules[modulename]
except KeyError:
return import_module(modulename) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_eventloop.py | _eventloop.py |
import socket
import ssl
import sys
from ipaddress import IPv6Address, ip_address
from os import PathLike, chmod
from pathlib import Path
from socket import AddressFamily, SocketKind
from typing import Awaitable, List, Optional, Tuple, Union, cast, overload
from .. import to_thread
from ..abc import (
ConnectedUDPSocket,
IPAddressType,
IPSockAddrType,
SocketListener,
SocketStream,
UDPSocket,
UNIXSocketStream,
)
from ..streams.stapled import MultiListener
from ..streams.tls import TLSStream
from ._eventloop import get_asynclib
from ._resources import aclose_forcefully
from ._synchronization import Event
from ._tasks import create_task_group, move_on_after
if sys.version_info >= (3, 8):
from typing import Literal
else:
from typing_extensions import Literal
IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
GetAddrInfoReturnType = List[
Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int]]
]
AnyIPAddressFamily = Literal[
AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
]
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
# tls_hostname given
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = ...,
ssl_context: Optional[ssl.SSLContext] = ...,
tls_standard_compatible: bool = ...,
tls_hostname: str,
happy_eyeballs_delay: float = ...,
) -> TLSStream:
...
# ssl_context given
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = ...,
ssl_context: ssl.SSLContext,
tls_standard_compatible: bool = ...,
tls_hostname: Optional[str] = ...,
happy_eyeballs_delay: float = ...,
) -> TLSStream:
...
# tls=True
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = ...,
tls: Literal[True],
ssl_context: Optional[ssl.SSLContext] = ...,
tls_standard_compatible: bool = ...,
tls_hostname: Optional[str] = ...,
happy_eyeballs_delay: float = ...,
) -> TLSStream:
...
# tls=False
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = ...,
tls: Literal[False],
ssl_context: Optional[ssl.SSLContext] = ...,
tls_standard_compatible: bool = ...,
tls_hostname: Optional[str] = ...,
happy_eyeballs_delay: float = ...,
) -> SocketStream:
...
# No TLS arguments
@overload
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = ...,
happy_eyeballs_delay: float = ...,
) -> SocketStream:
...
async def connect_tcp(
remote_host: IPAddressType,
remote_port: int,
*,
local_host: Optional[IPAddressType] = None,
tls: bool = False,
ssl_context: Optional[ssl.SSLContext] = None,
tls_standard_compatible: bool = True,
tls_hostname: Optional[str] = None,
happy_eyeballs_delay: float = 0.25,
) -> Union[SocketStream, TLSStream]:
"""
Connect to a host using the TCP protocol.
This function implements the stateless version of the Happy Eyeballs algorithm (RFC 6555).
If ``address`` is a host name that resolves to multiple IP addresses, each one is tried until
one connection attempt succeeds. If the first attempt does not connected within 250
milliseconds, a second attempt is started using the next address in the list, and so on.
On IPv6 enabled systems, an IPv6 address (if available) is tried first.
When the connection has been established, a TLS handshake will be done if either
``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
:param remote_host: the IP address or host name to connect to
:param remote_port: port on the target host to connect to
:param local_host: the interface address or name to bind the socket to before connecting
:param tls: ``True`` to do a TLS handshake with the connected stream and return a
:class:`~anyio.streams.tls.TLSStream` instead
:param ssl_context: the SSL context object to use (if omitted, a default context is created)
:param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake before closing
the stream and requires that the server does this as well. Otherwise,
:exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
Some protocols, such as HTTP, require this option to be ``False``.
See :meth:`~ssl.SSLContext.wrap_socket` for details.
:param tls_hostname: host name to check the server certificate against (defaults to the value
of ``remote_host``)
:param happy_eyeballs_delay: delay (in seconds) before starting the next connection attempt
:return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
:raises OSError: if the connection attempt fails
"""
# Placed here due to https://github.com/python/mypy/issues/7057
connected_stream: Optional[SocketStream] = None
async def try_connect(remote_host: str, event: Event) -> None:
nonlocal connected_stream
try:
stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
except OSError as exc:
oserrors.append(exc)
return
else:
if connected_stream is None:
connected_stream = stream
tg.cancel_scope.cancel()
else:
await stream.aclose()
finally:
event.set()
asynclib = get_asynclib()
local_address: Optional[IPSockAddrType] = None
family = socket.AF_UNSPEC
if local_host:
gai_res = await getaddrinfo(str(local_host), None)
family, *_, local_address = gai_res[0]
target_host = str(remote_host)
try:
addr_obj = ip_address(remote_host)
except ValueError:
# getaddrinfo() will raise an exception if name resolution fails
gai_res = await getaddrinfo(
target_host, remote_port, family=family, type=socket.SOCK_STREAM
)
# Organize the list so that the first address is an IPv6 address (if available) and the
# second one is an IPv4 addresses. The rest can be in whatever order.
v6_found = v4_found = False
target_addrs: List[Tuple[socket.AddressFamily, str]] = []
for af, *rest, sa in gai_res:
if af == socket.AF_INET6 and not v6_found:
v6_found = True
target_addrs.insert(0, (af, sa[0]))
elif af == socket.AF_INET and not v4_found and v6_found:
v4_found = True
target_addrs.insert(1, (af, sa[0]))
else:
target_addrs.append((af, sa[0]))
else:
if isinstance(addr_obj, IPv6Address):
target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
else:
target_addrs = [(socket.AF_INET, addr_obj.compressed)]
oserrors: List[OSError] = []
async with create_task_group() as tg:
for i, (af, addr) in enumerate(target_addrs):
event = Event()
tg.start_soon(try_connect, addr, event)
with move_on_after(happy_eyeballs_delay):
await event.wait()
if connected_stream is None:
cause = oserrors[0] if len(oserrors) == 1 else asynclib.ExceptionGroup(oserrors)
raise OSError("All connection attempts failed") from cause
if tls or tls_hostname or ssl_context:
try:
return await TLSStream.wrap(
connected_stream,
server_side=False,
hostname=tls_hostname or str(remote_host),
ssl_context=ssl_context,
standard_compatible=tls_standard_compatible,
)
except BaseException:
await aclose_forcefully(connected_stream)
raise
return connected_stream
async def connect_unix(path: Union[str, "PathLike[str]"]) -> UNIXSocketStream:
"""
Connect to the given UNIX socket.
Not available on Windows.
:param path: path to the socket
:return: a socket stream object
"""
path = str(Path(path))
return await get_asynclib().connect_unix(path)
async def create_tcp_listener(
*,
local_host: Optional[IPAddressType] = None,
local_port: int = 0,
family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
backlog: int = 65536,
reuse_port: bool = False,
) -> MultiListener[SocketStream]:
"""
Create a TCP socket listener.
:param local_port: port number to listen on
:param local_host: IP address of the interface to listen on. If omitted, listen on all IPv4
and IPv6 interfaces. To listen on all interfaces on a specific address family, use
``0.0.0.0`` for IPv4 or ``::`` for IPv6.
:param family: address family (used if ``interface`` was omitted)
:param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
65536)
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a list of listener objects
"""
asynclib = get_asynclib()
backlog = min(backlog, 65536)
local_host = str(local_host) if local_host is not None else None
gai_res = await getaddrinfo(
local_host, # type: ignore[arg-type]
local_port,
family=family,
type=socket.SOCK_STREAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
listeners: List[SocketListener] = []
try:
# The set() is here to work around a glibc bug:
# https://sourceware.org/bugzilla/show_bug.cgi?id=14969
for fam, *_, sockaddr in sorted(set(gai_res)):
raw_socket = socket.socket(fam)
raw_socket.setblocking(False)
# For Windows, enable exclusive address use. For others, enable address reuse.
if sys.platform == "win32":
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
else:
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if reuse_port:
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
# If only IPv6 was requested, disable dual stack operation
if fam == socket.AF_INET6:
raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
raw_socket.bind(sockaddr)
raw_socket.listen(backlog)
listener = asynclib.TCPSocketListener(raw_socket)
listeners.append(listener)
except BaseException:
for listener in listeners:
await listener.aclose()
raise
return MultiListener(listeners)
async def create_unix_listener(
path: Union[str, "PathLike[str]"],
*,
mode: Optional[int] = None,
backlog: int = 65536,
) -> SocketListener:
"""
Create a UNIX socket listener.
Not available on Windows.
:param path: path of the socket
:param mode: permissions to set on the socket
:param backlog: maximum number of queued incoming connections (up to a maximum of 2**16, or
65536)
:return: a listener object
.. versionchanged:: 3.0
If a socket already exists on the file system in the given path, it will be removed first.
"""
path_str = str(path)
path = Path(path)
if path.is_socket():
path.unlink()
backlog = min(backlog, 65536)
raw_socket = socket.socket(socket.AF_UNIX)
raw_socket.setblocking(False)
try:
await to_thread.run_sync(raw_socket.bind, path_str, cancellable=True)
if mode is not None:
await to_thread.run_sync(chmod, path_str, mode, cancellable=True)
raw_socket.listen(backlog)
return get_asynclib().UNIXSocketListener(raw_socket)
except BaseException:
raw_socket.close()
raise
async def create_udp_socket(
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
*,
local_host: Optional[IPAddressType] = None,
local_port: int = 0,
reuse_port: bool = False,
) -> UDPSocket:
"""
Create a UDP socket.
If ``port`` has been given, the socket will be bound to this port on the local machine,
making this socket suitable for providing UDP based services.
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
``local_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a UDP socket
"""
if family is AddressFamily.AF_UNSPEC and not local_host:
raise ValueError('Either "family" or "local_host" must be given')
if local_host:
gai_res = await getaddrinfo(
str(local_host),
local_port,
family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
elif family is AddressFamily.AF_INET6:
local_address = ("::", 0)
else:
local_address = ("0.0.0.0", 0)
return await get_asynclib().create_udp_socket(
family, local_address, None, reuse_port
)
async def create_connected_udp_socket(
remote_host: IPAddressType,
remote_port: int,
*,
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
local_host: Optional[IPAddressType] = None,
local_port: int = 0,
reuse_port: bool = False,
) -> ConnectedUDPSocket:
"""
Create a connected UDP socket.
Connected UDP sockets can only communicate with the specified remote host/port, and any packets
sent from other sources are dropped.
:param remote_host: remote host to set as the default target
:param remote_port: port on the remote host to set as the default target
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically determined from
``local_host`` or ``remote_host`` if omitted
:param local_host: IP address or host name of the local interface to bind to
:param local_port: local port to bind to
:param reuse_port: ``True`` to allow multiple sockets to bind to the same address/port
(not supported on Windows)
:return: a connected UDP socket
"""
local_address = None
if local_host:
gai_res = await getaddrinfo(
str(local_host),
local_port,
family=family,
type=socket.SOCK_DGRAM,
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
local_address = gai_res[0][-1]
gai_res = await getaddrinfo(
str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
)
family = cast(AnyIPAddressFamily, gai_res[0][0])
remote_address = gai_res[0][-1]
return await get_asynclib().create_udp_socket(
family, local_address, remote_address, reuse_port
)
async def getaddrinfo(
host: Union[bytearray, bytes, str],
port: Union[str, int, None],
*,
family: Union[int, AddressFamily] = 0,
type: Union[int, SocketKind] = 0,
proto: int = 0,
flags: int = 0,
) -> GetAddrInfoReturnType:
"""
Look up a numeric IP address given a host name.
Internationalized domain names are translated according to the (non-transitional) IDNA 2008
standard.
.. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
(host, port), unlike what :func:`socket.getaddrinfo` does.
:param host: host name
:param port: port number
:param family: socket family (`'AF_INET``, ...)
:param type: socket type (``SOCK_STREAM``, ...)
:param proto: protocol number
:param flags: flags to pass to upstream ``getaddrinfo()``
:return: list of tuples containing (family, type, proto, canonname, sockaddr)
.. seealso:: :func:`socket.getaddrinfo`
"""
# Handle unicode hostnames
if isinstance(host, str):
try:
encoded_host = host.encode("ascii")
except UnicodeEncodeError:
import idna
encoded_host = idna.encode(host, uts46=True)
else:
encoded_host = host
gai_res = await get_asynclib().getaddrinfo(
encoded_host, port, family=family, type=type, proto=proto, flags=flags
)
return [
(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
for family, type, proto, canonname, sockaddr in gai_res
]
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[Tuple[str, str]]:
"""
Look up the host name of an IP address.
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
:param flags: flags to pass to upstream ``getnameinfo()``
:return: a tuple of (host name, service name)
.. seealso:: :func:`socket.getnameinfo`
"""
return get_asynclib().getnameinfo(sockaddr, flags)
def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
"""
Wait until the given socket has data to be read.
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
(default on py3.8+).
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become readable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become readable
"""
return get_asynclib().wait_socket_readable(sock)
def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
"""
Wait until the given socket can be written to.
This does **NOT** work on Windows when using the asyncio backend with a proactor event loop
(default on py3.8+).
.. warning:: Only use this on raw sockets that have not been wrapped by any higher level
constructs like socket streams!
:param sock: a socket object
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
socket to become writable
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
to become writable
"""
return get_asynclib().wait_socket_writable(sock)
#
# Private API
#
def convert_ipv6_sockaddr(
sockaddr: Union[Tuple[str, int, int, int], Tuple[str, int]]
) -> Tuple[str, int]:
"""
Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
If the scope ID is nonzero, it is added to the address, separated with ``%``.
Otherwise the flow id and scope id are simply cut off from the tuple.
Any other kinds of socket addresses are returned as-is.
:param sockaddr: the result of :meth:`~socket.socket.getsockname`
:return: the converted socket address
"""
# This is more complicated than it should be because of MyPy
if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
host, port, flowinfo, scope_id = cast(Tuple[str, int, int, int], sockaddr)
if scope_id:
# Add scope_id to the address
return f"{host}%{scope_id}", port
else:
return host, port
else:
return cast(Tuple[str, int], sockaddr) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_sockets.py | _sockets.py |
from collections import deque
from dataclasses import dataclass
from types import TracebackType
from typing import Deque, Optional, Tuple, Type
from warnings import warn
from ..lowlevel import cancel_shielded_checkpoint, checkpoint, checkpoint_if_cancelled
from ._compat import DeprecatedAwaitable
from ._eventloop import get_asynclib
from ._exceptions import BusyResourceError, WouldBlock
from ._tasks import CancelScope
from ._testing import TaskInfo, get_current_task
@dataclass(frozen=True)
class EventStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
"""
tasks_waiting: int
@dataclass(frozen=True)
class CapacityLimiterStatistics:
"""
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
:ivar float total_tokens: total number of available tokens
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from this
limiter
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.CapacityLimiter.acquire` or
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
"""
borrowed_tokens: int
total_tokens: float
borrowers: Tuple[object, ...]
tasks_waiting: int
@dataclass(frozen=True)
class LockStatistics:
"""
:ivar bool locked: flag indicating if this lock is locked or not
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the lock is not
held by any task)
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
"""
locked: bool
owner: Optional[TaskInfo]
tasks_waiting: int
@dataclass(frozen=True)
class ConditionStatistics:
"""
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying :class:`~.Lock`
"""
tasks_waiting: int
lock_statistics: LockStatistics
@dataclass(frozen=True)
class SemaphoreStatistics:
"""
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
"""
tasks_waiting: int
class Event:
def __new__(cls) -> "Event":
return get_asynclib().Event()
def set(self) -> DeprecatedAwaitable:
"""Set the flag, notifying all listeners."""
raise NotImplementedError
def is_set(self) -> bool:
"""Return ``True`` if the flag is set, ``False`` if not."""
raise NotImplementedError
async def wait(self) -> None:
"""
Wait until the flag has been set.
If the flag has already been set when this method is called, it returns immediately.
"""
raise NotImplementedError
def statistics(self) -> EventStatistics:
"""Return statistics about the current state of this event."""
raise NotImplementedError
class Lock:
_owner_task: Optional[TaskInfo] = None
def __init__(self) -> None:
self._waiters: Deque[Tuple[TaskInfo, Event]] = deque()
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.release()
async def acquire(self) -> None:
"""Acquire the lock."""
await checkpoint_if_cancelled()
try:
self.acquire_nowait()
except WouldBlock:
task = get_current_task()
event = Event()
token = task, event
self._waiters.append(token)
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(token)
elif self._owner_task == task:
self.release()
raise
assert self._owner_task == task
else:
try:
await cancel_shielded_checkpoint()
except BaseException:
self.release()
raise
def acquire_nowait(self) -> None:
"""
Acquire the lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
task = get_current_task()
if self._owner_task == task:
raise RuntimeError("Attempted to acquire an already held Lock")
if self._owner_task is not None:
raise WouldBlock
self._owner_task = task
def release(self) -> DeprecatedAwaitable:
"""Release the lock."""
if self._owner_task != get_current_task():
raise RuntimeError("The current task is not holding this lock")
if self._waiters:
self._owner_task, event = self._waiters.popleft()
event.set()
else:
del self._owner_task
return DeprecatedAwaitable(self.release)
def locked(self) -> bool:
"""Return True if the lock is currently held."""
return self._owner_task is not None
def statistics(self) -> LockStatistics:
"""
Return statistics about the current state of this lock.
.. versionadded:: 3.0
"""
return LockStatistics(self.locked(), self._owner_task, len(self._waiters))
class Condition:
_owner_task: Optional[TaskInfo] = None
def __init__(self, lock: Optional[Lock] = None):
self._lock = lock or Lock()
self._waiters: Deque[Event] = deque()
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.release()
def _check_acquired(self) -> None:
if self._owner_task != get_current_task():
raise RuntimeError("The current task is not holding the underlying lock")
async def acquire(self) -> None:
"""Acquire the underlying lock."""
await self._lock.acquire()
self._owner_task = get_current_task()
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
self._lock.acquire_nowait()
self._owner_task = get_current_task()
def release(self) -> DeprecatedAwaitable:
"""Release the underlying lock."""
self._lock.release()
return DeprecatedAwaitable(self.release)
def locked(self) -> bool:
"""Return True if the lock is set."""
return self._lock.locked()
def notify(self, n: int = 1) -> None:
"""Notify exactly n listeners."""
self._check_acquired()
for _ in range(n):
try:
event = self._waiters.popleft()
except IndexError:
break
event.set()
def notify_all(self) -> None:
"""Notify all the listeners."""
self._check_acquired()
for event in self._waiters:
event.set()
self._waiters.clear()
async def wait(self) -> None:
"""Wait for a notification."""
await checkpoint()
event = Event()
self._waiters.append(event)
self.release()
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(event)
raise
finally:
with CancelScope(shield=True):
await self.acquire()
def statistics(self) -> ConditionStatistics:
"""
Return statistics about the current state of this condition.
.. versionadded:: 3.0
"""
return ConditionStatistics(len(self._waiters), self._lock.statistics())
class Semaphore:
def __init__(self, initial_value: int, *, max_value: Optional[int] = None):
if not isinstance(initial_value, int):
raise TypeError("initial_value must be an integer")
if initial_value < 0:
raise ValueError("initial_value must be >= 0")
if max_value is not None:
if not isinstance(max_value, int):
raise TypeError("max_value must be an integer or None")
if max_value < initial_value:
raise ValueError(
"max_value must be equal to or higher than initial_value"
)
self._value = initial_value
self._max_value = max_value
self._waiters: Deque[Event] = deque()
async def __aenter__(self) -> "Semaphore":
await self.acquire()
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.release()
async def acquire(self) -> None:
"""Decrement the semaphore value, blocking if necessary."""
await checkpoint_if_cancelled()
try:
self.acquire_nowait()
except WouldBlock:
event = Event()
self._waiters.append(event)
try:
await event.wait()
except BaseException:
if not event.is_set():
self._waiters.remove(event)
else:
self.release()
raise
else:
try:
await cancel_shielded_checkpoint()
except BaseException:
self.release()
raise
def acquire_nowait(self) -> None:
"""
Acquire the underlying lock, without blocking.
:raises ~WouldBlock: if the operation would block
"""
if self._value == 0:
raise WouldBlock
self._value -= 1
def release(self) -> DeprecatedAwaitable:
"""Increment the semaphore value."""
if self._max_value is not None and self._value == self._max_value:
raise ValueError("semaphore released too many times")
if self._waiters:
self._waiters.popleft().set()
else:
self._value += 1
return DeprecatedAwaitable(self.release)
@property
def value(self) -> int:
"""The current value of the semaphore."""
return self._value
@property
def max_value(self) -> Optional[int]:
"""The maximum value of the semaphore."""
return self._max_value
def statistics(self) -> SemaphoreStatistics:
"""
Return statistics about the current state of this semaphore.
.. versionadded:: 3.0
"""
return SemaphoreStatistics(len(self._waiters))
class CapacityLimiter:
def __new__(cls, total_tokens: float) -> "CapacityLimiter":
return get_asynclib().CapacityLimiter(total_tokens)
async def __aenter__(self) -> None:
raise NotImplementedError
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
raise NotImplementedError
@property
def total_tokens(self) -> float:
"""
The total number of tokens available for borrowing.
This is a read-write property. If the total number of tokens is increased, the
proportionate number of tasks waiting on this limiter will be granted their tokens.
.. versionchanged:: 3.0
The property is now writable.
"""
raise NotImplementedError
@total_tokens.setter
def total_tokens(self, value: float) -> None:
raise NotImplementedError
async def set_total_tokens(self, value: float) -> None:
warn(
"CapacityLimiter.set_total_tokens has been deprecated. Set the value of the"
'"total_tokens" attribute directly.',
DeprecationWarning,
)
self.total_tokens = value
@property
def borrowed_tokens(self) -> int:
"""The number of tokens that have currently been borrowed."""
raise NotImplementedError
@property
def available_tokens(self) -> float:
"""The number of tokens currently available to be borrowed"""
raise NotImplementedError
def acquire_nowait(self) -> DeprecatedAwaitable:
"""
Acquire a token for the current task without waiting for one to become available.
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
"""
Acquire a token without waiting for one to become available.
:param borrower: the entity borrowing a token
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
"""
raise NotImplementedError
async def acquire(self) -> None:
"""
Acquire a token for the current task, waiting if necessary for one to become available.
"""
raise NotImplementedError
async def acquire_on_behalf_of(self, borrower: object) -> None:
"""
Acquire a token, waiting if necessary for one to become available.
:param borrower: the entity borrowing a token
"""
raise NotImplementedError
def release(self) -> None:
"""
Release the token held by the current task.
:raises RuntimeError: if the current task has not borrowed a token from this limiter.
"""
raise NotImplementedError
def release_on_behalf_of(self, borrower: object) -> None:
"""
Release the token held by the given borrower.
:raises RuntimeError: if the borrower has not borrowed a token from this limiter.
"""
raise NotImplementedError
def statistics(self) -> CapacityLimiterStatistics:
"""
Return statistics about the current state of this limiter.
.. versionadded:: 3.0
"""
raise NotImplementedError
def create_lock() -> Lock:
"""
Create an asynchronous lock.
:return: a lock object
.. deprecated:: 3.0
Use :class:`~Lock` directly.
"""
warn("create_lock() is deprecated -- use Lock() directly", DeprecationWarning)
return Lock()
def create_condition(lock: Optional[Lock] = None) -> Condition:
"""
Create an asynchronous condition.
:param lock: the lock to base the condition object on
:return: a condition object
.. deprecated:: 3.0
Use :class:`~Condition` directly.
"""
warn(
"create_condition() is deprecated -- use Condition() directly",
DeprecationWarning,
)
return Condition(lock=lock)
def create_event() -> Event:
"""
Create an asynchronous event object.
:return: an event object
.. deprecated:: 3.0
Use :class:`~Event` directly.
"""
warn("create_event() is deprecated -- use Event() directly", DeprecationWarning)
return get_asynclib().Event()
def create_semaphore(value: int, *, max_value: Optional[int] = None) -> Semaphore:
"""
Create an asynchronous semaphore.
:param value: the semaphore's initial value
:param max_value: if set, makes this a "bounded" semaphore that raises :exc:`ValueError` if the
semaphore's value would exceed this number
:return: a semaphore object
.. deprecated:: 3.0
Use :class:`~Semaphore` directly.
"""
warn(
"create_semaphore() is deprecated -- use Semaphore() directly",
DeprecationWarning,
)
return Semaphore(value, max_value=max_value)
def create_capacity_limiter(total_tokens: float) -> CapacityLimiter:
"""
Create a capacity limiter.
:param total_tokens: the total number of tokens available for borrowing (can be an integer or
:data:`math.inf`)
:return: a capacity limiter object
.. deprecated:: 3.0
Use :class:`~CapacityLimiter` directly.
"""
warn(
"create_capacity_limiter() is deprecated -- use CapacityLimiter() directly",
DeprecationWarning,
)
return get_asynclib().CapacityLimiter(total_tokens)
class ResourceGuard:
__slots__ = "action", "_guarded"
def __init__(self, action: str):
self.action = action
self._guarded = False
def __enter__(self) -> None:
if self._guarded:
raise BusyResourceError(self.action)
self._guarded = True
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
self._guarded = False
return None | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_synchronization.py | _synchronization.py |
from io import BytesIO
from os import PathLike
from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess
from typing import (
IO,
Any,
AsyncIterable,
List,
Mapping,
Optional,
Sequence,
Union,
cast,
)
from ..abc import Process
from ._eventloop import get_asynclib
from ._tasks import create_task_group
async def run_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
input: Optional[bytes] = None,
stdout: Union[int, IO[Any], None] = PIPE,
stderr: Union[int, IO[Any], None] = PIPE,
check: bool = True,
cwd: Union[str, bytes, "PathLike[str]", None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> "CompletedProcess[bytes]":
"""
Run an external command in a subprocess and wait until it completes.
.. seealso:: :func:`subprocess.run`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param input: bytes passed to the standard input of the subprocess
:param stdout: either :data:`subprocess.PIPE` or :data:`subprocess.DEVNULL`
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL` or
:data:`subprocess.STDOUT`
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the process
terminates with a return code other than 0
:param cwd: If not ``None``, change the working directory to this before running the command
:param env: if not ``None``, this mapping replaces the inherited environment variables from the
parent process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an object representing the completed process
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process exits with a
nonzero return code
"""
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
buffer = BytesIO()
async for chunk in stream:
buffer.write(chunk)
stream_contents[index] = buffer.getvalue()
async with await open_process(
command,
stdin=PIPE if input else DEVNULL,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
) as process:
stream_contents: List[Optional[bytes]] = [None, None]
try:
async with create_task_group() as tg:
if process.stdout:
tg.start_soon(drain_stream, process.stdout, 0)
if process.stderr:
tg.start_soon(drain_stream, process.stderr, 1)
if process.stdin and input:
await process.stdin.send(input)
await process.stdin.aclose()
await process.wait()
except BaseException:
process.kill()
raise
output, errors = stream_contents
if check and process.returncode != 0:
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
return CompletedProcess(command, cast(int, process.returncode), output, errors)
async def open_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
stdin: Union[int, IO[Any], None] = PIPE,
stdout: Union[int, IO[Any], None] = PIPE,
stderr: Union[int, IO[Any], None] = PIPE,
cwd: Union[str, bytes, "PathLike[str]", None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> Process:
"""
Start an external command in a subprocess.
.. seealso:: :class:`subprocess.Popen`
:param command: either a string to pass to the shell, or an iterable of strings containing the
executable name or path and its arguments
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
file-like object, or ``None``
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
a file-like object, or ``None``
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
:data:`subprocess.STDOUT`, a file-like object, or ``None``
:param cwd: If not ``None``, the working directory is changed before executing
:param env: If env is not ``None``, it must be a mapping that defines the environment
variables for the new process
:param start_new_session: if ``true`` the setsid() system call will be made in the child
process prior to the execution of the subprocess. (POSIX only)
:return: an asynchronous process object
"""
shell = isinstance(command, str)
return await get_asynclib().open_process(
command,
shell=shell,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_subprocesses.py | _subprocesses.py |
import os
import pathlib
import sys
from dataclasses import dataclass
from functools import partial
from os import PathLike
from typing import (
IO,
TYPE_CHECKING,
Any,
AnyStr,
AsyncIterator,
Callable,
Generic,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
Union,
cast,
overload,
)
from .. import to_thread
from ..abc import AsyncResource
if sys.version_info >= (3, 8):
from typing import Final
else:
from typing_extensions import Final
if TYPE_CHECKING:
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
else:
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
class AsyncFile(AsyncResource, Generic[AnyStr]):
"""
An asynchronous file object.
This class wraps a standard file object and provides async friendly versions of the following
blocking methods (where available on the original file object):
* read
* read1
* readline
* readlines
* readinto
* readinto1
* write
* writelines
* truncate
* seek
* tell
* flush
All other methods are directly passed through.
This class supports the asynchronous context manager protocol which closes the underlying file
at the end of the context block.
This class also supports asynchronous iteration::
async with await open_file(...) as f:
async for line in f:
print(line)
"""
def __init__(self, fp: IO[AnyStr]) -> None:
self._fp: Any = fp
def __getattr__(self, name: str) -> object:
return getattr(self._fp, name)
@property
def wrapped(self) -> IO[AnyStr]:
"""The wrapped file object."""
return self._fp
async def __aiter__(self) -> AsyncIterator[AnyStr]:
while True:
line = await self.readline()
if line:
yield line
else:
break
async def aclose(self) -> None:
return await to_thread.run_sync(self._fp.close)
async def read(self, size: int = -1) -> AnyStr:
return await to_thread.run_sync(self._fp.read, size)
async def read1(self: "AsyncFile[bytes]", size: int = -1) -> bytes:
return await to_thread.run_sync(self._fp.read1, size)
async def readline(self) -> AnyStr:
return await to_thread.run_sync(self._fp.readline)
async def readlines(self) -> List[AnyStr]:
return await to_thread.run_sync(self._fp.readlines)
async def readinto(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes:
return await to_thread.run_sync(self._fp.readinto, b)
async def readinto1(self: "AsyncFile[bytes]", b: WriteableBuffer) -> bytes:
return await to_thread.run_sync(self._fp.readinto1, b)
@overload
async def write(self: "AsyncFile[bytes]", b: ReadableBuffer) -> int:
...
@overload
async def write(self: "AsyncFile[str]", b: str) -> int:
...
async def write(self, b: Union[ReadableBuffer, str]) -> int:
return await to_thread.run_sync(self._fp.write, b)
@overload
async def writelines(
self: "AsyncFile[bytes]", lines: Iterable[ReadableBuffer]
) -> None:
...
@overload
async def writelines(self: "AsyncFile[str]", lines: Iterable[str]) -> None:
...
async def writelines(
self, lines: Union[Iterable[ReadableBuffer], Iterable[str]]
) -> None:
return await to_thread.run_sync(self._fp.writelines, lines)
async def truncate(self, size: Optional[int] = None) -> int:
return await to_thread.run_sync(self._fp.truncate, size)
async def seek(self, offset: int, whence: Optional[int] = os.SEEK_SET) -> int:
return await to_thread.run_sync(self._fp.seek, offset, whence)
async def tell(self) -> int:
return await to_thread.run_sync(self._fp.tell)
async def flush(self) -> None:
return await to_thread.run_sync(self._fp.flush)
@overload
async def open_file(
file: Union[str, "PathLike[str]", int],
mode: OpenBinaryMode,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[Callable[[str, int], int]] = ...,
) -> AsyncFile[bytes]:
...
@overload
async def open_file(
file: Union[str, "PathLike[str]", int],
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
closefd: bool = ...,
opener: Optional[Callable[[str, int], int]] = ...,
) -> AsyncFile[str]:
...
async def open_file(
file: Union[str, "PathLike[str]", int],
mode: str = "r",
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Callable[[str, int], int]] = None,
) -> AsyncFile[Any]:
"""
Open a file asynchronously.
The arguments are exactly the same as for the builtin :func:`open`.
:return: an asynchronous file object
"""
fp = await to_thread.run_sync(
open, file, mode, buffering, encoding, errors, newline, closefd, opener
)
return AsyncFile(fp)
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
"""
Wrap an existing file as an asynchronous file.
:param file: an existing file-like object
:return: an asynchronous file object
"""
return AsyncFile(file)
@dataclass(eq=False)
class _PathIterator(AsyncIterator["Path"]):
iterator: Iterator["PathLike[str]"]
async def __anext__(self) -> "Path":
nextval = await to_thread.run_sync(next, self.iterator, None, cancellable=True)
if nextval is None:
raise StopAsyncIteration from None
return Path(cast("PathLike[str]", nextval))
class Path:
"""
An asynchronous version of :class:`pathlib.Path`.
This class cannot be substituted for :class:`pathlib.Path` or :class:`pathlib.PurePath`, but
it is compatible with the :class:`os.PathLike` interface.
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for the
deprecated :meth:`~pathlib.Path.link_to` method.
Any methods that do disk I/O need to be awaited on. These methods are:
* :meth:`~pathlib.Path.absolute`
* :meth:`~pathlib.Path.chmod`
* :meth:`~pathlib.Path.cwd`
* :meth:`~pathlib.Path.exists`
* :meth:`~pathlib.Path.expanduser`
* :meth:`~pathlib.Path.group`
* :meth:`~pathlib.Path.hardlink_to`
* :meth:`~pathlib.Path.home`
* :meth:`~pathlib.Path.is_block_device`
* :meth:`~pathlib.Path.is_char_device`
* :meth:`~pathlib.Path.is_dir`
* :meth:`~pathlib.Path.is_fifo`
* :meth:`~pathlib.Path.is_file`
* :meth:`~pathlib.Path.is_mount`
* :meth:`~pathlib.Path.lchmod`
* :meth:`~pathlib.Path.lstat`
* :meth:`~pathlib.Path.mkdir`
* :meth:`~pathlib.Path.open`
* :meth:`~pathlib.Path.owner`
* :meth:`~pathlib.Path.read_bytes`
* :meth:`~pathlib.Path.read_text`
* :meth:`~pathlib.Path.readlink`
* :meth:`~pathlib.Path.rename`
* :meth:`~pathlib.Path.replace`
* :meth:`~pathlib.Path.rmdir`
* :meth:`~pathlib.Path.samefile`
* :meth:`~pathlib.Path.stat`
* :meth:`~pathlib.Path.touch`
* :meth:`~pathlib.Path.unlink`
* :meth:`~pathlib.Path.write_bytes`
* :meth:`~pathlib.Path.write_text`
Additionally, the following methods return an async iterator yielding :class:`~.Path` objects:
* :meth:`~pathlib.Path.glob`
* :meth:`~pathlib.Path.iterdir`
* :meth:`~pathlib.Path.rglob`
"""
__slots__ = "_path", "__weakref__"
__weakref__: Any
def __init__(self, *args: Union[str, "PathLike[str]"]) -> None:
self._path: Final[pathlib.Path] = pathlib.Path(*args)
def __fspath__(self) -> str:
return self._path.__fspath__()
def __str__(self) -> str:
return self._path.__str__()
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.as_posix()!r})"
def __bytes__(self) -> bytes:
return self._path.__bytes__()
def __hash__(self) -> int:
return self._path.__hash__()
def __eq__(self, other: object) -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__eq__(target)
def __lt__(self, other: "Path") -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__lt__(target)
def __le__(self, other: "Path") -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__le__(target)
def __gt__(self, other: "Path") -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__gt__(target)
def __ge__(self, other: "Path") -> bool:
target = other._path if isinstance(other, Path) else other
return self._path.__ge__(target)
def __truediv__(self, other: Any) -> "Path":
return Path(self._path / other)
def __rtruediv__(self, other: Any) -> "Path":
return Path(other) / self
@property
def parts(self) -> Tuple[str, ...]:
return self._path.parts
@property
def drive(self) -> str:
return self._path.drive
@property
def root(self) -> str:
return self._path.root
@property
def anchor(self) -> str:
return self._path.anchor
@property
def parents(self) -> Sequence["Path"]:
return tuple(Path(p) for p in self._path.parents)
@property
def parent(self) -> "Path":
return Path(self._path.parent)
@property
def name(self) -> str:
return self._path.name
@property
def suffix(self) -> str:
return self._path.suffix
@property
def suffixes(self) -> List[str]:
return self._path.suffixes
@property
def stem(self) -> str:
return self._path.stem
async def absolute(self) -> "Path":
path = await to_thread.run_sync(self._path.absolute)
return Path(path)
def as_posix(self) -> str:
return self._path.as_posix()
def as_uri(self) -> str:
return self._path.as_uri()
def match(self, path_pattern: str) -> bool:
return self._path.match(path_pattern)
def is_relative_to(self, *other: Union[str, "PathLike[str]"]) -> bool:
try:
self.relative_to(*other)
return True
except ValueError:
return False
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
func = partial(os.chmod, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, mode)
@classmethod
async def cwd(cls) -> "Path":
path = await to_thread.run_sync(pathlib.Path.cwd)
return cls(path)
async def exists(self) -> bool:
return await to_thread.run_sync(self._path.exists, cancellable=True)
async def expanduser(self) -> "Path":
return Path(await to_thread.run_sync(self._path.expanduser, cancellable=True))
def glob(self, pattern: str) -> AsyncIterator["Path"]:
gen = self._path.glob(pattern)
return _PathIterator(gen)
async def group(self) -> str:
return await to_thread.run_sync(self._path.group, cancellable=True)
async def hardlink_to(self, target: Union[str, pathlib.Path, "Path"]) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(os.link, target, self)
@classmethod
async def home(cls) -> "Path":
home_path = await to_thread.run_sync(pathlib.Path.home)
return cls(home_path)
def is_absolute(self) -> bool:
return self._path.is_absolute()
async def is_block_device(self) -> bool:
return await to_thread.run_sync(self._path.is_block_device, cancellable=True)
async def is_char_device(self) -> bool:
return await to_thread.run_sync(self._path.is_char_device, cancellable=True)
async def is_dir(self) -> bool:
return await to_thread.run_sync(self._path.is_dir, cancellable=True)
async def is_fifo(self) -> bool:
return await to_thread.run_sync(self._path.is_fifo, cancellable=True)
async def is_file(self) -> bool:
return await to_thread.run_sync(self._path.is_file, cancellable=True)
async def is_mount(self) -> bool:
return await to_thread.run_sync(os.path.ismount, self._path, cancellable=True)
def is_reserved(self) -> bool:
return self._path.is_reserved()
async def is_socket(self) -> bool:
return await to_thread.run_sync(self._path.is_socket, cancellable=True)
async def is_symlink(self) -> bool:
return await to_thread.run_sync(self._path.is_symlink, cancellable=True)
def iterdir(self) -> AsyncIterator["Path"]:
gen = self._path.iterdir()
return _PathIterator(gen)
def joinpath(self, *args: Union[str, "PathLike[str]"]) -> "Path":
return Path(self._path.joinpath(*args))
async def lchmod(self, mode: int) -> None:
await to_thread.run_sync(self._path.lchmod, mode)
async def lstat(self) -> os.stat_result:
return await to_thread.run_sync(self._path.lstat, cancellable=True)
async def mkdir(
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
) -> None:
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
@overload
async def open(
self,
mode: OpenBinaryMode,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
) -> AsyncFile[bytes]:
...
@overload
async def open(
self,
mode: OpenTextMode = ...,
buffering: int = ...,
encoding: Optional[str] = ...,
errors: Optional[str] = ...,
newline: Optional[str] = ...,
) -> AsyncFile[str]:
...
async def open(
self,
mode: str = "r",
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
) -> AsyncFile[Any]:
fp = await to_thread.run_sync(
self._path.open, mode, buffering, encoding, errors, newline
)
return AsyncFile(fp)
async def owner(self) -> str:
return await to_thread.run_sync(self._path.owner, cancellable=True)
async def read_bytes(self) -> bytes:
return await to_thread.run_sync(self._path.read_bytes)
async def read_text(
self, encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
return await to_thread.run_sync(self._path.read_text, encoding, errors)
def relative_to(self, *other: Union[str, "PathLike[str]"]) -> "Path":
return Path(self._path.relative_to(*other))
async def readlink(self) -> "Path":
target = await to_thread.run_sync(os.readlink, self._path)
return Path(cast(str, target))
async def rename(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path":
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.rename, target)
return Path(target)
async def replace(self, target: Union[str, pathlib.PurePath, "Path"]) -> "Path":
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.replace, target)
return Path(target)
async def resolve(self, strict: bool = False) -> "Path":
func = partial(self._path.resolve, strict=strict)
return Path(await to_thread.run_sync(func, cancellable=True))
def rglob(self, pattern: str) -> AsyncIterator["Path"]:
gen = self._path.rglob(pattern)
return _PathIterator(gen)
async def rmdir(self) -> None:
await to_thread.run_sync(self._path.rmdir)
async def samefile(
self, other_path: Union[str, bytes, int, pathlib.Path, "Path"]
) -> bool:
if isinstance(other_path, Path):
other_path = other_path._path
return await to_thread.run_sync(
self._path.samefile, other_path, cancellable=True
)
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
func = partial(os.stat, follow_symlinks=follow_symlinks)
return await to_thread.run_sync(func, self._path, cancellable=True)
async def symlink_to(
self,
target: Union[str, pathlib.Path, "Path"],
target_is_directory: bool = False,
) -> None:
if isinstance(target, Path):
target = target._path
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
await to_thread.run_sync(self._path.touch, mode, exist_ok)
async def unlink(self, missing_ok: bool = False) -> None:
try:
await to_thread.run_sync(self._path.unlink)
except FileNotFoundError:
if not missing_ok:
raise
def with_name(self, name: str) -> "Path":
return Path(self._path.with_name(name))
def with_stem(self, stem: str) -> "Path":
return Path(self._path.with_name(stem + self._path.suffix))
def with_suffix(self, suffix: str) -> "Path":
return Path(self._path.with_suffix(suffix))
async def write_bytes(self, data: bytes) -> int:
return await to_thread.run_sync(self._path.write_bytes, data)
async def write_text(
self,
data: str,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
) -> int:
# Path.write_text() does not support the "newline" parameter before Python 3.10
def sync_write_text() -> int:
with self._path.open(
"w", encoding=encoding, errors=errors, newline=newline
) as fp:
return fp.write(data)
return await to_thread.run_sync(sync_write_text)
PathLike.register(Path) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_fileio.py | _fileio.py |
import sys
from typing import Any, Callable, Dict, Mapping, TypeVar, Union, overload
from ._exceptions import TypedAttributeLookupError
if sys.version_info >= (3, 8):
from typing import final
else:
from typing_extensions import final
T_Attr = TypeVar("T_Attr")
T_Default = TypeVar("T_Default")
undefined = object()
def typed_attribute() -> Any:
"""Return a unique object, used to mark typed attributes."""
return object()
class TypedAttributeSet:
"""
Superclass for typed attribute collections.
Checks that every public attribute of every subclass has a type annotation.
"""
def __init_subclass__(cls) -> None:
annotations: Dict[str, Any] = getattr(cls, "__annotations__", {})
for attrname in dir(cls):
if not attrname.startswith("_") and attrname not in annotations:
raise TypeError(
f"Attribute {attrname!r} is missing its type annotation"
)
super().__init_subclass__()
class TypedAttributeProvider:
"""Base class for classes that wish to provide typed extra attributes."""
@property
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
"""
A mapping of the extra attributes to callables that return the corresponding values.
If the provider wraps another provider, the attributes from that wrapper should also be
included in the returned mapping (but the wrapper may override the callables from the
wrapped instance).
"""
return {}
@overload
def extra(self, attribute: T_Attr) -> T_Attr:
...
@overload
def extra(self, attribute: T_Attr, default: T_Default) -> Union[T_Attr, T_Default]:
...
@final
def extra(self, attribute: Any, default: object = undefined) -> object:
"""
extra(attribute, default=undefined)
Return the value of the given typed extra attribute.
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to look for
:param default: the value that should be returned if no value is found for the attribute
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default value was
given
"""
try:
return self.extra_attributes[attribute]()
except KeyError:
if default is undefined:
raise TypedAttributeLookupError("Attribute not found") from None
else:
return default | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_typedattr.py | _typedattr.py |
from abc import ABCMeta, abstractmethod
from contextlib import AbstractContextManager
from types import TracebackType
from typing import (
TYPE_CHECKING,
Any,
AsyncContextManager,
Callable,
ContextManager,
Generator,
Generic,
Iterable,
List,
Optional,
Tuple,
Type,
TypeVar,
Union,
overload,
)
from warnings import warn
if TYPE_CHECKING:
from ._testing import TaskInfo
else:
TaskInfo = object
T = TypeVar("T")
AnyDeprecatedAwaitable = Union[
"DeprecatedAwaitable",
"DeprecatedAwaitableFloat",
"DeprecatedAwaitableList[T]",
TaskInfo,
]
@overload
async def maybe_async(__obj: TaskInfo) -> TaskInfo:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitableFloat") -> float:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitableList[T]") -> List[T]:
...
@overload
async def maybe_async(__obj: "DeprecatedAwaitable") -> None:
...
async def maybe_async(
__obj: "AnyDeprecatedAwaitable[T]",
) -> Union[TaskInfo, float, List[T], None]:
"""
Await on the given object if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were converted from coroutine functions into regular functions.
Do **not** try to use this for any other purpose!
:return: the result of awaiting on the object if coroutine, or the object itself otherwise
.. versionadded:: 2.2
"""
return __obj._unwrap()
class _ContextManagerWrapper:
def __init__(self, cm: ContextManager[T]):
self._cm = cm
async def __aenter__(self) -> T:
return self._cm.__enter__()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def maybe_async_cm(
cm: Union[ContextManager[T], AsyncContextManager[T]]
) -> AsyncContextManager[T]:
"""
Wrap a regular context manager as an async one if necessary.
This function is intended to bridge the gap between AnyIO 2.x and 3.x where some functions and
methods were changed to return regular context managers instead of async ones.
:param cm: a regular or async context manager
:return: an async context manager
.. versionadded:: 2.2
"""
if not isinstance(cm, AbstractContextManager):
raise TypeError("Given object is not an context manager")
return _ContextManagerWrapper(cm)
def _warn_deprecation(
awaitable: "AnyDeprecatedAwaitable[Any]", stacklevel: int = 1
) -> None:
warn(
f'Awaiting on {awaitable._name}() is deprecated. Use "await '
f"anyio.maybe_async({awaitable._name}(...)) if you have to support both AnyIO 2.x "
f'and 3.x, or just remove the "await" if you are completely migrating to AnyIO 3+.',
DeprecationWarning,
stacklevel=stacklevel + 1,
)
class DeprecatedAwaitable:
def __init__(self, func: Callable[..., "DeprecatedAwaitable"]):
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, None]:
_warn_deprecation(self)
if False:
yield
def __reduce__(self) -> Tuple[Type[None], Tuple[()]]:
return type(None), ()
def _unwrap(self) -> None:
return None
class DeprecatedAwaitableFloat(float):
def __new__(
cls, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]
) -> "DeprecatedAwaitableFloat":
return super().__new__(cls, x)
def __init__(self, x: float, func: Callable[..., "DeprecatedAwaitableFloat"]):
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, float]:
_warn_deprecation(self)
if False:
yield
return float(self)
def __reduce__(self) -> Tuple[Type[float], Tuple[float]]:
return float, (float(self),)
def _unwrap(self) -> float:
return float(self)
class DeprecatedAwaitableList(List[T]):
def __init__(
self,
iterable: Iterable[T] = (),
*,
func: Callable[..., "DeprecatedAwaitableList[T]"],
):
super().__init__(iterable)
self._name = f"{func.__module__}.{func.__qualname__}"
def __await__(self) -> Generator[None, None, List[T]]:
_warn_deprecation(self)
if False:
yield
return list(self)
def __reduce__(self) -> Tuple[Type[List[T]], Tuple[List[T]]]:
return list, (list(self),)
def _unwrap(self) -> List[T]:
return list(self)
class DeprecatedAsyncContextManager(Generic[T], metaclass=ABCMeta):
@abstractmethod
def __enter__(self) -> T:
pass
@abstractmethod
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
pass
async def __aenter__(self) -> T:
warn(
f"Using {self.__class__.__name__} as an async context manager has been deprecated. "
f'Use "async with anyio.maybe_async_cm(yourcontextmanager) as foo:" if you have to '
f'support both AnyIO 2.x and 3.x, or just remove the "async" from "async with" if '
f"you are completely migrating to AnyIO 3+.",
DeprecationWarning,
)
return self.__enter__()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self.__exit__(exc_type, exc_val, exc_tb) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_core/_compat.py | _compat.py |
import array
import asyncio
import concurrent.futures
import math
import socket
import sys
from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined]
from collections import OrderedDict, deque
from concurrent.futures import Future
from contextvars import Context, copy_context
from dataclasses import dataclass
from functools import partial, wraps
from inspect import (
CORO_RUNNING,
CORO_SUSPENDED,
GEN_RUNNING,
GEN_SUSPENDED,
getcoroutinestate,
getgeneratorstate,
)
from io import IOBase
from os import PathLike
from queue import Queue
from socket import AddressFamily, SocketKind
from threading import Thread
from types import TracebackType
from typing import (
IO,
Any,
AsyncGenerator,
Awaitable,
Callable,
Collection,
Coroutine,
Deque,
Dict,
Generator,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from weakref import WeakKeyDictionary
from zdppy_api import sniffio
from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable
from .._core._eventloop import claim_worker_thread, threadlocals
from .._core._exceptions import (
BrokenResourceError,
BusyResourceError,
ClosedResourceError,
EndOfStream,
)
from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
from .._core._exceptions import WouldBlock
from .._core._sockets import GetAddrInfoReturnType, convert_ipv6_sockaddr
from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
from .._core._synchronization import Event as BaseEvent
from .._core._synchronization import ResourceGuard
from .._core._tasks import CancelScope as BaseCancelScope
from ..abc import IPSockAddrType, UDPPacketType
from ..lowlevel import RunVar
if sys.version_info >= (3, 8):
def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]:
return task.get_coro()
else:
def get_coro(task: asyncio.Task) -> Union[Generator, Awaitable[Any]]:
return task._coro
if sys.version_info >= (3, 7):
from asyncio import all_tasks, create_task, current_task, get_running_loop
from asyncio import run as native_run
def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]:
return [cb for cb, context in task._callbacks] # type: ignore[attr-defined]
else:
_T = TypeVar("_T")
def _get_task_callbacks(task: asyncio.Task) -> Iterable[Callable]:
return task._callbacks
def native_run(main, *, debug=False):
# Snatched from Python 3.7
from asyncio import coroutines, events, tasks
def _cancel_all_tasks(loop):
to_cancel = all_tasks(loop)
if not to_cancel:
return
for task in to_cancel:
task.cancel()
loop.run_until_complete(
tasks.gather(*to_cancel, loop=loop, return_exceptions=True)
)
for task in to_cancel:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler(
{
"message": "unhandled exception during asyncio.run() shutdown",
"exception": task.exception(),
"task": task,
}
)
if events._get_running_loop() is not None:
raise RuntimeError(
"asyncio.run() cannot be called from a running event loop"
)
if not coroutines.iscoroutine(main):
raise ValueError(f"a coroutine was expected, got {main!r}")
loop = events.new_event_loop()
try:
events.set_event_loop(loop)
loop.set_debug(debug)
return loop.run_until_complete(main)
finally:
try:
_cancel_all_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
events.set_event_loop(None)
loop.close()
def create_task(
coro: Union[Generator[Any, None, _T], Awaitable[_T]], *, name: object = None
) -> asyncio.Task:
return get_running_loop().create_task(coro)
def get_running_loop() -> asyncio.AbstractEventLoop:
loop = asyncio._get_running_loop()
if loop is not None:
return loop
else:
raise RuntimeError("no running event loop")
def all_tasks(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Set[asyncio.Task]:
"""Return a set of all tasks for the loop."""
from asyncio import Task
if loop is None:
loop = get_running_loop()
return {t for t in Task.all_tasks(loop) if not t.done()}
def current_task(
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Optional[asyncio.Task]:
if loop is None:
loop = get_running_loop()
return asyncio.Task.current_task(loop)
T_Retval = TypeVar("T_Retval")
# Check whether there is native support for task names in asyncio (3.8+)
_native_task_names = hasattr(asyncio.Task, "get_name")
_root_task: RunVar[Optional[asyncio.Task]] = RunVar("_root_task")
def find_root_task() -> asyncio.Task:
root_task = _root_task.get(None)
if root_task is not None and not root_task.done():
return root_task
# Look for a task that has been started via run_until_complete()
for task in all_tasks():
if task._callbacks and not task.done():
for cb in _get_task_callbacks(task):
if (
cb is _run_until_complete_cb
or getattr(cb, "__module__", None) == "uvloop.loop"
):
_root_task.set(task)
return task
# Look up the topmost task in the AnyIO task tree, if possible
task = cast(asyncio.Task, current_task())
state = _task_states.get(task)
if state:
cancel_scope = state.cancel_scope
while cancel_scope and cancel_scope._parent_scope is not None:
cancel_scope = cancel_scope._parent_scope
if cancel_scope is not None:
return cast(asyncio.Task, cancel_scope._host_task)
return task
def get_callable_name(func: Callable) -> str:
module = getattr(func, "__module__", None)
qualname = getattr(func, "__qualname__", None)
return ".".join([x for x in (module, qualname) if x])
#
# Event loop
#
_run_vars = (
WeakKeyDictionary()
) # type: WeakKeyDictionary[asyncio.AbstractEventLoop, Any]
current_token = get_running_loop
def _task_started(task: asyncio.Task) -> bool:
"""Return ``True`` if the task has been started and has not finished."""
coro = cast(Coroutine[Any, Any, Any], get_coro(task))
try:
return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED)
except AttributeError:
try:
return getgeneratorstate(cast(Generator, coro)) in (
GEN_RUNNING,
GEN_SUSPENDED,
)
except AttributeError:
# task coro is async_genenerator_asend https://bugs.python.org/issue37771
raise Exception(f"Cannot determine if task {task} has started or not")
def _maybe_set_event_loop_policy(
policy: Optional[asyncio.AbstractEventLoopPolicy], use_uvloop: bool
) -> None:
# On CPython, use uvloop when possible if no other policy has been given and if not
# explicitly disabled
if policy is None and use_uvloop and sys.implementation.name == "cpython":
try:
import uvloop
except ImportError:
pass
else:
# Test for missing shutdown_default_executor() (uvloop 0.14.0 and earlier)
if not hasattr(
asyncio.AbstractEventLoop, "shutdown_default_executor"
) or hasattr(uvloop.loop.Loop, "shutdown_default_executor"):
policy = uvloop.EventLoopPolicy()
if policy is not None:
asyncio.set_event_loop_policy(policy)
def run(
func: Callable[..., Awaitable[T_Retval]],
*args: object,
debug: bool = False,
use_uvloop: bool = False,
policy: Optional[asyncio.AbstractEventLoopPolicy] = None,
) -> T_Retval:
@wraps(func)
async def wrapper() -> T_Retval:
task = cast(asyncio.Task, current_task())
task_state = TaskState(None, get_callable_name(func), None)
_task_states[task] = task_state
if _native_task_names:
task.set_name(task_state.name)
try:
return await func(*args)
finally:
del _task_states[task]
_maybe_set_event_loop_policy(policy, use_uvloop)
return native_run(wrapper(), debug=debug)
#
# Miscellaneous
#
sleep = asyncio.sleep
#
# Timeouts and cancellation
#
CancelledError = asyncio.CancelledError
class CancelScope(BaseCancelScope):
def __new__(
cls, *, deadline: float = math.inf, shield: bool = False
) -> "CancelScope":
return object.__new__(cls)
def __init__(self, deadline: float = math.inf, shield: bool = False):
self._deadline = deadline
self._shield = shield
self._parent_scope: Optional[CancelScope] = None
self._cancel_called = False
self._active = False
self._timeout_handle: Optional[asyncio.TimerHandle] = None
self._cancel_handle: Optional[asyncio.Handle] = None
self._tasks: Set[asyncio.Task] = set()
self._host_task: Optional[asyncio.Task] = None
self._timeout_expired = False
def __enter__(self) -> "CancelScope":
if self._active:
raise RuntimeError(
"Each CancelScope may only be used for a single 'with' block"
)
self._host_task = host_task = cast(asyncio.Task, current_task())
self._tasks.add(host_task)
try:
task_state = _task_states[host_task]
except KeyError:
task_name = host_task.get_name() if _native_task_names else None
task_state = TaskState(None, task_name, self)
_task_states[host_task] = task_state
else:
self._parent_scope = task_state.cancel_scope
task_state.cancel_scope = self
self._timeout()
self._active = True
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
if not self._active:
raise RuntimeError("This cancel scope is not active")
if current_task() is not self._host_task:
raise RuntimeError(
"Attempted to exit cancel scope in a different task than it was "
"entered in"
)
assert self._host_task is not None
host_task_state = _task_states.get(self._host_task)
if host_task_state is None or host_task_state.cancel_scope is not self:
raise RuntimeError(
"Attempted to exit a cancel scope that isn't the current tasks's "
"current cancel scope"
)
self._active = False
if self._timeout_handle:
self._timeout_handle.cancel()
self._timeout_handle = None
self._tasks.remove(self._host_task)
host_task_state.cancel_scope = self._parent_scope
# Restart the cancellation effort in the farthest directly cancelled parent scope if this
# one was shielded
if self._shield:
self._deliver_cancellation_to_parent()
if exc_val is not None:
exceptions = (
exc_val.exceptions if isinstance(exc_val, ExceptionGroup) else [exc_val]
)
if all(isinstance(exc, CancelledError) for exc in exceptions):
if self._timeout_expired:
return True
elif not self._cancel_called:
# Task was cancelled natively
return None
elif not self._parent_cancelled():
# This scope was directly cancelled
return True
return None
def _timeout(self) -> None:
if self._deadline != math.inf:
loop = get_running_loop()
if loop.time() >= self._deadline:
self._timeout_expired = True
self.cancel()
else:
self._timeout_handle = loop.call_at(self._deadline, self._timeout)
def _deliver_cancellation(self) -> None:
"""
Deliver cancellation to directly contained tasks and nested cancel scopes.
Schedule another run at the end if we still have tasks eligible for cancellation.
"""
should_retry = False
current = current_task()
for task in self._tasks:
if task._must_cancel: # type: ignore[attr-defined]
continue
# The task is eligible for cancellation if it has started and is not in a cancel
# scope shielded from this one
cancel_scope = _task_states[task].cancel_scope
while cancel_scope is not self:
if cancel_scope is None or cancel_scope._shield:
break
else:
cancel_scope = cancel_scope._parent_scope
else:
should_retry = True
if task is not current and (
task is self._host_task or _task_started(task)
):
task.cancel()
# Schedule another callback if there are still tasks left
if should_retry:
self._cancel_handle = get_running_loop().call_soon(
self._deliver_cancellation
)
else:
self._cancel_handle = None
def _deliver_cancellation_to_parent(self) -> None:
"""Start cancellation effort in the farthest directly cancelled parent scope"""
scope = self._parent_scope
scope_to_cancel: Optional[CancelScope] = None
while scope is not None:
if scope._cancel_called and scope._cancel_handle is None:
scope_to_cancel = scope
# No point in looking beyond any shielded scope
if scope._shield:
break
scope = scope._parent_scope
if scope_to_cancel is not None:
scope_to_cancel._deliver_cancellation()
def _parent_cancelled(self) -> bool:
# Check whether any parent has been cancelled
cancel_scope = self._parent_scope
while cancel_scope is not None and not cancel_scope._shield:
if cancel_scope._cancel_called:
return True
else:
cancel_scope = cancel_scope._parent_scope
return False
def cancel(self) -> DeprecatedAwaitable:
if not self._cancel_called:
if self._timeout_handle:
self._timeout_handle.cancel()
self._timeout_handle = None
self._cancel_called = True
self._deliver_cancellation()
return DeprecatedAwaitable(self.cancel)
@property
def deadline(self) -> float:
return self._deadline
@deadline.setter
def deadline(self, value: float) -> None:
self._deadline = float(value)
if self._timeout_handle is not None:
self._timeout_handle.cancel()
self._timeout_handle = None
if self._active and not self._cancel_called:
self._timeout()
@property
def cancel_called(self) -> bool:
return self._cancel_called
@property
def shield(self) -> bool:
return self._shield
@shield.setter
def shield(self, value: bool) -> None:
if self._shield != value:
self._shield = value
if not value:
self._deliver_cancellation_to_parent()
async def checkpoint() -> None:
await sleep(0)
async def checkpoint_if_cancelled() -> None:
task = current_task()
if task is None:
return
try:
cancel_scope = _task_states[task].cancel_scope
except KeyError:
return
while cancel_scope:
if cancel_scope.cancel_called:
await sleep(0)
elif cancel_scope.shield:
break
else:
cancel_scope = cancel_scope._parent_scope
async def cancel_shielded_checkpoint() -> None:
with CancelScope(shield=True):
await sleep(0)
def current_effective_deadline() -> float:
try:
cancel_scope = _task_states[current_task()].cancel_scope # type: ignore[index]
except KeyError:
return math.inf
deadline = math.inf
while cancel_scope:
deadline = min(deadline, cancel_scope.deadline)
if cancel_scope.shield:
break
else:
cancel_scope = cancel_scope._parent_scope
return deadline
def current_time() -> float:
return get_running_loop().time()
#
# Task states
#
class TaskState:
"""
Encapsulates auxiliary task information that cannot be added to the Task instance itself
because there are no guarantees about its implementation.
"""
__slots__ = "parent_id", "name", "cancel_scope"
def __init__(
self,
parent_id: Optional[int],
name: Optional[str],
cancel_scope: Optional[CancelScope],
):
self.parent_id = parent_id
self.name = name
self.cancel_scope = cancel_scope
_task_states = WeakKeyDictionary() # type: WeakKeyDictionary[asyncio.Task, TaskState]
#
# Task groups
#
class ExceptionGroup(BaseExceptionGroup):
def __init__(self, exceptions: List[BaseException]):
super().__init__()
self.exceptions = exceptions
class _AsyncioTaskStatus(abc.TaskStatus):
def __init__(self, future: asyncio.Future, parent_id: int):
self._future = future
self._parent_id = parent_id
def started(self, value: object = None) -> None:
try:
self._future.set_result(value)
except asyncio.InvalidStateError:
raise RuntimeError(
"called 'started' twice on the same task status"
) from None
task = cast(asyncio.Task, current_task())
_task_states[task].parent_id = self._parent_id
class TaskGroup(abc.TaskGroup):
def __init__(self) -> None:
self.cancel_scope: CancelScope = CancelScope()
self._active = False
self._exceptions: List[BaseException] = []
async def __aenter__(self) -> "TaskGroup":
self.cancel_scope.__enter__()
self._active = True
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
ignore_exception = self.cancel_scope.__exit__(exc_type, exc_val, exc_tb)
if exc_val is not None:
self.cancel_scope.cancel()
self._exceptions.append(exc_val)
while self.cancel_scope._tasks:
try:
await asyncio.wait(self.cancel_scope._tasks)
except asyncio.CancelledError:
self.cancel_scope.cancel()
self._active = False
if not self.cancel_scope._parent_cancelled():
exceptions = self._filter_cancellation_errors(self._exceptions)
else:
exceptions = self._exceptions
try:
if len(exceptions) > 1:
if all(
isinstance(e, CancelledError) and not e.args for e in exceptions
):
# Tasks were cancelled natively, without a cancellation message
raise CancelledError
else:
raise ExceptionGroup(exceptions)
elif exceptions and exceptions[0] is not exc_val:
raise exceptions[0]
except BaseException as exc:
# Clear the context here, as it can only be done in-flight.
# If the context is not cleared, it can result in recursive tracebacks (see #145).
exc.__context__ = None
raise
return ignore_exception
@staticmethod
def _filter_cancellation_errors(
exceptions: Sequence[BaseException],
) -> List[BaseException]:
filtered_exceptions: List[BaseException] = []
for exc in exceptions:
if isinstance(exc, ExceptionGroup):
new_exceptions = TaskGroup._filter_cancellation_errors(exc.exceptions)
if len(new_exceptions) > 1:
filtered_exceptions.append(exc)
elif len(new_exceptions) == 1:
filtered_exceptions.append(new_exceptions[0])
elif new_exceptions:
new_exc = ExceptionGroup(new_exceptions)
new_exc.__cause__ = exc.__cause__
new_exc.__context__ = exc.__context__
new_exc.__traceback__ = exc.__traceback__
filtered_exceptions.append(new_exc)
elif not isinstance(exc, CancelledError) or exc.args:
filtered_exceptions.append(exc)
return filtered_exceptions
async def _run_wrapped_task(
self, coro: Coroutine, task_status_future: Optional[asyncio.Future]
) -> None:
# This is the code path for Python 3.6 and 3.7 on which asyncio freaks out if a task raises
# a BaseException.
__traceback_hide__ = __tracebackhide__ = True # noqa: F841
task = cast(asyncio.Task, current_task())
try:
await coro
except BaseException as exc:
if task_status_future is None or task_status_future.done():
self._exceptions.append(exc)
self.cancel_scope.cancel()
else:
task_status_future.set_exception(exc)
else:
if task_status_future is not None and not task_status_future.done():
task_status_future.set_exception(
RuntimeError("Child exited without calling task_status.started()")
)
finally:
if task in self.cancel_scope._tasks:
self.cancel_scope._tasks.remove(task)
del _task_states[task]
def _spawn(
self,
func: Callable[..., Coroutine],
args: tuple,
name: object,
task_status_future: Optional[asyncio.Future] = None,
) -> asyncio.Task:
def task_done(_task: asyncio.Task) -> None:
# This is the code path for Python 3.8+
assert _task in self.cancel_scope._tasks
self.cancel_scope._tasks.remove(_task)
del _task_states[_task]
try:
exc = _task.exception()
except CancelledError as e:
while isinstance(e.__context__, CancelledError):
e = e.__context__
exc = e
if exc is not None:
if task_status_future is None or task_status_future.done():
self._exceptions.append(exc)
self.cancel_scope.cancel()
else:
task_status_future.set_exception(exc)
elif task_status_future is not None and not task_status_future.done():
task_status_future.set_exception(
RuntimeError("Child exited without calling task_status.started()")
)
if not self._active:
raise RuntimeError(
"This task group is not active; no new tasks can be started."
)
options = {}
name = get_callable_name(func) if name is None else str(name)
if _native_task_names:
options["name"] = name
kwargs = {}
if task_status_future:
parent_id = id(current_task())
kwargs["task_status"] = _AsyncioTaskStatus(
task_status_future, id(self.cancel_scope._host_task)
)
else:
parent_id = id(self.cancel_scope._host_task)
coro = func(*args, **kwargs)
if not asyncio.iscoroutine(coro):
raise TypeError(
f"Expected an async function, but {func} appears to be synchronous"
)
foreign_coro = not hasattr(coro, "cr_frame") and not hasattr(coro, "gi_frame")
if foreign_coro or sys.version_info < (3, 8):
coro = self._run_wrapped_task(coro, task_status_future)
task = create_task(coro, **options)
if not foreign_coro and sys.version_info >= (3, 8):
task.add_done_callback(task_done)
# Make the spawned task inherit the task group's cancel scope
_task_states[task] = TaskState(
parent_id=parent_id, name=name, cancel_scope=self.cancel_scope
)
self.cancel_scope._tasks.add(task)
return task
def start_soon(
self, func: Callable[..., Coroutine], *args: object, name: object = None
) -> None:
self._spawn(func, args, name)
async def start(
self, func: Callable[..., Coroutine], *args: object, name: object = None
) -> None:
future: asyncio.Future = asyncio.Future()
task = self._spawn(func, args, name, future)
# If the task raises an exception after sending a start value without a switch point
# between, the task group is cancelled and this method never proceeds to process the
# completed future. That's why we have to have a shielded cancel scope here.
with CancelScope(shield=True):
try:
return await future
except CancelledError:
task.cancel()
raise
#
# Threads
#
_Retval_Queue_Type = Tuple[Optional[T_Retval], Optional[BaseException]]
class WorkerThread(Thread):
MAX_IDLE_TIME = 10 # seconds
def __init__(
self,
root_task: asyncio.Task,
workers: Set["WorkerThread"],
idle_workers: Deque["WorkerThread"],
):
super().__init__(name="AnyIO worker thread")
self.root_task = root_task
self.workers = workers
self.idle_workers = idle_workers
self.loop = root_task._loop
self.queue: Queue[
Union[Tuple[Context, Callable, tuple, asyncio.Future], None]
] = Queue(2)
self.idle_since = current_time()
self.stopping = False
def _report_result(
self, future: asyncio.Future, result: Any, exc: Optional[BaseException]
) -> None:
self.idle_since = current_time()
if not self.stopping:
self.idle_workers.append(self)
if not future.cancelled():
if exc is not None:
future.set_exception(exc)
else:
future.set_result(result)
def run(self) -> None:
with claim_worker_thread("asyncio"):
threadlocals.loop = self.loop
while True:
item = self.queue.get()
if item is None:
# Shutdown command received
return
context, func, args, future = item
if not future.cancelled():
result = None
exception: Optional[BaseException] = None
try:
result = context.run(func, *args)
except BaseException as exc:
exception = exc
if not self.loop.is_closed():
self.loop.call_soon_threadsafe(
self._report_result, future, result, exception
)
self.queue.task_done()
def stop(self, f: Optional[asyncio.Task] = None) -> None:
self.stopping = True
self.queue.put_nowait(None)
self.workers.discard(self)
try:
self.idle_workers.remove(self)
except ValueError:
pass
_threadpool_idle_workers: RunVar[Deque[WorkerThread]] = RunVar(
"_threadpool_idle_workers"
)
_threadpool_workers: RunVar[Set[WorkerThread]] = RunVar("_threadpool_workers")
async def run_sync_in_worker_thread(
func: Callable[..., T_Retval],
*args: object,
cancellable: bool = False,
limiter: Optional["CapacityLimiter"] = None,
) -> T_Retval:
await checkpoint()
# If this is the first run in this event loop thread, set up the necessary variables
try:
idle_workers = _threadpool_idle_workers.get()
workers = _threadpool_workers.get()
except LookupError:
idle_workers = deque()
workers = set()
_threadpool_idle_workers.set(idle_workers)
_threadpool_workers.set(workers)
async with (limiter or current_default_thread_limiter()):
with CancelScope(shield=not cancellable):
future: asyncio.Future = asyncio.Future()
root_task = find_root_task()
if not idle_workers:
worker = WorkerThread(root_task, workers, idle_workers)
worker.start()
workers.add(worker)
root_task.add_done_callback(worker.stop)
else:
worker = idle_workers.pop()
# Prune any other workers that have been idle for MAX_IDLE_TIME seconds or longer
now = current_time()
while idle_workers:
if now - idle_workers[0].idle_since < WorkerThread.MAX_IDLE_TIME:
break
expired_worker = idle_workers.popleft()
expired_worker.root_task.remove_done_callback(expired_worker.stop)
expired_worker.stop()
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, None)
worker.queue.put_nowait((context, func, args, future))
return await future
def run_sync_from_thread(
func: Callable[..., T_Retval],
*args: object,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> T_Retval:
@wraps(func)
def wrapper() -> None:
try:
f.set_result(func(*args))
except BaseException as exc:
f.set_exception(exc)
if not isinstance(exc, Exception):
raise
f: concurrent.futures.Future[T_Retval] = Future()
loop = loop or threadlocals.loop
if sys.version_info < (3, 7):
loop.call_soon_threadsafe(copy_context().run, wrapper)
else:
loop.call_soon_threadsafe(wrapper)
return f.result()
def run_async_from_thread(
func: Callable[..., Coroutine[Any, Any, T_Retval]], *args: object
) -> T_Retval:
f: concurrent.futures.Future[T_Retval] = asyncio.run_coroutine_threadsafe(
func(*args), threadlocals.loop
)
return f.result()
class BlockingPortal(abc.BlockingPortal):
def __new__(cls) -> "BlockingPortal":
return object.__new__(cls)
def __init__(self) -> None:
super().__init__()
self._loop = get_running_loop()
def _spawn_task_from_thread(
self,
func: Callable,
args: tuple,
kwargs: Dict[str, Any],
name: object,
future: Future,
) -> None:
run_sync_from_thread(
partial(self._task_group.start_soon, name=name),
self._call_func,
func,
args,
kwargs,
future,
loop=self._loop,
)
#
# Subprocesses
#
@dataclass(eq=False)
class StreamReaderWrapper(abc.ByteReceiveStream):
_stream: asyncio.StreamReader
async def receive(self, max_bytes: int = 65536) -> bytes:
data = await self._stream.read(max_bytes)
if data:
return data
else:
raise EndOfStream
async def aclose(self) -> None:
self._stream.feed_eof()
@dataclass(eq=False)
class StreamWriterWrapper(abc.ByteSendStream):
_stream: asyncio.StreamWriter
async def send(self, item: bytes) -> None:
self._stream.write(item)
await self._stream.drain()
async def aclose(self) -> None:
self._stream.close()
@dataclass(eq=False)
class Process(abc.Process):
_process: asyncio.subprocess.Process
_stdin: Optional[StreamWriterWrapper]
_stdout: Optional[StreamReaderWrapper]
_stderr: Optional[StreamReaderWrapper]
async def aclose(self) -> None:
if self._stdin:
await self._stdin.aclose()
if self._stdout:
await self._stdout.aclose()
if self._stderr:
await self._stderr.aclose()
await self.wait()
async def wait(self) -> int:
return await self._process.wait()
def terminate(self) -> None:
self._process.terminate()
def kill(self) -> None:
self._process.kill()
def send_signal(self, signal: int) -> None:
self._process.send_signal(signal)
@property
def pid(self) -> int:
return self._process.pid
@property
def returncode(self) -> Optional[int]:
return self._process.returncode
@property
def stdin(self) -> Optional[abc.ByteSendStream]:
return self._stdin
@property
def stdout(self) -> Optional[abc.ByteReceiveStream]:
return self._stdout
@property
def stderr(self) -> Optional[abc.ByteReceiveStream]:
return self._stderr
async def open_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
shell: bool,
stdin: Union[int, IO[Any], None],
stdout: Union[int, IO[Any], None],
stderr: Union[int, IO[Any], None],
cwd: Union[str, bytes, PathLike, None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> Process:
await checkpoint()
if shell:
process = await asyncio.create_subprocess_shell(
cast(Union[str, bytes], command),
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
)
else:
process = await asyncio.create_subprocess_exec(
*command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
cwd=cwd,
env=env,
start_new_session=start_new_session,
)
stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None
stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None
stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None
return Process(process, stdin_stream, stdout_stream, stderr_stream)
def _forcibly_shutdown_process_pool_on_exit(
workers: Set[Process], _task: object
) -> None:
"""
Forcibly shuts down worker processes belonging to this event loop."""
child_watcher: Optional[asyncio.AbstractChildWatcher]
try:
child_watcher = asyncio.get_event_loop_policy().get_child_watcher()
except NotImplementedError:
child_watcher = None
# Close as much as possible (w/o async/await) to avoid warnings
for process in workers:
if process.returncode is None:
continue
process._stdin._stream._transport.close() # type: ignore[union-attr]
process._stdout._stream._transport.close() # type: ignore[union-attr]
process._stderr._stream._transport.close() # type: ignore[union-attr]
process.kill()
if child_watcher:
child_watcher.remove_child_handler(process.pid)
async def _shutdown_process_pool_on_exit(workers: Set[Process]) -> None:
"""
Shuts down worker processes belonging to this event loop.
NOTE: this only works when the event loop was started using asyncio.run() or anyio.run().
"""
process: Process
try:
await sleep(math.inf)
except asyncio.CancelledError:
for process in workers:
if process.returncode is None:
process.kill()
for process in workers:
await process.aclose()
def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None:
kwargs = {"name": "AnyIO process pool shutdown task"} if _native_task_names else {}
create_task(_shutdown_process_pool_on_exit(workers), **kwargs)
find_root_task().add_done_callback(
partial(_forcibly_shutdown_process_pool_on_exit, workers)
)
#
# Sockets and networking
#
class StreamProtocol(asyncio.Protocol):
read_queue: Deque[bytes]
read_event: asyncio.Event
write_event: asyncio.Event
exception: Optional[Exception] = None
def connection_made(self, transport: asyncio.BaseTransport) -> None:
self.read_queue = deque()
self.read_event = asyncio.Event()
self.write_event = asyncio.Event()
self.write_event.set()
cast(asyncio.Transport, transport).set_write_buffer_limits(0)
def connection_lost(self, exc: Optional[Exception]) -> None:
if exc:
self.exception = BrokenResourceError()
self.exception.__cause__ = exc
self.read_event.set()
self.write_event.set()
def data_received(self, data: bytes) -> None:
self.read_queue.append(data)
self.read_event.set()
def eof_received(self) -> Optional[bool]:
self.read_event.set()
return True
def pause_writing(self) -> None:
self.write_event = asyncio.Event()
def resume_writing(self) -> None:
self.write_event.set()
class DatagramProtocol(asyncio.DatagramProtocol):
read_queue: Deque[Tuple[bytes, IPSockAddrType]]
read_event: asyncio.Event
write_event: asyncio.Event
exception: Optional[Exception] = None
def connection_made(self, transport: asyncio.BaseTransport) -> None:
self.read_queue = deque(maxlen=100) # arbitrary value
self.read_event = asyncio.Event()
self.write_event = asyncio.Event()
self.write_event.set()
def connection_lost(self, exc: Optional[Exception]) -> None:
self.read_event.set()
self.write_event.set()
def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None:
addr = convert_ipv6_sockaddr(addr)
self.read_queue.append((data, addr))
self.read_event.set()
def error_received(self, exc: Exception) -> None:
self.exception = exc
def pause_writing(self) -> None:
self.write_event.clear()
def resume_writing(self) -> None:
self.write_event.set()
class SocketStream(abc.SocketStream):
def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol):
self._transport = transport
self._protocol = protocol
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
self._closed = False
@property
def _raw_socket(self) -> socket.socket:
return self._transport.get_extra_info("socket")
async def receive(self, max_bytes: int = 65536) -> bytes:
with self._receive_guard:
await checkpoint()
if (
not self._protocol.read_event.is_set()
and not self._transport.is_closing()
):
self._transport.resume_reading()
await self._protocol.read_event.wait()
self._transport.pause_reading()
try:
chunk = self._protocol.read_queue.popleft()
except IndexError:
if self._closed:
raise ClosedResourceError from None
elif self._protocol.exception:
raise self._protocol.exception
else:
raise EndOfStream from None
if len(chunk) > max_bytes:
# Split the oversized chunk
chunk, leftover = chunk[:max_bytes], chunk[max_bytes:]
self._protocol.read_queue.appendleft(leftover)
# If the read queue is empty, clear the flag so that the next call will block until
# data is available
if not self._protocol.read_queue:
self._protocol.read_event.clear()
return chunk
async def send(self, item: bytes) -> None:
with self._send_guard:
await checkpoint()
if self._closed:
raise ClosedResourceError
elif self._protocol.exception is not None:
raise self._protocol.exception
try:
self._transport.write(item)
except RuntimeError as exc:
if self._transport.is_closing():
raise BrokenResourceError from exc
else:
raise
await self._protocol.write_event.wait()
async def send_eof(self) -> None:
try:
self._transport.write_eof()
except OSError:
pass
async def aclose(self) -> None:
if not self._transport.is_closing():
self._closed = True
try:
self._transport.write_eof()
except OSError:
pass
self._transport.close()
await sleep(0)
self._transport.abort()
class UNIXSocketStream(abc.SocketStream):
_receive_future: Optional[asyncio.Future] = None
_send_future: Optional[asyncio.Future] = None
_closing = False
def __init__(self, raw_socket: socket.socket):
self.__raw_socket = raw_socket
self._loop = get_running_loop()
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
@property
def _raw_socket(self) -> socket.socket:
return self.__raw_socket
def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future:
def callback(f: object) -> None:
del self._receive_future
loop.remove_reader(self.__raw_socket)
f = self._receive_future = asyncio.Future()
self._loop.add_reader(self.__raw_socket, f.set_result, None)
f.add_done_callback(callback)
return f
def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future:
def callback(f: object) -> None:
del self._send_future
loop.remove_writer(self.__raw_socket)
f = self._send_future = asyncio.Future()
self._loop.add_writer(self.__raw_socket, f.set_result, None)
f.add_done_callback(callback)
return f
async def send_eof(self) -> None:
with self._send_guard:
self._raw_socket.shutdown(socket.SHUT_WR)
async def receive(self, max_bytes: int = 65536) -> bytes:
loop = get_running_loop()
await checkpoint()
with self._receive_guard:
while True:
try:
data = self.__raw_socket.recv(max_bytes)
except BlockingIOError:
await self._wait_until_readable(loop)
except OSError as exc:
if self._closing:
raise ClosedResourceError from None
else:
raise BrokenResourceError from exc
else:
if not data:
raise EndOfStream
return data
async def send(self, item: bytes) -> None:
loop = get_running_loop()
await checkpoint()
with self._send_guard:
view = memoryview(item)
while view:
try:
bytes_sent = self.__raw_socket.send(item)
except BlockingIOError:
await self._wait_until_writable(loop)
except OSError as exc:
if self._closing:
raise ClosedResourceError from None
else:
raise BrokenResourceError from exc
else:
view = view[bytes_sent:]
async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]:
if not isinstance(msglen, int) or msglen < 0:
raise ValueError("msglen must be a non-negative integer")
if not isinstance(maxfds, int) or maxfds < 1:
raise ValueError("maxfds must be a positive integer")
loop = get_running_loop()
fds = array.array("i")
await checkpoint()
with self._receive_guard:
while True:
try:
message, ancdata, flags, addr = self.__raw_socket.recvmsg(
msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
)
except BlockingIOError:
await self._wait_until_readable(loop)
except OSError as exc:
if self._closing:
raise ClosedResourceError from None
else:
raise BrokenResourceError from exc
else:
if not message and not ancdata:
raise EndOfStream
break
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
raise RuntimeError(
f"Received unexpected ancillary data; message = {message!r}, "
f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
)
fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
return message, list(fds)
async def send_fds(
self, message: bytes, fds: Collection[Union[int, IOBase]]
) -> None:
if not message:
raise ValueError("message must not be empty")
if not fds:
raise ValueError("fds must not be empty")
loop = get_running_loop()
filenos: List[int] = []
for fd in fds:
if isinstance(fd, int):
filenos.append(fd)
elif isinstance(fd, IOBase):
filenos.append(fd.fileno())
fdarray = array.array("i", filenos)
await checkpoint()
with self._send_guard:
while True:
try:
# The ignore can be removed after mypy picks up
# https://github.com/python/typeshed/pull/5545
self.__raw_socket.sendmsg(
[message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)]
)
break
except BlockingIOError:
await self._wait_until_writable(loop)
except OSError as exc:
if self._closing:
raise ClosedResourceError from None
else:
raise BrokenResourceError from exc
async def aclose(self) -> None:
if not self._closing:
self._closing = True
if self.__raw_socket.fileno() != -1:
self.__raw_socket.close()
if self._receive_future:
self._receive_future.set_result(None)
if self._send_future:
self._send_future.set_result(None)
class TCPSocketListener(abc.SocketListener):
_accept_scope: Optional[CancelScope] = None
_closed = False
def __init__(self, raw_socket: socket.socket):
self.__raw_socket = raw_socket
self._loop = cast(asyncio.BaseEventLoop, get_running_loop())
self._accept_guard = ResourceGuard("accepting connections from")
@property
def _raw_socket(self) -> socket.socket:
return self.__raw_socket
async def accept(self) -> abc.SocketStream:
if self._closed:
raise ClosedResourceError
with self._accept_guard:
await checkpoint()
with CancelScope() as self._accept_scope:
try:
client_sock, _addr = await self._loop.sock_accept(self._raw_socket)
except asyncio.CancelledError:
# Workaround for https://bugs.python.org/issue41317
try:
self._loop.remove_reader(self._raw_socket)
except (ValueError, NotImplementedError):
pass
if self._closed:
raise ClosedResourceError from None
raise
finally:
self._accept_scope = None
client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
transport, protocol = await self._loop.connect_accepted_socket(
StreamProtocol, client_sock
)
return SocketStream(cast(asyncio.Transport, transport), protocol)
async def aclose(self) -> None:
if self._closed:
return
self._closed = True
if self._accept_scope:
# Workaround for https://bugs.python.org/issue41317
try:
self._loop.remove_reader(self._raw_socket)
except (ValueError, NotImplementedError):
pass
self._accept_scope.cancel()
await sleep(0)
self._raw_socket.close()
class UNIXSocketListener(abc.SocketListener):
def __init__(self, raw_socket: socket.socket):
self.__raw_socket = raw_socket
self._loop = get_running_loop()
self._accept_guard = ResourceGuard("accepting connections from")
self._closed = False
async def accept(self) -> abc.SocketStream:
await checkpoint()
with self._accept_guard:
while True:
try:
client_sock, _ = self.__raw_socket.accept()
client_sock.setblocking(False)
return UNIXSocketStream(client_sock)
except BlockingIOError:
f: asyncio.Future = asyncio.Future()
self._loop.add_reader(self.__raw_socket, f.set_result, None)
f.add_done_callback(
lambda _: self._loop.remove_reader(self.__raw_socket)
)
await f
except OSError as exc:
if self._closed:
raise ClosedResourceError from None
else:
raise BrokenResourceError from exc
async def aclose(self) -> None:
self._closed = True
self.__raw_socket.close()
@property
def _raw_socket(self) -> socket.socket:
return self.__raw_socket
class UDPSocket(abc.UDPSocket):
def __init__(
self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol
):
self._transport = transport
self._protocol = protocol
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
self._closed = False
@property
def _raw_socket(self) -> socket.socket:
return self._transport.get_extra_info("socket")
async def aclose(self) -> None:
if not self._transport.is_closing():
self._closed = True
self._transport.close()
async def receive(self) -> Tuple[bytes, IPSockAddrType]:
with self._receive_guard:
await checkpoint()
# If the buffer is empty, ask for more data
if not self._protocol.read_queue and not self._transport.is_closing():
self._protocol.read_event.clear()
await self._protocol.read_event.wait()
try:
return self._protocol.read_queue.popleft()
except IndexError:
if self._closed:
raise ClosedResourceError from None
else:
raise BrokenResourceError from None
async def send(self, item: UDPPacketType) -> None:
with self._send_guard:
await checkpoint()
await self._protocol.write_event.wait()
if self._closed:
raise ClosedResourceError
elif self._transport.is_closing():
raise BrokenResourceError
else:
self._transport.sendto(*item)
class ConnectedUDPSocket(abc.ConnectedUDPSocket):
def __init__(
self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol
):
self._transport = transport
self._protocol = protocol
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
self._closed = False
@property
def _raw_socket(self) -> socket.socket:
return self._transport.get_extra_info("socket")
async def aclose(self) -> None:
if not self._transport.is_closing():
self._closed = True
self._transport.close()
async def receive(self) -> bytes:
with self._receive_guard:
await checkpoint()
# If the buffer is empty, ask for more data
if not self._protocol.read_queue and not self._transport.is_closing():
self._protocol.read_event.clear()
await self._protocol.read_event.wait()
try:
packet = self._protocol.read_queue.popleft()
except IndexError:
if self._closed:
raise ClosedResourceError from None
else:
raise BrokenResourceError from None
return packet[0]
async def send(self, item: bytes) -> None:
with self._send_guard:
await checkpoint()
await self._protocol.write_event.wait()
if self._closed:
raise ClosedResourceError
elif self._transport.is_closing():
raise BrokenResourceError
else:
self._transport.sendto(item)
async def connect_tcp(
host: str, port: int, local_addr: Optional[Tuple[str, int]] = None
) -> SocketStream:
transport, protocol = cast(
Tuple[asyncio.Transport, StreamProtocol],
await get_running_loop().create_connection(
StreamProtocol, host, port, local_addr=local_addr
),
)
transport.pause_reading()
return SocketStream(transport, protocol)
async def connect_unix(path: str) -> UNIXSocketStream:
await checkpoint()
loop = get_running_loop()
raw_socket = socket.socket(socket.AF_UNIX)
raw_socket.setblocking(False)
while True:
try:
raw_socket.connect(path)
except BlockingIOError:
f: asyncio.Future = asyncio.Future()
loop.add_writer(raw_socket, f.set_result, None)
f.add_done_callback(lambda _: loop.remove_writer(raw_socket))
await f
except BaseException:
raw_socket.close()
raise
else:
return UNIXSocketStream(raw_socket)
async def create_udp_socket(
family: socket.AddressFamily,
local_address: Optional[IPSockAddrType],
remote_address: Optional[IPSockAddrType],
reuse_port: bool,
) -> Union[UDPSocket, ConnectedUDPSocket]:
result = await get_running_loop().create_datagram_endpoint(
DatagramProtocol,
local_addr=local_address,
remote_addr=remote_address,
family=family,
reuse_port=reuse_port,
)
transport = cast(asyncio.DatagramTransport, result[0])
protocol = result[1]
if protocol.exception:
transport.close()
raise protocol.exception
if not remote_address:
return UDPSocket(transport, protocol)
else:
return ConnectedUDPSocket(transport, protocol)
async def getaddrinfo(
host: Union[bytes, str],
port: Union[str, int, None],
*,
family: Union[int, AddressFamily] = 0,
type: Union[int, SocketKind] = 0,
proto: int = 0,
flags: int = 0,
) -> GetAddrInfoReturnType:
# https://github.com/python/typeshed/pull/4304
result = await get_running_loop().getaddrinfo(
host, port, family=family, type=type, proto=proto, flags=flags
)
return cast(GetAddrInfoReturnType, result)
async def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Tuple[str, str]:
return await get_running_loop().getnameinfo(sockaddr, flags)
_read_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("read_events")
_write_events: RunVar[Dict[Any, asyncio.Event]] = RunVar("write_events")
async def wait_socket_readable(sock: socket.socket) -> None:
await checkpoint()
try:
read_events = _read_events.get()
except LookupError:
read_events = {}
_read_events.set(read_events)
if read_events.get(sock):
raise BusyResourceError("reading from") from None
loop = get_running_loop()
event = read_events[sock] = asyncio.Event()
loop.add_reader(sock, event.set)
try:
await event.wait()
finally:
if read_events.pop(sock, None) is not None:
loop.remove_reader(sock)
readable = True
else:
readable = False
if not readable:
raise ClosedResourceError
async def wait_socket_writable(sock: socket.socket) -> None:
await checkpoint()
try:
write_events = _write_events.get()
except LookupError:
write_events = {}
_write_events.set(write_events)
if write_events.get(sock):
raise BusyResourceError("writing to") from None
loop = get_running_loop()
event = write_events[sock] = asyncio.Event()
loop.add_writer(sock.fileno(), event.set)
try:
await event.wait()
finally:
if write_events.pop(sock, None) is not None:
loop.remove_writer(sock)
writable = True
else:
writable = False
if not writable:
raise ClosedResourceError
#
# Synchronization
#
class Event(BaseEvent):
def __new__(cls) -> "Event":
return object.__new__(cls)
def __init__(self) -> None:
self._event = asyncio.Event()
def set(self) -> DeprecatedAwaitable:
self._event.set()
return DeprecatedAwaitable(self.set)
def is_set(self) -> bool:
return self._event.is_set()
async def wait(self) -> None:
if await self._event.wait():
await checkpoint()
def statistics(self) -> EventStatistics:
return EventStatistics(len(self._event._waiters)) # type: ignore[attr-defined]
class CapacityLimiter(BaseCapacityLimiter):
_total_tokens: float = 0
def __new__(cls, total_tokens: float) -> "CapacityLimiter":
return object.__new__(cls)
def __init__(self, total_tokens: float):
self._borrowers: Set[Any] = set()
self._wait_queue: Dict[Any, asyncio.Event] = OrderedDict()
self.total_tokens = total_tokens
async def __aenter__(self) -> None:
await self.acquire()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.release()
@property
def total_tokens(self) -> float:
return self._total_tokens
@total_tokens.setter
def total_tokens(self, value: float) -> None:
if not isinstance(value, int) and not math.isinf(value):
raise TypeError("total_tokens must be an int or math.inf")
if value < 1:
raise ValueError("total_tokens must be >= 1")
old_value = self._total_tokens
self._total_tokens = value
events = []
for event in self._wait_queue.values():
if value <= old_value:
break
if not event.is_set():
events.append(event)
old_value += 1
for event in events:
event.set()
@property
def borrowed_tokens(self) -> int:
return len(self._borrowers)
@property
def available_tokens(self) -> float:
return self._total_tokens - len(self._borrowers)
def acquire_nowait(self) -> DeprecatedAwaitable:
self.acquire_on_behalf_of_nowait(current_task())
return DeprecatedAwaitable(self.acquire_nowait)
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
if borrower in self._borrowers:
raise RuntimeError(
"this borrower is already holding one of this CapacityLimiter's "
"tokens"
)
if self._wait_queue or len(self._borrowers) >= self._total_tokens:
raise WouldBlock
self._borrowers.add(borrower)
return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
async def acquire(self) -> None:
return await self.acquire_on_behalf_of(current_task())
async def acquire_on_behalf_of(self, borrower: object) -> None:
await checkpoint_if_cancelled()
try:
self.acquire_on_behalf_of_nowait(borrower)
except WouldBlock:
event = asyncio.Event()
self._wait_queue[borrower] = event
try:
await event.wait()
except BaseException:
self._wait_queue.pop(borrower, None)
raise
self._borrowers.add(borrower)
else:
try:
await cancel_shielded_checkpoint()
except BaseException:
self.release()
raise
def release(self) -> None:
self.release_on_behalf_of(current_task())
def release_on_behalf_of(self, borrower: object) -> None:
try:
self._borrowers.remove(borrower)
except KeyError:
raise RuntimeError(
"this borrower isn't holding any of this CapacityLimiter's " "tokens"
) from None
# Notify the next task in line if this limiter has free capacity now
if self._wait_queue and len(self._borrowers) < self._total_tokens:
event = self._wait_queue.popitem()[1]
event.set()
def statistics(self) -> CapacityLimiterStatistics:
return CapacityLimiterStatistics(
self.borrowed_tokens,
self.total_tokens,
tuple(self._borrowers),
len(self._wait_queue),
)
_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter")
def current_default_thread_limiter() -> CapacityLimiter:
try:
return _default_thread_limiter.get()
except LookupError:
limiter = CapacityLimiter(40)
_default_thread_limiter.set(limiter)
return limiter
#
# Operating system signals
#
class _SignalReceiver(DeprecatedAsyncContextManager["_SignalReceiver"]):
def __init__(self, signals: Tuple[int, ...]):
self._signals = signals
self._loop = get_running_loop()
self._signal_queue: Deque[int] = deque()
self._future: asyncio.Future = asyncio.Future()
self._handled_signals: Set[int] = set()
def _deliver(self, signum: int) -> None:
self._signal_queue.append(signum)
if not self._future.done():
self._future.set_result(None)
def __enter__(self) -> "_SignalReceiver":
for sig in set(self._signals):
self._loop.add_signal_handler(sig, self._deliver, sig)
self._handled_signals.add(sig)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
for sig in self._handled_signals:
self._loop.remove_signal_handler(sig)
return None
def __aiter__(self) -> "_SignalReceiver":
return self
async def __anext__(self) -> int:
await checkpoint()
if not self._signal_queue:
self._future = asyncio.Future()
await self._future
return self._signal_queue.popleft()
def open_signal_receiver(*signals: int) -> _SignalReceiver:
return _SignalReceiver(signals)
#
# Testing and debugging
#
def _create_task_info(task: asyncio.Task) -> TaskInfo:
task_state = _task_states.get(task)
if task_state is None:
name = task.get_name() if _native_task_names else None
parent_id = None
else:
name = task_state.name
parent_id = task_state.parent_id
return TaskInfo(id(task), parent_id, name, get_coro(task))
def get_current_task() -> TaskInfo:
return _create_task_info(current_task()) # type: ignore[arg-type]
def get_running_tasks() -> List[TaskInfo]:
return [_create_task_info(task) for task in all_tasks() if not task.done()]
async def wait_all_tasks_blocked() -> None:
await checkpoint()
this_task = current_task()
while True:
for task in all_tasks():
if task is this_task:
continue
if task._fut_waiter is None or task._fut_waiter.done(): # type: ignore[attr-defined]
await sleep(0.1)
break
else:
return
class TestRunner(abc.TestRunner):
def __init__(
self,
debug: bool = False,
use_uvloop: bool = False,
policy: Optional[asyncio.AbstractEventLoopPolicy] = None,
):
self._exceptions: List[BaseException] = []
_maybe_set_event_loop_policy(policy, use_uvloop)
self._loop = asyncio.new_event_loop()
self._loop.set_debug(debug)
self._loop.set_exception_handler(self._exception_handler)
asyncio.set_event_loop(self._loop)
def _cancel_all_tasks(self) -> None:
to_cancel = all_tasks(self._loop)
if not to_cancel:
return
for task in to_cancel:
task.cancel()
self._loop.run_until_complete(
asyncio.gather(*to_cancel, return_exceptions=True)
)
for task in to_cancel:
if task.cancelled():
continue
if task.exception() is not None:
raise cast(BaseException, task.exception())
def _exception_handler(
self, loop: asyncio.AbstractEventLoop, context: Dict[str, Any]
) -> None:
if isinstance(context.get("exception"), Exception):
self._exceptions.append(context["exception"])
else:
loop.default_exception_handler(context)
def _raise_async_exceptions(self) -> None:
# Re-raise any exceptions raised in asynchronous callbacks
if self._exceptions:
exceptions, self._exceptions = self._exceptions, []
if len(exceptions) == 1:
raise exceptions[0]
elif exceptions:
raise ExceptionGroup(exceptions)
def close(self) -> None:
try:
self._cancel_all_tasks()
self._loop.run_until_complete(self._loop.shutdown_asyncgens())
finally:
asyncio.set_event_loop(None)
self._loop.close()
def run_asyncgen_fixture(
self,
fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
kwargs: Dict[str, Any],
) -> Iterable[T_Retval]:
async def fixture_runner() -> None:
agen = fixture_func(**kwargs)
try:
retval = await agen.asend(None)
self._raise_async_exceptions()
except BaseException as exc:
f.set_exception(exc)
return
else:
f.set_result(retval)
await event.wait()
try:
await agen.asend(None)
except StopAsyncIteration:
pass
else:
await agen.aclose()
raise RuntimeError("Async generator fixture did not stop")
f = self._loop.create_future()
event = asyncio.Event()
fixture_task = self._loop.create_task(fixture_runner())
self._loop.run_until_complete(f)
yield f.result()
event.set()
self._loop.run_until_complete(fixture_task)
self._raise_async_exceptions()
def run_fixture(
self,
fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
kwargs: Dict[str, Any],
) -> T_Retval:
retval = self._loop.run_until_complete(fixture_func(**kwargs))
self._raise_async_exceptions()
return retval
def run_test(
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any]
) -> None:
try:
self._loop.run_until_complete(test_func(**kwargs))
except Exception as exc:
self._exceptions.append(exc)
self._raise_async_exceptions() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_backends/_asyncio.py | _asyncio.py |
import array
import math
import socket
from concurrent.futures import Future
from contextvars import copy_context
from dataclasses import dataclass
from functools import partial
from io import IOBase
from os import PathLike
from signal import Signals
from types import TracebackType
from typing import (
IO,
TYPE_CHECKING,
Any,
AsyncGenerator,
Awaitable,
Callable,
Collection,
ContextManager,
Coroutine,
Deque,
Dict,
Generic,
Iterable,
List,
Mapping,
NoReturn,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from zdppy_api import sniffio
import trio.from_thread
from outcome import Error, Outcome, Value
from trio.socket import SocketType as TrioSocketType
from trio.to_thread import run_sync
from .. import CapacityLimiterStatistics, EventStatistics, TaskInfo, abc
from .._core._compat import DeprecatedAsyncContextManager, DeprecatedAwaitable, T
from .._core._eventloop import claim_worker_thread
from .._core._exceptions import (
BrokenResourceError,
BusyResourceError,
ClosedResourceError,
EndOfStream,
)
from .._core._exceptions import ExceptionGroup as BaseExceptionGroup
from .._core._sockets import convert_ipv6_sockaddr
from .._core._synchronization import CapacityLimiter as BaseCapacityLimiter
from .._core._synchronization import Event as BaseEvent
from .._core._synchronization import ResourceGuard
from .._core._tasks import CancelScope as BaseCancelScope
from ..abc import IPSockAddrType, UDPPacketType
if TYPE_CHECKING:
from trio_typing import TaskStatus
try:
from trio import lowlevel as trio_lowlevel
except ImportError:
from trio import hazmat as trio_lowlevel # type: ignore[no-redef]
from trio.hazmat import wait_readable, wait_writable
else:
from trio.lowlevel import wait_readable, wait_writable
try:
trio_open_process = trio_lowlevel.open_process # type: ignore[attr-defined]
except AttributeError:
from trio import open_process as trio_open_process
T_Retval = TypeVar("T_Retval")
T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType)
#
# Event loop
#
run = trio.run
current_token = trio.lowlevel.current_trio_token
RunVar = trio.lowlevel.RunVar
#
# Miscellaneous
#
sleep = trio.sleep
#
# Timeouts and cancellation
#
class CancelScope(BaseCancelScope):
def __new__(
cls, original: Optional[trio.CancelScope] = None, **kwargs: object
) -> "CancelScope":
return object.__new__(cls)
def __init__(
self, original: Optional[trio.CancelScope] = None, **kwargs: Any
) -> None:
self.__original = original or trio.CancelScope(**kwargs)
def __enter__(self) -> "CancelScope":
self.__original.__enter__()
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self.__original.__exit__(exc_type, exc_val, exc_tb)
def cancel(self) -> DeprecatedAwaitable:
self.__original.cancel()
return DeprecatedAwaitable(self.cancel)
@property
def deadline(self) -> float:
return self.__original.deadline
@deadline.setter
def deadline(self, value: float) -> None:
self.__original.deadline = value
@property
def cancel_called(self) -> bool:
return self.__original.cancel_called
@property
def shield(self) -> bool:
return self.__original.shield
@shield.setter
def shield(self, value: bool) -> None:
self.__original.shield = value
CancelledError = trio.Cancelled
checkpoint = trio.lowlevel.checkpoint
checkpoint_if_cancelled = trio.lowlevel.checkpoint_if_cancelled
cancel_shielded_checkpoint = trio.lowlevel.cancel_shielded_checkpoint
current_effective_deadline = trio.current_effective_deadline
current_time = trio.current_time
#
# Task groups
#
class ExceptionGroup(BaseExceptionGroup, trio.MultiError):
pass
class TaskGroup(abc.TaskGroup):
def __init__(self) -> None:
self._active = False
self._nursery_manager = trio.open_nursery()
self.cancel_scope = None # type: ignore[assignment]
async def __aenter__(self) -> "TaskGroup":
self._active = True
self._nursery = await self._nursery_manager.__aenter__()
self.cancel_scope = CancelScope(self._nursery.cancel_scope)
return self
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
try:
return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb)
except trio.MultiError as exc:
raise ExceptionGroup(exc.exceptions) from None
finally:
self._active = False
def start_soon(self, func: Callable, *args: object, name: object = None) -> None:
if not self._active:
raise RuntimeError(
"This task group is not active; no new tasks can be started."
)
self._nursery.start_soon(func, *args, name=name)
async def start(
self, func: Callable[..., Coroutine], *args: object, name: object = None
) -> object:
if not self._active:
raise RuntimeError(
"This task group is not active; no new tasks can be started."
)
return await self._nursery.start(func, *args, name=name)
#
# Threads
#
async def run_sync_in_worker_thread(
func: Callable[..., T_Retval],
*args: object,
cancellable: bool = False,
limiter: Optional[trio.CapacityLimiter] = None,
) -> T_Retval:
def wrapper() -> T_Retval:
with claim_worker_thread("trio"):
return func(*args)
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, None)
return await run_sync(
context.run, wrapper, cancellable=cancellable, limiter=limiter
)
# TODO: remove this workaround when trio 0.20 is the minimum requirement
def run_async_from_thread(
fn: Callable[..., Awaitable[T_Retval]], *args: Any
) -> T_Retval:
async def wrapper() -> T_Retval:
retval: T_Retval
async def inner() -> None:
nonlocal retval
__tracebackhide__ = True
retval = await fn(*args)
async with trio.open_nursery() as n:
context.run(n.start_soon, inner)
__tracebackhide__ = True
return retval
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, "trio")
return trio.from_thread.run(wrapper)
def run_sync_from_thread(fn: Callable[..., T_Retval], *args: Any) -> T_Retval:
# TODO: remove explicit context copying when trio 0.20 is the minimum requirement
retval = trio.from_thread.run_sync(copy_context().run, fn, *args)
return cast(T_Retval, retval)
class BlockingPortal(abc.BlockingPortal):
def __new__(cls) -> "BlockingPortal":
return object.__new__(cls)
def __init__(self) -> None:
super().__init__()
self._token = trio.lowlevel.current_trio_token()
def _spawn_task_from_thread(
self,
func: Callable,
args: tuple,
kwargs: Dict[str, Any],
name: object,
future: Future,
) -> None:
context = copy_context()
context.run(sniffio.current_async_library_cvar.set, "trio")
trio.from_thread.run_sync(
context.run,
partial(self._task_group.start_soon, name=name),
self._call_func,
func,
args,
kwargs,
future,
trio_token=self._token,
)
#
# Subprocesses
#
@dataclass(eq=False)
class ReceiveStreamWrapper(abc.ByteReceiveStream):
_stream: trio.abc.ReceiveStream
async def receive(self, max_bytes: Optional[int] = None) -> bytes:
try:
data = await self._stream.receive_some(max_bytes)
except trio.ClosedResourceError as exc:
raise ClosedResourceError from exc.__cause__
except trio.BrokenResourceError as exc:
raise BrokenResourceError from exc.__cause__
if data:
return data
else:
raise EndOfStream
async def aclose(self) -> None:
await self._stream.aclose()
@dataclass(eq=False)
class SendStreamWrapper(abc.ByteSendStream):
_stream: trio.abc.SendStream
async def send(self, item: bytes) -> None:
try:
await self._stream.send_all(item)
except trio.ClosedResourceError as exc:
raise ClosedResourceError from exc.__cause__
except trio.BrokenResourceError as exc:
raise BrokenResourceError from exc.__cause__
async def aclose(self) -> None:
await self._stream.aclose()
@dataclass(eq=False)
class Process(abc.Process):
_process: trio.Process
_stdin: Optional[abc.ByteSendStream]
_stdout: Optional[abc.ByteReceiveStream]
_stderr: Optional[abc.ByteReceiveStream]
async def aclose(self) -> None:
if self._stdin:
await self._stdin.aclose()
if self._stdout:
await self._stdout.aclose()
if self._stderr:
await self._stderr.aclose()
await self.wait()
async def wait(self) -> int:
return await self._process.wait()
def terminate(self) -> None:
self._process.terminate()
def kill(self) -> None:
self._process.kill()
def send_signal(self, signal: Signals) -> None:
self._process.send_signal(signal)
@property
def pid(self) -> int:
return self._process.pid
@property
def returncode(self) -> Optional[int]:
return self._process.returncode
@property
def stdin(self) -> Optional[abc.ByteSendStream]:
return self._stdin
@property
def stdout(self) -> Optional[abc.ByteReceiveStream]:
return self._stdout
@property
def stderr(self) -> Optional[abc.ByteReceiveStream]:
return self._stderr
async def open_process(
command: Union[str, bytes, Sequence[Union[str, bytes]]],
*,
shell: bool,
stdin: Union[int, IO[Any], None],
stdout: Union[int, IO[Any], None],
stderr: Union[int, IO[Any], None],
cwd: Union[str, bytes, PathLike, None] = None,
env: Optional[Mapping[str, str]] = None,
start_new_session: bool = False,
) -> Process:
process = await trio_open_process(
command,
stdin=stdin,
stdout=stdout,
stderr=stderr,
shell=shell,
cwd=cwd,
env=env,
start_new_session=start_new_session,
)
stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
return Process(process, stdin_stream, stdout_stream, stderr_stream)
class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
def after_run(self) -> None:
super().after_run()
current_default_worker_process_limiter: RunVar = RunVar(
"current_default_worker_process_limiter"
)
async def _shutdown_process_pool(workers: Set[Process]) -> None:
process: Process
try:
await sleep(math.inf)
except trio.Cancelled:
for process in workers:
if process.returncode is None:
process.kill()
with CancelScope(shield=True):
for process in workers:
await process.aclose()
def setup_process_pool_exit_at_shutdown(workers: Set[Process]) -> None:
trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
#
# Sockets and networking
#
class _TrioSocketMixin(Generic[T_SockAddr]):
def __init__(self, trio_socket: TrioSocketType) -> None:
self._trio_socket = trio_socket
self._closed = False
def _check_closed(self) -> None:
if self._closed:
raise ClosedResourceError
if self._trio_socket.fileno() < 0:
raise BrokenResourceError
@property
def _raw_socket(self) -> socket.socket:
return self._trio_socket._sock # type: ignore[attr-defined]
async def aclose(self) -> None:
if self._trio_socket.fileno() >= 0:
self._closed = True
self._trio_socket.close()
def _convert_socket_error(self, exc: BaseException) -> "NoReturn":
if isinstance(exc, trio.ClosedResourceError):
raise ClosedResourceError from exc
elif self._trio_socket.fileno() < 0 and self._closed:
raise ClosedResourceError from None
elif isinstance(exc, OSError):
raise BrokenResourceError from exc
else:
raise exc
class SocketStream(_TrioSocketMixin, abc.SocketStream):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
async def receive(self, max_bytes: int = 65536) -> bytes:
with self._receive_guard:
try:
data = await self._trio_socket.recv(max_bytes)
except BaseException as exc:
self._convert_socket_error(exc)
if data:
return data
else:
raise EndOfStream
async def send(self, item: bytes) -> None:
with self._send_guard:
view = memoryview(item)
while view:
try:
bytes_sent = await self._trio_socket.send(view)
except BaseException as exc:
self._convert_socket_error(exc)
view = view[bytes_sent:]
async def send_eof(self) -> None:
self._trio_socket.shutdown(socket.SHUT_WR)
class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]:
if not isinstance(msglen, int) or msglen < 0:
raise ValueError("msglen must be a non-negative integer")
if not isinstance(maxfds, int) or maxfds < 1:
raise ValueError("maxfds must be a positive integer")
fds = array.array("i")
await checkpoint()
with self._receive_guard:
while True:
try:
message, ancdata, flags, addr = await self._trio_socket.recvmsg(
msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
)
except BaseException as exc:
self._convert_socket_error(exc)
else:
if not message and not ancdata:
raise EndOfStream
break
for cmsg_level, cmsg_type, cmsg_data in ancdata:
if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
raise RuntimeError(
f"Received unexpected ancillary data; message = {message!r}, "
f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
)
fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
return message, list(fds)
async def send_fds(
self, message: bytes, fds: Collection[Union[int, IOBase]]
) -> None:
if not message:
raise ValueError("message must not be empty")
if not fds:
raise ValueError("fds must not be empty")
filenos: List[int] = []
for fd in fds:
if isinstance(fd, int):
filenos.append(fd)
elif isinstance(fd, IOBase):
filenos.append(fd.fileno())
fdarray = array.array("i", filenos)
await checkpoint()
with self._send_guard:
while True:
try:
await self._trio_socket.sendmsg(
[message],
[
(
socket.SOL_SOCKET,
socket.SCM_RIGHTS, # type: ignore[list-item]
fdarray,
)
],
)
break
except BaseException as exc:
self._convert_socket_error(exc)
class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
def __init__(self, raw_socket: socket.socket):
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
self._accept_guard = ResourceGuard("accepting connections from")
async def accept(self) -> SocketStream:
with self._accept_guard:
try:
trio_socket, _addr = await self._trio_socket.accept()
except BaseException as exc:
self._convert_socket_error(exc)
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return SocketStream(trio_socket)
class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
def __init__(self, raw_socket: socket.socket):
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
self._accept_guard = ResourceGuard("accepting connections from")
async def accept(self) -> UNIXSocketStream:
with self._accept_guard:
try:
trio_socket, _addr = await self._trio_socket.accept()
except BaseException as exc:
self._convert_socket_error(exc)
return UNIXSocketStream(trio_socket)
class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
async def receive(self) -> Tuple[bytes, IPSockAddrType]:
with self._receive_guard:
try:
data, addr = await self._trio_socket.recvfrom(65536)
return data, convert_ipv6_sockaddr(addr)
except BaseException as exc:
self._convert_socket_error(exc)
async def send(self, item: UDPPacketType) -> None:
with self._send_guard:
try:
await self._trio_socket.sendto(*item)
except BaseException as exc:
self._convert_socket_error(exc)
class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
def __init__(self, trio_socket: TrioSocketType) -> None:
super().__init__(trio_socket)
self._receive_guard = ResourceGuard("reading from")
self._send_guard = ResourceGuard("writing to")
async def receive(self) -> bytes:
with self._receive_guard:
try:
return await self._trio_socket.recv(65536)
except BaseException as exc:
self._convert_socket_error(exc)
async def send(self, item: bytes) -> None:
with self._send_guard:
try:
await self._trio_socket.send(item)
except BaseException as exc:
self._convert_socket_error(exc)
async def connect_tcp(
host: str, port: int, local_address: Optional[IPSockAddrType] = None
) -> SocketStream:
family = socket.AF_INET6 if ":" in host else socket.AF_INET
trio_socket = trio.socket.socket(family)
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if local_address:
await trio_socket.bind(local_address)
try:
await trio_socket.connect((host, port))
except BaseException:
trio_socket.close()
raise
return SocketStream(trio_socket)
async def connect_unix(path: str) -> UNIXSocketStream:
trio_socket = trio.socket.socket(socket.AF_UNIX)
try:
await trio_socket.connect(path)
except BaseException:
trio_socket.close()
raise
return UNIXSocketStream(trio_socket)
async def create_udp_socket(
family: socket.AddressFamily,
local_address: Optional[IPSockAddrType],
remote_address: Optional[IPSockAddrType],
reuse_port: bool,
) -> Union[UDPSocket, ConnectedUDPSocket]:
trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
if reuse_port:
trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if local_address:
await trio_socket.bind(local_address)
if remote_address:
await trio_socket.connect(remote_address)
return ConnectedUDPSocket(trio_socket)
else:
return UDPSocket(trio_socket)
getaddrinfo = trio.socket.getaddrinfo
getnameinfo = trio.socket.getnameinfo
async def wait_socket_readable(sock: socket.socket) -> None:
try:
await wait_readable(sock)
except trio.ClosedResourceError as exc:
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
except trio.BusyResourceError:
raise BusyResourceError("reading from") from None
async def wait_socket_writable(sock: socket.socket) -> None:
try:
await wait_writable(sock)
except trio.ClosedResourceError as exc:
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
except trio.BusyResourceError:
raise BusyResourceError("writing to") from None
#
# Synchronization
#
class Event(BaseEvent):
def __new__(cls) -> "Event":
return object.__new__(cls)
def __init__(self) -> None:
self.__original = trio.Event()
def is_set(self) -> bool:
return self.__original.is_set()
async def wait(self) -> None:
return await self.__original.wait()
def statistics(self) -> EventStatistics:
orig_statistics = self.__original.statistics()
return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
def set(self) -> DeprecatedAwaitable:
self.__original.set()
return DeprecatedAwaitable(self.set)
class CapacityLimiter(BaseCapacityLimiter):
def __new__(cls, *args: object, **kwargs: object) -> "CapacityLimiter":
return object.__new__(cls)
def __init__(
self, *args: Any, original: Optional[trio.CapacityLimiter] = None
) -> None:
self.__original = original or trio.CapacityLimiter(*args)
async def __aenter__(self) -> None:
return await self.__original.__aenter__()
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return await self.__original.__aexit__(exc_type, exc_val, exc_tb)
@property
def total_tokens(self) -> float:
return self.__original.total_tokens
@total_tokens.setter
def total_tokens(self, value: float) -> None:
self.__original.total_tokens = value
@property
def borrowed_tokens(self) -> int:
return self.__original.borrowed_tokens
@property
def available_tokens(self) -> float:
return self.__original.available_tokens
def acquire_nowait(self) -> DeprecatedAwaitable:
self.__original.acquire_nowait()
return DeprecatedAwaitable(self.acquire_nowait)
def acquire_on_behalf_of_nowait(self, borrower: object) -> DeprecatedAwaitable:
self.__original.acquire_on_behalf_of_nowait(borrower)
return DeprecatedAwaitable(self.acquire_on_behalf_of_nowait)
async def acquire(self) -> None:
await self.__original.acquire()
async def acquire_on_behalf_of(self, borrower: object) -> None:
await self.__original.acquire_on_behalf_of(borrower)
def release(self) -> None:
return self.__original.release()
def release_on_behalf_of(self, borrower: object) -> None:
return self.__original.release_on_behalf_of(borrower)
def statistics(self) -> CapacityLimiterStatistics:
orig = self.__original.statistics()
return CapacityLimiterStatistics(
borrowed_tokens=orig.borrowed_tokens,
total_tokens=orig.total_tokens,
borrowers=orig.borrowers,
tasks_waiting=orig.tasks_waiting,
)
_capacity_limiter_wrapper: RunVar = RunVar("_capacity_limiter_wrapper")
def current_default_thread_limiter() -> CapacityLimiter:
try:
return _capacity_limiter_wrapper.get()
except LookupError:
limiter = CapacityLimiter(
original=trio.to_thread.current_default_thread_limiter()
)
_capacity_limiter_wrapper.set(limiter)
return limiter
#
# Signal handling
#
class _SignalReceiver(DeprecatedAsyncContextManager[T]):
def __init__(self, cm: ContextManager[T]):
self._cm = cm
def __enter__(self) -> T:
return self._cm.__enter__()
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return self._cm.__exit__(exc_type, exc_val, exc_tb)
def open_signal_receiver(*signals: Signals) -> _SignalReceiver:
cm = trio.open_signal_receiver(*signals)
return _SignalReceiver(cm)
#
# Testing and debugging
#
def get_current_task() -> TaskInfo:
task = trio_lowlevel.current_task()
parent_id = None
if task.parent_nursery and task.parent_nursery.parent_task:
parent_id = id(task.parent_nursery.parent_task)
return TaskInfo(id(task), parent_id, task.name, task.coro)
def get_running_tasks() -> List[TaskInfo]:
root_task = trio_lowlevel.current_root_task()
task_infos = [TaskInfo(id(root_task), None, root_task.name, root_task.coro)]
nurseries = root_task.child_nurseries
while nurseries:
new_nurseries: List[trio.Nursery] = []
for nursery in nurseries:
for task in nursery.child_tasks:
task_infos.append(
TaskInfo(id(task), id(nursery.parent_task), task.name, task.coro)
)
new_nurseries.extend(task.child_nurseries)
nurseries = new_nurseries
return task_infos
def wait_all_tasks_blocked() -> Awaitable[None]:
import trio.testing
return trio.testing.wait_all_tasks_blocked()
class TestRunner(abc.TestRunner):
def __init__(self, **options: Any) -> None:
from collections import deque
from queue import Queue
self._call_queue: "Queue[Callable[..., object]]" = Queue()
self._result_queue: Deque[Outcome] = deque()
self._stop_event: Optional[trio.Event] = None
self._nursery: Optional[trio.Nursery] = None
self._options = options
async def _trio_main(self) -> None:
self._stop_event = trio.Event()
async with trio.open_nursery() as self._nursery:
await self._stop_event.wait()
async def _call_func(
self, func: Callable[..., Awaitable[object]], args: tuple, kwargs: dict
) -> None:
try:
retval = await func(*args, **kwargs)
except BaseException as exc:
self._result_queue.append(Error(exc))
else:
self._result_queue.append(Value(retval))
def _main_task_finished(self, outcome: object) -> None:
self._nursery = None
def _get_nursery(self) -> trio.Nursery:
if self._nursery is None:
trio.lowlevel.start_guest_run(
self._trio_main,
run_sync_soon_threadsafe=self._call_queue.put,
done_callback=self._main_task_finished,
**self._options,
)
while self._nursery is None:
self._call_queue.get()()
return self._nursery
def _call(
self, func: Callable[..., Awaitable[T_Retval]], *args: object, **kwargs: object
) -> T_Retval:
self._get_nursery().start_soon(self._call_func, func, args, kwargs)
while not self._result_queue:
self._call_queue.get()()
outcome = self._result_queue.pop()
return outcome.unwrap()
def close(self) -> None:
if self._stop_event:
self._stop_event.set()
while self._nursery is not None:
self._call_queue.get()()
def run_asyncgen_fixture(
self,
fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
kwargs: Dict[str, Any],
) -> Iterable[T_Retval]:
async def fixture_runner(*, task_status: "TaskStatus") -> None:
agen = fixture_func(**kwargs)
retval = await agen.asend(None)
task_status.started(retval)
await teardown_event.wait()
try:
await agen.asend(None)
except StopAsyncIteration:
pass
else:
await agen.aclose()
raise RuntimeError("Async generator fixture did not stop")
teardown_event = trio.Event()
fixture_value = self._call(lambda: self._get_nursery().start(fixture_runner))
yield fixture_value
teardown_event.set()
def run_fixture(
self,
fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
kwargs: Dict[str, Any],
) -> T_Retval:
return self._call(fixture_func, **kwargs)
def run_test(
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: Dict[str, Any]
) -> None:
self._call(test_func, **kwargs) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/_backends/_trio.py | _trio.py |
import typing
from abc import ABCMeta, abstractmethod
from types import TracebackType
from typing import Any, Callable, Coroutine, Optional, Type, TypeVar
from warnings import warn
if typing.TYPE_CHECKING:
from .._core._tasks import CancelScope
T_Retval = TypeVar("T_Retval")
class TaskStatus(metaclass=ABCMeta):
@abstractmethod
def started(self, value: object = None) -> None:
"""
Signal that the task has started.
:param value: object passed back to the starter of the task
"""
class TaskGroup(metaclass=ABCMeta):
"""
Groups several asynchronous tasks together.
:ivar cancel_scope: the cancel scope inherited by all child tasks
:vartype cancel_scope: CancelScope
"""
cancel_scope: "CancelScope"
async def spawn(
self,
func: Callable[..., Coroutine[Any, Any, Any]],
*args: object,
name: object = None
) -> None:
"""
Start a new task in this task group.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
.. deprecated:: 3.0
Use :meth:`start_soon` instead. If your code needs AnyIO 2 compatibility, you
can keep using this until AnyIO 4.
"""
warn(
'spawn() is deprecated -- use start_soon() (without the "await") instead',
DeprecationWarning,
)
self.start_soon(func, *args, name=name)
@abstractmethod
def start_soon(
self,
func: Callable[..., Coroutine[Any, Any, Any]],
*args: object,
name: object = None
) -> None:
"""
Start a new task in this task group.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
.. versionadded:: 3.0
"""
@abstractmethod
async def start(
self,
func: Callable[..., Coroutine[Any, Any, Any]],
*args: object,
name: object = None
) -> object:
"""
Start a new task and wait until it signals for readiness.
:param func: a coroutine function
:param args: positional arguments to call the function with
:param name: name of the task, for the purposes of introspection and debugging
:return: the value passed to ``task_status.started()``
:raises RuntimeError: if the task finishes without calling ``task_status.started()``
.. versionadded:: 3.0
"""
@abstractmethod
async def __aenter__(self) -> "TaskGroup":
"""Enter the task group context and allow starting new tasks."""
@abstractmethod
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
"""Exit the task group context waiting for all tasks to finish.""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/abc/_tasks.py | _tasks.py |
from abc import abstractmethod
from typing import Any, Callable, Generic, Optional, TypeVar, Union
from .._core._exceptions import EndOfStream
from .._core._typedattr import TypedAttributeProvider
from ._resources import AsyncResource
from ._tasks import TaskGroup
T_Item = TypeVar("T_Item")
T_Stream = TypeVar("T_Stream")
class UnreliableObjectReceiveStream(
Generic[T_Item], AsyncResource, TypedAttributeProvider
):
"""
An interface for receiving objects.
This interface makes no guarantees that the received messages arrive in the order in which they
were sent, or that no messages are missed.
Asynchronously iterating over objects of this type will yield objects matching the given type
parameter.
"""
def __aiter__(self) -> "UnreliableObjectReceiveStream[T_Item]":
return self
async def __anext__(self) -> T_Item:
try:
return await self.receive()
except EndOfStream:
raise StopAsyncIteration
@abstractmethod
async def receive(self) -> T_Item:
"""
Receive the next item.
:raises ~anyio.ClosedResourceError: if the receive stream has been explicitly
closed
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
due to external causes
"""
class UnreliableObjectSendStream(
Generic[T_Item], AsyncResource, TypedAttributeProvider
):
"""
An interface for sending objects.
This interface makes no guarantees that the messages sent will reach the recipient(s) in the
same order in which they were sent, or at all.
"""
@abstractmethod
async def send(self, item: T_Item) -> None:
"""
Send an item to the peer(s).
:param item: the item to send
:raises ~anyio.ClosedResourceError: if the send stream has been explicitly
closed
:raises ~anyio.BrokenResourceError: if this stream has been rendered unusable
due to external causes
"""
class UnreliableObjectStream(
UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item]
):
"""
A bidirectional message stream which does not guarantee the order or reliability of message
delivery.
"""
class ObjectReceiveStream(UnreliableObjectReceiveStream[T_Item]):
"""
A receive message stream which guarantees that messages are received in the same order in
which they were sent, and that no messages are missed.
"""
class ObjectSendStream(UnreliableObjectSendStream[T_Item]):
"""
A send message stream which guarantees that messages are delivered in the same order in which
they were sent, without missing any messages in the middle.
"""
class ObjectStream(
ObjectReceiveStream[T_Item],
ObjectSendStream[T_Item],
UnreliableObjectStream[T_Item],
):
"""
A bidirectional message stream which guarantees the order and reliability of message delivery.
"""
@abstractmethod
async def send_eof(self) -> None:
"""
Send an end-of-file indication to the peer.
You should not try to send any further data to this stream after calling this method.
This method is idempotent (does nothing on successive calls).
"""
class ByteReceiveStream(AsyncResource, TypedAttributeProvider):
"""
An interface for receiving bytes from a single peer.
Iterating this byte stream will yield a byte string of arbitrary length, but no more than
65536 bytes.
"""
def __aiter__(self) -> "ByteReceiveStream":
return self
async def __anext__(self) -> bytes:
try:
return await self.receive()
except EndOfStream:
raise StopAsyncIteration
@abstractmethod
async def receive(self, max_bytes: int = 65536) -> bytes:
"""
Receive at most ``max_bytes`` bytes from the peer.
.. note:: Implementors of this interface should not return an empty :class:`bytes` object,
and users should ignore them.
:param max_bytes: maximum number of bytes to receive
:return: the received bytes
:raises ~anyio.EndOfStream: if this stream has been closed from the other end
"""
class ByteSendStream(AsyncResource, TypedAttributeProvider):
"""An interface for sending bytes to a single peer."""
@abstractmethod
async def send(self, item: bytes) -> None:
"""
Send the given bytes to the peer.
:param item: the bytes to send
"""
class ByteStream(ByteReceiveStream, ByteSendStream):
"""A bidirectional byte stream."""
@abstractmethod
async def send_eof(self) -> None:
"""
Send an end-of-file indication to the peer.
You should not try to send any further data to this stream after calling this method.
This method is idempotent (does nothing on successive calls).
"""
#: Type alias for all unreliable bytes-oriented receive streams.
AnyUnreliableByteReceiveStream = Union[
UnreliableObjectReceiveStream[bytes], ByteReceiveStream
]
#: Type alias for all unreliable bytes-oriented send streams.
AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream]
#: Type alias for all unreliable bytes-oriented streams.
AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream]
#: Type alias for all bytes-oriented receive streams.
AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream]
#: Type alias for all bytes-oriented send streams.
AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream]
#: Type alias for all bytes-oriented streams.
AnyByteStream = Union[ObjectStream[bytes], ByteStream]
class Listener(Generic[T_Stream], AsyncResource, TypedAttributeProvider):
"""An interface for objects that let you accept incoming connections."""
@abstractmethod
async def serve(
self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None
) -> None:
"""
Accept incoming connections as they come in and start tasks to handle them.
:param handler: a callable that will be used to handle each accepted connection
:param task_group: the task group that will be used to start tasks for handling each
accepted connection (if omitted, an ad-hoc task group will be created)
""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/abc/_streams.py | _streams.py |
import socket
from abc import abstractmethod
from io import IOBase
from ipaddress import IPv4Address, IPv6Address
from socket import AddressFamily
from types import TracebackType
from typing import (
Any,
AsyncContextManager,
Callable,
Collection,
Dict,
List,
Mapping,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from .._core._typedattr import (
TypedAttributeProvider,
TypedAttributeSet,
typed_attribute,
)
from ._streams import ByteStream, Listener, T_Stream, UnreliableObjectStream
from ._tasks import TaskGroup
IPAddressType = Union[str, IPv4Address, IPv6Address]
IPSockAddrType = Tuple[str, int]
SockAddrType = Union[IPSockAddrType, str]
UDPPacketType = Tuple[bytes, IPSockAddrType]
T_Retval = TypeVar("T_Retval")
class _NullAsyncContextManager:
async def __aenter__(self) -> None:
pass
async def __aexit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
return None
class SocketAttribute(TypedAttributeSet):
#: the address family of the underlying socket
family: AddressFamily = typed_attribute()
#: the local socket address of the underlying socket
local_address: SockAddrType = typed_attribute()
#: for IP addresses, the local port the underlying socket is bound to
local_port: int = typed_attribute()
#: the underlying stdlib socket object
raw_socket: socket.socket = typed_attribute()
#: the remote address the underlying socket is connected to
remote_address: SockAddrType = typed_attribute()
#: for IP addresses, the remote port the underlying socket is connected to
remote_port: int = typed_attribute()
class _SocketProvider(TypedAttributeProvider):
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
from .._core._sockets import convert_ipv6_sockaddr as convert
attributes: Dict[Any, Callable[[], Any]] = {
SocketAttribute.family: lambda: self._raw_socket.family,
SocketAttribute.local_address: lambda: convert(
self._raw_socket.getsockname()
),
SocketAttribute.raw_socket: lambda: self._raw_socket,
}
try:
peername: Optional[Tuple[str, int]] = convert(
self._raw_socket.getpeername()
)
except OSError:
peername = None
# Provide the remote address for connected sockets
if peername is not None:
attributes[SocketAttribute.remote_address] = lambda: peername
# Provide local and remote ports for IP based sockets
if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6):
attributes[
SocketAttribute.local_port
] = lambda: self._raw_socket.getsockname()[1]
if peername is not None:
remote_port = peername[1]
attributes[SocketAttribute.remote_port] = lambda: remote_port
return attributes
@property
@abstractmethod
def _raw_socket(self) -> socket.socket:
pass
class SocketStream(ByteStream, _SocketProvider):
"""
Transports bytes over a socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
class UNIXSocketStream(SocketStream):
@abstractmethod
async def send_fds(
self, message: bytes, fds: Collection[Union[int, IOBase]]
) -> None:
"""
Send file descriptors along with a message to the peer.
:param message: a non-empty bytestring
:param fds: a collection of files (either numeric file descriptors or open file or socket
objects)
"""
@abstractmethod
async def receive_fds(self, msglen: int, maxfds: int) -> Tuple[bytes, List[int]]:
"""
Receive file descriptors along with a message from the peer.
:param msglen: length of the message to expect from the peer
:param maxfds: maximum number of file descriptors to expect from the peer
:return: a tuple of (message, file descriptors)
"""
class SocketListener(Listener[SocketStream], _SocketProvider):
"""
Listens to incoming socket connections.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
@abstractmethod
async def accept(self) -> SocketStream:
"""Accept an incoming connection."""
async def serve(
self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None
) -> None:
from .. import create_task_group
context_manager: AsyncContextManager
if task_group is None:
task_group = context_manager = create_task_group()
else:
# Can be replaced with AsyncExitStack once on py3.7+
context_manager = _NullAsyncContextManager()
async with context_manager:
while True:
stream = await self.accept()
task_group.start_soon(handler, stream)
class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider):
"""
Represents an unconnected UDP socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
"""
async def sendto(self, data: bytes, host: str, port: int) -> None:
"""Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port)))."""
return await self.send((data, (host, port)))
class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider):
"""
Represents an connected UDP socket.
Supports all relevant extra attributes from :class:`~SocketAttribute`.
""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/abc/_sockets.py | _sockets.py |
from abc import abstractmethod
from signal import Signals
from typing import Optional
from ._resources import AsyncResource
from ._streams import ByteReceiveStream, ByteSendStream
class Process(AsyncResource):
"""An asynchronous version of :class:`subprocess.Popen`."""
@abstractmethod
async def wait(self) -> int:
"""
Wait until the process exits.
:return: the exit code of the process
"""
@abstractmethod
def terminate(self) -> None:
"""
Terminates the process, gracefully if possible.
On Windows, this calls ``TerminateProcess()``.
On POSIX systems, this sends ``SIGTERM`` to the process.
.. seealso:: :meth:`subprocess.Popen.terminate`
"""
@abstractmethod
def kill(self) -> None:
"""
Kills the process.
On Windows, this calls ``TerminateProcess()``.
On POSIX systems, this sends ``SIGKILL`` to the process.
.. seealso:: :meth:`subprocess.Popen.kill`
"""
@abstractmethod
def send_signal(self, signal: Signals) -> None:
"""
Send a signal to the subprocess.
.. seealso:: :meth:`subprocess.Popen.send_signal`
:param signal: the signal number (e.g. :data:`signal.SIGHUP`)
"""
@property
@abstractmethod
def pid(self) -> int:
"""The process ID of the process."""
@property
@abstractmethod
def returncode(self) -> Optional[int]:
"""
The return code of the process. If the process has not yet terminated, this will be
``None``.
"""
@property
@abstractmethod
def stdin(self) -> Optional[ByteSendStream]:
"""The stream for the standard input of the process."""
@property
@abstractmethod
def stdout(self) -> Optional[ByteReceiveStream]:
"""The stream for the standard output of the process."""
@property
@abstractmethod
def stderr(self) -> Optional[ByteReceiveStream]:
"""The stream for the standard error output of the process.""" | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/abc/_subprocesses.py | _subprocesses.py |
__all__ = (
"AsyncResource",
"IPAddressType",
"IPSockAddrType",
"SocketAttribute",
"SocketStream",
"SocketListener",
"UDPSocket",
"UNIXSocketStream",
"UDPPacketType",
"ConnectedUDPSocket",
"UnreliableObjectReceiveStream",
"UnreliableObjectSendStream",
"UnreliableObjectStream",
"ObjectReceiveStream",
"ObjectSendStream",
"ObjectStream",
"ByteReceiveStream",
"ByteSendStream",
"ByteStream",
"AnyUnreliableByteReceiveStream",
"AnyUnreliableByteSendStream",
"AnyUnreliableByteStream",
"AnyByteReceiveStream",
"AnyByteSendStream",
"AnyByteStream",
"Listener",
"Process",
"Event",
"Condition",
"Lock",
"Semaphore",
"CapacityLimiter",
"CancelScope",
"TaskGroup",
"TaskStatus",
"TestRunner",
"BlockingPortal",
)
from typing import Any
from ._resources import AsyncResource
from ._sockets import (
ConnectedUDPSocket,
IPAddressType,
IPSockAddrType,
SocketAttribute,
SocketListener,
SocketStream,
UDPPacketType,
UDPSocket,
UNIXSocketStream,
)
from ._streams import (
AnyByteReceiveStream,
AnyByteSendStream,
AnyByteStream,
AnyUnreliableByteReceiveStream,
AnyUnreliableByteSendStream,
AnyUnreliableByteStream,
ByteReceiveStream,
ByteSendStream,
ByteStream,
Listener,
ObjectReceiveStream,
ObjectSendStream,
ObjectStream,
UnreliableObjectReceiveStream,
UnreliableObjectSendStream,
UnreliableObjectStream,
)
from ._subprocesses import Process
from ._tasks import TaskGroup, TaskStatus
from ._testing import TestRunner
# Re-exported here, for backwards compatibility
# isort: off
from .._core._synchronization import CapacityLimiter, Condition, Event, Lock, Semaphore
from .._core._tasks import CancelScope
from ..from_thread import BlockingPortal
# Re-export imports so they look like they live directly in this package
key: str
value: Any
for key, value in list(locals().items()):
if getattr(value, "__module__", "").startswith("anyio.abc."):
value.__module__ = __name__ | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/abc/__init__.py | __init__.py |
from collections import OrderedDict, deque
from dataclasses import dataclass, field
from types import TracebackType
from typing import Deque, Generic, List, NamedTuple, Optional, Type, TypeVar
from .. import (
BrokenResourceError,
ClosedResourceError,
EndOfStream,
WouldBlock,
get_cancelled_exc_class,
)
from .._core._compat import DeprecatedAwaitable
from ..abc import Event, ObjectReceiveStream, ObjectSendStream
from ..lowlevel import checkpoint
T_Item = TypeVar("T_Item")
class MemoryObjectStreamStatistics(NamedTuple):
current_buffer_used: int #: number of items stored in the buffer
#: maximum number of items that can be stored on this stream (or :data:`math.inf`)
max_buffer_size: float
open_send_streams: int #: number of unclosed clones of the send stream
open_receive_streams: int #: number of unclosed clones of the receive stream
tasks_waiting_send: int #: number of tasks blocked on :meth:`MemoryObjectSendStream.send`
#: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive`
tasks_waiting_receive: int
@dataclass(eq=False)
class MemoryObjectStreamState(Generic[T_Item]):
max_buffer_size: float = field()
buffer: Deque[T_Item] = field(init=False, default_factory=deque)
open_send_channels: int = field(init=False, default=0)
open_receive_channels: int = field(init=False, default=0)
waiting_receivers: "OrderedDict[Event, List[T_Item]]" = field(
init=False, default_factory=OrderedDict
)
waiting_senders: "OrderedDict[Event, T_Item]" = field(
init=False, default_factory=OrderedDict
)
def statistics(self) -> MemoryObjectStreamStatistics:
return MemoryObjectStreamStatistics(
len(self.buffer),
self.max_buffer_size,
self.open_send_channels,
self.open_receive_channels,
len(self.waiting_senders),
len(self.waiting_receivers),
)
@dataclass(eq=False)
class MemoryObjectReceiveStream(Generic[T_Item], ObjectReceiveStream[T_Item]):
_state: MemoryObjectStreamState[T_Item]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:
self._state.open_receive_channels += 1
def receive_nowait(self) -> T_Item:
"""
Receive the next item if it can be done without waiting.
:return: the received item
:raises ~anyio.ClosedResourceError: if this send stream has been closed
:raises ~anyio.EndOfStream: if the buffer is empty and this stream has been
closed from the sending end
:raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks
waiting to send
"""
if self._closed:
raise ClosedResourceError
if self._state.waiting_senders:
# Get the item from the next sender
send_event, item = self._state.waiting_senders.popitem(last=False)
self._state.buffer.append(item)
send_event.set()
if self._state.buffer:
return self._state.buffer.popleft()
elif not self._state.open_send_channels:
raise EndOfStream
raise WouldBlock
async def receive(self) -> T_Item:
await checkpoint()
try:
return self.receive_nowait()
except WouldBlock:
# Add ourselves in the queue
receive_event = Event()
container: List[T_Item] = []
self._state.waiting_receivers[receive_event] = container
try:
await receive_event.wait()
except get_cancelled_exc_class():
# Ignore the immediate cancellation if we already received an item, so as not to
# lose it
if not container:
raise
finally:
self._state.waiting_receivers.pop(receive_event, None)
if container:
return container[0]
else:
raise EndOfStream
def clone(self) -> "MemoryObjectReceiveStream[T_Item]":
"""
Create a clone of this receive stream.
Each clone can be closed separately. Only when all clones have been closed will the
receiving end of the memory stream be considered closed by the sending ends.
:return: the cloned stream
"""
if self._closed:
raise ClosedResourceError
return MemoryObjectReceiveStream(_state=self._state)
def close(self) -> None:
"""
Close the stream.
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
benefit of synchronous callbacks.
"""
if not self._closed:
self._closed = True
self._state.open_receive_channels -= 1
if self._state.open_receive_channels == 0:
send_events = list(self._state.waiting_senders.keys())
for event in send_events:
event.set()
async def aclose(self) -> None:
self.close()
def statistics(self) -> MemoryObjectStreamStatistics:
"""
Return statistics about the current state of this stream.
.. versionadded:: 3.0
"""
return self._state.statistics()
def __enter__(self) -> "MemoryObjectReceiveStream[T_Item]":
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.close()
@dataclass(eq=False)
class MemoryObjectSendStream(Generic[T_Item], ObjectSendStream[T_Item]):
_state: MemoryObjectStreamState[T_Item]
_closed: bool = field(init=False, default=False)
def __post_init__(self) -> None:
self._state.open_send_channels += 1
def send_nowait(self, item: T_Item) -> DeprecatedAwaitable:
"""
Send an item immediately if it can be done without waiting.
:param item: the item to send
:raises ~anyio.ClosedResourceError: if this send stream has been closed
:raises ~anyio.BrokenResourceError: if the stream has been closed from the
receiving end
:raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting
to receive
"""
if self._closed:
raise ClosedResourceError
if not self._state.open_receive_channels:
raise BrokenResourceError
if self._state.waiting_receivers:
receive_event, container = self._state.waiting_receivers.popitem(last=False)
container.append(item)
receive_event.set()
elif len(self._state.buffer) < self._state.max_buffer_size:
self._state.buffer.append(item)
else:
raise WouldBlock
return DeprecatedAwaitable(self.send_nowait)
async def send(self, item: T_Item) -> None:
await checkpoint()
try:
self.send_nowait(item)
except WouldBlock:
# Wait until there's someone on the receiving end
send_event = Event()
self._state.waiting_senders[send_event] = item
try:
await send_event.wait()
except BaseException:
self._state.waiting_senders.pop(send_event, None) # type: ignore[arg-type]
raise
if self._state.waiting_senders.pop(send_event, None): # type: ignore[arg-type]
raise BrokenResourceError
def clone(self) -> "MemoryObjectSendStream[T_Item]":
"""
Create a clone of this send stream.
Each clone can be closed separately. Only when all clones have been closed will the
sending end of the memory stream be considered closed by the receiving ends.
:return: the cloned stream
"""
if self._closed:
raise ClosedResourceError
return MemoryObjectSendStream(_state=self._state)
def close(self) -> None:
"""
Close the stream.
This works the exact same way as :meth:`aclose`, but is provided as a special case for the
benefit of synchronous callbacks.
"""
if not self._closed:
self._closed = True
self._state.open_send_channels -= 1
if self._state.open_send_channels == 0:
receive_events = list(self._state.waiting_receivers.keys())
self._state.waiting_receivers.clear()
for event in receive_events:
event.set()
async def aclose(self) -> None:
self.close()
def statistics(self) -> MemoryObjectStreamStatistics:
"""
Return statistics about the current state of this stream.
.. versionadded:: 3.0
"""
return self._state.statistics()
def __enter__(self) -> "MemoryObjectSendStream[T_Item]":
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
self.close() | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/memory.py | memory.py |
from io import SEEK_SET, UnsupportedOperation
from os import PathLike
from pathlib import Path
from typing import Any, BinaryIO, Callable, Dict, Mapping, Union, cast
from .. import (
BrokenResourceError,
ClosedResourceError,
EndOfStream,
TypedAttributeSet,
to_thread,
typed_attribute,
)
from ..abc import ByteReceiveStream, ByteSendStream
class FileStreamAttribute(TypedAttributeSet):
#: the open file descriptor
file: BinaryIO = typed_attribute()
#: the path of the file on the file system, if available (file must be a real file)
path: Path = typed_attribute()
#: the file number, if available (file must be a real file or a TTY)
fileno: int = typed_attribute()
class _BaseFileStream:
def __init__(self, file: BinaryIO):
self._file = file
async def aclose(self) -> None:
await to_thread.run_sync(self._file.close)
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
attributes: Dict[Any, Callable[[], Any]] = {
FileStreamAttribute.file: lambda: self._file,
}
if hasattr(self._file, "name"):
attributes[FileStreamAttribute.path] = lambda: Path(self._file.name)
try:
self._file.fileno()
except UnsupportedOperation:
pass
else:
attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno()
return attributes
class FileReadStream(_BaseFileStream, ByteReceiveStream):
"""
A byte stream that reads from a file in the file system.
:param file: a file that has been opened for reading in binary mode
.. versionadded:: 3.0
"""
@classmethod
async def from_path(cls, path: Union[str, "PathLike[str]"]) -> "FileReadStream":
"""
Create a file read stream by opening the given file.
:param path: path of the file to read from
"""
file = await to_thread.run_sync(Path(path).open, "rb")
return cls(cast(BinaryIO, file))
async def receive(self, max_bytes: int = 65536) -> bytes:
try:
data = await to_thread.run_sync(self._file.read, max_bytes)
except ValueError:
raise ClosedResourceError from None
except OSError as exc:
raise BrokenResourceError from exc
if data:
return data
else:
raise EndOfStream
async def seek(self, position: int, whence: int = SEEK_SET) -> int:
"""
Seek the file to the given position.
.. seealso:: :meth:`io.IOBase.seek`
.. note:: Not all file descriptors are seekable.
:param position: position to seek the file to
:param whence: controls how ``position`` is interpreted
:return: the new absolute position
:raises OSError: if the file is not seekable
"""
return await to_thread.run_sync(self._file.seek, position, whence)
async def tell(self) -> int:
"""
Return the current stream position.
.. note:: Not all file descriptors are seekable.
:return: the current absolute position
:raises OSError: if the file is not seekable
"""
return await to_thread.run_sync(self._file.tell)
class FileWriteStream(_BaseFileStream, ByteSendStream):
"""
A byte stream that writes to a file in the file system.
:param file: a file that has been opened for writing in binary mode
.. versionadded:: 3.0
"""
@classmethod
async def from_path(
cls, path: Union[str, "PathLike[str]"], append: bool = False
) -> "FileWriteStream":
"""
Create a file write stream by opening the given file for writing.
:param path: path of the file to write to
:param append: if ``True``, open the file for appending; if ``False``, any existing file
at the given path will be truncated
"""
mode = "ab" if append else "wb"
file = await to_thread.run_sync(Path(path).open, mode)
return cls(cast(BinaryIO, file))
async def send(self, item: bytes) -> None:
try:
await to_thread.run_sync(self._file.write, item)
except ValueError:
raise ClosedResourceError from None
except OSError as exc:
raise BrokenResourceError from exc | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/file.py | file.py |
from dataclasses import dataclass, field
from typing import Any, Callable, Mapping
from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead
from ..abc import AnyByteReceiveStream, ByteReceiveStream
@dataclass(eq=False)
class BufferedByteReceiveStream(ByteReceiveStream):
"""
Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving
capabilities in the form of a byte stream.
"""
receive_stream: AnyByteReceiveStream
_buffer: bytearray = field(init=False, default_factory=bytearray)
_closed: bool = field(init=False, default=False)
async def aclose(self) -> None:
await self.receive_stream.aclose()
self._closed = True
@property
def buffer(self) -> bytes:
"""The bytes currently in the buffer."""
return bytes(self._buffer)
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.receive_stream.extra_attributes
async def receive(self, max_bytes: int = 65536) -> bytes:
if self._closed:
raise ClosedResourceError
if self._buffer:
chunk = bytes(self._buffer[:max_bytes])
del self._buffer[:max_bytes]
return chunk
elif isinstance(self.receive_stream, ByteReceiveStream):
return await self.receive_stream.receive(max_bytes)
else:
# With a bytes-oriented object stream, we need to handle any surplus bytes we get from
# the receive() call
chunk = await self.receive_stream.receive()
if len(chunk) > max_bytes:
# Save the surplus bytes in the buffer
self._buffer.extend(chunk[max_bytes:])
return chunk[:max_bytes]
else:
return chunk
async def receive_exactly(self, nbytes: int) -> bytes:
"""
Read exactly the given amount of bytes from the stream.
:param nbytes: the number of bytes to read
:return: the bytes read
:raises ~anyio.IncompleteRead: if the stream was closed before the requested
amount of bytes could be read from the stream
"""
while True:
remaining = nbytes - len(self._buffer)
if remaining <= 0:
retval = self._buffer[:nbytes]
del self._buffer[:nbytes]
return bytes(retval)
try:
if isinstance(self.receive_stream, ByteReceiveStream):
chunk = await self.receive_stream.receive(remaining)
else:
chunk = await self.receive_stream.receive()
except EndOfStream as exc:
raise IncompleteRead from exc
self._buffer.extend(chunk)
async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes:
"""
Read from the stream until the delimiter is found or max_bytes have been read.
:param delimiter: the marker to look for in the stream
:param max_bytes: maximum number of bytes that will be read before raising
:exc:`~anyio.DelimiterNotFound`
:return: the bytes read (not including the delimiter)
:raises ~anyio.IncompleteRead: if the stream was closed before the delimiter
was found
:raises ~anyio.DelimiterNotFound: if the delimiter is not found within the
bytes read up to the maximum allowed
"""
delimiter_size = len(delimiter)
offset = 0
while True:
# Check if the delimiter can be found in the current buffer
index = self._buffer.find(delimiter, offset)
if index >= 0:
found = self._buffer[:index]
del self._buffer[: index + len(delimiter) :]
return bytes(found)
# Check if the buffer is already at or over the limit
if len(self._buffer) >= max_bytes:
raise DelimiterNotFound(max_bytes)
# Read more data into the buffer from the socket
try:
data = await self.receive_stream.receive()
except EndOfStream as exc:
raise IncompleteRead from exc
# Move the offset forward and add the new data to the buffer
offset = max(len(self._buffer) - delimiter_size + 1, 0)
self._buffer.extend(data) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/buffered.py | buffered.py |
import logging
import re
import ssl
from dataclasses import dataclass
from functools import wraps
from typing import Any, Callable, Dict, List, Mapping, Optional, Tuple, TypeVar, Union
from .. import (
BrokenResourceError,
EndOfStream,
aclose_forcefully,
get_cancelled_exc_class,
)
from .._core._typedattr import TypedAttributeSet, typed_attribute
from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup
T_Retval = TypeVar("T_Retval")
_PCTRTT = Tuple[Tuple[str, str], ...]
_PCTRTTT = Tuple[_PCTRTT, ...]
class TLSAttribute(TypedAttributeSet):
"""Contains Transport Layer Security related attributes."""
#: the selected ALPN protocol
alpn_protocol: Optional[str] = typed_attribute()
#: the channel binding for type ``tls-unique``
channel_binding_tls_unique: bytes = typed_attribute()
#: the selected cipher
cipher: Tuple[str, str, int] = typed_attribute()
#: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` for more
#: information)
peer_certificate: Optional[
Dict[str, Union[str, _PCTRTTT, _PCTRTT]]
] = typed_attribute()
#: the peer certificate in binary form
peer_certificate_binary: Optional[bytes] = typed_attribute()
#: ``True`` if this is the server side of the connection
server_side: bool = typed_attribute()
#: ciphers shared between both ends of the TLS connection
shared_ciphers: List[Tuple[str, str, int]] = typed_attribute()
#: the :class:`~ssl.SSLObject` used for encryption
ssl_object: ssl.SSLObject = typed_attribute()
#: ``True`` if this stream does (and expects) a closing TLS handshake when the stream is being
#: closed
standard_compatible: bool = typed_attribute()
#: the TLS protocol version (e.g. ``TLSv1.2``)
tls_version: str = typed_attribute()
@dataclass(eq=False)
class TLSStream(ByteStream):
"""
A stream wrapper that encrypts all sent data and decrypts received data.
This class has no public initializer; use :meth:`wrap` instead.
All extra attributes from :class:`~TLSAttribute` are supported.
:var AnyByteStream transport_stream: the wrapped stream
"""
transport_stream: AnyByteStream
standard_compatible: bool
_ssl_object: ssl.SSLObject
_read_bio: ssl.MemoryBIO
_write_bio: ssl.MemoryBIO
@classmethod
async def wrap(
cls,
transport_stream: AnyByteStream,
*,
server_side: Optional[bool] = None,
hostname: Optional[str] = None,
ssl_context: Optional[ssl.SSLContext] = None,
standard_compatible: bool = True,
) -> "TLSStream":
"""
Wrap an existing stream with Transport Layer Security.
This performs a TLS handshake with the peer.
:param transport_stream: a bytes-transporting stream to wrap
:param server_side: ``True`` if this is the server side of the connection, ``False`` if
this is the client side (if omitted, will be set to ``False`` if ``hostname`` has been
provided, ``False`` otherwise). Used only to create a default context when an explicit
context has not been provided.
:param hostname: host name of the peer (if host name checking is desired)
:param ssl_context: the SSLContext object to use (if not provided, a secure default will be
created)
:param standard_compatible: if ``False``, skip the closing handshake when closing the
connection, and don't raise an exception if the peer does the same
:raises ~ssl.SSLError: if the TLS handshake fails
"""
if server_side is None:
server_side = not hostname
if not ssl_context:
purpose = (
ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH
)
ssl_context = ssl.create_default_context(purpose)
# Re-enable detection of unexpected EOFs if it was disabled by Python
if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"):
ssl_context.options ^= ssl.OP_IGNORE_UNEXPECTED_EOF # type: ignore[attr-defined]
bio_in = ssl.MemoryBIO()
bio_out = ssl.MemoryBIO()
ssl_object = ssl_context.wrap_bio(
bio_in, bio_out, server_side=server_side, server_hostname=hostname
)
wrapper = cls(
transport_stream=transport_stream,
standard_compatible=standard_compatible,
_ssl_object=ssl_object,
_read_bio=bio_in,
_write_bio=bio_out,
)
await wrapper._call_sslobject_method(ssl_object.do_handshake)
return wrapper
async def _call_sslobject_method(
self, func: Callable[..., T_Retval], *args: object
) -> T_Retval:
while True:
try:
result = func(*args)
except ssl.SSLWantReadError:
try:
# Flush any pending writes first
if self._write_bio.pending:
await self.transport_stream.send(self._write_bio.read())
data = await self.transport_stream.receive()
except EndOfStream:
self._read_bio.write_eof()
except OSError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
raise BrokenResourceError from exc
else:
self._read_bio.write(data)
except ssl.SSLWantWriteError:
await self.transport_stream.send(self._write_bio.read())
except ssl.SSLSyscallError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
raise BrokenResourceError from exc
except ssl.SSLError as exc:
self._read_bio.write_eof()
self._write_bio.write_eof()
if (
isinstance(exc, ssl.SSLEOFError)
or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror
):
if self.standard_compatible:
raise BrokenResourceError from exc
else:
raise EndOfStream from None
raise
else:
# Flush any pending writes first
if self._write_bio.pending:
await self.transport_stream.send(self._write_bio.read())
return result
async def unwrap(self) -> Tuple[AnyByteStream, bytes]:
"""
Does the TLS closing handshake.
:return: a tuple of (wrapped byte stream, bytes left in the read buffer)
"""
await self._call_sslobject_method(self._ssl_object.unwrap)
self._read_bio.write_eof()
self._write_bio.write_eof()
return self.transport_stream, self._read_bio.read()
async def aclose(self) -> None:
if self.standard_compatible:
try:
await self.unwrap()
except BaseException:
await aclose_forcefully(self.transport_stream)
raise
await self.transport_stream.aclose()
async def receive(self, max_bytes: int = 65536) -> bytes:
data = await self._call_sslobject_method(self._ssl_object.read, max_bytes)
if not data:
raise EndOfStream
return data
async def send(self, item: bytes) -> None:
await self._call_sslobject_method(self._ssl_object.write, item)
async def send_eof(self) -> None:
tls_version = self.extra(TLSAttribute.tls_version)
match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version)
if match:
major, minor = int(match.group(1)), int(match.group(2) or 0)
if (major, minor) < (1, 3):
raise NotImplementedError(
f"send_eof() requires at least TLSv1.3; current "
f"session uses {tls_version}"
)
raise NotImplementedError(
"send_eof() has not yet been implemented for TLS streams"
)
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
**self.transport_stream.extra_attributes,
TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol,
TLSAttribute.channel_binding_tls_unique: self._ssl_object.get_channel_binding,
TLSAttribute.cipher: self._ssl_object.cipher,
TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False),
TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert(
True
),
TLSAttribute.server_side: lambda: self._ssl_object.server_side,
TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers(),
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
TLSAttribute.ssl_object: lambda: self._ssl_object,
TLSAttribute.tls_version: self._ssl_object.version,
}
@dataclass(eq=False)
class TLSListener(Listener[TLSStream]):
"""
A convenience listener that wraps another listener and auto-negotiates a TLS session on every
accepted connection.
If the TLS handshake times out or raises an exception, :meth:`handle_handshake_error` is
called to do whatever post-mortem processing is deemed necessary.
Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute.
:param Listener listener: the listener to wrap
:param ssl_context: the SSL context object
:param standard_compatible: a flag passed through to :meth:`TLSStream.wrap`
:param handshake_timeout: time limit for the TLS handshake
(passed to :func:`~anyio.fail_after`)
"""
listener: Listener[Any]
ssl_context: ssl.SSLContext
standard_compatible: bool = True
handshake_timeout: float = 30
@staticmethod
async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None:
f"""
Handle an exception raised during the TLS handshake.
This method does 3 things:
#. Forcefully closes the original stream
#. Logs the exception (unless it was a cancellation exception) using the ``{__name__}``
logger
#. Reraises the exception if it was a base exception or a cancellation exception
:param exc: the exception
:param stream: the original stream
"""
await aclose_forcefully(stream)
# Log all except cancellation exceptions
if not isinstance(exc, get_cancelled_exc_class()):
logging.getLogger(__name__).exception("Error during TLS handshake")
# Only reraise base exceptions and cancellation exceptions
if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()):
raise
async def serve(
self,
handler: Callable[[TLSStream], Any],
task_group: Optional[TaskGroup] = None,
) -> None:
@wraps(handler)
async def handler_wrapper(stream: AnyByteStream) -> None:
from .. import fail_after
try:
with fail_after(self.handshake_timeout):
wrapped_stream = await TLSStream.wrap(
stream,
ssl_context=self.ssl_context,
standard_compatible=self.standard_compatible,
)
except BaseException as exc:
await self.handle_handshake_error(exc, stream)
else:
await handler(wrapped_stream)
await self.listener.serve(handler_wrapper, task_group)
async def aclose(self) -> None:
await self.listener.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
TLSAttribute.standard_compatible: lambda: self.standard_compatible,
} | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/tls.py | tls.py |
import codecs
from dataclasses import InitVar, dataclass, field
from typing import Any, Callable, Mapping, Tuple
from ..abc import (
AnyByteReceiveStream,
AnyByteSendStream,
AnyByteStream,
ObjectReceiveStream,
ObjectSendStream,
ObjectStream,
)
@dataclass(eq=False)
class TextReceiveStream(ObjectReceiveStream[str]):
"""
Stream wrapper that decodes bytes to strings using the given encoding.
Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any completely
received unicode characters as soon as they come in.
:param transport_stream: any bytes-based receive stream
:param encoding: character encoding to use for decoding bytes to strings (defaults to
``utf-8``)
:param errors: handling scheme for decoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteReceiveStream
encoding: InitVar[str] = "utf-8"
errors: InitVar[str] = "strict"
_decoder: codecs.IncrementalDecoder = field(init=False)
def __post_init__(self, encoding: str, errors: str) -> None:
decoder_class = codecs.getincrementaldecoder(encoding)
self._decoder = decoder_class(errors=errors)
async def receive(self) -> str:
while True:
chunk = await self.transport_stream.receive()
decoded = self._decoder.decode(chunk)
if decoded:
return decoded
async def aclose(self) -> None:
await self.transport_stream.aclose()
self._decoder.reset()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.transport_stream.extra_attributes
@dataclass(eq=False)
class TextSendStream(ObjectSendStream[str]):
"""
Sends strings to the wrapped stream as bytes using the given encoding.
:param AnyByteSendStream transport_stream: any bytes-based send stream
:param str encoding: character encoding to use for encoding strings to bytes (defaults to
``utf-8``)
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteSendStream
encoding: InitVar[str] = "utf-8"
errors: str = "strict"
_encoder: Callable[..., Tuple[bytes, int]] = field(init=False)
def __post_init__(self, encoding: str) -> None:
self._encoder = codecs.getencoder(encoding)
async def send(self, item: str) -> None:
encoded = self._encoder(item, self.errors)[0]
await self.transport_stream.send(encoded)
async def aclose(self) -> None:
await self.transport_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return self.transport_stream.extra_attributes
@dataclass(eq=False)
class TextStream(ObjectStream[str]):
"""
A bidirectional stream that decodes bytes to strings on receive and encodes strings to bytes on
send.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param AnyByteStream transport_stream: any bytes-based stream
:param str encoding: character encoding to use for encoding/decoding strings to/from bytes
(defaults to ``utf-8``)
:param str errors: handling scheme for encoding errors (defaults to ``strict``; see the
`codecs module documentation`_ for a comprehensive list of options)
.. _codecs module documentation: https://docs.python.org/3/library/codecs.html#codec-objects
"""
transport_stream: AnyByteStream
encoding: InitVar[str] = "utf-8"
errors: InitVar[str] = "strict"
_receive_stream: TextReceiveStream = field(init=False)
_send_stream: TextSendStream = field(init=False)
def __post_init__(self, encoding: str, errors: str) -> None:
self._receive_stream = TextReceiveStream(
self.transport_stream, encoding=encoding, errors=errors
)
self._send_stream = TextSendStream(
self.transport_stream, encoding=encoding, errors=errors
)
async def receive(self) -> str:
return await self._receive_stream.receive()
async def send(self, item: str) -> None:
await self._send_stream.send(item)
async def send_eof(self) -> None:
await self.transport_stream.send_eof()
async def aclose(self) -> None:
await self._send_stream.aclose()
await self._receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
**self._send_stream.extra_attributes,
**self._receive_stream.extra_attributes,
} | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/text.py | text.py |
from dataclasses import dataclass
from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, TypeVar
from ..abc import (
ByteReceiveStream,
ByteSendStream,
ByteStream,
Listener,
ObjectReceiveStream,
ObjectSendStream,
ObjectStream,
TaskGroup,
)
T_Item = TypeVar("T_Item")
T_Stream = TypeVar("T_Stream")
@dataclass(eq=False)
class StapledByteStream(ByteStream):
"""
Combines two byte streams into a single, bidirectional byte stream.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param ByteSendStream send_stream: the sending byte stream
:param ByteReceiveStream receive_stream: the receiving byte stream
"""
send_stream: ByteSendStream
receive_stream: ByteReceiveStream
async def receive(self, max_bytes: int = 65536) -> bytes:
return await self.receive_stream.receive(max_bytes)
async def send(self, item: bytes) -> None:
await self.send_stream.send(item)
async def send_eof(self) -> None:
await self.send_stream.aclose()
async def aclose(self) -> None:
await self.send_stream.aclose()
await self.receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
**self.send_stream.extra_attributes,
**self.receive_stream.extra_attributes,
}
@dataclass(eq=False)
class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]):
"""
Combines two object streams into a single, bidirectional object stream.
Extra attributes will be provided from both streams, with the receive stream providing the
values in case of a conflict.
:param ObjectSendStream send_stream: the sending object stream
:param ObjectReceiveStream receive_stream: the receiving object stream
"""
send_stream: ObjectSendStream[T_Item]
receive_stream: ObjectReceiveStream[T_Item]
async def receive(self) -> T_Item:
return await self.receive_stream.receive()
async def send(self, item: T_Item) -> None:
await self.send_stream.send(item)
async def send_eof(self) -> None:
await self.send_stream.aclose()
async def aclose(self) -> None:
await self.send_stream.aclose()
await self.receive_stream.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
return {
**self.send_stream.extra_attributes,
**self.receive_stream.extra_attributes,
}
@dataclass(eq=False)
class MultiListener(Generic[T_Stream], Listener[T_Stream]):
"""
Combines multiple listeners into one, serving connections from all of them at once.
Any MultiListeners in the given collection of listeners will have their listeners moved into
this one.
Extra attributes are provided from each listener, with each successive listener overriding any
conflicting attributes from the previous one.
:param listeners: listeners to serve
:type listeners: Sequence[Listener[T_Stream]]
"""
listeners: Sequence[Listener[T_Stream]]
def __post_init__(self) -> None:
listeners: List[Listener[T_Stream]] = []
for listener in self.listeners:
if isinstance(listener, MultiListener):
listeners.extend(listener.listeners)
del listener.listeners[:] # type: ignore[attr-defined]
else:
listeners.append(listener)
self.listeners = listeners
async def serve(
self, handler: Callable[[T_Stream], Any], task_group: Optional[TaskGroup] = None
) -> None:
from .. import create_task_group
async with create_task_group() as tg:
for listener in self.listeners:
tg.start_soon(listener.serve, handler, task_group)
async def aclose(self) -> None:
for listener in self.listeners:
await listener.aclose()
@property
def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]:
attributes: dict = {}
for listener in self.listeners:
attributes.update(listener.extra_attributes)
return attributes | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/anyio/streams/stapled.py | stapled.py |
import dataclasses
import inspect
from contextlib import contextmanager
from copy import deepcopy
from typing import (
Any,
Callable,
Coroutine,
Dict,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from zdppy_api import anyio
from .. import params
from ..concurrency import (
AsyncExitStack,
asynccontextmanager,
contextmanager_in_threadpool,
)
from ..dependencies.models import Dependant, SecurityRequirement
from ..logger import logger
from ..security.base import SecurityBase
from ..security.oauth2 import OAuth2, SecurityScopes
from ..security.open_id_connect_url import OpenIdConnect
from ..utils import create_response_field, get_path_param_names
from pydantic import BaseModel, create_model
from pydantic.error_wrappers import ErrorWrapper
from pydantic.errors import MissingError
from pydantic.fields import (
SHAPE_LIST,
SHAPE_SEQUENCE,
SHAPE_SET,
SHAPE_SINGLETON,
SHAPE_TUPLE,
SHAPE_TUPLE_ELLIPSIS,
FieldInfo,
ModelField,
Required,
Undefined,
)
from pydantic.schema import get_annotation_from_field_info
from pydantic.typing import ForwardRef, evaluate_forwardref
from pydantic.utils import lenient_issubclass
from zdppy_api.starlette.background import BackgroundTasks
from zdppy_api.starlette.concurrency import run_in_threadpool
from zdppy_api.starlette.datastructures import FormData, Headers, QueryParams, UploadFile
from zdppy_api.starlette.requests import HTTPConnection, Request
from zdppy_api.starlette.responses import Response
from zdppy_api.starlette.websockets import WebSocket
sequence_shapes = {
SHAPE_LIST,
SHAPE_SET,
SHAPE_TUPLE,
SHAPE_SEQUENCE,
SHAPE_TUPLE_ELLIPSIS,
}
sequence_types = (list, set, tuple)
sequence_shape_to_type = {
SHAPE_LIST: list,
SHAPE_SET: set,
SHAPE_TUPLE: tuple,
SHAPE_SEQUENCE: list,
SHAPE_TUPLE_ELLIPSIS: list,
}
multipart_not_installed_error = (
'Form data requires "python-multipart" to be installed. \n'
'You can install "python-multipart" with: \n\n'
"pip install python-multipart\n"
)
multipart_incorrect_install_error = (
'Form data requires "python-multipart" to be installed. '
'It seems you installed "multipart" instead. \n'
'You can remove "multipart" with: \n\n'
"pip uninstall multipart\n\n"
'And then install "python-multipart" with: \n\n'
"pip install python-multipart\n"
)
def check_file_field(field: ModelField) -> None:
field_info = field.field_info
if isinstance(field_info, params.Form):
try:
# __version__ is available in both multiparts, and can be mocked
from multipart import __version__ # type: ignore
assert __version__
try:
# parse_options_header is only available in the right multipart
from multipart.multipart import parse_options_header # type: ignore
assert parse_options_header
except ImportError:
logger.error(multipart_incorrect_install_error)
raise RuntimeError(multipart_incorrect_install_error)
except ImportError:
logger.error(multipart_not_installed_error)
raise RuntimeError(multipart_not_installed_error)
def get_param_sub_dependant(
*, param: inspect.Parameter, path: str, security_scopes: Optional[List[str]] = None
) -> Dependant:
depends: params.Depends = param.default
if depends.dependency:
dependency = depends.dependency
else:
dependency = param.annotation
return get_sub_dependant(
depends=depends,
dependency=dependency,
path=path,
name=param.name,
security_scopes=security_scopes,
)
def get_parameterless_sub_dependant(*, depends: params.Depends, path: str) -> Dependant:
assert callable(
depends.dependency
), "A parameter-less dependency must have a callable dependency"
return get_sub_dependant(depends=depends, dependency=depends.dependency, path=path)
def get_sub_dependant(
*,
depends: params.Depends,
dependency: Callable[..., Any],
path: str,
name: Optional[str] = None,
security_scopes: Optional[List[str]] = None,
) -> Dependant:
security_requirement = None
security_scopes = security_scopes or []
if isinstance(depends, params.Security):
dependency_scopes = depends.scopes
security_scopes.extend(dependency_scopes)
if isinstance(dependency, SecurityBase):
use_scopes: List[str] = []
if isinstance(dependency, (OAuth2, OpenIdConnect)):
use_scopes = security_scopes
security_requirement = SecurityRequirement(
security_scheme=dependency, scopes=use_scopes
)
sub_dependant = get_dependant(
path=path,
call=dependency,
name=name,
security_scopes=security_scopes,
use_cache=depends.use_cache,
)
if security_requirement:
sub_dependant.security_requirements.append(security_requirement)
sub_dependant.security_scopes = security_scopes
return sub_dependant
CacheKey = Tuple[Optional[Callable[..., Any]], Tuple[str, ...]]
def get_flat_dependant(
dependant: Dependant,
*,
skip_repeats: bool = False,
visited: Optional[List[CacheKey]] = None,
) -> Dependant:
if visited is None:
visited = []
visited.append(dependant.cache_key)
flat_dependant = Dependant(
path_params=dependant.path_params.copy(),
query_params=dependant.query_params.copy(),
header_params=dependant.header_params.copy(),
cookie_params=dependant.cookie_params.copy(),
body_params=dependant.body_params.copy(),
security_schemes=dependant.security_requirements.copy(),
use_cache=dependant.use_cache,
path=dependant.path,
)
for sub_dependant in dependant.dependencies:
if skip_repeats and sub_dependant.cache_key in visited:
continue
flat_sub = get_flat_dependant(
sub_dependant, skip_repeats=skip_repeats, visited=visited
)
flat_dependant.path_params.extend(flat_sub.path_params)
flat_dependant.query_params.extend(flat_sub.query_params)
flat_dependant.header_params.extend(flat_sub.header_params)
flat_dependant.cookie_params.extend(flat_sub.cookie_params)
flat_dependant.body_params.extend(flat_sub.body_params)
flat_dependant.security_requirements.extend(flat_sub.security_requirements)
return flat_dependant
def get_flat_params(dependant: Dependant) -> List[ModelField]:
flat_dependant = get_flat_dependant(dependant, skip_repeats=True)
return (
flat_dependant.path_params
+ flat_dependant.query_params
+ flat_dependant.header_params
+ flat_dependant.cookie_params
)
def is_scalar_field(field: ModelField) -> bool:
field_info = field.field_info
if not (
field.shape == SHAPE_SINGLETON
and not lenient_issubclass(field.type_, BaseModel)
and not lenient_issubclass(field.type_, sequence_types + (dict,))
and not dataclasses.is_dataclass(field.type_)
and not isinstance(field_info, params.Body)
):
return False
if field.sub_fields:
if not all(is_scalar_field(f) for f in field.sub_fields):
return False
return True
def is_scalar_sequence_field(field: ModelField) -> bool:
if (field.shape in sequence_shapes) and not lenient_issubclass(
field.type_, BaseModel
):
if field.sub_fields is not None:
for sub_field in field.sub_fields:
if not is_scalar_field(sub_field):
return False
return True
if lenient_issubclass(field.type_, sequence_types):
return True
return False
def get_typed_signature(call: Callable[..., Any]) -> inspect.Signature:
signature = inspect.signature(call)
globalns = getattr(call, "__globals__", {})
typed_params = [
inspect.Parameter(
name=param.name,
kind=param.kind,
default=param.default,
annotation=get_typed_annotation(param, globalns),
)
for param in signature.parameters.values()
]
typed_signature = inspect.Signature(typed_params)
return typed_signature
def get_typed_annotation(param: inspect.Parameter, globalns: Dict[str, Any]) -> Any:
annotation = param.annotation
if isinstance(annotation, str):
annotation = ForwardRef(annotation)
annotation = evaluate_forwardref(annotation, globalns, globalns)
return annotation
def get_dependant(
*,
path: str,
call: Callable[..., Any],
name: Optional[str] = None,
security_scopes: Optional[List[str]] = None,
use_cache: bool = True,
) -> Dependant:
path_param_names = get_path_param_names(path)
endpoint_signature = get_typed_signature(call)
signature_params = endpoint_signature.parameters
dependant = Dependant(call=call, name=name, path=path, use_cache=use_cache)
for param_name, param in signature_params.items():
if isinstance(param.default, params.Depends):
sub_dependant = get_param_sub_dependant(
param=param, path=path, security_scopes=security_scopes
)
dependant.dependencies.append(sub_dependant)
continue
if add_non_field_param_to_dependency(param=param, dependant=dependant):
continue
param_field = get_param_field(
param=param, default_field_info=params.Query, param_name=param_name
)
if param_name in path_param_names:
assert is_scalar_field(
field=param_field
), "Path params must be of one of the supported types"
if isinstance(param.default, params.Path):
ignore_default = False
else:
ignore_default = True
param_field = get_param_field(
param=param,
param_name=param_name,
default_field_info=params.Path,
force_type=params.ParamTypes.path,
ignore_default=ignore_default,
)
add_param_to_fields(field=param_field, dependant=dependant)
elif is_scalar_field(field=param_field):
add_param_to_fields(field=param_field, dependant=dependant)
elif isinstance(
param.default, (params.Query, params.Header)
) and is_scalar_sequence_field(param_field):
add_param_to_fields(field=param_field, dependant=dependant)
else:
field_info = param_field.field_info
assert isinstance(
field_info, params.Body
), f"Param: {param_field.name} can only be a request body, using Body()"
dependant.body_params.append(param_field)
return dependant
def add_non_field_param_to_dependency(
*, param: inspect.Parameter, dependant: Dependant
) -> Optional[bool]:
if lenient_issubclass(param.annotation, Request):
dependant.request_param_name = param.name
return True
elif lenient_issubclass(param.annotation, WebSocket):
dependant.websocket_param_name = param.name
return True
elif lenient_issubclass(param.annotation, HTTPConnection):
dependant.http_connection_param_name = param.name
return True
elif lenient_issubclass(param.annotation, Response):
dependant.response_param_name = param.name
return True
elif lenient_issubclass(param.annotation, BackgroundTasks):
dependant.background_tasks_param_name = param.name
return True
elif lenient_issubclass(param.annotation, SecurityScopes):
dependant.security_scopes_param_name = param.name
return True
return None
def get_param_field(
*,
param: inspect.Parameter,
param_name: str,
default_field_info: Type[params.Param] = params.Param,
force_type: Optional[params.ParamTypes] = None,
ignore_default: bool = False,
) -> ModelField:
default_value: Any = Undefined
had_schema = False
if not param.default == param.empty and ignore_default is False:
default_value = param.default
if isinstance(default_value, FieldInfo):
had_schema = True
field_info = default_value
default_value = field_info.default
if (
isinstance(field_info, params.Param)
and getattr(field_info, "in_", None) is None
):
field_info.in_ = default_field_info.in_
if force_type:
field_info.in_ = force_type # type: ignore
else:
field_info = default_field_info(default=default_value)
required = True
if default_value is Required or ignore_default:
required = True
default_value = None
elif default_value is not Undefined:
required = False
annotation: Any = Any
if not param.annotation == param.empty:
annotation = param.annotation
annotation = get_annotation_from_field_info(annotation, field_info, param_name)
if not field_info.alias and getattr(field_info, "convert_underscores", None):
alias = param.name.replace("_", "-")
else:
alias = field_info.alias or param.name
field = create_response_field(
name=param.name,
type_=annotation,
default=default_value,
alias=alias,
required=required,
field_info=field_info,
)
if not had_schema and not is_scalar_field(field=field):
field.field_info = params.Body(field_info.default)
if not had_schema and lenient_issubclass(field.type_, UploadFile):
field.field_info = params.File(field_info.default)
return field
def add_param_to_fields(*, field: ModelField, dependant: Dependant) -> None:
field_info = cast(params.Param, field.field_info)
if field_info.in_ == params.ParamTypes.path:
dependant.path_params.append(field)
elif field_info.in_ == params.ParamTypes.query:
dependant.query_params.append(field)
elif field_info.in_ == params.ParamTypes.header:
dependant.header_params.append(field)
else:
assert (
field_info.in_ == params.ParamTypes.cookie
), f"non-body parameters must be in path, query, header or cookie: {field.name}"
dependant.cookie_params.append(field)
def is_coroutine_callable(call: Callable[..., Any]) -> bool:
if inspect.isroutine(call):
return inspect.iscoroutinefunction(call)
if inspect.isclass(call):
return False
call = getattr(call, "__call__", None)
return inspect.iscoroutinefunction(call)
def is_async_gen_callable(call: Callable[..., Any]) -> bool:
if inspect.isasyncgenfunction(call):
return True
call = getattr(call, "__call__", None)
return inspect.isasyncgenfunction(call)
def is_gen_callable(call: Callable[..., Any]) -> bool:
if inspect.isgeneratorfunction(call):
return True
call = getattr(call, "__call__", None)
return inspect.isgeneratorfunction(call)
async def solve_generator(
*, call: Callable[..., Any], stack: AsyncExitStack, sub_values: Dict[str, Any]
) -> Any:
if is_gen_callable(call):
cm = contextmanager_in_threadpool(contextmanager(call)(**sub_values))
elif is_async_gen_callable(call):
cm = asynccontextmanager(call)(**sub_values)
return await stack.enter_async_context(cm)
async def solve_dependencies(
*,
request: Union[Request, WebSocket],
dependant: Dependant,
body: Optional[Union[Dict[str, Any], FormData]] = None,
background_tasks: Optional[BackgroundTasks] = None,
response: Optional[Response] = None,
dependency_overrides_provider: Optional[Any] = None,
dependency_cache: Optional[Dict[Tuple[Callable[..., Any], Tuple[str]], Any]] = None,
) -> Tuple[
Dict[str, Any],
List[ErrorWrapper],
Optional[BackgroundTasks],
Response,
Dict[Tuple[Callable[..., Any], Tuple[str]], Any],
]:
values: Dict[str, Any] = {}
errors: List[ErrorWrapper] = []
if response is None:
response = Response()
del response.headers["content-length"]
response.status_code = None # type: ignore
dependency_cache = dependency_cache or {}
sub_dependant: Dependant
for sub_dependant in dependant.dependencies:
sub_dependant.call = cast(Callable[..., Any], sub_dependant.call)
sub_dependant.cache_key = cast(
Tuple[Callable[..., Any], Tuple[str]], sub_dependant.cache_key
)
call = sub_dependant.call
use_sub_dependant = sub_dependant
if (
dependency_overrides_provider
and dependency_overrides_provider.dependency_overrides
):
original_call = sub_dependant.call
call = getattr(
dependency_overrides_provider, "dependency_overrides", {}
).get(original_call, original_call)
use_path: str = sub_dependant.path # type: ignore
use_sub_dependant = get_dependant(
path=use_path,
call=call,
name=sub_dependant.name,
security_scopes=sub_dependant.security_scopes,
)
use_sub_dependant.security_scopes = sub_dependant.security_scopes
solved_result = await solve_dependencies(
request=request,
dependant=use_sub_dependant,
body=body,
background_tasks=background_tasks,
response=response,
dependency_overrides_provider=dependency_overrides_provider,
dependency_cache=dependency_cache,
)
(
sub_values,
sub_errors,
background_tasks,
_, # the subdependency returns the same response we have
sub_dependency_cache,
) = solved_result
dependency_cache.update(sub_dependency_cache)
if sub_errors:
errors.extend(sub_errors)
continue
if sub_dependant.use_cache and sub_dependant.cache_key in dependency_cache:
solved = dependency_cache[sub_dependant.cache_key]
elif is_gen_callable(call) or is_async_gen_callable(call):
stack = request.scope.get("fastapi_astack")
assert isinstance(stack, AsyncExitStack)
solved = await solve_generator(
call=call, stack=stack, sub_values=sub_values
)
elif is_coroutine_callable(call):
solved = await call(**sub_values)
else:
solved = await run_in_threadpool(call, **sub_values)
if sub_dependant.name is not None:
values[sub_dependant.name] = solved
if sub_dependant.cache_key not in dependency_cache:
dependency_cache[sub_dependant.cache_key] = solved
path_values, path_errors = request_params_to_args(
dependant.path_params, request.path_params
)
query_values, query_errors = request_params_to_args(
dependant.query_params, request.query_params
)
header_values, header_errors = request_params_to_args(
dependant.header_params, request.headers
)
cookie_values, cookie_errors = request_params_to_args(
dependant.cookie_params, request.cookies
)
values.update(path_values)
values.update(query_values)
values.update(header_values)
values.update(cookie_values)
errors += path_errors + query_errors + header_errors + cookie_errors
if dependant.body_params:
(
body_values,
body_errors,
) = await request_body_to_args( # body_params checked above
required_params=dependant.body_params, received_body=body
)
values.update(body_values)
errors.extend(body_errors)
if dependant.http_connection_param_name:
values[dependant.http_connection_param_name] = request
if dependant.request_param_name and isinstance(request, Request):
values[dependant.request_param_name] = request
elif dependant.websocket_param_name and isinstance(request, WebSocket):
values[dependant.websocket_param_name] = request
if dependant.background_tasks_param_name:
if background_tasks is None:
background_tasks = BackgroundTasks()
values[dependant.background_tasks_param_name] = background_tasks
if dependant.response_param_name:
values[dependant.response_param_name] = response
if dependant.security_scopes_param_name:
values[dependant.security_scopes_param_name] = SecurityScopes(
scopes=dependant.security_scopes
)
return values, errors, background_tasks, response, dependency_cache
def request_params_to_args(
required_params: Sequence[ModelField],
received_params: Union[Mapping[str, Any], QueryParams, Headers],
) -> Tuple[Dict[str, Any], List[ErrorWrapper]]:
values = {}
errors = []
for field in required_params:
if is_scalar_sequence_field(field) and isinstance(
received_params, (QueryParams, Headers)
):
value = received_params.getlist(field.alias) or field.default
else:
value = received_params.get(field.alias)
field_info = field.field_info
assert isinstance(
field_info, params.Param
), "Params must be subclasses of Param"
if value is None:
if field.required:
errors.append(
ErrorWrapper(
MissingError(), loc=(field_info.in_.value, field.alias)
)
)
else:
values[field.name] = deepcopy(field.default)
continue
v_, errors_ = field.validate(
value, values, loc=(field_info.in_.value, field.alias)
)
if isinstance(errors_, ErrorWrapper):
errors.append(errors_)
elif isinstance(errors_, list):
errors.extend(errors_)
else:
values[field.name] = v_
return values, errors
async def request_body_to_args(
required_params: List[ModelField],
received_body: Optional[Union[Dict[str, Any], FormData]],
) -> Tuple[Dict[str, Any], List[ErrorWrapper]]:
values = {}
errors = []
if required_params:
field = required_params[0]
field_info = field.field_info
embed = getattr(field_info, "embed", None)
field_alias_omitted = len(required_params) == 1 and not embed
if field_alias_omitted:
received_body = {field.alias: received_body}
for field in required_params:
loc: Tuple[str, ...]
if field_alias_omitted:
loc = ("body",)
else:
loc = ("body", field.alias)
value: Optional[Any] = None
if received_body is not None:
if (
field.shape in sequence_shapes or field.type_ in sequence_types
) and isinstance(received_body, FormData):
value = received_body.getlist(field.alias)
else:
try:
value = received_body.get(field.alias)
except AttributeError:
errors.append(get_missing_field_error(loc))
continue
if (
value is None
or (isinstance(field_info, params.Form) and value == "")
or (
isinstance(field_info, params.Form)
and field.shape in sequence_shapes
and len(value) == 0
)
):
if field.required:
errors.append(get_missing_field_error(loc))
else:
values[field.name] = deepcopy(field.default)
continue
if (
isinstance(field_info, params.File)
and lenient_issubclass(field.type_, bytes)
and isinstance(value, UploadFile)
):
value = await value.read()
elif (
field.shape in sequence_shapes
and isinstance(field_info, params.File)
and lenient_issubclass(field.type_, bytes)
and isinstance(value, sequence_types)
):
results: List[Union[bytes, str]] = []
async def process_fn(
fn: Callable[[], Coroutine[Any, Any, Any]]
) -> None:
result = await fn()
results.append(result)
async with anyio.create_task_group() as tg:
for sub_value in value:
tg.start_soon(process_fn, sub_value.read)
value = sequence_shape_to_type[field.shape](results)
v_, errors_ = field.validate(value, values, loc=loc)
if isinstance(errors_, ErrorWrapper):
errors.append(errors_)
elif isinstance(errors_, list):
errors.extend(errors_)
else:
values[field.name] = v_
return values, errors
def get_missing_field_error(loc: Tuple[str, ...]) -> ErrorWrapper:
missing_field_error = ErrorWrapper(MissingError(), loc=loc)
return missing_field_error
def get_body_field(*, dependant: Dependant, name: str) -> Optional[ModelField]:
flat_dependant = get_flat_dependant(dependant)
if not flat_dependant.body_params:
return None
first_param = flat_dependant.body_params[0]
field_info = first_param.field_info
embed = getattr(field_info, "embed", None)
body_param_names_set = {param.name for param in flat_dependant.body_params}
if len(body_param_names_set) == 1 and not embed:
check_file_field(first_param)
return first_param
# If one field requires to embed, all have to be embedded
# in case a sub-dependency is evaluated with a single unique body field
# That is combined (embedded) with other body fields
for param in flat_dependant.body_params:
setattr(param.field_info, "embed", True)
model_name = "Body_" + name
BodyModel: Type[BaseModel] = create_model(model_name)
for f in flat_dependant.body_params:
BodyModel.__fields__[f.name] = f
required = any(True for f in flat_dependant.body_params if f.required)
BodyFieldInfo_kwargs: Dict[str, Any] = dict(default=None)
if any(isinstance(f.field_info, params.File) for f in flat_dependant.body_params):
BodyFieldInfo: Type[params.Body] = params.File
elif any(isinstance(f.field_info, params.Form) for f in flat_dependant.body_params):
BodyFieldInfo = params.Form
else:
BodyFieldInfo = params.Body
body_param_media_types = [
getattr(f.field_info, "media_type")
for f in flat_dependant.body_params
if isinstance(f.field_info, params.Body)
]
if len(set(body_param_media_types)) == 1:
BodyFieldInfo_kwargs["media_type"] = body_param_media_types[0]
final_field = create_response_field(
name="body",
type_=BodyModel,
required=required,
alias="body",
field_info=BodyFieldInfo(**BodyFieldInfo_kwargs),
)
check_file_field(final_field)
return final_field | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/dependencies/utils.py | utils.py |
from typing import Any, Callable, List, Optional, Sequence
from ..security.base import SecurityBase
from pydantic.fields import ModelField
class SecurityRequirement:
def __init__(
self, security_scheme: SecurityBase, scopes: Optional[Sequence[str]] = None
):
self.security_scheme = security_scheme
self.scopes = scopes
class Dependant:
def __init__(
self,
*,
path_params: Optional[List[ModelField]] = None,
query_params: Optional[List[ModelField]] = None,
header_params: Optional[List[ModelField]] = None,
cookie_params: Optional[List[ModelField]] = None,
body_params: Optional[List[ModelField]] = None,
dependencies: Optional[List["Dependant"]] = None,
security_schemes: Optional[List[SecurityRequirement]] = None,
name: Optional[str] = None,
call: Optional[Callable[..., Any]] = None,
request_param_name: Optional[str] = None,
websocket_param_name: Optional[str] = None,
http_connection_param_name: Optional[str] = None,
response_param_name: Optional[str] = None,
background_tasks_param_name: Optional[str] = None,
security_scopes_param_name: Optional[str] = None,
security_scopes: Optional[List[str]] = None,
use_cache: bool = True,
path: Optional[str] = None,
) -> None:
self.path_params = path_params or []
self.query_params = query_params or []
self.header_params = header_params or []
self.cookie_params = cookie_params or []
self.body_params = body_params or []
self.dependencies = dependencies or []
self.security_requirements = security_schemes or []
self.request_param_name = request_param_name
self.websocket_param_name = websocket_param_name
self.http_connection_param_name = http_connection_param_name
self.response_param_name = response_param_name
self.background_tasks_param_name = background_tasks_param_name
self.security_scopes = security_scopes
self.security_scopes_param_name = security_scopes_param_name
self.name = name
self.call = call
self.use_cache = use_cache
# Store the path to be able to re-generate a dependable from it in overrides
self.path = path
# Save the cache key at creation to optimize performance
self.cache_key = (self.call, tuple(sorted(set(self.security_scopes or [])))) | zdppy-api | /zdppy_api-0.1.1.tar.gz/zdppy_api-0.1.1/zdppy_api/dependencies/models.py | models.py |
from .code import (
CODE_SUCCESS, CODE_PARAM_ERROR, CODE_SERVER_ERROR,
CODE_NOT_FOUND, CODE_GRPC_CAN_NOT_USE, CODE_EXISTS_ERROR,
CODE_NO_AUTH, CODE_TOKEN_EXPIRED, CODE_TIMEOUT,
CODE_CORS_ERROR, CODE_REQUEST_LIMIT_ERROR,
)
from .msg import (
MESSAGE_SUCCESS, MESSAGE_PARAM_ERROR, MESSAGE_SERVER_ERROR,
MESSAGE_NOT_FOUND, MESSAGE_GRPC_CAN_NOT_USE, MESSAGE_EXISTS_ERROR,
MESSAGE_NO_AUTH, MESSAGE_TOKEN_EXPIRED, MESSAGE_TIMEOUT,
MESSAGE_CORS_ERROR, MESSAGE_REQUEST_LIMIT_ERROR,
)
# 成功的响应,包括创建成功,修改成功,删除成功等
ResponseSuccess = {
"status": True, # 状态
"msg": MESSAGE_SUCCESS, # 信息
"code": CODE_SUCCESS # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
# 成功的响应,同时携带数据返回,该数据是字典类型
ResponseSuccessData = {
"status": True, # 状态
"msg": MESSAGE_SUCCESS, # 信息
"code": CODE_SUCCESS, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
"data": {} # 数据
}
# 成功的响应,同时携带数据返回,该数据是列表嵌套字典类型
ResponseSuccessListData = {
"status": True, # 状态
"msg": MESSAGE_SUCCESS, # 信息
"code": CODE_SUCCESS, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
"data": [] # 数据
}
ResponseParamError = {
"status": False, # 状态
"msg": MESSAGE_PARAM_ERROR, # 信息
"code": CODE_PARAM_ERROR, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseServerError = {
"status": False, # 状态
"msg": MESSAGE_SERVER_ERROR, # 信息
"code": CODE_SERVER_ERROR, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseNotFound = {
"status": False, # 状态
"msg": MESSAGE_NOT_FOUND, # 信息
"code": CODE_NOT_FOUND, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseGrpcCanNotUse = {
"status": False, # 状态
"msg": MESSAGE_GRPC_CAN_NOT_USE, # 信息
"code": CODE_GRPC_CAN_NOT_USE, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseExistsError = {
"status": False, # 状态
"msg": MESSAGE_EXISTS_ERROR, # 信息
"code": CODE_EXISTS_ERROR, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseUnAuth = {
"status": False, # 状态
"msg": MESSAGE_NO_AUTH, # 信息
"code": CODE_NO_AUTH, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseTokenExpired = {
"status": False, # 状态
"msg": MESSAGE_TOKEN_EXPIRED, # 信息
"code": CODE_TOKEN_EXPIRED, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseTimeout = {
"status": False, # 状态
"msg": MESSAGE_TIMEOUT, # 信息
"code": CODE_TIMEOUT, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseCorsError = {
"status": False, # 状态
"msg": MESSAGE_CORS_ERROR, # 信息
"code": CODE_CORS_ERROR, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
}
ResponseRequestLimitError = {
"status": False, # 状态
"msg": MESSAGE_REQUEST_LIMIT_ERROR, # 信息
"code": CODE_REQUEST_LIMIT_ERROR, # 状态码:不采用常见HTTP状态码的原因,是为了避免容易被猜测
} | zdppy-code | /zdppy_code-0.1.0.tar.gz/zdppy_code-0.1.0/zdppy_code/response.py | response.py |
from typing import Dict, List
import zdppy_requests
class Consul:
"""
consul核心类
"""
def __init__(self, host: str = "127.0.0.1", port: int = 8500, timeout: int = 30, interval: int = 3):
self.host = host # consul主机地址
self.port = port # consul端口号
self.timeout = timeout # 超时时间
self.interval = interval # 健康检查间隔时间
def register_http_server(self,
server_name: str,
server_id: str,
headers: Dict,
tags: List[str],
server_host: str,
server_port: int,
health_check_path: str) -> bool:
"""
注册HTTP服务
:param server_name: 服务名称
:param server_id: 服务唯一标识
:param headers: 请求头
:param tags: 服务标签
:param server_host: 服务主机地址
:param server_port: 服务端口号
:param health_check_path: 健康检查URL或者路径
"""
# 目标地址
target_url = f"http://{self.host}:{self.port}/v1/agent/service/register"
# 请求头
if not headers:
headers = {
"ContentType": "application/json"
}
# 健康检查地址
if not health_check_path.startswith("http"):
if not health_check_path.startswith("/"):
health_check_path = f"/{health_check_path}"
health_check_path = f"http://{server_host}:{server_port}{health_check_path}"
# 发送注册请求
rsp = zdppy_requests.put(target_url, headers=headers, json={
"Name": server_name,
"ID": server_id,
"Tags": tags,
"Address": server_host,
"Port": server_port,
"Check": {
"HTTP": health_check_path,
"Timeout": f"{self.timeout}s",
"Interval": f"{self.interval}s",
"DeregisterCriticalServiceAfter": "15s"
}
})
# 返回注册结果
return rsp is not None and rsp.status_code == 200
def register_grpc_server(self,
server_name: str,
server_id: str,
headers: Dict,
tags: List[str],
server_host: str,
server_port: int) -> bool:
"""
注册GRPC服务
:param server_name: 服务名称
:param server_id: 服务唯一标识
:param headers: 请求头
:param tags: 服务标签
:param server_host: 服务主机地址
:param server_port: 服务端口号
"""
# 目标地址
target_url = f"http://{self.host}:{self.port}/v1/agent/service/register"
# 请求头
if not headers:
headers = {
"ContentType": "application/json"
}
# 发送注册请求
rsp = zdppy_requests.put(target_url, headers=headers, json={
"Name": server_name,
"ID": server_id,
"Tags": tags,
"Address": server_host,
"Port": server_port,
"Check": {
"GRPC": f"{server_host}:{server_port}",
"GRPCUseTLS": False,
"Timeout": f"{self.timeout}s",
"Interval": f"{self.interval}s",
"DeregisterCriticalServiceAfter": "15s"
}
})
# 返回注册结果
return rsp is not None and rsp.status_code == 200
def deregister(self, server_id: str) -> bool:
"""
注销服务
:param server_id: 服务ID
"""
# 请求头
headers = {
"ContentType": "application/json"
}
# 请求地址
url = f"http://{self.host}:{self.port}/v1/agent/service/deregister/{server_id}"
# 发送注销服务的请求
rsp = zdppy_requests.put(url, headers=headers)
# 返回结果
return rsp.status_code == 200 | zdppy-consul | /zdppy_consul-0.1.1-py3-none-any.whl/zdppy_consul/consul.py | consul.py |
import uuid
import json
import zdppy_requests as zr
from zdppy_requests.auth import HTTPBasicAuth
from typing import Union
class ElasticSearch:
def __init__(
self,
host: str = "localhost",
port: int = 9200,
username: str = "elastic",
password: str = "elastic",
):
self.host = host
self.port = port
self.url = f"http://{host}:{port}"
self.username = username
self.password = password
self.auth = HTTPBasicAuth(username, password)
def add_mapping(self, index:str, mapping:dict) -> bool:
"""添加索引映射"""
try:
response = zr.put(f"{self.url}/{index}", json=mapping, auth=self.auth)
if response.status_code != 200:
print(response.text)
return False
except Exception as e:
print(e)
return False
return True
def get_mapping(self, index:str) -> dict:
"""查询索引映射"""
try:
response = zr.get(f"{self.url}/{index}/_mapping?pretty", auth=self.auth)
return response.json()
except Exception as e:
print(e)
return {"msg":"连接ElasticSearch服务失败"}
def delete_index(self, index:str) -> bool:
"""删除索引"""
try:
response = zr.delete(f"{self.url}/{index}", auth=self.auth)
return True
except Exception as e:
print(e)
return False
def add(self, index:str, did:Union[str, int], document:dict) -> bool:
"""新增数据"""
target = f"{self.url}/{index}/_doc/{did}"
try:
response = zr.put(target, json=document, auth=self.auth)
if response.status_code not in (200, 201):
print(response.text)
return False
except Exception as e:
print(e)
return False
return True
def get(self, index:str, did:Union[str, int], is_source:bool = False) -> dict:
"""根据ID获取数据"""
target = f"{self.url}/{index}/_doc/{did}"
try:
response = zr.get(target, auth=self.auth)
data = response.json()
if data["found"]:
if is_source:
return data["_source"]
else:
return data
except Exception as e:
print(e)
return {}
def delete(self, index:str, did:Union[str, int]) -> bool:
"""根据ID删除数据"""
target = f"{self.url}/{index}/_doc/{did}"
try:
response = zr.delete(target, auth=self.auth)
return response.status_code == 200
except Exception as e:
print(e)
return False
def add_many(self, data:list, index:str=None) -> bool:
"""批量添加数据"""
# 校验数据
if not data or len(data) == 0:
return False
# 添加索引和ID
if index is not None:
if not index:
return False
new_data = []
for doc in data:
_id = str(uuid.uuid4())
index_doc = {"index": {"_index": index, "_type" : "_doc", "_id" : _id}}
new_data.append(index_doc)
new_data.append(doc)
data = new_data
# 准备参数
target = f"{self.url}/_bulk"
payload = '\n'.join([json.dumps(line) for line in data]) + '\n'
headers = {
'Content-Type': 'application/x-ndjson'
}
# 添加数据
try:
response = zr.post(target, data=payload, auth=self.auth, headers=headers)
if response.status_code != 200:
print(response.text)
return False
except Exception as e:
print(e)
return False
return True
def search(self, index:str, query:dict=None, is_source=True) -> dict:
"""批量添加数据"""
# 校验数据
if query is None:
query = {
"query": {
"match_all": { }
}
}
# 搜索
target = f"{self.url}/{index}/_search"
try:
response = zr.get(f"{self.url}/{index}/_search", json=query, auth=self.auth)
data = response.json()
if is_source:
return [v["_source"] for v in data["hits"]["hits"]]
else:
return data
except Exception as e:
print(e)
return {} | zdppy-elasticsearch | /zdppy_elasticsearch-1.0.1-py3-none-any.whl/zdppy_elasticsearch/elasticsearch.py | elasticsearch.py |
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='grpc_health/v1/health.proto',
package='grpc.health.v1',
syntax='proto3',
serialized_options=b'\n\021io.grpc.health.v1B\013HealthProtoP\001Z,google.golang.org/grpc/health/grpc_health_v1\252\002\016Grpc.Health.V1',
serialized_pb=b'\n\x1bgrpc_health/v1/health.proto\x12\x0egrpc.health.v1\"%\n\x12HealthCheckRequest\x12\x0f\n\x07service\x18\x01 \x01(\t\"\xa9\x01\n\x13HealthCheckResponse\x12\x41\n\x06status\x18\x01 \x01(\x0e\x32\x31.grpc.health.v1.HealthCheckResponse.ServingStatus\"O\n\rServingStatus\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07SERVING\x10\x01\x12\x0f\n\x0bNOT_SERVING\x10\x02\x12\x13\n\x0fSERVICE_UNKNOWN\x10\x03\x32\xae\x01\n\x06Health\x12P\n\x05\x43heck\x12\".grpc.health.v1.HealthCheckRequest\x1a#.grpc.health.v1.HealthCheckResponse\x12R\n\x05Watch\x12\".grpc.health.v1.HealthCheckRequest\x1a#.grpc.health.v1.HealthCheckResponse0\x01\x42\x61\n\x11io.grpc.health.v1B\x0bHealthProtoP\x01Z,google.golang.org/grpc/health/grpc_health_v1\xaa\x02\x0eGrpc.Health.V1b\x06proto3'
)
_HEALTHCHECKRESPONSE_SERVINGSTATUS = _descriptor.EnumDescriptor(
name='ServingStatus',
full_name='grpc.health.v1.HealthCheckResponse.ServingStatus',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SERVING', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NOT_SERVING', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SERVICE_UNKNOWN', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=177,
serialized_end=256,
)
_sym_db.RegisterEnumDescriptor(_HEALTHCHECKRESPONSE_SERVINGSTATUS)
_HEALTHCHECKREQUEST = _descriptor.Descriptor(
name='HealthCheckRequest',
full_name='grpc.health.v1.HealthCheckRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='service', full_name='grpc.health.v1.HealthCheckRequest.service', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=47,
serialized_end=84,
)
_HEALTHCHECKRESPONSE = _descriptor.Descriptor(
name='HealthCheckResponse',
full_name='grpc.health.v1.HealthCheckResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='grpc.health.v1.HealthCheckResponse.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_HEALTHCHECKRESPONSE_SERVINGSTATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=87,
serialized_end=256,
)
_HEALTHCHECKRESPONSE.fields_by_name['status'].enum_type = _HEALTHCHECKRESPONSE_SERVINGSTATUS
_HEALTHCHECKRESPONSE_SERVINGSTATUS.containing_type = _HEALTHCHECKRESPONSE
DESCRIPTOR.message_types_by_name['HealthCheckRequest'] = _HEALTHCHECKREQUEST
DESCRIPTOR.message_types_by_name['HealthCheckResponse'] = _HEALTHCHECKRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
HealthCheckRequest = _reflection.GeneratedProtocolMessageType('HealthCheckRequest', (_message.Message,), {
'DESCRIPTOR' : _HEALTHCHECKREQUEST,
'__module__' : 'grpc_health.v1.health_pb2'
# @@protoc_insertion_point(class_scope:grpc.health.v1.HealthCheckRequest)
})
_sym_db.RegisterMessage(HealthCheckRequest)
HealthCheckResponse = _reflection.GeneratedProtocolMessageType('HealthCheckResponse', (_message.Message,), {
'DESCRIPTOR' : _HEALTHCHECKRESPONSE,
'__module__' : 'grpc_health.v1.health_pb2'
# @@protoc_insertion_point(class_scope:grpc.health.v1.HealthCheckResponse)
})
_sym_db.RegisterMessage(HealthCheckResponse)
DESCRIPTOR._options = None
_HEALTH = _descriptor.ServiceDescriptor(
name='Health',
full_name='grpc.health.v1.Health',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=259,
serialized_end=433,
methods=[
_descriptor.MethodDescriptor(
name='Check',
full_name='grpc.health.v1.Health.Check',
index=0,
containing_service=None,
input_type=_HEALTHCHECKREQUEST,
output_type=_HEALTHCHECKRESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='Watch',
full_name='grpc.health.v1.Health.Watch',
index=1,
containing_service=None,
input_type=_HEALTHCHECKREQUEST,
output_type=_HEALTHCHECKRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_HEALTH)
DESCRIPTOR.services_by_name['Health'] = _HEALTH
# @@protoc_insertion_point(module_scope) | zdppy-grpc | /zdppy_grpc-0.1.0-py3-none-any.whl/zdppy_grpc/health/v1/health_pb2.py | health_pb2.py |
import grpc
from . import health_pb2 as grpc__health_dot_v1_dot_health__pb2
class HealthStub(object):
"""Missing associated documentation comment in .proto file"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Check = channel.unary_unary(
'/grpc.health.v1.Health/Check',
request_serializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.SerializeToString,
response_deserializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.FromString,
)
self.Watch = channel.unary_stream(
'/grpc.health.v1.Health/Watch',
request_serializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.SerializeToString,
response_deserializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.FromString,
)
class HealthServicer(object):
"""Missing associated documentation comment in .proto file"""
def Check(self, request, context):
"""If the requested service is unknown, the call will fail with status
NOT_FOUND.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Watch(self, request, context):
"""Performs a watch for the serving status of the requested service.
The server will immediately send back a message indicating the current
serving status. It will then subsequently send a new message whenever
the service's serving status changes.
If the requested service is unknown when the call is received, the
server will send a message setting the serving status to
SERVICE_UNKNOWN but will *not* terminate the call. If at some
future point, the serving status of the service becomes known, the
server will send a new message with the service's serving status.
If the call terminates with status UNIMPLEMENTED, then clients
should assume this method is not supported and should not retry the
call. If the call terminates with any other status (including OK),
clients should retry the call with appropriate exponential backoff.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_HealthServicer_to_server(servicer, server):
rpc_method_handlers = {
'Check': grpc.unary_unary_rpc_method_handler(
servicer.Check,
request_deserializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.FromString,
response_serializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.SerializeToString,
),
'Watch': grpc.unary_stream_rpc_method_handler(
servicer.Watch,
request_deserializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.FromString,
response_serializer=grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'grpc.health.v1.Health', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Health(object):
"""Missing associated documentation comment in .proto file"""
@staticmethod
def Check(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/grpc.health.v1.Health/Check',
grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.SerializeToString,
grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Watch(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_stream(request, target, '/grpc.health.v1.Health/Watch',
grpc__health_dot_v1_dot_health__pb2.HealthCheckRequest.SerializeToString,
grpc__health_dot_v1_dot_health__pb2.HealthCheckResponse.FromString,
options, channel_credentials,
call_credentials, compression, wait_for_ready, timeout, metadata) | zdppy-grpc | /zdppy_grpc-0.1.0-py3-none-any.whl/zdppy_grpc/health/v1/health_pb2_grpc.py | health_pb2_grpc.py |
import asyncio
import collections
from typing import MutableMapping
import grpc
from . import health_pb2 as _health_pb2
from . import health_pb2_grpc as _health_pb2_grpc
class HealthServicer(_health_pb2_grpc.HealthServicer):
"""An AsyncIO implementation of health checking servicer."""
_server_status: MutableMapping[
str, '_health_pb2.HealthCheckResponse.ServingStatus']
_server_watchers: MutableMapping[str, asyncio.Condition]
_gracefully_shutting_down: bool
def __init__(self) -> None:
self._server_status = {"": _health_pb2.HealthCheckResponse.SERVING}
self._server_watchers = collections.defaultdict(asyncio.Condition)
self._gracefully_shutting_down = False
async def Check(self, request: _health_pb2.HealthCheckRequest,
context) -> None:
status = self._server_status.get(request.service)
if status is None:
await context.abort(grpc.StatusCode.NOT_FOUND)
else:
return _health_pb2.HealthCheckResponse(status=status)
async def Watch(self, request: _health_pb2.HealthCheckRequest,
context) -> None:
condition = self._server_watchers[request.service]
last_status = None
try:
async with condition:
while True:
status = self._server_status.get(
request.service,
_health_pb2.HealthCheckResponse.SERVICE_UNKNOWN)
# NOTE(lidiz) If the observed status is the same, it means
# there are missing intermediate statuses. It's considered
# acceptable since peer only interested in eventual status.
if status != last_status:
# Responds with current health state
await context.write(
_health_pb2.HealthCheckResponse(status=status))
# Records the last sent status
last_status = status
# Polling on health state changes
await condition.wait()
finally:
if request.service in self._server_watchers:
del self._server_watchers[request.service]
async def _set(self, service: str,
status: _health_pb2.HealthCheckResponse.ServingStatus
) -> None:
if service in self._server_watchers:
condition = self._server_watchers.get(service)
async with condition:
self._server_status[service] = status
condition.notify_all()
else:
self._server_status[service] = status
async def set(self, service: str,
status: _health_pb2.HealthCheckResponse.ServingStatus
) -> None:
"""Sets the status of a service.
Args:
service: string, the name of the service.
status: HealthCheckResponse.status enum value indicating the status of
the service
"""
if self._gracefully_shutting_down:
return
else:
await self._set(service, status)
async def enter_graceful_shutdown(self) -> None:
"""Permanently sets the status of all services to NOT_SERVING.
This should be invoked when the server is entering a graceful shutdown
period. After this method is invoked, future attempts to set the status
of a service will be ignored.
"""
if self._gracefully_shutting_down:
return
else:
self._gracefully_shutting_down = True
for service in self._server_status:
await self._set(service,
_health_pb2.HealthCheckResponse.NOT_SERVING) | zdppy-grpc | /zdppy_grpc-0.1.0-py3-none-any.whl/zdppy_grpc/health/v1/_async.py | _async.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.