code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.util.api import is_func_overridden
# ################################################################################################################################
class HookTool(object):
def __init__(self, server, hook_ctx_class, hook_type_to_method, invoke_func):
self.server = server
self.hook_ctx_class = hook_ctx_class
self.hook_type_to_method = hook_type_to_method
self.invoke_func = invoke_func
# ################################################################################################################################
def is_hook_overridden(self, service_name, hook_type):
impl_name = self.server.service_store.name_to_impl_name[service_name]
service_class = self.server.service_store.service_data(impl_name)['service_class']
func_name = self.hook_type_to_method[hook_type]
func = getattr(service_class, func_name)
return is_func_overridden(func)
# ################################################################################################################################
def get_hook_service_invoker(self, service_name, hook_type):
""" Returns a function that will invoke ooks or None if a given service does not implement input hook_type.
"""
# Do not continue if we already know that user did not override the hook method
if not self.is_hook_overridden(service_name, hook_type):
return
def _invoke_hook_service(*args, **kwargs):
""" A function to invoke hook services.
"""
ctx = self.hook_ctx_class(hook_type, *args, **kwargs)
return self.invoke_func(service_name, {'ctx':ctx}, serialize=False).getvalue(serialize=False)['response']
return _invoke_hook_service
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/hook.py | hook.py |
# stdlib
from datetime import datetime, timedelta
from time import time
import logging
# Arrow
import arrow
# tzlocal
from tzlocal import get_localzone
# Python 2/3 compatibility
from past.builtins import unicode
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
local_tz = get_localzone()
# ################################################################################################################################
def datetime_to_ms(dt):
""" Converts a datetime object to a number of milliseconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds() * 1000
# ################################################################################################################################
def utcnow_as_ms(_time=time):
""" Returns current UTC time in milliseconds since epoch. As of now, uses time.time but may eventually choose
to use alternative implementations on different systems.
"""
return _time()
# ################################################################################################################################
def datetime_from_ms(ms, isoformat=True):
""" Converts a number of milliseconds since UNIX epoch to a datetime object.
"""
value = _epoch + timedelta(milliseconds=ms)
return value.isoformat() if isoformat else value
# ################################################################################################################################
class TimeUtil(object):
""" A thin layer around Arrow's date/time handling library customized for our needs.
Default format is always taken from ISO 8601 (so it's sorted lexicographically)
and default timezone is always UTC.
"""
# ################################################################################################################################
def now(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True, delta=None):
""" Returns now in a specified timezone.
"""
now = arrow.now(tz=tz)
if delta:
now = now + delta
if needs_format:
return now.format(format)
return now
# ################################################################################################################################
def yesterday(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=-1))
# ################################################################################################################################
def tomorrow(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=1))
# ################################################################################################################################
def utcnow(self, format='YYYY-MM-DD HH:mm:ss', needs_format=True):
""" Returns now in UTC formatted as given in 'format'.
"""
return self.now(format, 'UTC', needs_format)
# ################################################################################################################################
def today(self, format='YYYY-MM-DD', tz=local_tz.zone, needs_format=True):
""" Returns current day in a given timezone.
"""
now = arrow.now(tz=tz)
today = arrow.Arrow(year=now.year, month=now.month, day=now.day)
if tz != 'UTC':
today = today.to(tz)
if needs_format:
return today.format(format)
else:
return today
# ################################################################################################################################
def isonow(self, tz=local_tz.zone, needs_format=True, _format='YYYY-MM-DDTHH:mm:ss.SSSSSS'):
return self.now(_format, tz, needs_format)
# ################################################################################################################################
def isoutcnow(self, needs_format=True, _format='YYYY-MM-DDTHH:mm:ss.SSSSSS'):
return self.utc_now(_format, needs_format)
# ################################################################################################################################
def reformat(self, value, from_, to):
""" Reformats value from one datetime format to another, for instance
from 23-03-2013 to 03/23/13 (MM-DD-YYYY to DD/MM/YY).
"""
try:
# Arrow compares to str, not basestring
value = str(value) if isinstance(value, unicode) else value
from_ = str(from_) if isinstance(from_, unicode) else from_
return arrow.get(value, from_).format(to)
except Exception:
logger.error('Could not reformat value:`%s` from:`%s` to:`%s`',
value, from_, to)
raise
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/time_.py | time_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
# Bunch
from bunch import Bunch
# Python 2/3 compatibility
from builtins import bytes
from past.builtins import basestring
# Zato
from zato.common.const import SECRETS
# ################################################################################################################################
def resolve_value(key, value, decrypt_func=None, _default=object(), _secrets=SECRETS):
""" Resolves final value of a given variable by looking it up in environment if applicable.
"""
# Skip non-resolvable items
if not isinstance(value, basestring):
return value
if not value:
return value
value = value.decode('utf8') if isinstance(value, bytes) else value
# It may be an environment variable ..
if value.startswith('$'):
# .. but not if it's $$ which is a signal to skip this value ..
if value.startswith('$$'):
return value
# .. a genuine pointer to an environment variable.
else:
env_key = value[1:].strip().upper()
value = os.environ.get(env_key, _default)
# Use a placeholder if the actual environment key is missing
if value is _default:
value = 'ENV_KEY_MISSING_{}'.format(env_key)
# It may be an encrypted value
elif key in _secrets.PARAMS and value.startswith(_secrets.PREFIX):
value = decrypt_func(value)
# Pre-processed, we can assign this pair to output
return value
# ################################################################################################################################
def resolve_env_variables(data):
""" Given a Bunch instance on input, iterates over all items and resolves all keys/values to ones extracted
from environment variables.
"""
out = Bunch()
for key, value in data.items():
out[key] = resolve_value(None, value)
return out | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/config.py | config.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Requests
from requests import Session as RequestsSession
# Zato
from zato.common.api import CACHE, NotGiven
from zato.common.crypto.api import ServerCryptoManager
from zato.common.json_internal import dumps
from zato.common.util.api import as_bool, get_config, get_odb_session_from_server_dir, get_repo_dir_from_component_dir
from zato.common.odb.model import Cluster, HTTPBasicAuth, Server
# ################################################################################################################################
if 0:
from requests import Response as RequestsResponse
RequestsResponse = RequestsResponse
# ################################################################################################################################
# Maps cache operations to HTTP verbos
op_verb_map = {
'get': 'GET',
'set': 'POST',
'delete': 'DELETE'
}
# ################################################################################################################################
# ################################################################################################################################
class CommandConfig(object):
__slots__ = 'command', 'modifier', 'key', 'value', 'is_string_key', 'is_int_key', 'is_string_value', 'is_int_value', \
'is_bool_value', 'format'
def __init__(self):
self.command = None # type: str
self.modifier = None # type: str
self.key = None # type: str
self.value = None # type: str
self.is_string_key = None # type: bool
self.is_int_key = None # type: bool
self.is_string_value = None # type: bool
self.is_int_value = None # type: bool
self.is_bool_value = None # type: bool
self.format = None # type: str
def to_dict(self):
out = {}
for name in self.__slots__:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
# ################################################################################################################################
class CommandResponse(object):
__slots__ = 'key', 'text', 'has_value'
def __init__(self):
self.key = None # type: object
self.text = None # type: str
self.has_value = None # type: bool
# ################################################################################################################################
# ################################################################################################################################
class Client(object):
""" An HTTP-based Zato cache client.
"""
__slots__ = 'address', 'username', 'password', 'cache_name', 'session'
def __init__(self):
self.address = None # type: str
self.username = None # type: str
self.password = None # type: str
self.cache_name = None # type: str
self.session = None # type: RequestsSession
# ################################################################################################################################
@staticmethod
def from_server_conf(server_dir, cache_name, is_https):
# type: (str, str, bool) -> Client
repo_dir = get_repo_dir_from_component_dir(server_dir)
cm = ServerCryptoManager.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, 'server.conf', crypto_manager=cm, secrets_conf=secrets_conf)
session = None
password = None
try:
session = get_odb_session_from_server_dir(server_dir)
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster # type: Cluster
security = session.query(HTTPBasicAuth).\
filter(Cluster.id == HTTPBasicAuth.cluster_id).\
filter(HTTPBasicAuth.username == CACHE.API_USERNAME).\
filter(HTTPBasicAuth.cluster_id == cluster.id).\
first() # type: HTTPBasicAuth
if security:
password = security.password
finally:
if session:
session.close()
return Client.from_dict({
'username': CACHE.API_USERNAME,
'password': password,
'address': config.main.gunicorn_bind,
'cache_name': cache_name,
'is_https': is_https,
})
# ################################################################################################################################
@staticmethod
def from_dict(config):
# type: (dict) -> Client
client = Client()
client.username = config['username']
client.password = config['password']
client.cache_name = config['cache_name']
if config['address'].startswith('http'):
address = config['address']
else:
address = 'http{}://{}'.format('s' if config['is_https'] else '', config['address'])
client.address = address
session = RequestsSession()
if client.password:
session.auth = (client.username, client.password)
client.session = session
return client
# ################################################################################################################################
def _request(self, op, key, value=NotGiven, pattern='/zato/cache/{}', op_verb_map=op_verb_map):
# type: (str, str, str) -> str
# Build a full address
path = pattern.format(key)
address = '{}{}'.format(self.address, path)
# Get the HTTP verb to use in the request
verb = op_verb_map[op] # type: str
data = {
'cache': self.cache_name,
'return_prev': True
}
if value is not NotGiven:
data['value'] = value
data = dumps(data)
response = self.session.request(verb, address, data=data) # type: RequestsResponse
return response.text
# ################################################################################################################################
def run_command(self, config):
# type: (CommandConfig) -> CommandResponse
if config.value is not NotGiven:
if config.is_int_value:
value = int(config.value)
elif config.is_bool_value:
value = as_bool(config.value)
else:
value = config.value
else:
value = config.value
raw_response = self._request(config.command, config.key, value)
_response = CommandResponse()
_response.key = config.key
_response.text = raw_response
return _response
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/cache.py | cache.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import Formatter
# ################################################################################################################################
# ################################################################################################################################
# Based on http://stackoverflow.com/questions/384076/how-can-i-make-the-python-logging-output-to-be-colored
class ColorFormatter(Formatter):
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
RESET_SEQ = '\033[0m'
COLOR_SEQ = '\033[1;%dm'
BOLD_SEQ = '\033[1m'
COLORS = {
'WARNING': YELLOW,
'INFO': WHITE,
'DEBUG': BLUE,
'CRITICAL': YELLOW,
'ERROR': RED,
'TRACE1': YELLOW
}
def __init__(self, fmt):
self.use_color = True
super(ColorFormatter, self).__init__(fmt)
# ################################################################################################################################
def formatter_msg(self, msg, use_color=True):
if use_color:
msg = msg.replace('$RESET', self.RESET_SEQ).replace('$BOLD', self.BOLD_SEQ)
else:
msg = msg.replace('$RESET', '').replace('$BOLD', '')
return msg
# ################################################################################################################################
def format(self, record):
levelname = record.levelname
if self.use_color and levelname in self.COLORS:
fore_color = 30 + self.COLORS[levelname]
levelname_color = self.COLOR_SEQ % fore_color + levelname + self.RESET_SEQ
record.levelname = levelname_color
return Formatter.format(self, record)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/logging_.py | logging_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from base64 import b64decode, b64encode
# Python 2/3 compatibility
from past.builtins import unicode
from six import PY2
# Zato
from zato.common.api import AUTH_RESULT
logger = getLogger('zato')
def parse_basic_auth(auth, prefix='Basic '):
""" Parses username/password out of incoming HTTP Basic Auth data.
"""
if not auth:
raise ValueError('No auth received in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
if not auth.startswith(prefix):
raise ValueError('Invalid prefix in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
auth = auth if PY2 else auth.decode('utf8')
return auth.split(':', 1)
# ################################################################################################################################
# ################################################################################################################################
# Code below comes from another project - will be moved elsewhere at one point thus the location of imports and definitions
# ################################################################################################################################
# ################################################################################################################################
# stdlib
from hashlib import sha1
from datetime import datetime
# Python 2/3 compatibility
from future.moves.urllib.parse import quote_plus
# lxml
from lxml import etree
# PyYAML
from yaml import dump
try:
from yaml import CDumper as Dumper
except ImportError: # pragma: no cover
from yaml import Dumper # pragma: no cover
# ################################################################################################################################
# ################################################################################################################################
class AuthResult(object):
""" Represents the result of validating a URL against the config. 'status' is the main boolean flag indicating
whether the successful was successful or not. 'code' equal to '0' means success and any other value is a failure,
note that 'code' may be a multi-character string including punctuation. 'description' is an optional attribute holding
any additional textual information a callee might wish to pass to the calling layer. 'auth_info' is either
an empty string or information regarding the authorization data presented by the calling application.
Instances of this class are considered True or False in boolean comparisons
according to the boolean value of self.status.
"""
def __init__(self, status=False, code='-1', description=''):
self.status = status
self.code = code
self.description = description
self._auth_info = b''
@property
def auth_info(self):
return self._auth_info
@auth_info.setter
def auth_info(self, value):
self._auth_info = dump(value, Dumper=Dumper)
def __repr__(self):
return '<{0} at {1} status={2} code={3} description={4} auth_info={5}>'.format(
self.__class__.__name__, hex(id(self)), self.status, self.code,
self.description, self.auth_info)
def __bool__(self):
""" Returns the boolean value of self.status. Useful when an instance
must be compared in a boolean context.
"""
return bool(self.status)
__nonzero__ = __bool__
# ################################################################################################################################
# ################################################################################################################################
class SecurityException(Exception):
""" Indicates problems with validating incoming requests. The 'description'
attribute holds textual information suitable for showing to human users.
"""
def __init__(self, description):
self.description = description
# ################################################################################################################################
# ################################################################################################################################
AUTH_WSSE_NO_DATA = '0003.0001'
AUTH_WSSE_VALIDATION_ERROR = '0003.0002'
AUTH_BASIC_NO_AUTH = '0004.0001'
AUTH_BASIC_INVALID_PREFIX = '0004.0002'
AUTH_BASIC_USERNAME_OR_PASSWORD_MISMATCH = '0004.0003'
# ################################################################################################################################
# ################################################################################################################################
def on_wsse_pwd(wsse, url_config, data, needs_auth_info=True):
""" Visit _RequestApp._on_wsse_pwd method's docstring.
"""
if not data:
return AuthResult(False, AUTH_WSSE_NO_DATA)
request = etree.fromstring(data)
try:
ok, wsse_username = wsse.validate(request, url_config)
except SecurityException as e:
return AuthResult(False, AUTH_WSSE_VALIDATION_ERROR, e.description)
else:
auth_result = AuthResult(True, '0')
if needs_auth_info:
auth_result.auth_info = {b'wsse-pwd-username': str(wsse_username)}
return auth_result
# ################################################################################################################################
# ################################################################################################################################
def _on_basic_auth(auth, expected_username, expected_password):
""" A low-level call for checking the HTTP Basic Auth credentials.
"""
if not auth:
return AUTH_BASIC_NO_AUTH
prefix = 'Basic '
if not auth.startswith(prefix):
return AUTH_BASIC_INVALID_PREFIX
_, auth = auth.split(prefix)
auth = auth.strip()
auth = b64decode(auth)
auth = auth if isinstance(auth, unicode) else auth.decode('utf8')
username, password = auth.split(':', 1)
if username == expected_username and password == expected_password:
return True
else:
return AUTH_BASIC_USERNAME_OR_PASSWORD_MISMATCH
# ################################################################################################################################
# ################################################################################################################################
def on_basic_auth(env, url_config, needs_auth_info=True):
""" Visit _RequestApp._on_basic_auth method's docstring.
"""
username = url_config['basic-auth-username']
result = _on_basic_auth(env.get('HTTP_AUTHORIZATION', ''), username, url_config['basic-auth-password'])
is_success = result is True # Yes, need to check for True
auth_result = AuthResult(is_success)
if is_success:
if needs_auth_info:
auth_result.auth_info = {b'basic-auth-username': quote_plus(username).encode('utf-8')}
else:
auth_result.code = result
return auth_result
# ################################################################################################################################
# ################################################################################################################################
soap_date_time_format = '%Y-%m-%dT%H:%M:%S.%fZ'
soapenv_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soap_body_path = '/soapenv:Envelope/soapenv:Body'
soap_body_xpath = etree.XPath(soap_body_path, namespaces={'soapenv':soapenv_namespace})
wsse_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
wsu_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
wss_namespaces = {'soapenv':soapenv_namespace, 'wsse':wsse_namespace, 'wsu':wsu_namespace}
wsse_password_type_text = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'
wsse_password_type_digest = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordDigest'
supported_wsse_password_types = (wsse_password_type_text, wsse_password_type_digest)
wsse_username_token_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken'
wsse_username_token_xpath = etree.XPath(wsse_username_token_path, namespaces=wss_namespaces)
wsse_username_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Username'
wsse_username_xpath = etree.XPath(wsse_username_path, namespaces=wss_namespaces)
wsse_password_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password'
wsse_password_xpath = etree.XPath(wsse_password_path, namespaces=wss_namespaces)
wsse_password_type_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password/@Type'
wsse_password_type_xpath = etree.XPath(wsse_password_type_path, namespaces=wss_namespaces)
wsse_nonce_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Nonce'
wsse_nonce_xpath = etree.XPath(wsse_nonce_path, namespaces=wss_namespaces)
wsu_username_created_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsu:Created'
wsu_username_created_xpath = etree.XPath(wsu_username_created_path, namespaces=wss_namespaces)
class WSSE(object):
""" Implements authentication using WS-Security.
"""
def _replace_username_token_elem(self, soap, old_elem, attr_name):
""" A utility function for replacing passwords and nonces with '***'
for the purpose of logging the messages without worrying of disclosing
any data known to be secret.
"""
old_elem = old_elem[0]
attr = old_elem.get(attr_name)
username_token = wsse_username_token_xpath(soap)
if not username_token:
self.error(expected_element=wsse_username_token_path)
username_token = username_token[0]
elem_idx = username_token.index(old_elem)
username_token.remove(old_elem)
new_elem = etree.Element(old_elem.tag)
new_elem.set(attr_name, attr)
new_elem.text = '***'
username_token.insert(elem_idx, new_elem)
return old_elem.text, attr
def _get_digest(self, password, nonce, created):
""" Returns the password's expected digest.
"""
nonce = b64decode(nonce)
concat = nonce + created + password
h = sha1()
h.update(concat)
return b64encode(h.digest()).rstrip('\n')
def error(self, description='', expected_element='', soap=None):
""" A utility function for exceptions in erronous situations. May be
subclassed if error reporting needs to be customized. The 'soap'
parameter is guaranteed to have WSSE password and token replaced
with '***' characters. Note that default implementation doesn't use
the 'soap' parameter however the subclasses are free to do so.
"""
msg = description
if expected_element:
if description:
msg += '. '
msg += 'Element [{0}] doesn\'t exist'.format(expected_element)
raise SecurityException(msg)
def check_nonce(self, wsse_nonce, now, nonce_freshness_time):
""" Checks whether the nonce has been already seen. Default implementation
lets all nonces in. More sophisticated subclasses may wish to override
this method and check the nonce against a cache of some sort.
"""
return False
def on_invalid_username(self, config, given, message):
""" Invoked when the expected and given usernames don't match.
"""
self.error('Invalid username or password')
def on_invalid_password(self, config, given_username, given_password, message):
""" Invoked when the expected and given passwords don't match.
"""
self.error('Invalid username or password')
def on_username_token_expired(self, config, elapsed, message):
""" Invoked when the username token has been found to be expired.
"""
self.error('UsernameToken has expired')
def on_nonce_non_unique(self, config, nonce, now, message):
""" Invoked when the nonce has been found not to be unique.
"""
self.error('Nonce [{0}] is not unique'.format(nonce))
def validate(self, soap, config):
# Shadow the password and a nonce before any processing, getting
# their values along the way.
wsse_password = wsse_password_xpath(soap)
if wsse_password:
wsse_password, wsse_password_type = self._replace_username_token_elem(soap, wsse_password, 'Type')
wsse_nonce = wsse_nonce_xpath(soap)
if wsse_nonce:
wsse_nonce, wsse_encoding_type = self._replace_username_token_elem(soap, wsse_nonce, 'EncodingType')
wsse_username = wsse_username_xpath(soap)
if not wsse_username:
self.error('No username sent', wsse_username_path, soap)
wsse_username = wsse_username[0].text
if config['wsse-pwd-username'] != wsse_username:
self.on_invalid_username(config, wsse_username, soap)
if not wsse_password_type:
self.error('No password type sent', wsse_password_type_path, soap)
if not wsse_password_type in supported_wsse_password_types:
msg = 'Unsupported password type=[{0}], not in [{1}]'.format(wsse_password_type, supported_wsse_password_types)
self.error(msg, soap=soap)
now = datetime.utcnow()
if config['wsse-pwd-reject-empty-nonce-creation']:
wsu_username_created = wsu_username_created_xpath(soap)
if not all((wsse_nonce, wsu_username_created)):
self.error('Both nonce and creation timestamp must be given', soap=soap)
else:
if wsu_username_created:
wsu_username_created = wsu_username_created[0].text
# Check nonce freshness and report error if the UsernameToken is stale.
token_created = datetime.strptime(wsu_username_created, soap_date_time_format)
elapsed = (now - token_created)
if config['wsse-pwd-reject-stale-tokens'] and elapsed.seconds > config['wsse-pwd-reject-expiry-limit']:
self.on_username_token_expired(config, elapsed, soap)
if config.get('wsse-pwd-password-digest'):
expected_password = self._get_digest(config['wsse-pwd-password'], wsse_nonce, wsu_username_created)
else:
expected_password = config.get('wsse-pwd-password')
if wsse_password != expected_password:
self.on_invalid_password(config, wsse_username, wsse_password, soap)
# Have we already seen such a nonce?
if self.check_nonce(wsse_nonce, now, config.get('wsse-pwd-nonce-freshness-time')):
self.on_nonce_non_unique(config, wsse_nonce, now, soap)
# All good, we let the client in.
return True, wsse_username
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/auth.py | auth.py |
# stdlib
from datetime import datetime, timedelta
from logging import getLogger
from mmap import mmap
from time import sleep
from traceback import format_exc
# posix-ipc
import posix_ipc as ipc
# Zato
from zato.common.json_internal import dumps, loads
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_shmem_pattern = '/zato-shmem-{}'
# ################################################################################################################################
# ################################################################################################################################
class SharedMemoryIPC(object):
""" An IPC object which Zato processes use to communicate with each other using mmap files
backed by shared memory. All data in shared memory is kept as a dictionary and serialized as JSON
each time any read or write is needed.
"""
key_name = '<invalid>'
def __init__(self):
self.shmem_name = ''
self.size = -1
self._mmap = None
self.running = False
# ################################################################################################################################
def create(self, shmem_suffix, size, needs_create):
""" Creates all IPC structures.
"""
self.shmem_name = _shmem_pattern.format(shmem_suffix)
self.size = size
# Create or read share memory
logger.debug('%s shmem `%s` (%s %s)', 'Creating' if needs_create else 'Opening', self.shmem_name,
self.size, self.key_name)
try:
self._mem = ipc.SharedMemory(self.shmem_name, ipc.O_CREAT if needs_create else 0, size=self.size)
except ipc.ExistentialError:
raise ValueError('Could not create shmem `{}` ({}), e:`{}`'.format(self.shmem_name, self.key_name, format_exc()))
# Map memory to mmap
self._mmap = mmap(self._mem.fd, self.size)
# Write initial data so that JSON .loads always succeeds
self.store_initial()
self.running = True
# ################################################################################################################################
def store(self, data):
""" Serializes input data as JSON and stores it in RAM, overwriting any previous data.
"""
self._mmap.seek(0)
self._mmap.write(dumps(data).encode('utf8'))
self._mmap.flush()
# ################################################################################################################################
def store_initial(self):
""" Stores initial data in shmem unless there is already data in there.
"""
if self.load(False):
return
else:
self.store({})
# ################################################################################################################################
def load(self, needs_loads=True):
""" Reads in all data from RAM and, optionally, loads it as JSON.
"""
self._mmap.seek(0)
data = self._mmap.read(self.size).strip(b'\x00')
return loads(data.decode('utf8')) if needs_loads else data
# ################################################################################################################################
def close(self):
""" Closes all underlying in-RAM structures.
"""
if not self.running:
logger.debug('Skipped close, IPC not running (%s)', self.key_name)
return
else:
logger.info('Closing IPC (%s)', self.key_name)
self._mmap.close()
try:
self._mem.unlink()
except ipc.ExistentialError:
pass
# ################################################################################################################################
def get_parent(self, parent_path, needs_data=True):
""" Returns element pointed to by parent_path, creating all elements along the way, if neccessary.
"""
data = self.load()
parent_path = [elem for elem in parent_path.split('/') if elem]
# Find or create element that is parent of input key
current = data
while parent_path:
next = parent_path.pop(0)
current = current.setdefault(next, {})
return (data, current) if needs_data else current
# ################################################################################################################################
def set_key(self, parent, key, value):
""" Set key to value under element called 'parent'.
"""
# Get parent to add our key to - will create it if needed
data, parent = self.get_parent(parent)
# Set key to value
parent[key] = value
# Save it all back
self.store(data)
# ################################################################################################################################
def _get_key(self, parent, key):
""" Low-level implementation of get_key which does not handle timeouts.
"""
parent = self.get_parent(parent, False)
return parent[key]
# ################################################################################################################################
def get_key(self, parent, key, timeout=None, _sleep=sleep, _utcnow=datetime.utcnow):
""" Returns a specific key from parent dictionary.
"""
try:
return self._get_key(parent, key)
except KeyError:
if timeout:
now = _utcnow()
start = now
until = now + timedelta(seconds=timeout)
idx = 0
while now <= until:
try:
value = self._get_key(parent, key)
if value:
msg = 'Returning value `%s` for parent/key `%s` `%s` after %s'
logger.info(msg, value, parent, key, now - start)
return value
except KeyError:
_sleep(0.1)
idx += 1
if idx % 10 == 0:
logger.info('Waiting for parent/key `%s` `%s` (timeout: %ss)', parent, key, timeout)
now = _utcnow()
# We get here if we did not return the key within timeout seconds,
# in which case we need to log an error and raise an exception.
# Same message for logger and exception
msg = 'Could not get parent/key `{}` `{}` after {}s'.format(parent, key, timeout)
logger.warn(msg)
raise KeyError(msg)
# No exception = re-raise exception immediately
else:
raise
# ################################################################################################################################
# ################################################################################################################################
class ServerStartupIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for server startup initialization.
"""
key_name = '/pubsub/pid'
def create(self, deployment_key, size, needs_create=True):
super(ServerStartupIPC, self).create('server-{}'.format(deployment_key), size, needs_create)
def set_pubsub_pid(self, pid):
self.set_key(self.key_name, 'current', pid)
def get_pubsub_pid(self, timeout=60):
return self.get_key(self.key_name, 'current', timeout)
# ################################################################################################################################
# ################################################################################################################################
class ConnectorConfigIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for configuration of subprocess-based containers.
"""
needs_create = False
key_name = '/connector/config'
def create(self, deployment_key, size, needs_create=True):
super(ConnectorConfigIPC, self).create('connector-config-{}'.format(deployment_key), size, needs_create)
def set_config(self, connector_key, config):
self.set_key(self.key_name, connector_key, config)
def get_config(self, connector_key, timeout=60, as_dict=False):
response = self.get_key(self.key_name, connector_key, timeout)
if response:
return loads(response) if as_dict else response
# ################################################################################################################################
# ################################################################################################################################
class CommandStoreIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for CLI commands used by Zato.
"""
needs_create = False
key_name = '/cli/command/store'
def create(self, size=100_000, needs_create=True):
super(CommandStoreIPC, self).create('cli-command-store', size, needs_create)
def add_parser(self, parser_data):
self.set_key(self.key_name, 'parser', parser_data)
def get_config(self, timeout=3):
return self.get_key(self.key_name, 'parser', timeout)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/posix_ipc_.py | posix_ipc_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import sys
import traceback
from logging import getLogger
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
def get_current_stack():
sep = '*' * 80
out = ['\n', sep]
for line in traceback.format_stack():
out.append(line.strip())
out.append(sep)
return '\n'.join(out)
# ################################################################################################################################
# ################################################################################################################################
def log_current_stack():
logger.info(get_current_stack())
# ################################################################################################################################
# ################################################################################################################################
# Taken from https://stackoverflow.com/a/16589622
def get_full_stack():
exc = sys.exc_info()[0]
stack = traceback.extract_stack()[:-1] # last one would be full_stack()
if exc is not None: # i.e. if an exception is present
del stack[-1] # remove call of full_stack, the printed exception will contain the caught exception caller instead
trace = 'Traceback (most recent call last):\n'
stack_string = trace + ''.join(traceback.format_list(stack))
if exc is not None:
stack_string += ' '
stack_string += traceback.format_exc()
stack_string = stack_string.lstrip(trace)
return stack_string
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/python_.py | python_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from itertools import chain
from logging import DEBUG, getLogger
# Bunch
from bunch import bunchify
# gevent
from gevent import sleep
# SQLAlchemy
from sqlalchemy.exc import InternalError as SAInternalError, OperationalError as SAOperationalError
# Zato
from zato.common.api import GENERIC, SEARCH
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import Base, SecurityBase
from zato.common.util.search import SearchResults
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
has_debug = logger_zato.isEnabledFor(DEBUG) or logger_pubsub.isEnabledFor(DEBUG)
# ################################################################################################################################
_default_page_size = SEARCH.ZATO.DEFAULTS.PAGE_SIZE
_max_page_size = _default_page_size * 5
# All exceptions that can be raised when deadlocks occur
_DeadlockException = (SAInternalError, SAOperationalError)
# In MySQL, 1213 = 'Deadlock found when trying to get lock; try restarting transaction'
# but the underlying PyMySQL library returns only a string rather than an integer code.
_deadlock_code = 'Deadlock found when trying to get lock'
_zato_opaque_skip_attrs=set(['needs_details', 'paginate', 'cur_page', 'query'])
# ################################################################################################################################
def search(search_func, config, filter_by, session=None, cluster_id=None, *args, **kwargs):
""" Adds search criteria to an SQLAlchemy query based on current search configuration.
"""
try:
cur_page = int(config.get('cur_page', 1))
except(ValueError, TypeError):
cur_page = 1
try:
page_size = min(int(config.get('page_size', _default_page_size)), _max_page_size)
except(ValueError, TypeError):
page_size = _default_page_size
# We need to substract 1 because externally our API exposes human-readable numbers,
# i.e. starting from 1, not 0, but internally the database needs 0-based slices.
if cur_page > 0:
cur_page -= 1
kwargs = {
'cur_page': cur_page,
'page_size': page_size,
'filter_by': filter_by,
'where': kwargs.get('where'),
'filter_op': kwargs.get('filter_op'),
'data_filter': kwargs.get('data_filter'),
}
query = config.get('query')
if query:
query = query.strip().split()
if query:
kwargs['query'] = query
result = search_func(session, cluster_id, *args, **kwargs)
# Fills out all the search-related information
result.set_data(cur_page, page_size)
return result
# ################################################################################################################################
def sql_op_with_deadlock_retry(cid, name, func, *args, **kwargs):
cid = cid or None
attempts = 0
while True:
attempts += 1
if has_debug:
logger_zato.info('In sql_op_with_deadlock_retry, %s %s %s %s %r %r', attempts, cid, name, func, args, kwargs)
try:
# Call the SQL function that will possibly result in a deadlock
func(*args, **kwargs)
if has_debug:
logger_zato.info('In sql_op_with_deadlock_retry, returning True')
# This will return only if there is no exception in calling the SQL function
return True
# Catch deadlocks - it may happen because both this function and delivery tasks update the same tables
except _DeadlockException as e:
if has_debug:
logger_zato.warn('Caught _DeadlockException `%s` `%s`', cid, e)
if _deadlock_code not in e.args[0]:
raise
else:
if attempts % 50 == 0:
msg = 'Still in deadlock for `{}` after %d attempts cid:%s args:%s'.format(name)
logger_zato.warn(msg, attempts, cid, args)
logger_pubsub.warn(msg, attempts, cid, args)
# Sleep for a while until the next attempt
sleep(0.005)
# Push the counter
attempts += 1
# ################################################################################################################################
# ################################################################################################################################
class ElemsWithOpaqueMaker(object):
def __init__(self, elems):
self.elems = elems
# ################################################################################################################################
@staticmethod
def get_opaque_data(elem):
return elem.get(GENERIC.ATTR_NAME)
has_opaque_data = get_opaque_data
# ################################################################################################################################
@staticmethod
def _set_opaque(elem, drop_opaque=False):
opaque = ElemsWithOpaqueMaker.get_opaque_data(elem)
opaque = loads(opaque) if opaque else {}
elem.update(opaque)
if drop_opaque:
del elem[GENERIC.ATTR_NAME]
# ################################################################################################################################
@staticmethod
def process_config_dict(config, drop_opaque=False):
ElemsWithOpaqueMaker._set_opaque(config, drop_opaque)
# ################################################################################################################################
def _process_elems(self, out, elems, _skip_class=(Base, list)):
for elem in elems:
if hasattr(elem, '_sa_class_manager'):
data = {}
for (name, _) in elem._sa_class_manager._all_sqla_attributes():
value = getattr(elem, name)
if name.startswith('__'):
continue
if isinstance(value, _skip_class):
continue
data[name] = value
else:
data = elem._asdict()
elem = bunchify(data)
ElemsWithOpaqueMaker._set_opaque(elem)
out.append(elem)
return out
# ################################################################################################################################
def _elems_with_opaque_search(self):
""" Resolves all opaque elements in search results.
"""
search_result = self.elems[0]
new_result = self._process_elems([], search_result.result)
search_result.result = new_result
return self.elems
# ################################################################################################################################
def get(self):
if isinstance(self.elems, tuple) and isinstance(self.elems[0], SearchResults):
return self._elems_with_opaque_search()
else:
return self._process_elems([], self.elems)
# ################################################################################################################################
# ################################################################################################################################
def elems_with_opaque(elems):
""" Turns a list of SQLAlchemy elements into a list of Bunch instances,
each possibly with its opaque elements already extracted to the level of each Bunch.
"""
return ElemsWithOpaqueMaker(elems).get()
# ################################################################################################################################
def parse_instance_opaque_attr(instance):
opaque = getattr(instance, GENERIC.ATTR_NAME)
opaque = loads(opaque) if opaque else None
if not opaque:
return {}
ElemsWithOpaqueMaker.process_config_dict(opaque)
return bunchify(opaque)
# ################################################################################################################################
def get_dict_with_opaque(instance, to_bunch=False):
opaque = parse_instance_opaque_attr(instance)
out = instance._asdict() if hasattr(instance, '_asdict') else instance.asdict()
for k, v in opaque.items():
out[k] = v
return bunchify(out) if to_bunch else out
# ################################################################################################################################
def set_instance_opaque_attrs(instance, input, skip=None, only=None, _zato_skip=_zato_opaque_skip_attrs):
""" Given an SQLAlchemy object instance and incoming SimpleIO-based input,
populates all opaque values of that instance.
"""
only = only or []
instance_opaque_attrs = None
instance_attrs = set(instance.asdict())
input_attrs = set(input)
if only:
input_attrs = set([elem for elem in input_attrs if elem in only])
instance_attrs = set([elem for elem in instance_attrs if elem not in only])
# Any extra input attributes will be treated as opaque ones
input_opaque_attrs = input_attrs - instance_attrs
# Skip attributes related to pagination
for name in chain(skip or [], _zato_skip):
input_opaque_attrs.discard(name)
# Prepare generic attributes for instance
if GENERIC.ATTR_NAME in instance_attrs:
instance_opaque_attrs = getattr(instance, GENERIC.ATTR_NAME)
if instance_opaque_attrs:
instance_opaque_attrs = loads(instance_opaque_attrs)
else:
instance_opaque_attrs = {}
for name in input_opaque_attrs:
instance_opaque_attrs[name] = input[name]
# Set generic attributes for instance
if instance_opaque_attrs is not None:
setattr(instance, GENERIC.ATTR_NAME, dumps(instance_opaque_attrs))
# ################################################################################################################################
def get_security_by_id(session, security_id):
return session.query(SecurityBase).\
filter(SecurityBase.id==security_id).\
one()
# ################################################################################################################################
def get_instance_by_id(session, model_class, id):
return session.query(model_class).\
filter(model_class.id==id).\
one()
# ################################################################################################################################
def get_instance_by_name(session, model_class, type_, name):
return session.query(model_class).\
filter(model_class.type_==type_).\
filter(model_class.name==name).\
one()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/sql.py | sql.py |
# stdlib
import copy
import errno
import gc
import imp
import inspect
import linecache
import logging
import os
import random
import re
import signal
import threading
import socket
import sys
import unicodedata
from ast import literal_eval
from base64 import b64decode
from binascii import hexlify as binascii_hexlify
from contextlib import closing
from datetime import datetime, timedelta
from getpass import getuser as getpass_getuser
from glob import glob
from hashlib import sha256
from inspect import isfunction, ismethod
from itertools import tee
from io import StringIO
from operator import itemgetter
from os.path import abspath, isabs, join
from pathlib import Path
from pprint import pprint as _pprint, PrettyPrinter
from string import Template
from subprocess import Popen, PIPE
from tempfile import NamedTemporaryFile
from threading import current_thread
from time import sleep
from traceback import format_exc
# Bunch
from bunch import Bunch, bunchify
from dateutil.parser import parse as dt_parse
# gevent
from gevent import sleep as gevent_sleep, spawn, Timeout
from gevent.greenlet import Greenlet
from gevent.hub import Hub
# lxml
from lxml import etree, objectify
# OpenSSL
from OpenSSL import crypto
# portalocker
import portalocker
# psutil
import psutil
# pytz
import pytz
# requests
import requests
# SQLAlchemy
import sqlalchemy as sa
from sqlalchemy import orm
# Texttable
from texttable import Texttable
# Python 2/3 compatibility
from builtins import bytes
from future.moves.itertools import zip_longest
from future.utils import iteritems, raise_
from past.builtins import basestring, cmp, reduce, unicode
from six import PY3
from six.moves.urllib.parse import urlparse
from zato.common.py23_ import ifilter, izip
from zato.common.py23_.spring_ import CAValidatingHTTPSConnection, SSLClientTransport
if PY3:
from functools import cmp_to_key
# Zato
from zato.common.api import CHANNEL, CLI_ARG_SEP, DATA_FORMAT, engine_def, engine_def_sqlite, HL7, KVDB, MISC, \
SECRET_SHADOW, SIMPLE_IO, TLS, TRACE1, zato_no_op_marker, ZATO_NOT_GIVEN, ZMQ
from zato.common.broker_message import SERVICE
from zato.common.const import SECRETS
from zato.common.crypto.api import CryptoManager
from zato.common.exception import ZatoException
from zato.common.ext.configobj_ import ConfigObj
from zato.common.ext.validate_ import is_boolean, is_integer, VdtTypeError
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import Cluster, HTTPBasicAuth, HTTPSOAP, IntervalBasedJob, Job, Server, Service
from zato.common.util.tcp import get_free_port, is_port_taken, wait_for_zato_ping, wait_until_port_free, wait_until_port_taken
from zato.common.util.eval_ import as_bool, as_list
from zato.common.util.file_system import fs_safe_name
from zato.common.util.logging_ import ColorFormatter
from zato.common.xml_ import soap_body_path, soap_body_xpath
from zato.hl7.parser import get_payload_from_request as hl7_get_payload_from_request
# ################################################################################################################################
if 0:
from typing import Iterable as iterable
from simdjson import Parser as SIMDJSONParser
iterable = iterable
SIMDJSONParser = SIMDJSONParser
# ################################################################################################################################
random.seed()
# ################################################################################################################################
logger = logging.getLogger(__name__)
logging.addLevelName(TRACE1, "TRACE1")
_repr_template = Template('<$class_name at $mem_loc$attrs>')
_uncamelify_re = re.compile(r'((?<=[a-z])[A-Z]|(?<!\A)[A-Z](?=[a-z]))')
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
cid_symbols = '0123456789abcdefghjkmnpqrstvwxyz'
encode_cid_symbols = {idx: elem for (idx, elem) in enumerate(cid_symbols)}
cid_base = len(cid_symbols)
# ################################################################################################################################
# For pyflakes
ColorFormatter = ColorFormatter
# ################################################################################################################################
asbool = as_bool
aslist = as_list
# ################################################################################################################################
_data_format_json = DATA_FORMAT.JSON
_data_format_json_like = DATA_FORMAT.JSON, DATA_FORMAT.DICT
_data_format_xml = DATA_FORMAT.XML
_data_format_hl7_v2 = HL7.Const.Version.v2.id
# ################################################################################################################################
# Kept here for backward compatibility
get_free_port = get_free_port
is_port_taken = is_port_taken
wait_until_port_free = wait_until_port_free
wait_until_port_taken = wait_until_port_taken
# ################################################################################################################################
# We can initialize it once per process here
_hostname = socket.gethostname()
_fqdn = socket.getfqdn()
_current_host = '{}/{}'.format(_hostname, _fqdn)
_current_user = getpass_getuser()
# ################################################################################################################################
TLS_KEY_TYPE = {
crypto.TYPE_DSA: 'DSA',
crypto.TYPE_RSA: 'RSA'
}
# ################################################################################################################################
def is_method(class_, func=isfunction if PY3 else ismethod):
return func(class_)
# ################################################################################################################################
def absjoin(base, path):
""" Turns a path into an absolute path if it's relative to the base location. If the path is already an absolute path,
it is returned as-is.
"""
if isabs(path):
return path
return abspath(join(base, path))
# ################################################################################################################################
def absolutize(path, base=''):
""" Turns a relative path into an absolute one or returns it as is if it's already absolute.
"""
if not isabs(path):
path = os.path.expanduser(path)
if not isabs(path):
path = os.path.normpath(os.path.join(base, path))
return path
# ################################################################################################################################
def current_host():
return _current_host
# ################################################################################################################################
def current_user(_getpass_getuser=getpass_getuser):
return _getpass_getuser()
# ################################################################################################################################
def pprint(obj):
""" Pretty-print an object into a string buffer.
"""
# Get dicts' items.
if hasattr(obj, "items"):
obj = sorted(obj.items())
buf = StringIO()
_pprint(obj, buf)
value = buf.getvalue()
buf.close()
return value
# ################################################################################################################################
def get_zato_command():
""" Returns the full path to the 'zato' command' in a buildout environment.
"""
return os.path.join(os.path.dirname(sys.executable), 'zato')
# ################################################################################################################################
def object_attrs(_object, ignore_double_underscore, to_avoid_list, sort):
attrs = dir(_object)
if ignore_double_underscore:
attrs = ifilter(lambda elem: not elem.startswith("__"), attrs)
_to_avoid_list = getattr(_object, to_avoid_list, None) # Don't swallow exceptions
if _to_avoid_list is not None:
attrs = ifilter(lambda elem: not elem in _to_avoid_list, attrs)
if sort:
attrs = sorted(attrs)
return attrs
# ################################################################################################################################
def make_repr(_object, ignore_double_underscore=True, to_avoid_list='repr_to_avoid', sort=True):
""" Makes a nice string representation of an object, suitable for logging purposes.
"""
attrs = object_attrs(_object, ignore_double_underscore, to_avoid_list, sort)
buff = StringIO()
for attr in attrs:
attr_obj = getattr(_object, attr)
if not callable(attr_obj):
buff.write('; %s:%r' % (attr, attr_obj))
out = _repr_template.safe_substitute(
class_name=_object.__class__.__name__, mem_loc=hex(id(_object)), attrs=buff.getvalue())
buff.close()
return out
# ################################################################################################################################
def to_form(_object):
""" Reads public attributes of an object and creates a dictionary out of it;
handy for providing initial data to a Django form which isn't backed by
a true Django model.
"""
out = {}
attrs = object_attrs(_object, True, "repr_to_avoid", False)
for attr in attrs:
out[attr] = getattr(_object, attr)
return out
# ################################################################################################################################
def get_lb_client(is_tls_enabled, lb_host, lb_agent_port, ssl_ca_certs, ssl_key_file, ssl_cert_file, timeout):
""" Returns an SSL XML-RPC client to the load-balancer.
"""
from zato.agent.load_balancer.client import LoadBalancerAgentClient, TLSLoadBalancerAgentClient
http_proto = 'https' if is_tls_enabled else 'http'
agent_uri = '{}://{}:{}/RPC2'.format(http_proto, lb_host, lb_agent_port)
if is_tls_enabled:
if sys.version_info >= (2, 7):
class Python27CompatTransport(SSLClientTransport):
def make_connection(self, host):
return CAValidatingHTTPSConnection(
host, strict=self.strict, ca_certs=self.ca_certs,
keyfile=self.keyfile, certfile=self.certfile, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version, timeout=self.timeout)
transport = Python27CompatTransport
else:
transport = None
return TLSLoadBalancerAgentClient(
agent_uri, ssl_ca_certs, ssl_key_file, ssl_cert_file, transport=transport, timeout=timeout)
else:
return LoadBalancerAgentClient(agent_uri)
# ################################################################################################################################
def tech_account_password(password_clear, salt):
return sha256(password_clear+ ':' + salt).hexdigest()
# ################################################################################################################################
def new_cid(bytes=12, _random=random.getrandbits):
""" Returns a new 96-bit correlation identifier. It is *not* safe to use the ID
for any cryptographical purposes; it is only meant to be used as a conveniently
formatted ticket attached to each of the requests processed by Zato servers.
"""
# Note that we need to convert bytes to bits here.
return hex(_random(bytes * 8))[2:]
# ################################################################################################################################
def get_user_config_name(file_name):
return file_name.split('.')[0]
# ################################################################################################################################
def _get_config(conf, bunchified, needs_user_config, repo_location=None):
# type: (bool, bool, str) -> Bunch
conf = bunchify(conf) if bunchified else conf
if needs_user_config:
conf.user_config_items = {}
user_config = conf.get('user_config')
if user_config:
for name, path in user_config.items():
path = absolutize(path, repo_location)
if not os.path.exists(path):
logger.warn('User config not found `%s`, name:`%s`', path, name)
else:
user_conf = ConfigObj(path)
user_conf = bunchify(user_conf) if bunchified else user_conf
conf.user_config_items[name] = user_conf
return conf
# ################################################################################################################################
def get_config(repo_location, config_name, bunchified=True, needs_user_config=True, crypto_manager=None, secrets_conf=None,
raise_on_error=False, log_exception=True):
""" Returns the configuration object. Will load additional user-defined config files, if any are available.
"""
# type: (str, str, bool, bool, object, object) -> Bunch
# Default output to produce
result = Bunch()
try:
conf_location = os.path.join(repo_location, config_name)
conf = ConfigObj(conf_location, zato_crypto_manager=crypto_manager, zato_secrets_conf=secrets_conf)
result = _get_config(conf, bunchified, needs_user_config, repo_location)
except Exception:
if log_exception:
logger.warn('Error while reading %s from %s; e:`%s`', config_name, repo_location, format_exc())
if raise_on_error:
raise
else:
return result
else:
return result
# ################################################################################################################################
def get_config_from_string(data):
""" A simplified version of get_config which creates a config object from string, skipping any user-defined config files.
"""
# type: (str) -> Bunch
buff = StringIO()
buff.write(data)
buff.seek(0)
conf = ConfigObj(buff)
out = _get_config(conf, True, False)
buff.close()
return out
# ################################################################################################################################
def _get_ioc_config(location, config_class):
""" Instantiates an Inversion of Control container from the given location if the location exists at all.
"""
stat = os.stat(location)
if stat.st_size:
config = config_class(location)
else:
config = None
return config
# ################################################################################################################################
def get_current_user():
return _current_user
# ################################################################################################################################
def service_name_from_impl(impl_name):
""" Turns a Zato internal service's implementation name into a shorter
service name
"""
return impl_name.replace('server.service.internal.', '')
# ################################################################################################################################
def deployment_info(method, object_, timestamp, fs_location, remote_host='', remote_user=''):
""" Returns a JSON document containing information who deployed a service
onto a server, where from and when it was.
"""
return {
'method': method,
'object': object_,
'timestamp': timestamp,
'fs_location':fs_location,
'remote_host': remote_host or os.environ.get('SSH_CONNECTION', ''),
'remote_user': remote_user,
'current_host': current_host(),
'current_user': get_current_user(),
}
# ################################################################################################################################
def get_body_payload(body):
body_children_count = body[0].countchildren()
if body_children_count == 0:
body_payload = None
elif body_children_count == 1:
body_payload = body[0].getchildren()[0]
else:
body_payload = body[0].getchildren()
return body_payload
# ################################################################################################################################
def payload_from_request(json_parser, cid, request, data_format, transport, channel_item=None):
""" Converts a raw request to a payload suitable for usage with SimpleIO.
"""
# type: (SIMDJSONParser, str, object, str, str, object)
if request is not None:
#
# JSON and dicts
#
if data_format in _data_format_json_like:
# It is possible that we have an XML request converted
# to an ObjectifiedElement instance on input and sent
# using the data format of dict. This happens in IBM MQ channels.
if isinstance(request, objectify.ObjectifiedElement):
return request
if not request:
return ''
if isinstance(request, basestring) and data_format == _data_format_json:
try:
request_bytes = request if isinstance(request, bytes) else request.encode('utf8')
try:
payload = json_parser.parse(request_bytes)
except ValueError:
payload = request_bytes
if hasattr(payload, 'as_dict'):
payload = payload.as_dict()
except ValueError:
logger.warn('Could not parse request as JSON:`%s`, (%s), e:`%s`', request, type(request), format_exc())
raise
else:
payload = request
#
# XML
#
elif data_format == _data_format_xml:
if transport == 'soap':
if isinstance(request, objectify.ObjectifiedElement):
soap = request
else:
soap = objectify.fromstring(request)
body = soap_body_xpath(soap)
if not body:
raise ZatoException(cid, 'Client did not send `{}` element'.format(soap_body_path))
payload = get_body_payload(body)
else:
if isinstance(request, objectify.ObjectifiedElement):
payload = request
elif len(request) == 0:
payload = objectify.fromstring('<empty/>')
else:
payload = objectify.fromstring(request)
#
# HL7 v2
#
elif data_format == _data_format_hl7_v2:
payload = hl7_get_payload_from_request(
request,
channel_item['data_encoding'],
channel_item['hl7_version'],
channel_item['json_path'],
channel_item['should_parse_on_input'],
channel_item['should_validate']
)
#
# Other data formats
#
else:
payload = request
else:
payload = request
return payload
# ################################################################################################################################
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
ZIP_EXTENSIONS = ('.zip', '.whl')
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
ARCHIVE_EXTENSIONS = (ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = os.path.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
# ################################################################################################################################
def is_python_file(name):
""" Is it a Python file we can import Zato services from?
"""
for suffix in('py', 'pyw'):
if name.endswith(suffix):
return True
# ################################################################################################################################
class _DummyLink(object):
""" A dummy class for staying consistent with pip's API in certain places
below.
"""
def __init__(self, url):
self.url = url
# ################################################################################################################################
class ModuleInfo(object):
def __init__(self, file_name, module):
self.file_name = file_name
self.module = module
# ################################################################################################################################
def import_module_from_path(file_name, base_dir=None):
if not os.path.isabs(file_name):
file_name = os.path.normpath(os.path.join(base_dir, file_name))
if not os.path.exists(file_name):
raise ValueError("Module could not be imported, path:`{}` doesn't exist".format(file_name))
_, mod_file = os.path.split(file_name)
mod_name, _ = os.path.splitext(mod_file)
# Delete compiled bytecode if it exists so that imp.load_source actually picks up the source module
for suffix in('c', 'o'):
path = file_name + suffix
if os.path.exists(path):
os.remove(path)
return ModuleInfo(file_name, imp.load_source(mod_name, file_name))
# ################################################################################################################################
def visit_py_source(dir_name):
for pattern in('*.py', '*.pyw'):
glob_path = os.path.join(dir_name, pattern)
for py_path in sorted(glob(glob_path)):
yield py_path
# ################################################################################################################################
def _os_remove(path):
""" A helper function so it's easier to mock it in unittests.
"""
return os.remove(path)
# ################################################################################################################################
def hot_deploy(parallel_server, file_name, path, delete_path=True, notify=True):
""" Hot-deploys a package if it looks like a Python module or archive.
"""
logger.debug('About to hot-deploy `%s`', path)
now = datetime.utcnow()
di = dumps(deployment_info('hot-deploy', file_name, now.isoformat(), path))
# Insert the package into the DB ..
package_id = parallel_server.odb.hot_deploy(
now, di, file_name, open(path, 'rb').read(), parallel_server.id)
# .. and optionally notify all the servers they're to pick up a delivery
if notify:
parallel_server.notify_new_package(package_id)
if delete_path:
_os_remove(path)
return package_id
# ################################################################################################################################
# As taken from http://wiki.python.org/moin/SortingListsOfDictionaries
def multikeysort(items, columns):
comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
for fn, mult in comparers:
result = cmp(fn(left), fn(right))
if result:
return mult * result
else:
return 0
if PY3:
return sorted(items, key=cmp_to_key(comparer))
else:
return sorted(items, cmp=comparer)
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
# ################################################################################################################################
def translation_name(system1, key1, value1, system2, key2):
return KVDB.SEPARATOR.join((KVDB.TRANSLATION, system1, key1, value1, system2, key2))
# ################################################################################################################################
def dict_item_name(system, key, value):
return KVDB.SEPARATOR.join((system, key, value))
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return izip(a, b)
# ################################################################################################################################
def from_local_to_utc(dt, tz_name, dayfirst=True):
""" What is the UTC time given the local time and the timezone's name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt, dayfirst=dayfirst)
dt = pytz.timezone(tz_name).localize(dt)
utc_dt = pytz.utc.normalize(dt.astimezone(pytz.utc))
return utc_dt
# ################################################################################################################################
def from_utc_to_local(dt, tz_name):
""" What is the local time in the user-provided time zone name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt)
local_tz = pytz.timezone(tz_name)
dt = local_tz.normalize(dt.astimezone(local_tz))
return dt
# ################################################################################################################################
def _utcnow():
""" See zato.common.util.utcnow for docstring.
"""
return datetime.utcnow()
# ################################################################################################################################
def utcnow():
""" A thin wrapper around datetime.utcnow added so that tests can mock it
out and return their own timestamps at will.
"""
return _utcnow()
# ################################################################################################################################
def _now(tz):
""" See zato.common.util.utcnow for docstring.
"""
return datetime.now(tz)
# ################################################################################################################################
def now(tz=None):
""" A thin wrapper around datetime.now added so that tests can mock it
out and return their own timestamps at will.
"""
return _now(tz)
# ################################################################################################################################
def datetime_to_seconds(dt):
""" Converts a datetime object to a number of seconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds()
# ################################################################################################################################
# Inspired by http://stackoverflow.com/a/9283563
def uncamelify(s, separator='-', elem_func=unicode.lower):
""" Converts a CamelCaseName into a more readable one, e.g.
will turn ILikeToReadWSDLDocsNotReallyNOPENotMeQ into
i-like-to-read-wsdl-docs-not-really-nope-not-me-q or a similar one,
depending on the value of separator and elem_func.
"""
return separator.join(elem_func(elem) for elem in re.sub(_uncamelify_re, r' \1', s).split())
# ################################################################################################################################
def get_component_name(prefix='parallel'):
""" Returns a name of the component issuing a given request so it's possible
to trace which Zato component issued it.
"""
return '{}/{}/{}/{}'.format(prefix, current_host(), os.getpid(), current_thread().name)
# ################################################################################################################################
def dotted_getattr(o, path):
return reduce(getattr, path.split('.'), o)
# ################################################################################################################################
def wait_for_odb_service(session, cluster_id, service_name):
# type: (object, int, str) -> Service
# Assume we do not have it
service = None
while not service:
# Try to look it up ..
service = session.query(Service).\
filter(Service.name==service_name).\
filter(Cluster.id==cluster_id).\
first()
# .. if not found, sleep for a moment.
if not service:
sleep(1)
logger.info('Waiting for ODB service `%s`', service_name)
# If we are here, it means that the service was found so we can return it
return service
# ################################################################################################################################
def add_startup_jobs(cluster_id, odb, jobs, stats_enabled):
""" Adds internal jobs to the ODB. Note that it isn't being added
directly to the scheduler because we want users to be able to fine-tune the job's
settings.
"""
with closing(odb.session()) as session:
now = datetime.utcnow()
for item in jobs:
if item['name'].startswith('zato.stats'):
continue
try:
extra = item.get('extra', '')
if isinstance(extra, basestring):
extra = extra.encode('utf-8')
else:
if item.get('is_extra_list'):
extra = '\n'.join(extra)
else:
extra = dumps(extra)
if extra:
if not isinstance(extra, bytes):
extra = extra.encode('utf8')
#
# This will block as long as this service is not available in the ODB.
# It is required to do it because the scheduler may start before servers
# in which case services will not be in the ODB yet and we need to wait for them.
#
service = wait_for_odb_service(session, cluster_id, item['service'])
cluster = session.query(Cluster).\
filter(Cluster.id==cluster_id).\
one()
existing_one = session.query(Job).\
filter(Job.name==item['name']).\
filter(Job.cluster_id==cluster_id).\
first()
if existing_one:
continue
job = Job(None, item['name'], True, 'interval_based', now, cluster=cluster, service=service, extra=extra)
kwargs = {}
for name in('seconds', 'minutes'):
if name in item:
kwargs[name] = item[name]
ib_job = IntervalBasedJob(None, job, **kwargs)
session.add(job)
session.add(ib_job)
session.commit()
except Exception:
logger.warn(format_exc())
else:
logger.info('Initial job added `%s`', job.name)
# ################################################################################################################################
def hexlify(item, _hexlify=binascii_hexlify):
""" Returns a nice hex version of a string given on input.
"""
item = item if isinstance(item, unicode) else item.decode('utf8')
return ' '.join(hex(ord(elem)) for elem in item)
# ################################################################################################################################
def validate_input_dict(cid, *validation_info):
""" Checks that input belongs is one of allowed values.
"""
for key_name, key, source in validation_info:
if not source.has(key):
msg = 'Invalid {}:[{}]'.format(key_name, key)
log_msg = '{} (attrs: {})'.format(msg, source.attrs)
logger.warn(log_msg)
raise ZatoException(cid, msg)
# ################################################################################################################################
# Code below taken from tripod https://github.com/shayne/tripod/blob/master/tripod/sampler.py and slightly modified
# under the terms of LGPL (see LICENSE.txt file for details).
class SafePrettyPrinter(PrettyPrinter, object):
def format(self, obj, context, maxlevels, level):
try:
return super(SafePrettyPrinter, self).format(
obj, context, maxlevels, level)
except Exception:
return object.__repr__(obj)[:-1] + ' (bad repr)>', True, False
def spformat(obj, depth=None):
return SafePrettyPrinter(indent=1, width=76, depth=depth).pformat(obj)
def formatvalue(v):
s = spformat(v, depth=1).replace('\n', '')
if len(s) > 12500:
s = object.__repr__(v)[:-1] + ' (really long repr)>'
return '=' + s
def get_stack(f, with_locals=False):
limit = getattr(sys, 'tracebacklimit', None)
frames = []
n = 0
while f is not None and (limit is None or n < limit):
lineno, co = f.f_lineno, f.f_code
name, filename = co.co_name, co.co_filename
args = inspect.getargvalues(f)
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
frames.append((filename, lineno, name, line, f.f_locals, args))
f = f.f_back
n += 1
frames.reverse()
out = []
for filename, lineno, name, line, localvars, args in frames:
out.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
out.append(' %s' % line.strip())
if with_locals:
args = inspect.formatargvalues(formatvalue=formatvalue, *args)
out.append('\n Arguments: %s%s' % (name, args))
if with_locals and localvars:
out.append(' Local variables:\n')
try:
reprs = spformat(localvars)
except Exception:
reprs = "failed to format local variables"
out += [' ' + line for line in reprs.splitlines()]
out.append('')
return '\n'.join(out)
# ################################################################################################################################
def get_threads_traceback(pid):
result = {}
id_name = dict([(th.ident, th.name) for th in threading.enumerate()])
for thread_id, frame in iteritems(sys._current_frames()):
key = '{}:{}'.format(pid, id_name.get(thread_id, '(No name)'))
result[key] = get_stack(frame, True)
return result
# ################################################################################################################################
def get_greenlets_traceback(pid):
result = {}
for item in gc.get_objects():
if not isinstance(item, (Greenlet, Hub)):
continue
if not item:
continue
key = '{}:{}'.format(pid, repr(item))
result[key] = ''.join(get_stack(item.gr_frame, True))
return result
# ################################################################################################################################
def dump_stacks(*ignored):
pid = os.getpid()
table = Texttable()
table.set_cols_width((30, 90))
table.set_cols_dtype(['t', 't'])
rows = [['Proc:Thread/Greenlet', 'Traceback']]
rows.extend(sorted(iteritems(get_threads_traceback(pid))))
rows.extend(sorted(iteritems(get_greenlets_traceback(pid))))
table.add_rows(rows)
logger.info('\n' + table.draw())
# ################################################################################################################################
def register_diag_handlers():
""" Registers diagnostic handlers dumping stacks, threads and greenlets on receiving a signal.
"""
signal.signal(signal.SIGURG, dump_stacks)
# ################################################################################################################################
def parse_simple_type(value, convert_bool=True):
if convert_bool:
try:
value = is_boolean(value)
except VdtTypeError:
# It's cool, not a boolean
pass
try:
value = is_integer(value)
except VdtTypeError:
# OK, not an integer
pass
# Could be a dict or another simple type then
try:
value = literal_eval(value)
except Exception:
pass
# Either parsed out or as it was received
return value
# ################################################################################################################################
def parse_extra_into_dict(lines, convert_bool=True):
""" Creates a dictionary out of key=value lines.
"""
_extra = {}
if lines:
extra = ';'.join(lines.splitlines())
for line in extra.split(';'):
original_line = line
if line:
line = line.strip()
if line.startswith('#'):
continue
line = line.split('=', 1)
if not len(line) == 2:
raise ValueError('Each line must be a single key=value entry, not `{}`'.format(original_line))
key, value = line
value = value.strip()
value = parse_simple_type(value, convert_bool)
# OK, let's just treat it as string
_extra[key.strip()] = value
return _extra
# ################################################################################################################################
# Taken from http://plumberjack.blogspot.cz/2009/09/how-to-treat-logger-like-output-stream.html
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(self.level, message)
# ################################################################################################################################
def validate_xpath(expr):
""" Evaluates an XPath expression thus confirming it is correct.
"""
etree.XPath(expr)
return True
# ################################################################################################################################
def get_haproxy_agent_pidfile(component_dir):
json_config = loads(open(os.path.join(component_dir, 'config', 'repo', 'lb-agent.conf')).read())
return os.path.abspath(os.path.join(component_dir, json_config['pid_file']))
def store_pidfile(component_dir, pidfile=MISC.PIDFILE):
open(os.path.join(component_dir, pidfile), 'w').write('{}'.format(os.getpid()))
# ################################################################################################################################
def get_kvdb_config_for_log(config):
config = copy.deepcopy(config)
if config.shadow_password_in_logs:
config.password = SECRET_SHADOW
return config
# ################################################################################################################################
def validate_tls_from_payload(payload, is_key=False):
with NamedTemporaryFile(prefix='zato-tls-') as tf:
payload = payload.encode('utf8') if isinstance(payload, unicode) else payload
tf.write(payload)
tf.flush()
pem = open(tf.name).read()
cert_info = crypto.load_certificate(crypto.FILETYPE_PEM, pem)
cert_info = sorted(cert_info.get_subject().get_components())
cert_info = '; '.join('{}={}'.format(k.decode('utf8'), v.decode('utf8')) for k, v in cert_info)
if is_key:
key_info = crypto.load_privatekey(crypto.FILETYPE_PEM, pem)
key_info = '{}; {} bits'.format(TLS_KEY_TYPE[key_info.type()], key_info.bits())
return '{}; {}'.format(key_info, cert_info)
else:
return cert_info
get_tls_from_payload = validate_tls_from_payload
# ################################################################################################################################
def get_tls_full_path(root_dir, component, info):
return os.path.join(root_dir, component, fs_safe_name(info) + '.pem')
# ################################################################################################################################
def get_tls_ca_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_CA_CERTS, info)
# ################################################################################################################################
def get_tls_key_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS, info)
# ################################################################################################################################
def store_tls(root_dir, payload, is_key=False):
# Raises exception if it's not really a certificate.
info = get_tls_from_payload(payload, is_key)
pem_file_path = get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS if is_key else TLS.DIR_CA_CERTS, info)
pem_file = open(pem_file_path, 'w')
try:
portalocker.lock(pem_file, portalocker.LOCK_EX)
pem_file.write(payload)
pem_file.close()
os.chmod(pem_file_path, 0o640)
return pem_file_path
except portalocker.LockException:
pass # It's OK, something else is doing the same thing right now
# ################################################################################################################################
def replace_private_key(orig_payload):
if isinstance(orig_payload, basestring):
for item in TLS.BEGIN_END:
begin = '-----BEGIN {}PRIVATE KEY-----'.format(item)
if begin in orig_payload:
end = '-----END {}PRIVATE KEY-----'.format(item)
begin_last_idx = orig_payload.find(begin) + len(begin) + 1
end_preceeding_idx = orig_payload.find(end) -1
return orig_payload[0:begin_last_idx] + SECRET_SHADOW + orig_payload[end_preceeding_idx:]
# No private key at all in payload
return orig_payload
# ################################################################################################################################
def delete_tls_material_from_fs(server, info, full_path_func):
try:
os.remove(full_path_func(server.tls_dir, info))
except OSError as e:
if e.errno == errno.ENOENT:
# It's ok - some other worker must have deleted it already
pass
else:
raise
# ################################################################################################################################
def ping_solr(config):
result = urlparse(config.address)
requests.get('{}://{}{}'.format(result.scheme, result.netloc, config.ping_path))
# ################################################################################################################################
def ping_odoo(conn):
user_model = conn.get_model('res.users')
ids = user_model.search([('login', '=', conn.login)])
user_model.read(ids, ['login'])[0]['login']
# ################################################################################################################################
def ping_sap(conn):
conn.ping()
# ################################################################################################################################
class StaticConfig(Bunch):
def __init__(self, base_dir):
# type: (str) -> None
super(StaticConfig, self).__init__()
self.base_dir = base_dir
def read_file(self, full_path, file_name):
# type: (str, str) -> None
f = open(full_path)
file_contents = f.read()
f.close()
# Convert to a Path object to prepare to manipulations ..
full_path = Path(full_path)
# .. this is the path to the directory containing the file
# relative to the base directory, e.g. the "config/repo/static" part
# in "/home/zato/server1/config/repo/static" ..
relative_dir = Path(full_path.parent).relative_to(self.base_dir)
# .. now, convert all the components from relative_dir into a nested Bunch of Bunch instances ..
relative_dir_elems = list(relative_dir.parts)
# .. start with ourselves ..
_bunch = self
# .. if there are no directories leading to the file, simply assign
# its name to self and return ..
if not relative_dir_elems:
_bunch[file_name] = file_contents
return
# .. otherwise, if there are directories leading to the file,
# iterate until they exist and convert their names to Bunch keys ..
while relative_dir_elems:
# .. name of a directory = a Bunch key ..
elem = relative_dir_elems.pop(0)
# .. attach to the parent Bunch as a new Bunch instance ..
_bunch = _bunch.setdefault(elem, Bunch())
# .. this was the last directory to visit so we can now attach the file name and its contents
# to the Bunch instance representing this directory.
if not relative_dir_elems:
_bunch[file_name] = file_contents
def read_directory(self, root_dir):
for elem in Path(root_dir).rglob('*'): # type: Path
full_path = str(elem)
try:
if elem.is_file():
self.read_file(full_path, elem.name)
except Exception as e:
logger.warn('Could not read file `%s`, e:`%s`', full_path, e.args)
# ################################################################################################################################
def add_scheduler_jobs(api, odb, cluster_id, spawn=True):
for(id, name, is_active, job_type, start_date, extra, service_name, _,
_, weeks, days, hours, minutes, seconds, repeats, cron_definition)\
in odb.get_job_list(cluster_id):
job_data = Bunch({'id':id, 'name':name, 'is_active':is_active,
'job_type':job_type, 'start_date':start_date,
'extra':extra, 'service':service_name, 'weeks':weeks,
'days':days, 'hours':hours, 'minutes':minutes,
'seconds':seconds, 'repeats':repeats,
'cron_definition':cron_definition})
if is_active:
api.create_edit('create', job_data, spawn=spawn)
else:
logger.info('Not adding an inactive job `%s`', job_data)
# ################################################################################################################################
def get_basic_auth_credentials(auth):
if not auth:
return None, None
prefix = 'Basic '
if not auth.startswith(prefix):
return None, None
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
return auth.split(':', 1)
# ################################################################################################################################
def parse_tls_channel_security_definition(value):
# type: (bytes) -> iterable(str, str)
if not value:
raise ValueError('No definition given `{}`'.format(repr(value)))
else:
if isinstance(value, bytes):
value = value.decode('utf8')
for line in value.splitlines():
line = line.strip()
if not line:
continue
if not '=' in line:
raise ValueError("Line `{}` has no '=' key/value separator".format(line))
# It's possible we will have multiple '=' symbols.
sep_index = line.find('=')
key, value = line[:sep_index], line[sep_index+1:]
if not key:
raise ValueError('Key missing in line `{}`'.format(line))
if not value:
raise ValueError('Value missing in line `{}`'.format(line))
yield 'HTTP_X_ZATO_TLS_{}'.format(key.upper()), value
# ################################################################################################################################
def get_http_json_channel(name, service, cluster, security):
return HTTPSOAP(None, '{}.json'.format(name), True, True, 'channel', 'plain_http', None, '/zato/json/{}'.format(name),
None, '', None, SIMPLE_IO.FORMAT.JSON, service=service, cluster=cluster, security=security)
# ################################################################################################################################
def get_http_soap_channel(name, service, cluster, security):
return HTTPSOAP(None, name, True, True, 'channel', 'soap', None, '/zato/soap', None, name, '1.1',
SIMPLE_IO.FORMAT.XML, service=service, cluster=cluster, security=security)
# ################################################################################################################################
def get_engine(args):
return sa.create_engine(get_engine_url(args))
# ################################################################################################################################
def get_session(engine):
session = orm.sessionmaker() # noqa
session.configure(bind=engine)
return session()
# ################################################################################################################################
def get_crypto_manager_from_server_config(config, repo_dir):
priv_key_location = os.path.abspath(os.path.join(repo_dir, config.crypto.priv_key_location))
cm = CryptoManager(priv_key_location=priv_key_location)
cm.load_keys()
return cm
# ################################################################################################################################
def get_odb_session_from_server_config(config, cm, odb_password_encrypted):
engine_args = Bunch()
engine_args.odb_type = config.odb.engine
engine_args.odb_user = config.odb.username
engine_args.odb_host = config.odb.host
engine_args.odb_port = config.odb.port
engine_args.odb_db_name = config.odb.db_name
if odb_password_encrypted:
engine_args.odb_password = cm.decrypt(config.odb.password) if config.odb.password else ''
else:
engine_args.odb_password = config.odb.password
return get_session(get_engine(engine_args))
# ################################################################################################################################
def get_odb_session_from_component_dir(component_dir, config_file, CryptoManagerClass):
repo_dir = get_repo_dir_from_component_dir(component_dir)
cm = CryptoManagerClass.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, config_file, crypto_manager=cm, secrets_conf=secrets_conf)
return get_odb_session_from_server_config(config, None, False)
# ################################################################################################################################
def get_odb_session_from_server_dir(server_dir):
# Zato
from zato.common.crypto.api import ServerCryptoManager
return get_odb_session_from_component_dir(server_dir, 'server.conf', ServerCryptoManager)
# ################################################################################################################################
def get_server_client_auth(config, repo_dir, cm, odb_password_encrypted):
""" Returns credentials to authenticate with against Zato's own /zato/admin/invoke channel.
"""
session = get_odb_session_from_server_config(config, cm, odb_password_encrypted)
with closing(session) as session:
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster
channel = session.query(HTTPSOAP).\
filter(HTTPSOAP.cluster_id == cluster.id).\
filter(HTTPSOAP.url_path == '/zato/admin/invoke').\
filter(HTTPSOAP.connection== 'channel').\
one()
if channel.security_id:
security = session.query(HTTPBasicAuth).\
filter(HTTPBasicAuth.id == channel.security_id).\
first()
if security:
password = security.password.replace(SECRETS.PREFIX, '')
if password.startswith(SECRETS.EncryptedMarker):
password = cm.decrypt(password)
return (security.username, password)
# ################################################################################################################################
def get_client_from_server_conf(server_dir, require_server=True, stdin_data=None):
# Imports go here to avoid circular dependencies
from zato.client import get_client_from_server_conf as client_get_client_from_server_conf
# Get the client object ..
client = client_get_client_from_server_conf(server_dir, get_server_client_auth, get_config, stdin_data=stdin_data)
# .. make sure the server is available ..
if require_server:
wait_for_zato_ping(client.address)
# .. return the client to our caller now.
return client
# ################################################################################################################################
def get_repo_dir_from_component_dir(component_dir):
# type: (str) -> str
return os.path.join(os.path.abspath(os.path.join(component_dir)), 'config', 'repo')
# ################################################################################################################################
django_sa_mappings = {
'NAME': 'db_name',
'HOST': 'host',
'PORT': 'port',
'USER': 'username',
'PASSWORD': 'password',
'odb_type': 'engine',
'db_type': 'engine',
}
cli_sa_mappings = {
'odb_db_name': 'db_name',
'odb_host': 'host',
'odb_port': 'port',
'odb_user': 'username',
'odb_password': 'password',
'odb_type': 'engine',
}
# ################################################################################################################################
def get_engine_url(args):
attrs = {}
is_sqlite = False
is_django = 'NAME' in args
has_get = getattr(args, 'get', False)
odb_type = getattr(args, 'odb_type', None)
if odb_type:
is_sqlite = odb_type == 'sqlite'
else:
is_sqlite = args.get('engine') == 'sqlite' or args.get('db_type') == 'sqlite'
names = ('engine', 'username', 'password', 'host', 'port', 'name', 'db_name', 'db_type', 'sqlite_path', 'odb_type',
'odb_user', 'odb_password', 'odb_host', 'odb_port', 'odb_db_name', 'odb_type', 'ENGINE', 'NAME', 'HOST', 'USER',
'PASSWORD', 'PORT')
for name in names:
if has_get:
attrs[name] = args.get(name, '')
else:
attrs[name] = getattr(args, name, '')
# Re-map Django params into SQLAlchemy params
if is_django:
for name in django_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
if not value and (name in 'db_type', 'odb_type'):
continue
attrs[django_sa_mappings[name]] = value
# Zato CLI to SQLAlchemy
if not attrs.get('engine'):
for name in cli_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
attrs[cli_sa_mappings[name]] = value
# Re-map server ODB params into SQLAlchemy params
if attrs['engine'] == 'sqlite':
db_name = attrs.get('db_name')
sqlite_path = attrs.get('sqlite_path')
if db_name:
attrs['sqlite_path'] = db_name
if sqlite_path:
attrs['db_name'] = sqlite_path
return (engine_def_sqlite if is_sqlite else engine_def).format(**attrs)
# ################################################################################################################################
def startup_service_payload_from_path(name, value, repo_location):
""" Reads payload from a local file. Abstracted out to ease in testing.
"""
orig_path = value.replace('file://', '')
if not os.path.isabs(orig_path):
path = os.path.normpath(os.path.join(repo_location, orig_path))
else:
path = orig_path
try:
payload = open(path).read()
except Exception:
logger.warn(
'Could not open payload path:`%s` `%s`, skipping startup service:`%s`, e:`%s`', orig_path, path, name, format_exc())
else:
return payload
# ################################################################################################################################
def invoke_startup_services(source, key, fs_server_config, repo_location, broker_client=None, service_name=None,
skip_include=True, worker_store=None, is_sso_enabled=False):
""" Invoked when we are the first worker and we know we have a broker client and all the other config is ready
so we can publish the request to execute startup services. In the worst case the requests will get back to us but it's
also possible that other workers are already running. In short, there is no guarantee that any server or worker in particular
will receive the requests, only that there will be exactly one.
"""
for name, payload in iteritems(fs_server_config.get(key, {})):
# Don't invoke SSO services if the feature is not enabled
if not is_sso_enabled:
if 'zato.sso' in name:
continue
if service_name:
# We are to skip this service:
if skip_include:
if name == service_name:
continue
# We are to include this service only, any other is rejected
else:
if name != service_name:
continue
if isinstance(payload, basestring) and payload.startswith('file://'):
payload = startup_service_payload_from_path(name, payload, repo_location)
if not payload:
continue
cid = new_cid()
msg = {}
msg['action'] = SERVICE.PUBLISH.value
msg['service'] = name
msg['payload'] = payload
msg['cid'] = cid
msg['channel'] = CHANNEL.STARTUP_SERVICE
if broker_client:
broker_client.invoke_async(msg)
else:
worker_store.on_message_invoke_service(msg, msg['channel'], msg['action'])
# ################################################################################################################################
def timeouting_popen(command, timeout, timeout_msg, rc_non_zero_msg, common_msg=''):
""" Runs a command in background and returns its return_code, stdout and stderr.
stdout and stderr will be None if return code = 0
"""
stdout, stderr = None, None
# Run the command
p = Popen(command, stdout=PIPE, stderr=PIPE)
# Sleep as long as requested and poll for results
sleep(timeout)
p.poll()
if p.returncode is None:
msg = timeout_msg + common_msg + 'command:[{}]'.format(command)
raise Exception(msg.format(timeout))
else:
if p.returncode != 0:
stdout, stderr = p.communicate()
msg = rc_non_zero_msg + common_msg + 'command:[{}], return code:[{}], stdout:[{}], stderr:[{}] '.format(
command, p.returncode, stdout, stderr)
raise Exception(msg)
return p.returncode
# ################################################################################################################################
def spawn_greenlet(callable, *args, **kwargs):
""" Spawns a new greenlet and wait up to timeout seconds for its response. It is expected that the response never arrives
because if it does, it means that there were some errors.
"""
try:
timeout = kwargs.pop('timeout', 0.2)
g = spawn(callable, *args, **kwargs)
gevent_sleep(0)
g.join(timeout)
if g.exception:
type_, value, traceback = g.exc_info
raise_(type_(value, str(g.exception)), None, traceback)
except Timeout:
pass # Timeout = good = no errors
else:
return g
# ################################################################################################################################
def get_logger_for_class(class_):
return logging.getLogger('{}.{}'.format(inspect.getmodule(class_).__name__, class_.__name__))
# ################################################################################################################################
def get_worker_pids():
""" Returns all sibling worker PIDs of the server process we are being invoked on, including our own worker too.
"""
return sorted(elem.pid for elem in psutil.Process(psutil.Process().ppid()).children())
# ################################################################################################################################
def update_bind_port(data, idx):
address_info = urlparse(data.address)
base, port = address_info.netloc.split(':')
port = int(port) + idx
data.address = '{}://{}:{}{}'.format(address_info.scheme, base, port, address_info.path)
data.bind_port = port
# ################################################################################################################################
def start_connectors(worker_store, service_name, data):
for idx, pid in enumerate(get_worker_pids()):
if 'socket_method' in data and data.socket_method == ZMQ.METHOD_NAME.BIND:
update_bind_port(data, idx)
worker_store.server.invoke(service_name, data, pid=pid, is_async=True, data_format=DATA_FORMAT.DICT)
# ################################################################################################################################
def require_tcp_port(address):
if not ':' in address:
raise Exception('No TCP port in {}'.format(address))
port = address.split(':')[-1]
if not port.strip():
raise Exception('No TCP port in {}'.format(address))
try:
int(port)
except ValueError:
raise Exception('Invalid TCP port in {}'.format(address))
# ################################################################################################################################
def update_apikey_username_to_channel(config):
config.username = 'HTTP_{}'.format(config.get('username', '').upper().replace('-', '_'))
# ################################################################################################################################
def get_response_value(response):
""" Extracts the actual response string from a response object produced by services.
"""
return (response.payload.getvalue() if hasattr(response.payload, 'getvalue') else response.payload) or ''
# ################################################################################################################################
def get_lb_agent_json_config(repo_dir):
return loads(open(os.path.join(repo_dir, 'lb-agent.conf')).read())
# ################################################################################################################################
def parse_cmd_line_options(argv):
options = argv.split(CLI_ARG_SEP)
options = '\n'.join(options)
return parse_extra_into_dict(options)
# ################################################################################################################################
def get_sa_model_columns(model):
""" Returns all columns (as string) of an input SQLAlchemy model.
"""
return [elem.key for elem in model.__table__.columns]
# ################################################################################################################################
def is_class_pubsub_hook(class_):
""" Returns True if input class subclasses PubSubHook.
"""
# Imported here to avoid circular dependencies
from zato.server.service import PubSubHook
return issubclass(class_, PubSubHook) and (class_ is not PubSubHook)
# ################################################################################################################################
def ensure_pubsub_hook_is_valid(self, input, instance, attrs):
""" An instance hook that validates if an optional pub/sub hook given on input actually subclasses PubSubHook.
"""
if input.get('hook_service_id'):
impl_name = self.server.service_store.id_to_impl_name[input.hook_service_id]
details = self.server.service_store.services[impl_name]
if not is_class_pubsub_hook(details['service_class']):
raise ValueError('Service `{}` is not a PubSubHook subclass'.format(details['name']))
# ################################################################################################################################
def is_func_overridden(func):
""" Returns True if input func was overridden by user in a subclass - used to decide
whether users implemented a given hook. If there is a special internal marker in input arguments,
it means that it is an internal function from parent class, not a user-defined one.
"""
if func and is_method(func):
func_defaults = func.__defaults__ if PY3 else func.im_func.func_defaults
# Only internally defined methods will fulfill conditions that they have default arguments
# and one of them is our no-op marker, hence if we negate it and the result is True,
# it means it must have been a user-defined method.
if not (func_defaults and isinstance(func_defaults, tuple) and zato_no_op_marker in func_defaults):
return True
# ################################################################################################################################
def get_sql_engine_display_name(engine, fs_sql_config):
display_name = None
for key, value in fs_sql_config.items():
if key == engine:
display_name = value.get('display_name')
break
if not display_name:
raise ValueError('Could not find display name for engine `{}` in config `{}`'.format(
engine, fs_sql_config))
else:
return display_name
# ################################################################################################################################
def pretty_format_float(value):
return ('%f' % value).rstrip('0').rstrip('.') if value else value
# ################################################################################################################################
# The slugify function below is taken from Django:
"""
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
def slugify(value, allow_unicode=False):
""" Convert to ASCII if 'allow_unicode' is False. Convert spaces to underscores.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
"""
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
value = re.sub('[^\w\s-]', '', value, flags=re.U).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value, flags=re.U) # noqa: W605
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value) # noqa: W605
# ################################################################################################################################
def wait_for_predicate(predicate_func, timeout, interval, *args, **kwargs):
# type: (object, int, float, *object, **object) -> bool
is_fulfilled = predicate_func(*args, **kwargs)
if not is_fulfilled:
start = datetime.utcnow()
wait_until = start + timedelta(seconds=timeout)
while not is_fulfilled:
gevent_sleep(interval)
is_fulfilled = predicate_func(*args, **kwargs)
if datetime.utcnow() > wait_until:
break
return is_fulfilled
# ################################################################################################################################
def wait_for_dict_key(_dict, key, timeout=30, interval=0.01):
# type: (dict, object, int, float) -> bool
def _predicate_dict_key(*_ignored_args, **_ignored_kwargs):
return key in _dict
return wait_for_predicate(_predicate_dict_key, timeout, interval)
# ################################################################################################################################
def hex_sequence_to_bytes(elems):
# type: (str) -> bytes
elems = [int(elem.strip(), 16) for elem in elems.split()]
elems = [chr(elem) for elem in elems]
elems = [bytes(elem, 'utf8') for elem in elems]
return b''.join(elems)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
import sys
from logging import getLogger
from tempfile import mkstemp
from time import time, sleep
# Sarge
from sarge import run as sarge_run, shell_format
# Python 2/3 compatibility
from six import PY2
# Zato
from zato.common.api import CLI_ARG_SEP
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
stderr_sleep_fg = 0.9
stderr_sleep_bg = 1.2
# ################################################################################################################################
# This is for convenience of switching to a newer version of sarge in the future. Newer versions use async_ instead of async.
async_keyword = 'async_' if PY2 else 'async_'
# ################################################################################################################################
import platform
system = platform.system()
is_windows = 'windows' in system.lower()
# ################################################################################################################################
def get_executable():
""" Returns the wrapper which buildout uses for executing Zato commands,
the one with all the dependencies added to PYTHONPATH.
"""
if is_windows:
return os.path.join(os.path.dirname(sys.executable), 'python.exe')
return os.path.join(os.path.dirname(sys.executable), 'py')
# ################################################################################################################################
class _StdErr(object):
# Some log messages (like the ones produced by PyKafka) go to stderr but they are not really errors,
# in which case we need to ignore them.
ignored = [
'Could not load pykafka.rdkafka extension.'
]
def __init__(self, path, timeout):
self.path = path
self.timeout = timeout
# ################################################################################################################################
def wait_for_error(self):
now = time()
while time() - now < self.timeout:
sleep(0.1)
_stderr = open(self.path)
_err = _stderr.read()
if _err and (not self.should_ignore(_err)):
return _err
else:
_stderr.close()
# ################################################################################################################################
def should_ignore(self, err):
for item in self.ignored:
if err.endswith(item):
return True
# ################################################################################################################################
def start_process(component_name, executable, run_in_fg, cli_options, extra_cli_options='', on_keyboard_interrupt=None,
failed_to_start_err=-100, extra_options=None, stderr_path=None, stdin_data=None, async_keyword=async_keyword):
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
stderr_path = stderr_path or mkstemp('-zato-start-{}.txt'.format(component_name.replace(' ','')))[1]
stdout_redirect = ''
stderr_redirect = ''
if not is_windows:
if not run_in_fg:
stdout_redirect = '1> /dev/null'
stderr_redirect = '2> {}'.format(stderr_path)
program = '{} {} {} {}'.format(executable, extra_cli_options, stdout_redirect, stderr_redirect)
try:
_stderr = _StdErr(stderr_path, stderr_sleep_fg if run_in_fg else stderr_sleep_bg)
run_kwargs = {
async_keyword: False if run_in_fg else True,
}
# Do not send input if it does not really exist because it prevents pdb from attaching to a service's stdin
if stdin_data:
run_kwargs['input'] = stdin_data
sarge_run(program, **run_kwargs)
# Wait a moment for any potential errors
_err = _stderr.wait_for_error()
if _err:
if 'Could not load pykafka.rdkafka extension.' not in _err:
logger.warn('Stderr received from program `%s` e:`%s`, kw:`%s`', program, _err, run_kwargs)
sys.exit(failed_to_start_err)
except KeyboardInterrupt:
if on_keyboard_interrupt:
on_keyboard_interrupt()
sys.exit(0)
# ################################################################################################################################
def start_python_process(component_name, run_in_fg, py_path, program_dir, on_keyboard_interrupt=None, failed_to_start_err=-100,
extra_options=None, stderr_path=None, stdin_data=None):
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
options = {
'fg': run_in_fg,
}
if extra_options:
options.update(extra_options)
options = CLI_ARG_SEP.join('{}={}'.format(k, v) for k, v in options.items())
py_path_option = shell_format('-m {0}', py_path)
program_dir_option = shell_format('{0}', program_dir) if program_dir else ''
extra_cli_options = '{} {} {}'.format(py_path_option, program_dir_option, options)
extra_cli_options = '{} '.format(py_path_option)
if program_dir_option:
extra_cli_options += '{} '.format(program_dir_option)
extra_cli_options += '{}'.format(options)
return start_process(component_name, get_executable(), run_in_fg, None, extra_cli_options, on_keyboard_interrupt,
failed_to_start_err, extra_options, stderr_path, stdin_data)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/proc.py | proc.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.common.api import WEB_SOCKET
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_wsx = getLogger('zato_web_socket')
# ################################################################################################################################
msg_cleanup_error = 'WSX cleanup error, wcr:`%d`, si:`%s`, pci:`%s`, sk_list:`%s`, h:`%r`, hs:`%r`, hr:`%r`, ofl:`%s`, e:`%s`'
# ################################################################################################################################
_on_disconnected = WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED
# ################################################################################################################################
def find_wsx_environ(service, raise_if_not_found=True):
wsx_environ = service.wsgi_environ.get('zato.request_ctx.async_msg', {}).get('environ')
if not wsx_environ:
if raise_if_not_found:
raise Exception('Could not find `[\'zato.request_ctx.async_msg\'][\'environ\']` in WSGI environ `{}`'.format(
service.wsgi_environ))
else:
return wsx_environ
# ################################################################################################################################
def cleanup_wsx_client(wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook, hook_service, hook_request,
opaque_func_list=None):
""" Cleans up information about a WSX client that has disconnected.
"""
try:
# Sometime it will not be needed at all, e.g. when we clean up a half-opened connection that never
# succesfully authenticated.
if wsx_cleanup_required:
# Deletes state from SQL
service_invoker('zato.channel.web-socket.client.delete-by-pub-id', {
'pub_client_id': pub_client_id,
})
if sub_keys:
# Deletes across all workers the in-RAM pub/sub state about the client that is disconnecting
service_invoker('zato.channel.web-socket.client.unregister-ws-sub-key', {
'sub_key_list': sub_keys,
})
# An opaque list of functions to invoke - each caller may decide what else should be carried out here
for func in opaque_func_list or []:
func()
# Run the relevant on_disconnected hook, if any is available (even if the session was never opened)
if hook:
hook(_on_disconnected, hook_service, **hook_request)
except Exception as e:
for logger in logger_zato, logger_wsx:
logger.info(msg_cleanup_error, wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook,
hook_service, hook_request, opaque_func_list, e)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/wsx.py | wsx.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import errno
from datetime import datetime, timedelta
from logging import getLogger
from socket import timeout as SocketTimeoutException
from time import sleep
from traceback import format_exc
# gevent
from gevent import socket
from gevent.server import StreamServer
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class SocketReaderCtx:
""" Configuration and context used to read that from sockets via read_from_socket.
"""
__slots__ = 'conn_id', 'socket', 'max_wait_time', 'max_msg_size', 'read_buffer_size', 'recv_timeout', \
'should_log_messages', 'buffer', 'is_ok', 'reason'
def __init__(self, conn_id, socket, max_wait_time, max_msg_size, read_buffer_size, recv_timeout, should_log_messages):
# type: (str, socket, int, int, int, int, object)
self.conn_id = conn_id
self.socket = socket
self.max_wait_time = max_wait_time
self.max_msg_size = max_msg_size
self.read_buffer_size = read_buffer_size
self.recv_timeout = recv_timeout
self.should_log_messages = should_log_messages
self.buffer = []
self.is_ok = False
self.reason = ''
# ################################################################################################################################
# ################################################################################################################################
def get_free_port(start=30000):
port = start
while is_port_taken(port):
port += 1
return port
# ################################################################################################################################
# Taken from http://grodola.blogspot.com/2014/04/reimplementing-netstat-in-cpython.html
def is_port_taken(port):
# psutil
import psutil
# Zato
from .platform_ import is_linux
# Shortcut for Linux so as not to bind to a socket which in turn means waiting until it's closed by OS
if is_linux:
for conn in psutil.net_connections(kind='tcp'):
if conn.laddr[1] == port and conn.status == psutil.CONN_LISTEN:
return True
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('', port))
sock.close()
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
return True
raise
# ################################################################################################################################
def _is_port_ready(port, needs_taken):
taken = is_port_taken(port)
return taken if needs_taken else not taken
# ################################################################################################################################
def _wait_for_port(port, timeout, interval, needs_taken):
port_ready = _is_port_ready(port, needs_taken)
if not port_ready:
start = datetime.utcnow()
wait_until = start + timedelta(seconds=timeout)
while not port_ready:
sleep(interval)
port_ready = _is_port_ready(port, needs_taken)
if datetime.utcnow() > wait_until:
break
return port_ready
# ################################################################################################################################
def wait_for_zato(address, url_path, timeout=60, interval=0.1):
""" Waits until a Zato server responds.
"""
# Requests
from requests import get as requests_get
# Imported here to avoid circular imports
from zato.common.util.api import wait_for_predicate
# Full URL to check a Zato server under
url = address + url_path
def _predicate_zato_ping(*ignored_args, **ignored_kwargs):
try:
requests_get(url, timeout=interval)
except Exception as e:
logger.warn('Waiting for `%s` (%s)', url, e)
else:
return True
return wait_for_predicate(_predicate_zato_ping, timeout, interval, address)
# ################################################################################################################################
def wait_for_zato_ping(address, timeout=60, interval=0.1):
""" Waits for timeout seconds until address replies to a request sent to /zato/ping.
"""
wait_for_zato(address, '/zato/ping', timeout, interval)
# ################################################################################################################################
def wait_until_port_taken(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes taken, i.e. a process binds to a TCP socket.
"""
return _wait_for_port(port, timeout, interval, True)
# ################################################################################################################################
def wait_until_port_free(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes free, i.e. a process releases a TCP socket.
"""
return _wait_for_port(port, timeout, interval, False)
# ################################################################################################################################
def get_fqdn_by_ip(ip_address, default, log_msg_prefix):
# type: (str, str) -> str
try:
host = socket.gethostbyaddr(ip_address)[0]
return socket.getfqdn(host)
except Exception:
logger.warn('%s exception in FQDN lookup `%s`', log_msg_prefix, format_exc())
return '(unknown-{}-fqdn)'.format(default)
# ################################################################################################################################
def read_from_socket(ctx, _utcnow=datetime.utcnow, _timedelta=timedelta):
""" Reads data from an already connected TCP socket.
"""
# type: (SocketReaderCtx) -> bytes
# Local aliases
_should_log_messages = ctx.should_log_messages
_log_info = logger.warn
_log_debug = logger.warn
_conn_id = ctx.conn_id
_max_msg_size = ctx.max_msg_size
_read_buffer_size = ctx.read_buffer_size
_recv_timeout = ctx.recv_timeout
_socket_recv = ctx.socket.recv
_socket_settimeout = ctx.socket.settimeout
# Wait for that many seconds
wait_until = _utcnow() + timedelta(seconds=ctx.max_wait_time)
# How many bytes have we read so far
msg_size = 0
# Buffer to accumulate data in
buffer = []
# No data received yet
data = '<initial-no-data>'
# Run the main loop
while _utcnow() < wait_until:
# Check whether reading the data would not exceed our message size limit
new_size = msg_size + _read_buffer_size
if new_size > _max_msg_size:
reason = 'Message would exceed max. size allowed `{}` > `{}`'.format(new_size, _max_msg_size)
raise ValueError(reason)
try:
_socket_settimeout(_recv_timeout)
data = _socket_recv(_read_buffer_size)
if _should_log_messages:
_log_debug('Data received by `%s` (%d) -> `%s`', _conn_id, len(data), data)
except SocketTimeoutException:
# This is fine, we just iterate until wait_until time.
pass
else:
# Some data was received ..
if data:
buffer.append(data)
# .. otherwise, the remote end disconnected so we can end.
break
# If we are here, it means that we have all the data needed so we can just return it now
result = b''.join(buffer)
if _should_log_messages:
_log_info('Returning result from `%s` (%d) -> `%s`', _conn_id, len(result), result)
return result
# ################################################################################################################################
def parse_address(address):
# type: (str) -> (str, int)
# First, let's reverse it in case input contains an IPv6 address ..
address = address[::-1] # type: str
# .. now, split on the first colon to give the information we seek ..
port, host = address.split(':', 1)
# .. reverse the values back
host = host[::-1]
port = port[::-1]
# .. port needs to be an integer ..
port = int(port)
# .. and now we can return the result.
return host, port
# ################################################################################################################################
# ################################################################################################################################
class ZatoStreamServer(StreamServer):
# ################################################################################################################################
def shutdown(self):
self.close()
# ################################################################################################################################
# These two methods are reimplemented from gevent.server to make it possible to use SO_REUSEPORT.
@classmethod
def get_listener(self, address, backlog=None, family=None):
if backlog is None:
backlog = self.backlog
return ZatoStreamServer._make_socket(address, backlog=backlog, reuse_addr=self.reuse_addr, family=family)
@staticmethod
def _make_socket(address, backlog=50, reuse_addr=None, family=socket.AF_INET):
sock = socket.socket(family=family)
if reuse_addr is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, reuse_addr)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
try:
sock.bind(address)
except socket.error as e:
strerror = getattr(e, 'strerror', None)
if strerror is not None:
e.strerror = strerror + ': ' + repr(address)
raise
sock.listen(backlog)
sock.setblocking(0)
return sock
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/tcp.py | tcp.py |
__version__ = '1.0.1'
__all__ = (
'__version__',
'dottedQuadToNum',
'numToDottedQuad',
'ValidateError',
'VdtUnknownCheckError',
'VdtParamError',
'VdtTypeError',
'VdtValueError',
'VdtValueTooSmallError',
'VdtValueTooBigError',
'VdtValueTooShortError',
'VdtValueTooLongError',
'VdtMissingValue',
'Validator',
'is_integer',
'is_float',
'is_boolean',
'is_list',
'is_tuple',
'is_ip_addr',
'is_string',
'is_int_list',
'is_bool_list',
'is_float_list',
'is_string_list',
'is_ip_addr_list',
'is_mixed_list',
'is_option',
'__docformat__',
)
import re
import sys
from pprint import pprint
#TODO - #21 - six is part of the repo now, but we didn't switch over to it here
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
if sys.version_info < (3,):
string_type = basestring
else:
string_type = str
# so tests that care about unicode on 2.x can specify unicode, and the same
# tests when run on 3.x won't complain about a undefined name "unicode"
# since all strings are unicode on 3.x we just want to pass it through
# unchanged
unicode = lambda x: x
# in python 3, all ints are equivalent to python 2 longs, and they'll
# never show "L" in the repr
long = int
_list_arg = re.compile(r'''
(?:
([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
)
\)
)
''', re.VERBOSE | re.DOTALL) # two groups
_list_members = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
''', re.VERBOSE | re.DOTALL) # one group
_paramstring = r'''
(?:
(
(?:
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
\)
)|
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?)| # unquoted
(?: # keyword argument
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
)
)
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
)
'''
_matchstring = '^%s*' % _paramstring
# Python pre 2.2.1 doesn't have bool
try:
bool
except NameError:
def bool(val):
"""Simple boolean equivalent function. """
if val:
return 1
else:
return 0
def dottedQuadToNum(ip):
"""
Convert decimal dotted quad string to long integer
>>> int(dottedQuadToNum('1 '))
1
>>> int(dottedQuadToNum(' 1.2'))
16777218
>>> int(dottedQuadToNum(' 1.2.3 '))
16908291
>>> int(dottedQuadToNum('1.2.3.4'))
16909060
>>> dottedQuadToNum('255.255.255.255')
4294967295
>>> dottedQuadToNum('255.255.255.256')
Traceback (most recent call last):
ValueError: Not a good dotted-quad IP: 255.255.255.256
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
try:
return struct.unpack('!L',
socket.inet_aton(ip.strip()))[0]
except socket.error:
raise ValueError('Not a good dotted-quad IP: %s' % ip)
return
def numToDottedQuad(num):
"""
Convert int or long int to dotted quad string
>>> numToDottedQuad(long(-1))
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(long(1))
'0.0.0.1'
>>> numToDottedQuad(long(16777218))
'1.0.0.2'
>>> numToDottedQuad(long(16908291))
'1.2.0.3'
>>> numToDottedQuad(long(16909060))
'1.2.3.4'
>>> numToDottedQuad(long(4294967295))
'255.255.255.255'
>>> numToDottedQuad(long(4294967296))
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
>>> numToDottedQuad(-1)
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(1)
'0.0.0.1'
>>> numToDottedQuad(16777218)
'1.0.0.2'
>>> numToDottedQuad(16908291)
'1.2.0.3'
>>> numToDottedQuad(16909060)
'1.2.3.4'
>>> numToDottedQuad(4294967295)
'255.255.255.255'
>>> numToDottedQuad(4294967296)
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
# no need to intercept here, 4294967295L is fine
if num > long(4294967295) or num < 0:
raise ValueError('Not a good numeric IP: %s' % num)
try:
return socket.inet_ntoa(
struct.pack('!L', long(num)))
except (socket.error, struct.error, OverflowError):
raise ValueError('Not a good numeric IP: %s' % num)
class ValidateError(Exception):
"""
This error indicates that the check failed.
It can be the base class for more specific errors.
Any check function that fails ought to raise this error.
(or a subclass)
>>> raise ValidateError
Traceback (most recent call last):
ValidateError
"""
class VdtMissingValue(ValidateError):
"""No value was supplied to a check that needed one."""
class VdtUnknownCheckError(ValidateError):
"""An unknown check function was requested"""
def __init__(self, value):
"""
>>> raise VdtUnknownCheckError('yoda')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
"""
ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
class VdtParamError(SyntaxError):
"""An incorrect parameter was passed"""
def __init__(self, name, value):
"""
>>> raise VdtParamError('yoda', 'jedi')
Traceback (most recent call last):
VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
"""
SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
class VdtTypeError(ValidateError):
"""The value supplied was of the wrong type"""
def __init__(self, value):
"""
>>> raise VdtTypeError('jedi')
Traceback (most recent call last):
VdtTypeError: the value "jedi" is of the wrong type.
"""
ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
class VdtValueError(ValidateError):
"""The value supplied was of the correct type, but was not an allowed value."""
def __init__(self, value):
"""
>>> raise VdtValueError('jedi')
Traceback (most recent call last):
VdtValueError: the value "jedi" is unacceptable.
"""
ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
class VdtValueTooSmallError(VdtValueError):
"""The value supplied was of the correct type, but was too small."""
def __init__(self, value):
"""
>>> raise VdtValueTooSmallError('0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "0" is too small.
"""
ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
class VdtValueTooBigError(VdtValueError):
"""The value supplied was of the correct type, but was too big."""
def __init__(self, value):
"""
>>> raise VdtValueTooBigError('1')
Traceback (most recent call last):
VdtValueTooBigError: the value "1" is too big.
"""
ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
class VdtValueTooShortError(VdtValueError):
"""The value supplied was of the correct type, but was too short."""
def __init__(self, value):
"""
>>> raise VdtValueTooShortError('jed')
Traceback (most recent call last):
VdtValueTooShortError: the value "jed" is too short.
"""
ValidateError.__init__(
self,
'the value "%s" is too short.' % (value,))
class VdtValueTooLongError(VdtValueError):
"""The value supplied was of the correct type, but was too long."""
def __init__(self, value):
"""
>>> raise VdtValueTooLongError('jedie')
Traceback (most recent call last):
VdtValueTooLongError: the value "jedie" is too long.
"""
ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
class Validator(object):
"""
Validator is an object that allows you to register a set of 'checks'.
These checks take input and test that it conforms to the check.
This can also involve converting the value from a string into
the correct datatype.
The ``check`` method takes an input string which configures which
check is to be used and applies that check to a supplied value.
An example input string would be:
'int_range(param1, param2)'
You would then provide something like:
>>> def int_range_check(value, min, max):
... # turn min and max from strings to integers
... min = int(min)
... max = int(max)
... # check that value is of the correct type.
... # possible valid inputs are integers or strings
... # that represent integers
... if not isinstance(value, (int, long, string_type)):
... raise VdtTypeError(value)
... elif isinstance(value, string_type):
... # if we are given a string
... # attempt to convert to an integer
... try:
... value = int(value)
... except ValueError:
... raise VdtValueError(value)
... # check the value is between our constraints
... if not min <= value:
... raise VdtValueTooSmallError(value)
... if not value <= max:
... raise VdtValueTooBigError(value)
... return value
>>> fdict = {'int_range': int_range_check}
>>> vtr1 = Validator(fdict)
>>> vtr1.check('int_range(20, 40)', '30')
30
>>> vtr1.check('int_range(20, 40)', '60')
Traceback (most recent call last):
VdtValueTooBigError: the value "60" is too big.
New functions can be added with : ::
>>> vtr2 = Validator()
>>> vtr2.functions['int_range'] = int_range_check
Or by passing in a dictionary of functions when Validator
is instantiated.
Your functions *can* use keyword arguments,
but the first argument should always be 'value'.
If the function doesn't take additional arguments,
the parentheses are optional in the check.
It can be written with either of : ::
keyword = function_name
keyword = function_name()
The first program to utilise Validator() was Michael Foord's
ConfigObj, an alternative to ConfigParser which supports lists and
can validate a config file using a config schema.
For more details on using Validator with ConfigObj see:
https://configobj.readthedocs.org/en/latest/configobj.html
"""
# this regex does the initial parsing of the checks
_func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
# this regex takes apart keyword arguments
_key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
# this regex finds keyword=list(....) type values
_list_arg = _list_arg
# this regex takes individual values out of lists - in one pass
_list_members = _list_members
# These regexes check a set of arguments for validity
# and then pull the members out
_paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
_matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
def __init__(self, functions=None):
"""
>>> vtri = Validator()
"""
self.functions = {
'': self._pass,
'integer': is_integer,
'float': is_float,
'boolean': is_boolean,
'ip_addr': is_ip_addr,
'string': is_string,
'list': is_list,
'tuple': is_tuple,
'int_list': is_int_list,
'float_list': is_float_list,
'bool_list': is_bool_list,
'ip_addr_list': is_ip_addr_list,
'string_list': is_string_list,
'mixed_list': is_mixed_list,
'pass': self._pass,
'option': is_option,
'force_list': force_list,
}
if functions is not None:
self.functions.update(functions)
# tekNico: for use by ConfigObj
self.baseErrorClass = ValidateError
self._cache = {}
def check(self, check, value, missing=False):
"""
Usage: check(check, value)
Arguments:
check: string representing check to apply (including arguments)
value: object to be checked
Returns value, converted to correct type if necessary
If the check fails, raises a ``ValidateError`` subclass.
>>> vtor.check('yoda', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('yoda()', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('string(default="")', '', missing=True)
''
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if missing:
if default is None:
# no information needed here - to be handled by caller
raise VdtMissingValue()
value = self._handle_none(default)
if value is None:
return None
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _handle_none(self, value):
if value == 'None':
return None
elif value in ("'None'", '"None"'):
# Special case a quoted None
value = self._unquote(value)
return value
def _parse_with_caching(self, check):
if check in self._cache:
fun_name, fun_args, fun_kwargs, default = self._cache[check]
# We call list and dict below to work with *copies* of the data
# rather than the original (which are mutable of course)
fun_args = list(fun_args)
fun_kwargs = dict(fun_kwargs)
else:
fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
fun_kwargs = dict([(str(key), value) for (key, value) in list(fun_kwargs.items())])
self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
return fun_name, fun_args, fun_kwargs, default
def _check_value(self, value, fun_name, fun_args, fun_kwargs):
try:
fun = self.functions[fun_name]
except KeyError:
raise VdtUnknownCheckError(fun_name)
else:
return fun(value, *fun_args, **fun_kwargs)
def _parse_check(self, check):
fun_match = self._func_re.match(check)
if fun_match:
fun_name = fun_match.group(1)
arg_string = fun_match.group(2)
arg_match = self._matchfinder.match(arg_string)
if arg_match is None:
# Bad syntax
raise VdtParamError('Bad syntax in check "%s".' % check)
fun_args = []
fun_kwargs = {}
# pull out args of group 2
for arg in self._paramfinder.findall(arg_string):
# args may need whitespace removing (before removing quotes)
arg = arg.strip()
listmatch = self._list_arg.match(arg)
if listmatch:
key, val = self._list_handle(listmatch)
fun_kwargs[key] = val
continue
keymatch = self._key_arg.match(arg)
if keymatch:
val = keymatch.group(2)
if not val in ("'None'", '"None"'):
# Special case a quoted None
val = self._unquote(val)
fun_kwargs[keymatch.group(1)] = val
continue
fun_args.append(self._unquote(arg))
else:
# allows for function names without (args)
return check, (), {}, None
# Default must be deleted if the value is specified too,
# otherwise the check function will get a spurious "default" keyword arg
default = fun_kwargs.pop('default', None)
return fun_name, fun_args, fun_kwargs, default
def _unquote(self, val):
"""Unquote a value if necessary."""
if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
val = val[1:-1]
return val
def _list_handle(self, listmatch):
"""Take apart a ``keyword=list('val, 'val')`` type string."""
out = []
name = listmatch.group(1)
args = listmatch.group(2)
for arg in self._list_members.findall(args):
out.append(self._unquote(arg))
return name, out
def _pass(self, value):
"""
Dummy check that always passes
>>> vtor.check('', 0)
0
>>> vtor.check('', '0')
'0'
"""
return value
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
"""
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, (int, long, float, string_type)):
try:
out_params.append(fun(val))
except ValueError as e:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params
# built in checks
# you can override these by setting the appropriate name
# in Validator.functions
# note: if the params are specified wrongly in your input string,
# you will also raise errors.
def is_integer(value, min=None, max=None):
"""
A check that tests that a given value is an integer (int, or long)
and optionally, between bounds. A negative value is accepted, while
a float will fail.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
>>> vtor.check('integer', '-1')
-1
>>> vtor.check('integer', '0')
0
>>> vtor.check('integer', 9)
9
>>> vtor.check('integer', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('integer', '2.2')
Traceback (most recent call last):
VdtTypeError: the value "2.2" is of the wrong type.
>>> vtor.check('integer(10)', '20')
20
>>> vtor.check('integer(max=20)', '15')
15
>>> vtor.check('integer(10)', '9')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(10)', 9)
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(max=20)', '35')
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(max=20)', 35)
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(0, 9)', False)
0
"""
(min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
if not isinstance(value, (int, long, string_type)):
raise VdtTypeError(value)
if isinstance(value, string_type):
# if it's a string - does it represent an integer ?
try:
value = int(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
def is_float(value, min=None, max=None):
"""
A check that tests that a given value is a float
(an integer will be accepted), and optionally - that it is between bounds.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
This can accept negative values.
>>> vtor.check('float', '2')
2.0
From now on we multiply the value to avoid comparing decimals
>>> vtor.check('float', '-6.8') * 10
-68.0
>>> vtor.check('float', '12.2') * 10
122.0
>>> vtor.check('float', 8.4) * 10
84.0
>>> vtor.check('float', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('float(10.1)', '10.2') * 10
102.0
>>> vtor.check('float(max=20.2)', '15.1') * 10
151.0
>>> vtor.check('float(10.0)', '9.0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9.0" is too small.
>>> vtor.check('float(max=20.0)', '35.0')
Traceback (most recent call last):
VdtValueTooBigError: the value "35.0" is too big.
"""
(min_val, max_val) = _is_num_param(
('min', 'max'), (min, max), to_float=True)
if not isinstance(value, (int, long, float, string_type)):
raise VdtTypeError(value)
if not isinstance(value, float):
# if it's a string - does it represent a float ?
try:
value = float(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
bool_dict = {
True: True, 'on': True, '1': True, 'true': True, 'yes': True,
False: False, 'off': False, '0': False, 'false': False, 'no': False,
}
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '')
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up')
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
"""
if isinstance(value, string_type):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value)
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256')
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5')
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
[1, 2, 3, 4]
>>> vtor.check('list', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, string_type):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value)
def is_tuple(value, min=None, max=None):
"""
Check that the value is a tuple of values.
You can optionally specify the minimum and maximum number of members.
It does no check on members.
>>> vtor.check('tuple', ())
()
>>> vtor.check('tuple', [])
()
>>> vtor.check('tuple', (1, 2))
(1, 2)
>>> vtor.check('tuple', [1, 2])
(1, 2)
>>> vtor.check('tuple(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
(1, 2, 3, 4)
>>> vtor.check('tuple', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('tuple', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
return tuple(is_list(value, min, max))
def is_string(value, min=None, max=None):
"""
Check that the supplied value is a string.
You can optionally specify the minimum and maximum number of members.
>>> vtor.check('string', '0')
'0'
>>> vtor.check('string', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('string(2)', '12')
'12'
>>> vtor.check('string(2)', '1')
Traceback (most recent call last):
VdtValueTooShortError: the value "1" is too short.
>>> vtor.check('string(min=2, max=3)', '123')
'123'
>>> vtor.check('string(min=2, max=3)', '1234')
Traceback (most recent call last):
VdtValueTooLongError: the value "1234" is too long.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return value
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_integer(mem) for mem in is_list(value, min, max)]
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_boolean(mem) for mem in is_list(value, min, max)]
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_float(mem) for mem in is_list(value, min, max)]
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1])
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello')
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
"""
if isinstance(value, string_type):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)]
def is_ip_addr_list(value, min=None, max=None):
"""
Check that the value is a list of IP addresses.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an IP address.
>>> vtor.check('ip_addr_list', ())
[]
>>> vtor.check('ip_addr_list', [])
[]
>>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
['1.2.3.4', '5.6.7.8']
>>> vtor.check('ip_addr_list', ['a'])
Traceback (most recent call last):
VdtValueError: the value "a" is unacceptable.
"""
return [is_ip_addr(mem) for mem in is_list(value, min, max)]
def force_list(value, min=None, max=None):
"""
Check that a value is a list, coercing strings into
a list with one member. Useful where users forget the
trailing comma that turns a single value into a list.
You can optionally specify the minimum and maximum number of members.
A minumum of greater than one will fail if the user only supplies a
string.
>>> vtor.check('force_list', ())
[]
>>> vtor.check('force_list', [])
[]
>>> vtor.check('force_list', 'hello')
['hello']
"""
if not isinstance(value, (list, tuple)):
value = [value]
return is_list(value, min, max)
fun_dict = {
'integer': is_integer,
'float': is_float,
'ip_addr': is_ip_addr,
'string': is_string,
'boolean': is_boolean,
}
def is_mixed_list(value, *args):
"""
Check that the value is a list.
Allow specifying the type of each member.
Work on lists of specific lengths.
You specify each member as a positional argument specifying type
Each type should be one of the following strings :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So you can specify a list of two strings, followed by
two integers as :
mixed_list('string', 'string', 'integer', 'integer')
The length of the list must match the number of positional
arguments you supply.
>>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
>>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
Traceback (most recent call last):
VdtTypeError: the value "b" is of the wrong type.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
>>> vtor.check(mix_str, 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('mixed_list("yoda")', ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "KeyError('yoda',)" for parameter "'mixed_list'"
"""
try:
length = len(value)
except TypeError:
raise VdtTypeError(value)
if length < len(args):
raise VdtValueTooShortError(value)
elif length > len(args):
raise VdtValueTooLongError(value)
try:
return [fun_dict[arg](val) for arg, val in zip(args, value)]
except KeyError as e:
raise VdtParamError('mixed_list', e)
def is_option(value, *options):
"""
This check matches the value to any of a set of options.
>>> vtor.check('option("yoda", "jedi")', 'yoda')
'yoda'
>>> vtor.check('option("yoda", "jedi")', 'jed')
Traceback (most recent call last):
VdtValueError: the value "jed" is unacceptable.
>>> vtor.check('option("yoda", "jedi")', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
if not value in options:
raise VdtValueError(value)
return value
def _test(value, *args, **keywargs):
"""
A function that exists for test purposes.
>>> checks = [
... '3, 6, min=1, max=3, test=list(a, b, c)',
... '3',
... '3, 6',
... '3,',
... 'min=1, test="a b c"',
... 'min=5, test="a, b, c"',
... 'min=1, max=3, test="a, b, c"',
... 'min=-100, test=-99',
... 'min=1, max=3',
... '3, 6, test="36"',
... '3, 6, test="a, b, c"',
... '3, max=3, test=list("a", "b", "c")',
... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
... "test='x=fish(3)'",
... ]
>>> v = Validator({'test': _test})
>>> for entry in checks:
... pprint(v.check(('test(%s)' % entry), 3))
(3, ('3', '6'), {'max': '3', 'min': '1', 'test': ['a', 'b', 'c']})
(3, ('3',), {})
(3, ('3', '6'), {})
(3, ('3',), {})
(3, (), {'min': '1', 'test': 'a b c'})
(3, (), {'min': '5', 'test': 'a, b, c'})
(3, (), {'max': '3', 'min': '1', 'test': 'a, b, c'})
(3, (), {'min': '-100', 'test': '-99'})
(3, (), {'max': '3', 'min': '1'})
(3, ('3', '6'), {'test': '36'})
(3, ('3', '6'), {'test': 'a, b, c'})
(3, ('3',), {'max': '3', 'test': ['a', 'b', 'c']})
(3, ('3',), {'max': '3', 'test': ["'a'", 'b', 'x=(c)']})
(3, (), {'test': 'x=fish(3)'})
>>> v = Validator()
>>> v.check('integer(default=6)', '3')
3
>>> v.check('integer(default=6)', None, True)
6
>>> v.get_default_value('integer(default=6)')
6
>>> v.get_default_value('float(default=6)')
6.0
>>> v.get_default_value('pass(default=None)')
>>> v.get_default_value("string(default='None')")
'None'
>>> v.get_default_value('pass')
Traceback (most recent call last):
KeyError: 'Check "pass" has no default value.'
>>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
['1', '2', '3', '4']
>>> v = Validator()
>>> v.check("pass(default=None)", None, True)
>>> v.check("pass(default='None')", None, True)
'None'
>>> v.check('pass(default="None")', None, True)
'None'
>>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
['1', '2', '3', '4']
Bug test for unicode arguments
>>> v = Validator()
>>> v.check(unicode('string(min=4)'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> v.get_default_value(unicode('string(min=4, default="1234")')) == unicode('1234')
True
>>> v.check(unicode('string(min=4, default="1234")'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> default = v.get_default_value('string(default=None)')
>>> default == None
1
"""
return (value, args, keywargs)
def _test2():
"""
>>>
>>> v = Validator()
>>> v.get_default_value('string(default="#ff00dd")')
'#ff00dd'
>>> v.get_default_value('integer(default=3) # comment')
3
"""
def _test3():
r"""
>>> vtor.check('string(default="")', '', missing=True)
''
>>> vtor.check('string(default="\n")', '', missing=True)
'\n'
>>> print(vtor.check('string(default="\n")', '', missing=True))
<BLANKLINE>
<BLANKLINE>
>>> vtor.check('string()', '\n')
'\n'
>>> vtor.check('string(default="\n\n\n")', '', missing=True)
'\n\n\n'
>>> vtor.check('string()', 'random \n text goes here\n\n')
'random \n text goes here\n\n'
>>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
... '', missing=True)
' \nrandom text\ngoes \n here\n\n '
>>> vtor.check("string(default='\n\n\n')", '', missing=True)
'\n\n\n'
>>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
'\n'
>>> vtor.check("string_list()", ['foo', '\n', 'bar'])
['foo', '\n', 'bar']
>>> vtor.check("string_list(default=list('\n'))", '', missing=True)
['\n']
"""
if __name__ == '__main__':
# run the code tests in doctest format
import sys
import doctest
m = sys.modules.get('__main__')
globs = m.__dict__.copy()
globs.update({
'vtor': Validator(),
})
failures, tests = doctest.testmod(
m, globs=globs,
optionflags=doctest.IGNORE_EXCEPTION_DETAIL | doctest.ELLIPSIS)
assert not failures, '{} failures out of {} tests'.format(failures, tests) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/validate_.py | validate_.py |
import os
import re
import sys
from ast import literal_eval
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
from collections import OrderedDict
import six
__version__ = '5.0.6'
# imported lazily to avoid startup performance hit if it isn't used
compiler = None
# A dictionary mapping BOM to
# the encoding to decode with, and what to set the
# encoding attribute to.
BOMS = {
BOM_UTF8: ('utf_8', None),
BOM_UTF16_BE: ('utf16_be', 'utf_16'),
BOM_UTF16_LE: ('utf16_le', 'utf_16'),
BOM_UTF16: ('utf_16', 'utf_16'),
}
# All legal variants of the BOM codecs.
# TODO: the list of aliases is not meant to be exhaustive, is there a
# better way ?
BOM_LIST = {
'utf_16': 'utf_16',
'u16': 'utf_16',
'utf16': 'utf_16',
'utf-16': 'utf_16',
'utf16_be': 'utf16_be',
'utf_16_be': 'utf16_be',
'utf-16be': 'utf16_be',
'utf16_le': 'utf16_le',
'utf_16_le': 'utf16_le',
'utf-16le': 'utf16_le',
'utf_8': 'utf_8',
'u8': 'utf_8',
'utf': 'utf_8',
'utf8': 'utf_8',
'utf-8': 'utf_8',
}
# Map of encodings to the BOM to write.
BOM_SET = {
'utf_8': BOM_UTF8,
'utf_16': BOM_UTF16,
'utf16_be': BOM_UTF16_BE,
'utf16_le': BOM_UTF16_LE,
None: BOM_UTF8
}
def match_utf8(encoding):
return BOM_LIST.get(encoding.lower()) == 'utf_8'
# Quote strings used for writing values
squot = "'%s'"
dquot = '"%s"'
noquot = "%s"
wspace_plus = ' \r\n\v\t\'"'
tsquot = '"""%s"""'
tdquot = "'''%s'''"
# Sentinel for use in getattr calls to replace hasattr
MISSING = object()
__all__ = (
'DEFAULT_INDENT_TYPE',
'DEFAULT_INTERPOLATION',
'ConfigObjError',
'NestingError',
'ParseError',
'DuplicateError',
'ConfigspecError',
'ConfigObj',
'SimpleVal',
'InterpolationError',
'InterpolationLoopError',
'MissingInterpolationOption',
'RepeatSectionError',
'ReloadError',
'UnreprError',
'UnknownType',
'flatten_errors',
'get_extra_values'
)
DEFAULT_INTERPOLATION = 'configparser'
DEFAULT_INDENT_TYPE = ' '
MAX_INTERPOL_DEPTH = 10
OPTION_DEFAULTS = {
'interpolation': True,
'raise_errors': False,
'list_values': True,
'create_empty': False,
'file_error': False,
'configspec': None,
'stringify': True,
# option may be set to one of ('', ' ', '\t')
'indent_type': None,
'encoding': None,
'default_encoding': None,
'unrepr': False,
'write_empty_values': False,
}
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
def getObj(s):
global compiler
if compiler is None:
import compiler
s = "a=" + s
p = compiler.parse(s)
return p.getChildren()[1].getChildren()[0].getChildren()[1]
class UnknownType(Exception):
pass
class Builder(object):
def build(self, o):
if m is None:
raise UnknownType(o.__class__.__name__)
return m(o)
def build_List(self, o):
return list(map(self.build, o.getChildren()))
def build_Const(self, o):
return o.value
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = next(i)
return d
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
if o.name == 'None':
return None
if o.name == 'True':
return True
if o.name == 'False':
return False
# An undefined Name
raise UnknownType('Undefined Name')
def build_Add(self, o):
real, imag = list(map(self.build_Const, o.getChildren()))
try:
real = float(real)
except TypeError:
raise UnknownType('Add')
if not isinstance(imag, complex) or imag.real != 0.0:
raise UnknownType('Add')
return real+imag
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
def build_UnarySub(self, o):
return -self.build_Const(o.getChildren()[0])
def build_UnaryAdd(self, o):
return self.build_Const(o.getChildren()[0])
_builder = Builder()
def unrepr(s):
if not s:
return s
# this is supposed to be safe
import ast
return ast.literal_eval(s)
class ConfigObjError(SyntaxError):
"""
This is the base class for all errors that ConfigObj raises.
It is a subclass of SyntaxError.
"""
def __init__(self, message='', line_number=None, line=''):
self.line = line
self.line_number = line_number
SyntaxError.__init__(self, message)
class NestingError(ConfigObjError):
"""
This error indicates a level of nesting that doesn't match.
"""
class ParseError(ConfigObjError):
"""
This error indicates that a line is badly written.
It is neither a valid ``key = value`` line,
nor a valid section marker line.
"""
class ReloadError(IOError):
"""
A 'reload' operation failed.
This exception is a subclass of ``IOError``.
"""
def __init__(self):
IOError.__init__(self, 'reload failed, filename is not set.')
class DuplicateError(ConfigObjError):
"""
The keyword or section specified already exists.
"""
class ConfigspecError(ConfigObjError):
"""
An error occured whilst parsing a configspec.
"""
class InterpolationError(ConfigObjError):
"""Base class for the two interpolation errors."""
class InterpolationLoopError(InterpolationError):
"""Maximum interpolation depth exceeded in string interpolation."""
def __init__(self, option):
InterpolationError.__init__(
self,
'interpolation loop detected in value "%s".' % option)
class RepeatSectionError(ConfigObjError):
"""
This error indicates additional sections in a section with a
``__many__`` (repeated) section.
"""
class MissingInterpolationOption(InterpolationError):
"""A value specified for interpolation was missing."""
def __init__(self, option):
msg = 'missing option "%s" in interpolation.' % option
InterpolationError.__init__(self, msg)
class UnreprError(ConfigObjError):
"""An error parsing in unrepr mode."""
class InterpolationEngine(object):
"""
A helper class to help perform string interpolation.
This class is an abstract base class; its descendants perform
the actual work.
"""
# compiled regexp to use in self.interpolate()
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
_cookie = '%'
def __init__(self, section):
# the Section instance that "owns" this engine
self.section = section
def interpolate(self, key, value):
# short-cut
if not self._cookie in value:
return value
def recursive_interpolate(key, value, section, backtrail):
"""The function that does the actual work.
``value``: the string we're trying to interpolate.
``section``: the section in which that string was found
``backtrail``: a dict to keep track of where we've been,
to detect and prevent infinite recursion loops
This is similar to a depth-first-search algorithm.
"""
# Have we been here already?
if (key, section.name) in backtrail:
# Yes - infinite loop detected
raise InterpolationLoopError(key)
# Place a marker on our backtrail so we won't come back here again
backtrail[(key, section.name)] = 1
# Now start the actual work
match = self._KEYCRE.search(value)
while match:
# The actual parsing of the match is implementation-dependent,
# so delegate to our helper function
k, v, s = self._parse_match(match)
if k is None:
# That's the signal that no further interpolation is needed
replacement = v
else:
# Further interpolation may be needed to obtain final value
replacement = recursive_interpolate(k, v, s, backtrail)
# Replace the matched string with its final value
start, end = match.span()
value = ''.join((value[:start], replacement, value[end:]))
new_search_start = start + len(replacement)
# Pick up the next interpolation key, if any, for next time
# through the while loop
match = self._KEYCRE.search(value, new_search_start)
# Now safe to come back here again; remove marker from backtrail
del backtrail[(key, section.name)]
return value
# Back in interpolate(), all we have to do is kick off the recursive
# function with appropriate starting values
value = recursive_interpolate(key, value, self.section, {})
return value
def _fetch(self, key):
"""Helper function to fetch values from owning section.
Returns a 2-tuple: the value, and the section where it was found.
"""
# switch off interpolation before we try and fetch anything !
save_interp = self.section.main.interpolation
self.section.main.interpolation = False
# Start at section that "owns" this InterpolationEngine
current_section = self.section
while True:
# try the current section first
val = current_section.get(key)
if val is not None and not isinstance(val, Section):
break
# try "DEFAULT" next
val = current_section.get('DEFAULT', {}).get(key)
if val is not None and not isinstance(val, Section):
break
# move up to parent and try again
# top-level's parent is itself
if current_section.parent is current_section:
# reached top level, time to give up
break
current_section = current_section.parent
# restore interpolation to previous value before returning
self.section.main.interpolation = save_interp
if val is None:
raise MissingInterpolationOption(key)
return val, current_section
def _parse_match(self, match):
"""Implementation-dependent helper function.
Will be passed a match object corresponding to the interpolation
key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
key in the appropriate config file section (using the ``_fetch()``
helper function) and return a 3-tuple: (key, value, section)
``key`` is the name of the key we're looking for
``value`` is the value found for that key
``section`` is a reference to the section where it was found
``key`` and ``section`` should be None if no further
interpolation should be performed on the resulting value
(e.g., if we interpolated "$$" and returned "$").
"""
raise NotImplementedError()
class ConfigParserInterpolation(InterpolationEngine):
"""Behaves like ConfigParser."""
_cookie = '%'
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
def _parse_match(self, match):
key = match.group(1)
value, section = self._fetch(key)
return key, value, section
class TemplateInterpolation(InterpolationEngine):
"""Behaves like string.Template."""
_cookie = '$'
_delimiter = '$'
_KEYCRE = re.compile(r"""
\$(?:
(?P<escaped>\$) | # Two $ signs
(?P<named>[_a-z][_a-z0-9]*) | # $name format
{(?P<braced>[^}]*)} # ${name} format
)
""", re.IGNORECASE | re.VERBOSE)
def _parse_match(self, match):
# Valid name (in or out of braces): fetch value from section
key = match.group('named') or match.group('braced')
if key is not None:
value, section = self._fetch(key)
return key, value, section
# Escaped delimiter (e.g., $$): return single delimiter
if match.group('escaped') is not None:
# Return None for key and section to indicate it's time to stop
return None, self._delimiter, None
# Anything else: ignore completely, just return it unchanged
return None, match.group(), None
interpolation_engines = {
'configparser': ConfigParserInterpolation,
'template': TemplateInterpolation,
}
def __newobj__(cls, *args):
# Hack for pickle
return cls.__new__(cls, *args)
class Section(OrderedDict):
"""
A dictionary-like object that represents a section in a config file.
It does string interpolation if the 'interpolation' attribute
of the 'main' object is set to True.
Interpolation is tried first from this object, then from the 'DEFAULT'
section of this object, next from the parent and its 'DEFAULT' section,
and so on until the main object is reached.
A Section will behave like an ordered dictionary - following the
order of the ``scalars`` and ``sections`` attributes.
You can use this to change the order of members.
Iteration follows the order: scalars, then sections.
"""
def __setstate__(self, state):
OrderedDict.update(self, state[0])
self.__dict__.update(state[1])
def __reduce__(self):
state = (OrderedDict(self), self.__dict__)
return (__newobj__, (self.__class__,), state)
def __init__(self, parent, depth, main, indict=None, name=None):
"""
* parent is the section above
* depth is the depth level of this section
* main is the main ConfigObj
* indict is a dictionary to initialise the section with
"""
if indict is None:
indict = {}
OrderedDict.__init__(self)
# used for nesting level *and* interpolation
self.parent = parent
# used for the interpolation attribute
self.main = main
# level of nesting depth of this Section
self.depth = depth
# purely for information
self.name = name
#
self._initialise()
# we do this explicitly so that __setitem__ is used properly
# (rather than just passing to ``OrderedDict.__init__``)
for entry, value in indict.items():
self[entry] = value
def _initialise(self):
# the sequence of scalar values in this Section
self.scalars = []
# the sequence of sections in this Section
self.sections = []
# for comments :-)
self.comments = {}
self.inline_comments = {}
# the configspec
self.configspec = None
# for defaults
self.defaults = []
self.default_values = {}
self.extra_values = []
self._created = False
def _interpolate(self, key, value):
try:
# do we already have an interpolation engine?
engine = self._interpolation_engine
except AttributeError:
# not yet: first time running _interpolate(), so pick the engine
name = self.main.interpolation
if name == True: # note that "if name:" would be incorrect here
# backwards-compatibility: interpolation=True means use default
name = DEFAULT_INTERPOLATION
name = name.lower() # so that "Template", "template", etc. all work
class_ = interpolation_engines.get(name, None)
if class_ is None:
# invalid value for self.main.interpolation
self.main.interpolation = False
return value
else:
# save reference to engine so we don't have to do this again
engine = self._interpolation_engine = class_(self)
# let the engine do the actual work
return engine.interpolate(key, value)
def __getitem__(self, key):
"""Fetch the item and do string interpolation."""
val = OrderedDict.__getitem__(self, key)
if self.main.interpolation:
if isinstance(val, six.string_types):
return self._interpolate(key, val)
if isinstance(val, list):
def _check(entry):
if isinstance(entry, six.string_types):
return self._interpolate(key, entry)
return entry
new = [_check(entry) for entry in val]
if new != val:
return new
return val
def __setitem__(self, key, value, unrepr=False):
"""
Correctly set a value.
Making dictionary values Section instances.
(We have to special case 'Section' instances - which are also dicts)
Keys must be strings.
Values need only be strings (or lists of strings) if
``main.stringify`` is set.
``unrepr`` must be set when setting a value to a dictionary, without
creating a new sub-section.
"""
if not isinstance(key, six.string_types):
raise ValueError('The key "%s" is not a string.' % key)
# add the comment
if key not in self.comments:
self.comments[key] = []
self.inline_comments[key] = ''
# remove the entry from defaults
if key in self.defaults:
self.defaults.remove(key)
#
if isinstance(value, Section):
if key not in self:
self.sections.append(key)
OrderedDict.__setitem__(self, key, value)
elif isinstance(value, dict) and not unrepr:
# First create the new depth level,
# then create the section
if key not in self:
self.sections.append(key)
new_depth = self.depth + 1
OrderedDict.__setitem__(
self,
key,
Section(
self,
new_depth,
self.main,
indict=value,
name=key))
else:
if key not in self:
self.scalars.append(key)
if not self.main.stringify:
if isinstance(value, six.string_types):
pass
elif isinstance(value, (list, tuple)):
for entry in value:
if not isinstance(entry, six.string_types):
raise TypeError('Value is not a string "%s".' % entry)
else:
raise TypeError('Value is not a string "%s".' % value)
OrderedDict.__setitem__(self, key, value)
def __delitem__(self, key):
"""Remove items from the sequence when deleting."""
OrderedDict. __delitem__(self, key)
if key in self.scalars:
self.scalars.remove(key)
else:
self.sections.remove(key)
del self.comments[key]
del self.inline_comments[key]
def get(self, key, default=None):
"""A version of ``get`` that doesn't bypass string interpolation."""
try:
return self[key]
except KeyError:
return default
def update(self, indict):
"""
A version of update that uses our ``__setitem__``.
"""
for entry in indict:
self[entry] = indict[entry]
def pop(self, key, default=MISSING):
"""
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised'
"""
try:
val = self[key]
except KeyError:
if default is MISSING:
raise
val = default
else:
del self[key]
return val
def popitem(self):
"""Pops the first (key,val)"""
sequence = (self.scalars + self.sections)
if not sequence:
raise KeyError(": 'popitem(): dictionary is empty'")
key = sequence[0]
val = self[key]
del self[key]
return key, val
def clear(self):
"""
A version of clear that also affects scalars/sections
Also clears comments and configspec.
Leaves other attributes alone :
depth/main/parent are not affected
"""
OrderedDict.clear(self)
self.scalars = []
self.sections = []
self.comments = {}
self.inline_comments = {}
self.configspec = None
self.defaults = []
self.extra_values = []
def setdefault(self, key, default=None):
"""A version of setdefault that sets sequence if appropriate."""
try:
return self[key]
except KeyError:
self[key] = default
return self[key]
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
return list(zip((self.scalars + self.sections), list(self.values())))
def keys(self):
"""D.keys() -> list of D's keys"""
return (self.scalars + self.sections)
def values(self):
"""D.values() -> list of D's values"""
return [self[key] for key in (self.scalars + self.sections)]
def iteritems(self):
"""D.iteritems() -> an iterator over the (key, value) items of D"""
return iter(list(self.items()))
def iterkeys(self):
"""D.iterkeys() -> an iterator over the keys of D"""
return iter((self.scalars + self.sections))
__iter__ = iterkeys
def itervalues(self):
"""D.itervalues() -> an iterator over the values of D"""
return iter(list(self.values()))
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)])
__str__ = __repr__
__str__.__doc__ = "x.__str__() <==> str(x)"
# Extra methods - not in a normal dictionary
def dict(self):
"""
Return a deepcopy of self as a dictionary.
All members that are ``Section`` instances are recursively turned to
ordinary dictionaries - by calling their ``dict`` method.
>>> n = a.dict()
>>> n == a
1
>>> n is a
0
"""
newdict = {}
for entry in self:
this_entry = self[entry]
if isinstance(this_entry, Section):
this_entry = this_entry.dict()
elif isinstance(this_entry, list):
# create a copy rather than a reference
this_entry = list(this_entry)
elif isinstance(this_entry, tuple):
# create a copy rather than a reference
this_entry = tuple(this_entry)
newdict[entry] = this_entry
return newdict
def merge(self, indict):
"""
A recursive update - useful for merging config files.
>>> a = '''[section1]
... option1 = True
... [[subsection]]
... more_options = False
... # end of file'''.splitlines()
>>> b = '''# File is user.ini
... [section1]
... option1 = False
... # end of file'''.splitlines()
>>> c1 = ConfigObj(b)
>>> c2 = ConfigObj(a)
>>> c2.merge(c1)
>>> c2
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
"""
for key, val in list(indict.items()):
if (key in self and isinstance(self[key], dict) and
isinstance(val, dict)):
self[key].merge(val)
else:
self[key] = val
def rename(self, oldkey, newkey):
"""
Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments.
"""
if oldkey in self.scalars:
the_list = self.scalars
elif oldkey in self.sections:
the_list = self.sections
else:
raise KeyError('Key "%s" not found.' % oldkey)
pos = the_list.index(oldkey)
#
val = self[oldkey]
OrderedDict.__delitem__(self, oldkey)
OrderedDict.__setitem__(self, newkey, val)
the_list.remove(oldkey)
the_list.insert(pos, newkey)
comm = self.comments[oldkey]
inline_comment = self.inline_comments[oldkey]
del self.comments[oldkey]
del self.inline_comments[oldkey]
self.comments[newkey] = comm
self.inline_comments[newkey] = inline_comment
def walk(self, function, raise_errors=True,
call_on_sections=False, **keywargs):
"""
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
.. admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
"""
out = {}
# scalars first
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
# bound again in case name has changed
entry = self.scalars[i]
out[entry] = val
except Exception:
if raise_errors:
raise
else:
entry = self.scalars[i]
out[entry] = False
# then sections
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs)
except Exception:
if raise_errors:
raise
else:
entry = self.sections[i]
out[entry] = False
# bound again in case name has changed
entry = self.sections[i]
# previous result is discarded
out[entry] = self[entry].walk(
function,
raise_errors=raise_errors,
call_on_sections=call_on_sections,
**keywargs)
return out
def as_bool(self, key):
"""
Accepts a key as input. The corresponding value must be a string or
the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
retain compatibility with Python 2.2.
If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
``True``.
If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
``False``.
``as_bool`` is not case sensitive.
Any other input will raise a ``ValueError``.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_bool('a')
Traceback (most recent call last):
ValueError: Value "fish" is neither True nor False
>>> a['b'] = 'True'
>>> a.as_bool('b')
1
>>> a['b'] = 'off'
>>> a.as_bool('b')
0
"""
val = self[key]
if val == True:
return True
elif val == False:
return False
else:
try:
if not isinstance(val, six.string_types):
# TODO: Why do we raise a KeyError here?
raise KeyError()
else:
return self.main._bools[val.lower()]
except KeyError:
raise ValueError('Value "%s" is neither True nor False' % val)
def as_int(self, key):
"""
A convenience method which coerces the specified value to an integer.
If the value is an invalid literal for ``int``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_int('a')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: 'fish'
>>> a['b'] = '1'
>>> a.as_int('b')
1
>>> a['b'] = '3.2'
>>> a.as_int('b')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: '3.2'
"""
return int(self[key])
def as_float(self, key):
"""
A convenience method which coerces the specified value to a float.
If the value is an invalid literal for ``float``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError: invalid literal for float(): fish
>>> a['b'] = '1'
>>> a.as_float('b')
1.0
>>> a['b'] = '3.2'
>>> a.as_float('b') #doctest: +ELLIPSIS
3.2...
"""
return float(self[key])
def as_list(self, key):
"""
A convenience method which fetches the specified value, guaranteeing
that it is a list.
>>> a = ConfigObj()
>>> a['a'] = 1
>>> a.as_list('a')
[1]
>>> a['a'] = (1,)
>>> a.as_list('a')
[1]
>>> a['a'] = [1]
>>> a.as_list('a')
[1]
"""
result = self[key]
if isinstance(result, (tuple, list)):
return list(result)
return [result]
def restore_default(self, key):
"""
Restore (and return) default value for the specified key.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
If there is no default value for this key, ``KeyError`` is raised.
"""
default = self.default_values[key]
OrderedDict.__setitem__(self, key, default)
if key not in self.defaults:
self.defaults.append(key)
return default
def restore_defaults(self):
"""
Recursively restore default values to all members
that have them.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
It doesn't delete or modify entries without default values.
"""
for key in self.default_values:
self.restore_default(key)
for section in self.sections:
self[section].restore_defaults()
class ConfigObj(Section):
"""An object to read, create, and write config files."""
_keyword = re.compile(r'''^ # line start
(\s*) # indentation
( # keyword
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"=].*?) # no quotes
)
\s*=\s* # divider
(.*) # value (including list values and comments)
$ # line end
''',
re.VERBOSE)
_sectionmarker = re.compile(r'''^
(\s*) # 1: indentation
((?:\[\s*)+) # 2: section marker open
( # 3: section name open
(?:"\s*\S.*?\s*")| # at least one non-space with double quotes
(?:'\s*\S.*?\s*')| # at least one non-space with single quotes
(?:[^'"\s].*?) # at least one non-space unquoted
) # section name close
((?:\s*\])+) # 4: section marker close
\s*(\#.*)? # 5: optional comment
$''',
re.VERBOSE)
# this regexp pulls list values out as a single string
# or single values and comments
# FIXME: this regex adds a '' to the end of comma terminated lists
# workaround in ``_handle_value``
_valueexp = re.compile(r'''^
(?:
(?:
(
(?:
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#][^,\#]*?) # unquoted
)
\s*,\s* # comma
)* # match all list items ending in a comma (if any)
)
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#\s][^,]*?)| # unquoted
(?:(?<!,)) # Empty value
)? # last item in a list - or string value
)|
(,) # alternatively a single comma - empty list
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# use findall to get the members of a list value
_listvalueexp = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#]?.*?) # unquoted
)
\s*,\s* # comma
''',
re.VERBOSE)
# this regexp is used for the value
# when lists are switched off
_nolistvalue = re.compile(r'''^
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"\#].*?)| # unquoted
(?:) # Empty value
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# regexes for finding triple quoted values on one line
_single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$")
_single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$')
_multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$")
_multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$')
_triple_quote = {
"'''": (_single_line_single, _multi_line_single),
'"""': (_single_line_double, _multi_line_double),
}
# Used by the ``istrue`` Section method
_bools = {
'yes': True, 'no': False,
'on': True, 'off': False,
'1': True, '0': False,
'true': True, 'false': False,
}
def __init__(self, infile=None, options=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False, use_zato=True, zato_crypto_manager=None,
zato_secrets_conf=None, zato_secrets_url_prefix='zato+secret://'):
"""
Parse a config file or create a config file object.
``ConfigObj(infile=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False)``
"""
self._inspec = _inspec
self.use_zato = use_zato
self.zato_crypto_manager = zato_crypto_manager
self.zato_secrets_conf = zato_secrets_conf
self.zato_secrets_url_prefix = zato_secrets_url_prefix
# init the superclass
Section.__init__(self, self, 0, self)
infile = infile or []
_options = {'configspec': configspec,
'encoding': encoding, 'interpolation': interpolation,
'raise_errors': raise_errors, 'list_values': list_values,
'create_empty': create_empty, 'file_error': file_error,
'stringify': stringify, 'indent_type': indent_type,
'default_encoding': default_encoding, 'unrepr': unrepr,
'write_empty_values': write_empty_values}
if options is None:
options = _options
else:
import warnings
warnings.warn('Passing in an options dictionary to ConfigObj() is '
'deprecated. Use **options instead.',
DeprecationWarning, stacklevel=2)
# TODO: check the values too.
for entry in options:
if entry not in OPTION_DEFAULTS:
raise TypeError('Unrecognised option "%s".' % entry)
for entry, value in list(OPTION_DEFAULTS.items()):
if entry not in options:
options[entry] = value
keyword_value = _options[entry]
if value != keyword_value:
options[entry] = keyword_value
# XXXX this ignores an explicit list_values = True in combination
# with _inspec. The user should *never* do that anyway, but still...
if _inspec:
options['list_values'] = False
self._initialise(options)
configspec = options['configspec']
self._original_configspec = configspec
self._load(infile, configspec)
def _load(self, infile, configspec):
if isinstance(infile, six.string_types):
self.filename = infile
if os.path.isfile(infile):
with open(infile, 'rb') as h:
content = h.readlines() or []
elif self.file_error:
# raise an error if the file doesn't exist
raise IOError('Config file not found: "%s".' % self.filename)
else:
# file doesn't already exist
if self.create_empty:
# this is a good test that the filename specified
# isn't impossible - like on a non-existent device
with open(infile, 'w') as h:
h.write('')
content = []
elif isinstance(infile, (list, tuple)):
content = list(infile)
elif isinstance(infile, dict):
# initialise self
# the Section class handles creating subsections
if isinstance(infile, ConfigObj):
# get a copy of our ConfigObj
def set_section(in_section, this_section):
for entry in in_section.scalars:
this_section[entry] = in_section[entry]
for section in in_section.sections:
this_section[section] = {}
set_section(in_section[section], this_section[section])
set_section(infile, self)
else:
for entry in infile:
self[entry] = infile[entry]
del self._errors
if configspec is not None:
self._handle_configspec(configspec)
else:
self.configspec = None
return
elif getattr(infile, 'read', MISSING) is not MISSING:
# This supports file like objects
content = infile.read() or []
# needs splitting into lines - but needs doing *after* decoding
# in case it's not an 8 bit encoding
else:
raise TypeError('infile must be a filename, file like object, or list of lines.')
if content:
# don't do it for the empty ConfigObj
content = self._handle_bom(content)
# infile is now *always* a list
#
# Set the newlines attribute (first line ending it finds)
# and strip trailing '\n' or '\r' from lines
for line in content:
if (not line) or (line[-1] not in ('\r', '\n')):
continue
for end in ('\r\n', '\n', '\r'):
if line.endswith(end):
self.newlines = end
break
break
assert all(isinstance(line, six.string_types) for line in content), repr(content)
content = [line.rstrip('\r\n') for line in content]
self._parse(content)
# if we had any errors, now is the time to raise them
if self._errors:
info = "at line %s." % self._errors[0].line_number
if len(self._errors) > 1:
msg = "Parsing failed with several errors.\nFirst error %s" % info
error = ConfigObjError(msg)
else:
error = self._errors[0]
# set the errors attribute; it's a list of tuples:
# (error_type, message, line_number)
error.errors = self._errors
# set the config attribute
error.config = self
raise error
# delete private attributes
del self._errors
if configspec is None:
self.configspec = None
else:
self._handle_configspec(configspec)
def _initialise(self, options=None):
if options is None:
options = OPTION_DEFAULTS
# initialise a few variables
self.filename = None
self._errors = []
self.raise_errors = options['raise_errors']
self.interpolation = options['interpolation']
self.list_values = options['list_values']
self.create_empty = options['create_empty']
self.file_error = options['file_error']
self.stringify = options['stringify']
self.indent_type = options['indent_type']
self.encoding = options['encoding']
self.default_encoding = options['default_encoding']
self.BOM = False
self.newlines = None
self.write_empty_values = options['write_empty_values']
self.unrepr = options['unrepr']
self.initial_comment = []
self.final_comment = []
self.configspec = None
if self._inspec:
self.list_values = False
# Clear section attributes as well
Section._initialise(self)
def __repr__(self):
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return ('ConfigObj({%s})' %
', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)]))
def _handle_bom(self, infile):
"""
Handle any BOM, and decode if necessary.
If an encoding is specified, that *must* be used - but the BOM should
still be removed (and the BOM attribute set).
(If the encoding is wrongly specified, then a BOM for an alternative
encoding won't be discovered or removed.)
If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
removed. The BOM attribute will be set. UTF16 will be decoded to
unicode.
NOTE: This method must not be called with an empty ``infile``.
Specifying the *wrong* encoding is likely to cause a
``UnicodeDecodeError``.
``infile`` must always be returned as a list of lines, but may be
passed in as a single string.
"""
if ((self.encoding is not None) and
(self.encoding.lower() not in BOM_LIST)):
# No need to check for a BOM
# the encoding specified doesn't have one
# just decode
return self._decode(infile, self.encoding)
if isinstance(infile, (list, tuple)):
line = infile[0]
else:
line = infile
if isinstance(line, six.text_type):
# it's already decoded and there's no need to do anything
# else, just use the _decode utility method to handle
# listifying appropriately
return self._decode(infile, self.encoding)
if self.encoding is not None:
# encoding explicitly supplied
# And it could have an associated BOM
# TODO: if encoding is just UTF16 - we ought to check for both
# TODO: big endian and little endian versions.
enc = BOM_LIST[self.encoding.lower()]
if enc == 'utf_16':
# For UTF16 we try big endian and little endian
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not final_encoding:
# skip UTF8
continue
if infile.startswith(BOM):
### BOM discovered
##self.BOM = True
# Don't need to remove BOM
return self._decode(infile, encoding)
# If we get this far, will *probably* raise a DecodeError
# As it doesn't appear to start with a BOM
return self._decode(infile, self.encoding)
# Must be UTF8
BOM = BOM_SET[enc]
if not line.startswith(BOM):
return self._decode(infile, self.encoding)
newline = line[len(BOM):]
# BOM removed
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
self.BOM = True
return self._decode(infile, self.encoding)
# No encoding specified - so we need to check for UTF8/UTF16
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not isinstance(line, six.binary_type) or not line.startswith(BOM):
# didn't specify a BOM, or it's not a bytestring
continue
else:
# BOM discovered
self.encoding = final_encoding
if not final_encoding:
self.BOM = True
# UTF8
# remove BOM
newline = line[len(BOM):]
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
# UTF-8
if isinstance(infile, six.text_type):
return infile.splitlines(True)
elif isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
# UTF16 - have to decode
return self._decode(infile, encoding)
if six.PY2 and isinstance(line, str):
# don't actually do any decoding, since we're on python 2 and
# returning a bytestring is fine
return self._decode(infile, None)
# No BOM discovered and no encoding specified, default to UTF-8
if isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
def _a_to_u(self, aString):
"""Decode ASCII strings to unicode if a self.encoding is specified."""
if isinstance(aString, six.binary_type) and self.encoding:
return aString.decode(self.encoding)
else:
return aString
def _decode(self, infile, encoding):
"""
Decode infile to unicode. Using the specified encoding.
if is a string, it also needs converting to a list.
"""
if isinstance(infile, six.string_types):
return infile.splitlines(True)
if isinstance(infile, six.binary_type):
# NOTE: Could raise a ``UnicodeDecodeError``
if encoding:
return infile.decode(encoding).splitlines(True)
else:
return infile.splitlines(True)
if encoding:
for i, line in enumerate(infile):
if isinstance(line, six.binary_type):
# NOTE: The isinstance test here handles mixed lists of unicode/string
# NOTE: But the decode will break on any non-string values
# NOTE: Or could raise a ``UnicodeDecodeError``
infile[i] = line.decode(encoding)
return infile
def _decode_element(self, line):
"""Decode element to unicode if necessary."""
if isinstance(line, six.binary_type) and self.default_encoding:
return line.decode(self.default_encoding)
else:
return line
# TODO: this may need to be modified
def _str(self, value):
"""
Used by ``stringify`` within validate, to turn non-string values
into strings.
"""
if not isinstance(value, six.string_types):
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
return str(value)
else:
return value
def _parse(self, infile):
"""Actually parse the config file."""
temp_list_values = self.list_values
if self.unrepr:
self.list_values = False
comment_list = []
done_start = False
this_section = self
maxline = len(infile) - 1
cur_index = -1
reset_comment = False
while cur_index < maxline:
if reset_comment:
comment_list = []
cur_index += 1
line = infile[cur_index]
sline = line.strip()
# do we have anything on the line ?
if not sline or sline.startswith('#'):
reset_comment = False
comment_list.append(line)
continue
if not done_start:
# preserve initial comment
self.initial_comment = comment_list
comment_list = []
done_start = True
reset_comment = True
# first we check if it's a section marker
mat = self._sectionmarker.match(line)
if mat is not None:
# is a section line
(indent, sect_open, sect_name, sect_close, comment) = mat.groups()
if indent and (self.indent_type is None):
self.indent_type = indent
cur_depth = sect_open.count('[')
if cur_depth != sect_close.count(']'):
self._handle_error("Cannot compute the section depth",
NestingError, infile, cur_index)
continue
if cur_depth < this_section.depth:
# the new section is dropping back to a previous level
try:
parent = self._match_depth(this_section,
cur_depth).parent
except SyntaxError:
self._handle_error("Cannot compute nesting level",
NestingError, infile, cur_index)
continue
elif cur_depth == this_section.depth:
# the new section is a sibling of the current section
parent = this_section.parent
elif cur_depth == this_section.depth + 1:
# the new section is a child the current section
parent = this_section
else:
self._handle_error("Section too nested",
NestingError, infile, cur_index)
continue
sect_name = self._unquote(sect_name)
if sect_name in parent:
self._handle_error('Duplicate section name',
DuplicateError, infile, cur_index)
continue
# create the new section
this_section = Section(
parent,
cur_depth,
self,
name=sect_name)
parent[sect_name] = this_section
parent.inline_comments[sect_name] = comment
parent.comments[sect_name] = comment_list
continue
#
# it's not a section marker,
# so it should be a valid ``key = value`` line
mat = self._keyword.match(line)
if mat is None:
self._handle_error(
'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
ParseError, infile, cur_index)
else:
# is a keyword value
# value will include any inline comment
(indent, key, value) = mat.groups()
# Handle Zato-specific needs
if self.use_zato:
# This may be an environment variable ..
if value.startswith('$'):
# .. but not if it's just a $ sign or an actual variable starting with it.
if not (len(value) == 1 or value.startswith('$$')):
env_key_name = value[1:].upper()
try:
value = os.environ[env_key_name]
except KeyError:
raise KeyError('Environment variable `{}` not found, config key `{}`'.format(env_key_name, key))
# .. this may be a value to decrypt with a secret key (note that it is an if, not elif,
# to make it possible for environment variables to point to secrets.conf).
if value.startswith(self.zato_secrets_url_prefix):
entry = value.replace(self.zato_secrets_url_prefix, '', 1)
if not entry:
raise ValueError('Missing entry in address `{}`, config key `{}`'.format(value, key))
entry = entry.split('.')
group_name = entry[0]
group_key = '.'.join(entry[1:])
if self.zato_secrets_conf:
group = self.zato_secrets_conf.get(group_name)
if not group:
raise ValueError('Group not found `{}`, config key `{}`, value `{}`'.format(
group_name, key, value))
if not group_key in group:
raise ValueError('Group key not found `{}`, config key `{}`, value `{}`'.format(
group_key, key, value))
else:
encrypted = group[group_key]
if encrypted:
try:
value = self.zato_crypto_manager.decrypt(encrypted)
except Exception as e:
raise ValueError('Could not decrypt value `{}`, group:`{}`, group_key:`{}`, e:`{}`'.format(
encrypted, group, group_key, e))
else:
value = encrypted # This will happen if 'encrypted' is actually an empty string
if indent and (self.indent_type is None):
self.indent_type = indent
# check for a multiline value
if value[:3] in ['"""', "'''"]:
try:
value, comment, cur_index = self._multiline(
value, infile, cur_index, maxline)
except SyntaxError:
self._handle_error(
'Parse error in multiline value',
ParseError, infile, cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if type(e) == UnknownType:
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing multiline value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if isinstance(e, UnknownType):
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
# extract comment and lists
try:
(value, comment) = self._handle_value(value)
except SyntaxError:
self._handle_error(
'Parse error in value',
ParseError, infile, cur_index)
continue
#
key = self._unquote(key)
if key in this_section:
self._handle_error(
'Duplicate keyword name',
DuplicateError, infile, cur_index)
continue
# add the key.
# we set unrepr because if we have got this far we will never
# be creating a new section
# As a last resort, we can attempt to convert strings to their actual
# data types, e.g. integers.
if self.use_zato:
try:
value = literal_eval(value)
except Exception:
# That's OK, we just had to try
pass
this_section.__setitem__(key, value, unrepr=True)
this_section.inline_comments[key] = comment
this_section.comments[key] = comment_list
continue
#
if self.indent_type is None:
# no indentation used, set the type accordingly
self.indent_type = ''
# preserve the final comment
if not self and not self.initial_comment:
self.initial_comment = comment_list
elif not reset_comment:
self.final_comment = comment_list
self.list_values = temp_list_values
def _match_depth(self, sect, depth):
"""
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
"""
while depth < sect.depth:
if sect is sect.parent:
# we've reached the top level already
raise SyntaxError()
sect = sect.parent
if sect.depth == depth:
return sect
# shouldn't get here
raise SyntaxError()
def _handle_error(self, text, ErrorClass, infile, cur_index):
"""
Handle an error according to the error settings.
Either raise the error or store it.
The error will have occured at ``cur_index``
"""
line = infile[cur_index]
cur_index += 1
message = '{0} at line {1}.'.format(text, cur_index)
error = ErrorClass(message, cur_index, line)
if self.raise_errors:
# raise the error - parsing stops here
raise error
# store the error
# reraise when parsing has finished
self._errors.append(error)
def _unquote(self, value):
"""Return an unquoted version of a value"""
if not value:
# should only happen during parsing of lists
raise SyntaxError
if (value[0] == value[-1]) and (value[0] in ('"', "'")):
value = value[1:-1]
return value
def _quote(self, value, multiline=True):
"""
Return a safely quoted version of a value.
Raise a ConfigObjError if the value cannot be safely quoted.
If multiline is ``True`` (default) then use triple quotes
if necessary.
* Don't quote values that don't need it.
* Recursively quote members of a list and return a comma joined list.
* Multiline is ``False`` for lists.
* Obey list syntax for empty and single member lists.
If ``list_values=False`` then the value is only quoted if it contains
a ``\\n`` (is multiline) or '#'.
If ``write_empty_values`` is set, and the value is an empty string, it
won't be quoted.
"""
if multiline and self.write_empty_values and value == '':
# Only if multiline is set, so that it is used for values not
# keys, and not values that are part of a list
return ''
if multiline and isinstance(value, (list, tuple)):
if not value:
return ','
elif len(value) == 1:
return self._quote(value[0], multiline=False) + ','
return ', '.join([self._quote(val, multiline=False)
for val in value])
if not isinstance(value, six.string_types):
if self.stringify:
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
value = str(value)
else:
raise TypeError('Value "%s" is not a string.' % value)
if not value:
return '""'
no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
if check_for_single:
if not self.list_values:
# we don't quote if ``list_values=False``
quot = noquot
# for normal values either single or double quotes will do
elif '\n' in value:
# will only happen if multiline is off - e.g. '\n' in key
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif ((value[0] not in wspace_plus) and
(value[-1] not in wspace_plus) and
(',' not in value)):
quot = noquot
else:
quot = self._get_single_quote(value)
else:
# if value has '\n' or "'" *and* '"', it will need triple quotes
quot = self._get_triple_quote(value)
if quot == noquot and '#' in value and self.list_values:
quot = self._get_single_quote(value)
return quot % value
def _get_single_quote(self, value):
if ("'" in value) and ('"' in value):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif '"' in value:
quot = squot
else:
quot = dquot
return quot
def _get_triple_quote(self, value):
if (value.find('"""') != -1) and (value.find("'''") != -1):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
if value.find('"""') == -1:
quot = tdquot
else:
quot = tsquot
return quot
def _handle_value(self, value):
"""
Given a value string, unquote, remove comment,
handle lists. (including empty and single member lists)
"""
if self._inspec:
# Parsing a configspec so don't handle comments
return (value, '')
# do we look for lists in values ?
if not self.list_values:
mat = self._nolistvalue.match(value)
if mat is None:
raise SyntaxError()
# NOTE: we don't unquote here
return mat.groups()
#
mat = self._valueexp.match(value)
if mat is None:
# the value is badly constructed, probably badly quoted,
# or an invalid list
raise SyntaxError()
(list_values, single, empty_list, comment) = mat.groups()
if (list_values == '') and (single is None):
# change this if you want to accept empty values
raise SyntaxError()
# NOTE: note there is no error handling from here if the regex
# is wrong: then incorrect values will slip through
if empty_list is not None:
# the single comma - meaning an empty list
return ([], comment)
if single is not None:
# handle empty values
if list_values and not single:
# FIXME: the '' is a workaround because our regex now matches
# '' at the end of a list if it has a trailing comma
single = None
else:
single = single or '""'
single = self._unquote(single)
if list_values == '':
# not a list value
return (single, comment)
the_list = self._listvalueexp.findall(list_values)
the_list = [self._unquote(val) for val in the_list]
if single is not None:
the_list += [single]
return (the_list, comment)
def _multiline(self, value, infile, cur_index, maxline):
"""Extract the value, where we are in a multiline situation."""
quot = value[:3]
newvalue = value[3:]
single_line = self._triple_quote[quot][0]
multi_line = self._triple_quote[quot][1]
mat = single_line.match(value)
if mat is not None:
retval = list(mat.groups())
retval.append(cur_index)
return retval
elif newvalue.find(quot) != -1:
# somehow the triple quote is missing
raise SyntaxError()
#
while cur_index < maxline:
cur_index += 1
newvalue += '\n'
line = infile[cur_index]
if line.find(quot) == -1:
newvalue += line
else:
# end of multiline, process it
break
else:
# we've got to the end of the config, oops...
raise SyntaxError()
mat = multi_line.match(line)
if mat is None:
# a badly formed line
raise SyntaxError()
(value, comment) = mat.groups()
return (newvalue + value, comment, cur_index)
def _handle_configspec(self, configspec):
"""Parse the configspec."""
# FIXME: Should we check that the configspec was created with the
# correct settings ? (i.e. ``list_values=False``)
if not isinstance(configspec, ConfigObj):
try:
configspec = ConfigObj(configspec,
raise_errors=True,
file_error=True,
_inspec=True)
except ConfigObjError as e:
# FIXME: Should these errors have a reference
# to the already parsed ConfigObj ?
raise ConfigspecError('Parsing configspec failed: %s' % e)
except IOError as e:
raise IOError('Reading configspec failed: %s' % e)
self.configspec = configspec
def _set_configspec(self, section, copy):
"""
Called by validate. Handles setting the configspec on subsections
including sections to be validated by __many__
"""
configspec = section.configspec
many = configspec.get('__many__')
if isinstance(many, dict):
for entry in section.sections:
if entry not in configspec:
section[entry].configspec = many
for entry in configspec.sections:
if entry == '__many__':
continue
if entry not in section:
section[entry] = {}
section[entry]._created = True
if copy:
# copy comments
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
# Could be a scalar when we expect a section
if isinstance(section[entry], Section):
section[entry].configspec = configspec[entry]
def _write_line(self, indent_string, entry, this_entry, comment):
"""Write an individual line, for the write method"""
# NOTE: the calls to self._quote here handles non-StringType values.
if not self.unrepr:
val = self._decode_element(self._quote(this_entry))
else:
val = repr(this_entry)
out = '%s%s%s%s %s' % (indent_string,
self._decode_element(self._quote(entry, multiline=False)),
self._a_to_u(' = '),
val,
self._decode_element(comment))
out = out.strip()
return out
def _write_marker(self, indent_string, depth, entry, comment):
"""Write a section marker line"""
return '%s%s%s%s%s' % (indent_string,
self._a_to_u('[' * depth),
self._quote(self._decode_element(entry), multiline=False),
self._a_to_u(']' * depth),
self._decode_element(comment))
def _handle_comment(self, comment):
"""Deal with a comment."""
if not comment:
return ''
start = self.indent_type
if not comment.startswith('#'):
start += self._a_to_u(' # ')
return (start + comment)
# Public methods
def write(self, outfile=None, section=None):
"""
Write the current ConfigObj as a file
tekNico: FIXME: use StringIO instead of real files
>>> filename = a.filename
>>> a.filename = 'test.ini'
>>> a.write()
>>> a.filename = filename
>>> a == ConfigObj('test.ini', raise_errors=True)
1
>>> import os
>>> os.remove('test.ini')
"""
if self.indent_type is None:
# this can be true if initialised from a dictionary
self.indent_type = DEFAULT_INDENT_TYPE
out = []
cs = self._a_to_u('#')
csp = self._a_to_u('# ')
if section is None:
int_val = self.interpolation
self.interpolation = False
section = self
for line in self.initial_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
indent_string = self.indent_type * section.depth
for entry in (section.scalars + section.sections):
if entry in section.defaults:
# don't write out default values
continue
for comment_line in section.comments[entry]:
comment_line = self._decode_element(comment_line.lstrip())
if comment_line and not comment_line.startswith(cs):
comment_line = csp + comment_line
out.append(indent_string + comment_line)
this_entry = section[entry]
comment = self._handle_comment(section.inline_comments[entry])
if isinstance(this_entry, Section):
# a section
out.append(self._write_marker(
indent_string,
this_entry.depth,
entry,
comment))
out.extend(self.write(section=this_entry))
else:
out.append(self._write_line(
indent_string,
entry,
this_entry,
comment))
if section is self:
for line in self.final_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
self.interpolation = int_val
if section is not self:
return out
if (self.filename is None) and (outfile is None):
# output a list of lines
# might need to encode
# NOTE: This will *screw* UTF16, each line will start with the BOM
if self.encoding:
out = [l.encode(self.encoding) for l in out]
if (self.BOM and ((self.encoding is None) or
(BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
# Add the UTF8 BOM
if not out:
out.append('')
out[0] = BOM_UTF8 + out[0]
return out
# Turn the list to a string, joined with correct newlines
newline = self.newlines or os.linesep
if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
and sys.platform == 'win32' and newline == '\r\n'):
# Windows specific hack to avoid writing '\r\r\n'
newline = '\n'
output = self._a_to_u(newline).join(out)
if not output.endswith(newline):
output += newline
if isinstance(output, six.binary_type):
output_bytes = output
else:
output_bytes = output.encode(self.encoding or
self.default_encoding or
'ascii')
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
# Add the UTF8 BOM
output_bytes = BOM_UTF8 + output_bytes
if outfile is not None:
outfile.write(output_bytes)
else:
with open(self.filename, 'wb') as h:
h.write(output_bytes)
def validate(self, validator, preserve_errors=False, copy=False,
section=None):
"""
Test the ConfigObj against a configspec.
It uses the ``validator`` object from *validate.py*.
To run ``validate`` on the current ConfigObj, call: ::
test = config.validate(validator)
(Normally having previously passed in the configspec when the ConfigObj
was created - you can dynamically assign a dictionary of checks to the
``configspec`` attribute of a section though).
It returns ``True`` if everything passes, or a dictionary of
pass/fails (True/False). If every member of a subsection passes, it
will just have the value ``True``. (It also returns ``False`` if all
members fail).
In addition, it converts the values from strings to their native
types if their checks pass (and ``stringify`` is set).
If ``preserve_errors`` is ``True`` (``False`` is default) then instead
of a marking a fail with a ``False``, it will preserve the actual
exception object. This can contain info about the reason for failure.
For example the ``VdtValueTooSmallError`` indicates that the value
supplied was too small. If a value (or section) is missing it will
still be marked as ``False``.
You must have the validate module to use ``preserve_errors=True``.
You can then use the ``flatten_errors`` function to turn your nested
results dictionary into a flattened list of failures - useful for
displaying meaningful error messages.
"""
if section is None:
if self.configspec is None:
raise ValueError('No configspec supplied.')
if preserve_errors:
# We do this once to remove a top level dependency on the validate module
# Which makes importing configobj faster
from zato.common.ext.validate_ import VdtTypeError
self._vdtMissingValue = VdtMissingValue
section = self
if copy:
section.initial_comment = section.configspec.initial_comment
section.final_comment = section.configspec.final_comment
section.encoding = section.configspec.encoding
section.BOM = section.configspec.BOM
section.newlines = section.configspec.newlines
section.indent_type = section.configspec.indent_type
#
# section.default_values.clear() #??
configspec = section.configspec
self._set_configspec(section, copy)
def validate_entry(entry, spec, val, missing, ret_true, ret_false):
section.default_values.pop(entry, None)
try:
section.default_values[entry] = validator.get_default_value(configspec[entry])
except (KeyError, AttributeError, validator.baseErrorClass):
# No default, bad default or validator has no 'get_default_value'
# (e.g. SimpleVal)
pass
try:
check = validator.check(spec,
val,
missing=missing
)
except validator.baseErrorClass as e:
if not preserve_errors or isinstance(e, self._vdtMissingValue):
out[entry] = False
else:
# preserve the error
out[entry] = e
ret_false = False
ret_true = False
else:
ret_false = False
out[entry] = True
if self.stringify or missing:
# if we are doing type conversion
# or the value is a supplied default
if not self.stringify:
if isinstance(check, (list, tuple)):
# preserve lists
check = [self._str(item) for item in check]
elif missing and check is None:
# convert the None from a default to a ''
check = ''
else:
check = self._str(check)
if (check != val) or missing:
section[entry] = check
if not copy and missing and entry not in section.defaults:
section.defaults.append(entry)
return ret_true, ret_false
#
out = {}
ret_true = True
ret_false = True
unvalidated = [k for k in section.scalars if k not in configspec]
incorrect_sections = [k for k in configspec.sections if k in section.scalars]
incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
for entry in configspec.scalars:
if entry in ('__many__', '___many___'):
# reserved names
continue
if (not entry in section.scalars) or (entry in section.defaults):
# missing entries
# or entries from defaults
missing = True
val = None
if copy and entry not in section.scalars:
# copy comments
section.comments[entry] = (
configspec.comments.get(entry, []))
section.inline_comments[entry] = (
configspec.inline_comments.get(entry, ''))
#
else:
missing = False
val = section[entry]
ret_true, ret_false = validate_entry(entry, configspec[entry], val,
missing, ret_true, ret_false)
many = None
if '__many__' in configspec.scalars:
many = configspec['__many__']
elif '___many___' in configspec.scalars:
many = configspec['___many___']
if many is not None:
for entry in unvalidated:
val = section[entry]
ret_true, ret_false = validate_entry(entry, many, val, False,
ret_true, ret_false)
unvalidated = []
for entry in incorrect_scalars:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Value %r was provided as a section' % entry
out[entry] = validator.baseErrorClass(msg)
for entry in incorrect_sections:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Section %r was provided as a single value' % entry
out[entry] = validator.baseErrorClass(msg)
# Missing sections will have been created as empty ones when the
# configspec was read.
for entry in section.sections:
# FIXME: this means DEFAULT is not copied in copy mode
if section is self and entry == 'DEFAULT':
continue
if section[entry].configspec is None:
unvalidated.append(entry)
continue
if copy:
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
out[entry] = check
if check == False:
ret_true = False
elif check == True:
ret_false = False
else:
ret_true = False
section.extra_values = unvalidated
if preserve_errors and not section._created:
# If the section wasn't created (i.e. it wasn't missing)
# then we can't return False, we need to preserve errors
ret_false = False
#
if ret_false and preserve_errors and out:
# If we are preserving errors, but all
# the failures are from missing sections / values
# then we can return False. Otherwise there is a
# real failure that we need to preserve.
ret_false = not any(out.values())
if ret_true:
return True
elif ret_false:
return False
return out
def reset(self):
"""Clear ConfigObj instance and restore to 'freshly created' state."""
self.clear()
self._initialise()
# FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
# requires an empty dictionary
self.configspec = None
# Just to be sure ;-)
self._original_configspec = None
def reload(self):
"""
Reload a ConfigObj from file.
This method raises a ``ReloadError`` if the ConfigObj doesn't have
a filename attribute pointing to a file.
"""
if not isinstance(self.filename, six.string_types):
raise ReloadError()
filename = self.filename
current_options = {}
for entry in OPTION_DEFAULTS:
if entry == 'configspec':
continue
current_options[entry] = getattr(self, entry)
configspec = self._original_configspec
current_options['configspec'] = configspec
self.clear()
self._initialise(current_options)
self._load(filename, configspec)
class SimpleVal(object):
"""
A simple validator.
Can be used to check that all members expected are present.
To use it, provide a configspec with all your members in (the value given
will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
method of your ``ConfigObj``. ``validate`` will return ``True`` if all
members are present, or a dictionary with True/False meaning
present/missing. (Whole missing sections will be replaced with ``False``)
"""
def __init__(self):
self.baseErrorClass = ConfigObjError
def check(self, check, member, missing=False):
"""A dummy check method, always returns the value unchanged."""
if missing:
raise self.baseErrorClass()
return member
def flatten_errors(cfg, res, levels=None, results=None):
"""
An example function that will turn a nested dictionary of results
(as returned by ``ConfigObj.validate``) into a flat list.
``cfg`` is the ConfigObj instance being checked, ``res`` is the results
dictionary returned by ``validate``.
(This is a recursive function, so you shouldn't use the ``levels`` or
``results`` arguments - they are used by the function.)
Returns a list of keys that failed. Each member of the list is a tuple::
([list of sections...], key, result)
If ``validate`` was called with ``preserve_errors=False`` (the default)
then ``result`` will always be ``False``.
*list of sections* is a flattened list of sections that the key was found
in.
If the section was missing (or a section was expected and a scalar provided
- or vice-versa) then key will be ``None``.
If the value (or section) was missing then ``result`` will be ``False``.
If ``validate`` was called with ``preserve_errors=True`` and a value
was present, but failed the check, then ``result`` will be the exception
object returned. You can use this as a string that describes the failure.
For example *The value "3" is of the wrong type*.
"""
if levels is None:
# first time called
levels = []
results = []
if res == True:
return sorted(results)
if res == False or isinstance(res, Exception):
results.append((levels[:], None, res))
if levels:
levels.pop()
return sorted(results)
for (key, val) in list(res.items()):
if val == True:
continue
if isinstance(cfg.get(key), dict):
# Go down one level
levels.append(key)
flatten_errors(cfg[key], val, levels, results)
continue
results.append((levels[:], key, val))
#
# Go up one level
if levels:
levels.pop()
#
return sorted(results)
def get_extra_values(conf, _prepend=()):
"""
Find all the values and sections not in the configspec from a validated
ConfigObj.
``get_extra_values`` returns a list of tuples where each tuple represents
either an extra section, or an extra value.
The tuples contain two values, a tuple representing the section the value
is in and the name of the extra values. For extra values in the top level
section the first member will be an empty tuple. For values in the 'foo'
section the first member will be ``('foo',)``. For members in the 'bar'
subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
been validated it will return an empty list.
"""
out = []
out.extend([(_prepend, name) for name in conf.extra_values])
for name in conf.sections:
if name not in conf.extra_values:
out.extend(get_extra_values(conf[name], _prepend + (name,)))
return out
"""*A programming language is a medium of expression.* - Paul Graham""" | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/configobj_.py | configobj_.py |
import abc
import collections
import contextlib
import sys
import typing
import collections.abc as collections_abc
import operator
# These are used by Protocol implementation
# We use internal typing helpers here, but this significantly reduces
# code duplication. (Also this is only until Protocol is in typing.)
from typing import Generic, Callable, TypeVar, Tuple
# After PEP 560, internal typing API was substantially reworked.
# This is especially important for Protocol class which uses internal APIs
# quite extensivelly.
PEP_560 = sys.version_info[:3] >= (3, 7, 0)
if PEP_560:
GenericMeta = TypingMeta = type
else:
from typing import GenericMeta, TypingMeta
OLD_GENERICS = False
try:
from typing import _type_vars, _next_in_mro, _type_check
except ImportError:
OLD_GENERICS = True
try:
from typing import _subs_tree # noqa
SUBS_TREE = True
except ImportError:
SUBS_TREE = False
try:
from typing import _tp_cache
except ImportError:
def _tp_cache(x):
return x
try:
from typing import _TypingEllipsis, _TypingEmpty
except ImportError:
class _TypingEllipsis:
pass
class _TypingEmpty:
pass
# The two functions below are copies of typing internal helpers.
# They are needed by _ProtocolMeta
def _no_slots_copy(dct):
dict_copy = dict(dct)
if '__slots__' in dict_copy:
for slot in dict_copy['__slots__']:
dict_copy.pop(slot, None)
return dict_copy
def _check_generic(cls, parameters):
if not cls.__parameters__:
raise TypeError("%s is not a generic class" % repr(cls))
alen = len(parameters)
elen = len(cls.__parameters__)
if alen != elen:
raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
("many" if alen > elen else "few", repr(cls), alen, elen))
if hasattr(typing, '_generic_new'):
_generic_new = typing._generic_new
else:
# Note: The '_generic_new(...)' function is used as a part of the
# process of creating a generic type and was added to the typing module
# as of Python 3.5.3.
#
# We've defined '_generic_new(...)' below to exactly match the behavior
# implemented in older versions of 'typing' bundled with Python 3.5.0 to
# 3.5.2. This helps eliminate redundancy when defining collection types
# like 'Deque' later.
#
# See https://github.com/python/typing/pull/308 for more details -- in
# particular, compare and contrast the definition of types like
# 'typing.List' before and after the merge.
def _generic_new(base_cls, cls, *args, **kwargs):
return base_cls.__new__(cls, *args, **kwargs)
# See https://github.com/python/typing/pull/439
if hasattr(typing, '_geqv'):
from typing import _geqv
_geqv_defined = True
else:
_geqv = None
_geqv_defined = False
if sys.version_info[:2] >= (3, 6):
import _collections_abc
_check_methods_in_mro = _collections_abc._check_methods
else:
def _check_methods_in_mro(C, *methods):
mro = C.__mro__
for method in methods:
for B in mro:
if method in B.__dict__:
if B.__dict__[method] is None:
return NotImplemented
break
else:
return NotImplemented
return True
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
'ClassVar',
'Final',
'Type',
# ABCs (from collections.abc).
# The following are added depending on presence
# of their non-generic counterparts in stdlib:
# 'Awaitable',
# 'AsyncIterator',
# 'AsyncIterable',
# 'Coroutine',
# 'AsyncGenerator',
# 'AsyncContextManager',
# 'ChainMap',
# Concrete collection types.
'ContextManager',
'Counter',
'Deque',
'DefaultDict',
'TypedDict',
# Structural checks, a.k.a. protocols.
'SupportsIndex',
# One-off things.
'final',
'IntVar',
'Literal',
'NewType',
'overload',
'Text',
'TYPE_CHECKING',
]
# Annotated relies on substitution trees of pep 560. It will not work for
# versions of typing older than 3.5.3
HAVE_ANNOTATED = PEP_560 or SUBS_TREE
if PEP_560:
__all__.extend(["get_args", "get_origin", "get_type_hints"])
if HAVE_ANNOTATED:
__all__.append("Annotated")
# Protocols are hard to backport to the original version of typing 3.5.0
HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0)
if HAVE_PROTOCOLS:
__all__.extend(['Protocol', 'runtime', 'runtime_checkable'])
# TODO
if hasattr(typing, 'NoReturn'):
NoReturn = typing.NoReturn
elif hasattr(typing, '_FinalTypingBase'):
class _NoReturn(typing._FinalTypingBase, _root=True):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
NoReturn = _NoReturn(_root=True)
else:
class _NoReturnMeta(typing.TypingMeta):
"""Metaclass for NoReturn"""
def __new__(cls, name, bases, namespace, _root=False):
return super().__new__(cls, name, bases, namespace, _root=_root)
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__slots__ = ()
# Some unconstrained type variables. These are used by the container types.
# (These are not for export.)
T = typing.TypeVar('T') # Any type.
KT = typing.TypeVar('KT') # Key type.
VT = typing.TypeVar('VT') # Value type.
T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers.
VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers.
T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
if hasattr(typing, 'ClassVar'):
ClassVar = typing.ClassVar
elif hasattr(typing, '_FinalTypingBase'):
class _ClassVar(typing._FinalTypingBase, _root=True):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
attribute is intended to be used as a class variable and
should not be set on instances of that class. Usage::
class Starship:
stats: ClassVar[Dict[str, int]] = {} # class variable
damage: int = 10 # instance variable
ClassVar accepts only types and cannot be further subscribed.
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _ClassVar):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
ClassVar = _ClassVar(_root=True)
else:
class _ClassVarMeta(typing.TypingMeta):
"""Metaclass for ClassVar"""
def __new__(cls, name, bases, namespace, tp=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if tp is not None:
self.__type__ = tp
return self
def __instancecheck__(self, obj):
raise TypeError("ClassVar cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("ClassVar cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__type__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
param = typing._type_check(
item,
'{} accepts only single type.'.format(cls.__name__[1:]))
return cls(self.__name__, self.__bases__,
dict(self.__dict__), tp=param, _root=True)
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(self.__name__, self.__bases__,
dict(self.__dict__), tp=self.__type__,
_root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, ClassVar):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
attribute is intended to be used as a class variable and
should not be set on instances of that class. Usage::
class Starship:
stats: ClassVar[Dict[str, int]] = {} # class variable
damage: int = 10 # instance variable
ClassVar accepts only types and cannot be further subscribed.
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
"""
__type__ = None
# On older versions of typing there is an internal class named "Final".
if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
Final = typing.Final
elif sys.version_info[:2] >= (3, 7):
class _FinalForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
'{} accepts only single type'.format(self._name))
return _GenericAlias(self, (item,))
Final = _FinalForm('Final',
doc="""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.""")
elif hasattr(typing, '_FinalTypingBase'):
class _Final(typing._FinalTypingBase, _root=True):
"""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _Final):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
Final = _Final(_root=True)
else:
class _FinalMeta(typing.TypingMeta):
"""Metaclass for Final"""
def __new__(cls, name, bases, namespace, tp=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if tp is not None:
self.__type__ = tp
return self
def __instancecheck__(self, obj):
raise TypeError("Final cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Final cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__type__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
param = typing._type_check(
item,
'{} accepts only single type.'.format(cls.__name__[1:]))
return cls(self.__name__, self.__bases__,
dict(self.__dict__), tp=param, _root=True)
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(self.__name__, self.__bases__,
dict(self.__dict__), tp=self.__type__,
_root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, Final):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class Final(typing.Final, metaclass=_FinalMeta, _root=True):
"""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
"""
__type__ = None
if hasattr(typing, 'final'):
final = typing.final
else:
def final(f):
"""This decorator can be used to indicate to type checkers that
the decorated method cannot be overridden, and decorated class
cannot be subclassed. For example:
class Base:
@final
def done(self) -> None:
...
class Sub(Base):
def done(self) -> None: # Error reported by type checker
...
@final
class Leaf:
...
class Other(Leaf): # Error reported by type checker
...
There is no runtime checking of these properties.
"""
return f
def IntVar(name):
return TypeVar(name)
if hasattr(typing, 'Literal'):
Literal = typing.Literal
elif sys.version_info[:2] >= (3, 7):
class _LiteralForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
return _GenericAlias(self, parameters)
Literal = _LiteralForm('Literal',
doc="""A type that can be used to indicate to type checkers
that the corresponding value has a value literally equivalent
to the provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to
the value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime
checking verifying that the parameter is actually a value
instead of a type.""")
elif hasattr(typing, '_FinalTypingBase'):
class _Literal(typing._FinalTypingBase, _root=True):
"""A type that can be used to indicate to type checkers that the
corresponding value has a value literally equivalent to the
provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to the
value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime checking
verifying that the parameter is actually a value instead of a type.
"""
__slots__ = ('__values__',)
def __init__(self, values=None, **kwds):
self.__values__ = values
def __getitem__(self, values):
cls = type(self)
if self.__values__ is None:
if not isinstance(values, tuple):
values = (values,)
return cls(values, _root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
return self
def __repr__(self):
r = super().__repr__()
if self.__values__ is not None:
r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
return r
def __hash__(self):
return hash((type(self).__name__, self.__values__))
def __eq__(self, other):
if not isinstance(other, _Literal):
return NotImplemented
if self.__values__ is not None:
return self.__values__ == other.__values__
return self is other
Literal = _Literal(_root=True)
else:
class _LiteralMeta(typing.TypingMeta):
"""Metaclass for Literal"""
def __new__(cls, name, bases, namespace, values=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if values is not None:
self.__values__ = values
return self
def __instancecheck__(self, obj):
raise TypeError("Literal cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Literal cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__values__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
if not isinstance(item, tuple):
item = (item,)
return cls(self.__name__, self.__bases__,
dict(self.__dict__), values=item, _root=True)
def _eval_type(self, globalns, localns):
return self
def __repr__(self):
r = super().__repr__()
if self.__values__ is not None:
r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
return r
def __hash__(self):
return hash((type(self).__name__, self.__values__))
def __eq__(self, other):
if not isinstance(other, Literal):
return NotImplemented
if self.__values__ is not None:
return self.__values__ == other.__values__
return self is other
class Literal(typing.Final, metaclass=_LiteralMeta, _root=True):
"""A type that can be used to indicate to type checkers that the
corresponding value has a value literally equivalent to the
provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to the
value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime checking
verifying that the parameter is actually a value instead of a type.
"""
__values__ = None
def _overload_dummy(*args, **kwds):
"""Helper for @overload to raise when called."""
raise NotImplementedError(
"You should not call an overloaded function. "
"A series of @overload-decorated functions "
"outside a stub module should always be followed "
"by an implementation that is not @overload-ed.")
def overload(func):
"""Decorator for overloaded functions/methods.
In a stub file, place two or more stub definitions for the same
function in a row, each decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
In a non-stub file (i.e. a regular .py file), do the same but
follow it with an implementation. The implementation should *not*
be decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
def utf8(value):
# implementation goes here
"""
return _overload_dummy
# This is not a real generic class. Don't use outside annotations.
if hasattr(typing, 'Type'):
Type = typing.Type
else:
# Internal type variable used for Type[].
CT_co = typing.TypeVar('CT_co', covariant=True, bound=type)
class Type(typing.Generic[CT_co], extra=type):
"""A special construct usable to annotate class objects.
For example, suppose we have the following classes::
class User: ... # Abstract base for User classes
class BasicUser(User): ...
class ProUser(User): ...
class TeamUser(User): ...
And a function that takes a class argument that's a subclass of
User and returns an instance of the corresponding class::
U = TypeVar('U', bound=User)
def new_user(user_class: Type[U]) -> U:
user = user_class()
# (Here we could write the user object to a database)
return user
joe = new_user(BasicUser)
At this point the type checker knows that joe has type BasicUser.
"""
__slots__ = ()
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
def _define_guard(type_name):
"""
Returns True if the given type isn't defined in typing but
is defined in collections_abc.
Adds the type to __all__ if the collection is found in either
typing or collection_abc.
"""
if hasattr(typing, type_name):
__all__.append(type_name)
globals()[type_name] = getattr(typing, type_name)
return False
elif hasattr(collections_abc, type_name):
__all__.append(type_name)
return True
else:
return False
class _ExtensionsGenericMeta(GenericMeta):
def __subclasscheck__(self, subclass):
"""This mimics a more modern GenericMeta.__subclasscheck__() logic
(that does not have problems with recursion) to work around interactions
between collections, typing, and typing_extensions on older
versions of Python, see https://github.com/python/typing/issues/501.
"""
if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if not self.__extra__:
return super().__subclasscheck__(subclass)
res = self.__extra__.__subclasshook__(subclass)
if res is not NotImplemented:
return res
if self.__extra__ in subclass.__mro__:
return True
for scls in self.__extra__.__subclasses__():
if isinstance(scls, GenericMeta):
continue
if issubclass(subclass, scls):
return True
return False
if _define_guard('Awaitable'):
class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta,
extra=collections_abc.Awaitable):
__slots__ = ()
if _define_guard('Coroutine'):
class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.Coroutine):
__slots__ = ()
if _define_guard('AsyncIterable'):
class AsyncIterable(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncIterable):
__slots__ = ()
if _define_guard('AsyncIterator'):
class AsyncIterator(AsyncIterable[T_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncIterator):
__slots__ = ()
if hasattr(typing, 'Deque'):
Deque = typing.Deque
elif _geqv_defined:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Deque):
return collections.deque(*args, **kwds)
return _generic_new(collections.deque, cls, *args, **kwds)
else:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Deque:
return collections.deque(*args, **kwds)
return _generic_new(collections.deque, cls, *args, **kwds)
if hasattr(typing, 'ContextManager'):
ContextManager = typing.ContextManager
elif hasattr(contextlib, 'AbstractContextManager'):
class ContextManager(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=contextlib.AbstractContextManager):
__slots__ = ()
else:
class ContextManager(typing.Generic[T_co]):
__slots__ = ()
def __enter__(self):
return self
@abc.abstractmethod
def __exit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is ContextManager:
# In Python 3.6+, it is possible to set a method to None to
# explicitly indicate that the class does not implement an ABC
# (https://bugs.python.org/issue25958), but we do not support
# that pattern here because this fallback class is only used
# in Python 3.5 and earlier.
if (any("__enter__" in B.__dict__ for B in C.__mro__) and
any("__exit__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
if hasattr(typing, 'AsyncContextManager'):
AsyncContextManager = typing.AsyncContextManager
__all__.append('AsyncContextManager')
elif hasattr(contextlib, 'AbstractAsyncContextManager'):
class AsyncContextManager(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=contextlib.AbstractAsyncContextManager):
__slots__ = ()
__all__.append('AsyncContextManager')
elif sys.version_info[:2] >= (3, 5):
exec("""
class AsyncContextManager(typing.Generic[T_co]):
__slots__ = ()
async def __aenter__(self):
return self
@abc.abstractmethod
async def __aexit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncContextManager:
return _check_methods_in_mro(C, "__aenter__", "__aexit__")
return NotImplemented
__all__.append('AsyncContextManager')
""")
if hasattr(typing, 'DefaultDict'):
DefaultDict = typing.DefaultDict
elif _geqv_defined:
class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.defaultdict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, DefaultDict):
return collections.defaultdict(*args, **kwds)
return _generic_new(collections.defaultdict, cls, *args, **kwds)
else:
class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.defaultdict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is DefaultDict:
return collections.defaultdict(*args, **kwds)
return _generic_new(collections.defaultdict, cls, *args, **kwds)
if hasattr(typing, 'Counter'):
Counter = typing.Counter
elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1):
assert _geqv_defined
_TInt = typing.TypeVar('_TInt')
class _CounterMeta(typing.GenericMeta):
"""Metaclass for Counter"""
def __getitem__(self, item):
return super().__getitem__((item, int))
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_CounterMeta,
extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Counter):
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
elif _geqv_defined:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Counter):
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
else:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Counter:
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
if hasattr(typing, 'ChainMap'):
ChainMap = typing.ChainMap
__all__.append('ChainMap')
elif hasattr(collections, 'ChainMap'):
# ChainMap only exists in 3.3+
if _geqv_defined:
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, ChainMap):
return collections.ChainMap(*args, **kwds)
return _generic_new(collections.ChainMap, cls, *args, **kwds)
else:
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is ChainMap:
return collections.ChainMap(*args, **kwds)
return _generic_new(collections.ChainMap, cls, *args, **kwds)
__all__.append('ChainMap')
if _define_guard('AsyncGenerator'):
class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncGenerator):
__slots__ = ()
if hasattr(typing, 'NewType'):
NewType = typing.NewType
else:
def NewType(name, tp):
"""NewType creates simple unique types with almost zero
runtime overhead. NewType(name, tp) is considered a subtype of tp
by static type checkers. At runtime, NewType(name, tp) returns
a dummy function that simply returns its argument. Usage::
UserId = NewType('UserId', int)
def name_by_id(user_id: UserId) -> str:
...
UserId('user') # Fails type check
name_by_id(42) # Fails type check
name_by_id(UserId(42)) # OK
num = UserId(5) + 1 # type: int
"""
def new_type(x):
return x
new_type.__name__ = name
new_type.__supertype__ = tp
return new_type
if hasattr(typing, 'Text'):
Text = typing.Text
else:
Text = str
if hasattr(typing, 'TYPE_CHECKING'):
TYPE_CHECKING = typing.TYPE_CHECKING
else:
# Constant that's True when type checking, but False here.
TYPE_CHECKING = False
def _gorg(cls):
"""This function exists for compatibility with old typing versions."""
assert isinstance(cls, GenericMeta)
if hasattr(cls, '_gorg'):
return cls._gorg
while cls.__origin__ is not None:
cls = cls.__origin__
return cls
if OLD_GENERICS:
def _next_in_mro(cls): # noqa
"""This function exists for compatibility with old typing versions."""
next_in_mro = object
for i, c in enumerate(cls.__mro__[:-1]):
if isinstance(c, GenericMeta) and _gorg(c) is Generic:
next_in_mro = cls.__mro__[i + 1]
return next_in_mro
_PROTO_WHITELIST = ['Callable', 'Awaitable',
'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
'ContextManager', 'AsyncContextManager']
def _get_protocol_attrs(cls):
attrs = set()
for base in cls.__mro__[:-1]: # without object
if base.__name__ in ('Protocol', 'Generic'):
continue
annotations = getattr(base, '__annotations__', {})
for attr in list(base.__dict__.keys()) + list(annotations.keys()):
if (not attr.startswith('_abc_') and attr not in (
'__abstractmethods__', '__annotations__', '__weakref__',
'_is_protocol', '_is_runtime_protocol', '__dict__',
'__args__', '__slots__',
'__next_in_mro__', '__parameters__', '__origin__',
'__orig_bases__', '__extra__', '__tree_hash__',
'__doc__', '__subclasshook__', '__init__', '__new__',
'__module__', '_MutableMapping__marker', '_gorg')):
attrs.add(attr)
return attrs
def _is_callable_members_only(cls):
return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
if hasattr(typing, 'Protocol'):
Protocol = typing.Protocol
elif HAVE_PROTOCOLS and not PEP_560:
class _ProtocolMeta(GenericMeta):
"""Internal metaclass for Protocol.
This exists so Protocol classes can be generic without deriving
from Generic.
"""
if not OLD_GENERICS:
def __new__(cls, name, bases, namespace,
tvars=None, args=None, origin=None, extra=None, orig_bases=None):
# This is just a version copied from GenericMeta.__new__ that
# includes "Protocol" special treatment. (Comments removed for brevity.)
assert extra is None # Protocols should not have extra
if tvars is not None:
assert origin is not None
assert all(isinstance(t, TypeVar) for t in tvars), tvars
else:
tvars = _type_vars(bases)
gvars = None
for base in bases:
if base is Generic:
raise TypeError("Cannot inherit from plain Generic")
if (isinstance(base, GenericMeta) and
base.__origin__ in (Generic, Protocol)):
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...] or"
" Protocol[...] multiple times.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
raise TypeError(
"Some type variables (%s) "
"are not listed in %s[%s]" %
(", ".join(str(t) for t in tvars if t not in gvarset),
"Generic" if any(b.__origin__ is Generic
for b in bases) else "Protocol",
", ".join(str(g) for g in gvars)))
tvars = gvars
initial_bases = bases
if (extra is not None and type(extra) is abc.ABCMeta and
extra not in bases):
bases = (extra,) + bases
bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
for b in bases)
if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
bases = tuple(b for b in bases if b is not Generic)
namespace.update({'__origin__': origin, '__extra__': extra})
self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
_root=True)
super(GenericMeta, self).__setattr__('_gorg',
self if not origin else
_gorg(origin))
self.__parameters__ = tvars
self.__args__ = tuple(... if a is _TypingEllipsis else
() if a is _TypingEmpty else
a for a in args) if args else None
self.__next_in_mro__ = _next_in_mro(self)
if orig_bases is None:
self.__orig_bases__ = initial_bases
elif origin is not None:
self._abc_registry = origin._abc_registry
self._abc_cache = origin._abc_cache
if hasattr(self, '_subs_tree'):
self.__tree_hash__ = (hash(self._subs_tree()) if origin else
super(GenericMeta, self).__hash__())
return self
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol or
isinstance(b, _ProtocolMeta) and
b.__origin__ is Protocol
for b in cls.__bases__)
if cls._is_protocol:
for base in cls.__mro__[1:]:
if not (base in (object, Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, TypingMeta) and base._is_protocol or
isinstance(base, GenericMeta) and
base.__origin__ is Generic):
raise TypeError('Protocols can only inherit from other'
' protocols, got %r' % base)
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
cls.__init__ = _no_init
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
def __instancecheck__(self, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(self, '_is_protocol', False) or
_is_callable_members_only(self)) and
issubclass(instance.__class__, self)):
return True
if self._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(self, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(self)):
return True
return super(GenericMeta, self).__instancecheck__(instance)
def __subclasscheck__(self, cls):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if (self.__dict__.get('_is_protocol', None) and
not self.__dict__.get('_is_runtime_protocol', None)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return False
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if (self.__dict__.get('_is_runtime_protocol', None) and
not _is_callable_members_only(self)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return super(GenericMeta, self).__subclasscheck__(cls)
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
return super(GenericMeta, self).__subclasscheck__(cls)
if not OLD_GENERICS:
@_tp_cache
def __getitem__(self, params):
# We also need to copy this from GenericMeta.__getitem__ to get
# special treatment of "Protocol". (Comments removed for brevity.)
if not isinstance(params, tuple):
params = (params,)
if not params and _gorg(self) is not Tuple:
raise TypeError(
"Parameter list to %s[...] cannot be empty" % self.__qualname__)
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if self in (Generic, Protocol):
if not all(isinstance(p, TypeVar) for p in params):
raise TypeError(
"Parameters to %r[...] must all be type variables" % self)
if len(set(params)) != len(params):
raise TypeError(
"Parameters to %r[...] must all be unique" % self)
tvars = params
args = params
elif self in (Tuple, Callable):
tvars = _type_vars(params)
args = params
elif self.__origin__ in (Generic, Protocol):
raise TypeError("Cannot subscript already-subscripted %s" %
repr(self))
else:
_check_generic(self, params)
tvars = _type_vars(params)
args = params
prepend = (self,) if self.__origin__ is None else ()
return self.__class__(self.__name__,
prepend + self.__bases__,
_no_slots_copy(self.__dict__),
tvars=tvars,
args=args,
origin=self,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
class Protocol(metaclass=_ProtocolMeta):
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto({bases}):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if _gorg(cls) is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can be used only as a base class")
if OLD_GENERICS:
return _generic_new(_next_in_mro(cls), cls, *args, **kwds)
return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
if Protocol.__doc__ is not None:
Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if
OLD_GENERICS else "Protocol[T]")
elif PEP_560:
from typing import _type_check, _GenericAlias, _collect_type_vars # noqa
class _ProtocolMeta(abc.ABCMeta):
# This metaclass is a bit unfortunate and exists only because of the lack
# of __instancehook__.
def __instancecheck__(cls, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(cls, '_is_protocol', False) or
_is_callable_members_only(cls)) and
issubclass(instance.__class__, cls)):
return True
if cls._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(cls, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(cls)):
return True
return super().__instancecheck__(instance)
class Protocol(metaclass=_ProtocolMeta):
# There is quite a lot of overlapping code with typing.Generic.
# Unfortunately it is hard to avoid this while these live in two different
# modules. The duplicated code will be removed when Protocol is moved to typing.
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if cls is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can only be used as a base class")
return super().__new__(cls)
@_tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
params = (params,)
if not params and cls is not Tuple:
raise TypeError(
"Parameter list to {}[...] cannot be empty".format(cls.__qualname__))
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if cls is Protocol:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, TypeVar) for p in params):
i = 0
while isinstance(params[i], TypeVar):
i += 1
raise TypeError(
"Parameters to Protocol[...] must all be type variables."
" Parameter {} is {}".format(i + 1, params[i]))
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Protocol[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
_check_generic(cls, params)
return _GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
tvars = []
if '__orig_bases__' in cls.__dict__:
error = Generic in cls.__orig_bases__
else:
error = Generic in cls.__bases__
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
tvars = _collect_type_vars(cls.__orig_bases__)
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...] and/or Protocol[...].
gvars = None
for base in cls.__orig_bases__:
if (isinstance(base, _GenericAlias) and
base.__origin__ in (Generic, Protocol)):
# for error messages
the_base = 'Generic' if base.__origin__ is Generic else 'Protocol'
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...]"
" and/or Protocol[...] multiple types.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
s_args = ', '.join(str(g) for g in gvars)
raise TypeError("Some type variables ({}) are"
" not listed in {}[{}]".format(s_vars,
the_base, s_args))
tvars = gvars
cls.__parameters__ = tuple(tvars)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
# Set (or override) the protocol subclass hook.
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not getattr(cls, '_is_runtime_protocol', False):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if not _is_callable_members_only(cls):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# We have nothing more to do for non-protocols.
if not cls._is_protocol:
return
# Check consistency of bases.
for base in cls.__bases__:
if not (base in (object, Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, _ProtocolMeta) and base._is_protocol):
raise TypeError('Protocols can only inherit from other'
' protocols, got %r' % base)
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
cls.__init__ = _no_init
if hasattr(typing, 'runtime_checkable'):
runtime_checkable = typing.runtime_checkable
elif HAVE_PROTOCOLS:
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol, so that it
can be used with isinstance() and issubclass(). Raise TypeError
if applied to a non-protocol class.
This allows a simple-minded structural check very similar to the
one-offs in collections.abc such as Hashable.
"""
if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
' got %r' % cls)
cls._is_runtime_protocol = True
return cls
if HAVE_PROTOCOLS:
# Exists for backwards compatibility.
runtime = runtime_checkable
if hasattr(typing, 'SupportsIndex'):
SupportsIndex = typing.SupportsIndex
elif HAVE_PROTOCOLS:
@runtime_checkable
class SupportsIndex(Protocol):
__slots__ = ()
@abc.abstractmethod
def __index__(self) -> int:
pass
if sys.version_info[:2] >= (3, 9):
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
TypedDict = typing.TypedDict
else:
def _check_fails(cls, other):
try:
if sys._getframe(1).f_globals['__name__'] not in ['abc',
'functools',
'typing']:
# Typed dicts are only for static structural subtyping.
raise TypeError('TypedDict does not support instance and class checks')
except (AttributeError, ValueError):
pass
return False
def _dict_new(*args, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
return dict(*args, **kwargs)
_dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
def _typeddict_new(*args, total=True, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
if args:
typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
elif '_typename' in kwargs:
typename = kwargs.pop('_typename')
import warnings
warnings.warn("Passing '_typename' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError("TypedDict.__new__() missing 1 required positional "
"argument: '_typename'")
if args:
try:
fields, = args # allow the "_fields" keyword be passed
except ValueError:
raise TypeError('TypedDict.__new__() takes from 2 to 3 '
'positional arguments but {} '
'were given'.format(len(args) + 2))
elif '_fields' in kwargs and len(kwargs) == 1:
fields = kwargs.pop('_fields')
import warnings
warnings.warn("Passing '_fields' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
fields = None
if fields is None:
fields = kwargs
elif kwargs:
raise TypeError("TypedDict takes either a dict or keyword arguments,"
" but not both")
ns = {'__annotations__': dict(fields), '__total__': total}
try:
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return _TypedDictMeta(typename, (), ns)
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
' /, *, total=True, **kwargs)')
class _TypedDictMeta(type):
def __new__(cls, name, bases, ns, total=True):
# Create new typed dict class object.
# This method is called directly when TypedDict is subclassed,
# or via _typeddict_new when TypedDict is instantiated. This way
# TypedDict supports all three syntaxes described in its docstring.
# Subclasses and instances of TypedDict return actual dictionaries
# via _dict_new.
ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
annotations = {}
own_annotations = ns.get('__annotations__', {})
own_annotation_keys = set(own_annotations.keys())
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
own_annotations = {
n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
for base in bases:
annotations.update(base.__dict__.get('__annotations__', {}))
required_keys.update(base.__dict__.get('__required_keys__', ()))
optional_keys.update(base.__dict__.get('__optional_keys__', ()))
annotations.update(own_annotations)
if total:
required_keys.update(own_annotation_keys)
else:
optional_keys.update(own_annotation_keys)
tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict, '__total__'):
tp_dict.__total__ = total
return tp_dict
__instancecheck__ = __subclasscheck__ = _check_fails
TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
TypedDict.__module__ = __name__
TypedDict.__doc__ = \
"""A simple typed name space. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type that expects all of its
instances to have a certain set of keys, with each key
associated with a value of a consistent type. This expectation
is not checked at runtime but is only enforced by type checkers.
Usage::
class Point2D(TypedDict):
x: int
y: int
label: str
a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
The type info can be accessed via the Point2D.__annotations__ dict, and
the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
TypedDict supports two additional equivalent forms::
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
The class syntax is only supported in Python 3.6+, while two other
syntax forms work for Python 2.7 and 3.2+
"""
# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
if hasattr(typing, 'Annotated'):
Annotated = typing.Annotated
get_type_hints = typing.get_type_hints
# Not exported and not a public API, but needed for get_origin() and get_args()
# to work.
_AnnotatedAlias = typing._AnnotatedAlias
elif PEP_560:
class _AnnotatedAlias(typing._GenericAlias, _root=True):
"""Runtime representation of an annotated type.
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
with extra annotations. The alias behaves like a normal typing alias,
instantiating is the same as instantiating the underlying type, binding
it to types is also the same.
"""
def __init__(self, origin, metadata):
if isinstance(origin, _AnnotatedAlias):
metadata = origin.__metadata__ + metadata
origin = origin.__origin__
super().__init__(origin, origin)
self.__metadata__ = metadata
def copy_with(self, params):
assert len(params) == 1
new_type = params[0]
return _AnnotatedAlias(new_type, self.__metadata__)
def __repr__(self):
return "typing_extensions.Annotated[{}, {}]".format(
typing._type_repr(self.__origin__),
", ".join(repr(a) for a in self.__metadata__)
)
def __reduce__(self):
return operator.getitem, (
Annotated, (self.__origin__,) + self.__metadata__
)
def __eq__(self, other):
if not isinstance(other, _AnnotatedAlias):
return NotImplemented
if self.__origin__ != other.__origin__:
return False
return self.__metadata__ == other.__metadata__
def __hash__(self):
return hash((self.__origin__, self.__metadata__))
class Annotated:
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type (and will be in
the __origin__ field), the remaining arguments are kept as a tuple in
the __extra__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
__slots__ = ()
def __new__(cls, *args, **kwargs):
raise TypeError("Type Annotated cannot be instantiated.")
@_tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be used "
"with at least two arguments (a type and an "
"annotation).")
msg = "Annotated[t, ...]: t must be a type."
origin = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return _AnnotatedAlias(origin, metadata)
def __init_subclass__(cls, *args, **kwargs):
raise TypeError(
"Cannot subclass {}.Annotated".format(cls.__module__)
)
def _strip_annotations(t):
"""Strips the annotations from a given type.
"""
if isinstance(t, _AnnotatedAlias):
return _strip_annotations(t.__origin__)
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
res = t.copy_with(stripped_args)
res._special = t._special
return res
return t
def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, adds Optional[t] if a
default value equal to None is set and recursively replaces all
'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
if include_extras:
return hint
return {k: _strip_annotations(t) for k, t in hint.items()}
elif HAVE_ANNOTATED:
def _is_dunder(name):
"""Returns True if name is a __dunder_variable_name__."""
return len(name) > 4 and name.startswith('__') and name.endswith('__')
# Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
# checks, argument expansion etc. are done on the _subs_tre. As a result we
# can't provide a get_type_hints function that strips out annotations.
class AnnotatedMeta(typing.GenericMeta):
"""Metaclass for Annotated"""
def __new__(cls, name, bases, namespace, **kwargs):
if any(b is not object for b in bases):
raise TypeError("Cannot subclass " + str(Annotated))
return super().__new__(cls, name, bases, namespace, **kwargs)
@property
def __metadata__(self):
return self._subs_tree()[2]
def _tree_repr(self, tree):
cls, origin, metadata = tree
if not isinstance(origin, tuple):
tp_repr = typing._type_repr(origin)
else:
tp_repr = origin[0]._tree_repr(origin)
metadata_reprs = ", ".join(repr(arg) for arg in metadata)
return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs)
def _subs_tree(self, tvars=None, args=None): # noqa
if self is Annotated:
return Annotated
res = super()._subs_tree(tvars=tvars, args=args)
# Flatten nested Annotated
if isinstance(res[1], tuple) and res[1][0] is Annotated:
sub_tp = res[1][1]
sub_annot = res[1][2]
return (Annotated, sub_tp, sub_annot + res[2])
return res
def _get_cons(self):
"""Return the class used to create instance of this type."""
if self.__origin__ is None:
raise TypeError("Cannot get the underlying type of a "
"non-specialized Annotated type.")
tree = self._subs_tree()
while isinstance(tree, tuple) and tree[0] is Annotated:
tree = tree[1]
if isinstance(tree, tuple):
return tree[0]
else:
return tree
@_tp_cache
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if self.__origin__ is not None: # specializing an instantiated type
return super().__getitem__(params)
elif not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be instantiated "
"with at least two arguments (a type and an "
"annotation).")
else:
msg = "Annotated[t, ...]: t must be a type."
tp = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return self.__class__(
self.__name__,
self.__bases__,
_no_slots_copy(self.__dict__),
tvars=_type_vars((tp,)),
# Metadata is a tuple so it won't be touched by _replace_args et al.
args=(tp, metadata),
origin=self,
)
def __call__(self, *args, **kwargs):
cons = self._get_cons()
result = cons(*args, **kwargs)
try:
result.__orig_class__ = self
except AttributeError:
pass
return result
def __getattr__(self, attr):
# For simplicity we just don't relay all dunder names
if self.__origin__ is not None and not _is_dunder(attr):
return getattr(self._get_cons(), attr)
raise AttributeError(attr)
def __setattr__(self, attr, value):
if _is_dunder(attr) or attr.startswith('_abc_'):
super().__setattr__(attr, value)
elif self.__origin__ is None:
raise AttributeError(attr)
else:
setattr(self._get_cons(), attr, value)
def __instancecheck__(self, obj):
raise TypeError("Annotated cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Annotated cannot be used with issubclass().")
class Annotated(metaclass=AnnotatedMeta):
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type, the remaining
arguments are kept as a tuple in the __metadata__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
# Python 3.8 has get_origin() and get_args() but those implementations aren't
# Annotated-aware, so we can't use those, only Python 3.9 versions will do.
if sys.version_info[:2] >= (3, 9):
get_origin = typing.get_origin
get_args = typing.get_args
elif PEP_560:
from typing import _GenericAlias # noqa
def get_origin(tp):
"""Get the unsubscripted version of a type.
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
and Annotated. Return None for unsupported types. Examples::
get_origin(Literal[42]) is Literal
get_origin(int) is None
get_origin(ClassVar[int]) is ClassVar
get_origin(Generic) is Generic
get_origin(Generic[T]) is Generic
get_origin(Union[T, int]) is Union
get_origin(List[Tuple[T, T]][int]) == list
"""
if isinstance(tp, _AnnotatedAlias):
return Annotated
if isinstance(tp, _GenericAlias):
return tp.__origin__
if tp is Generic:
return Generic
return None
def get_args(tp):
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
return (tp.__origin__,) + tp.__metadata__
if isinstance(tp, _GenericAlias) and not tp._special:
res = tp.__args__
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return ()
if hasattr(typing, 'TypeAlias'):
TypeAlias = typing.TypeAlias
elif sys.version_info[:2] >= (3, 9):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_TypeAliasForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
raise TypeError("{} is not subscriptable".format(self))
elif sys.version_info[:2] >= (3, 7):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
TypeAlias = _TypeAliasForm('TypeAlias',
doc="""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example
above.""")
elif hasattr(typing, '_FinalTypingBase'):
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __repr__(self):
return 'typing_extensions.TypeAlias'
class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __repr__(self):
return 'typing_extensions.TypeAlias'
TypeAlias = _TypeAliasBase(_root=True)
else:
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __call__(self, *args, **kwargs):
raise TypeError("Cannot instantiate TypeAlias")
class TypeAlias(metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = () | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/typing_extensions.py | typing_extensions.py |
import re
import sys
import copy
import types
import inspect
import keyword
__all__ = ['dataclass',
'field',
'Field',
'FrozenInstanceError',
'InitVar',
'MISSING',
# Helper functions.
'fields',
'asdict',
'astuple',
'make_dataclass',
'replace',
'is_dataclass',
]
# Conditions for adding methods. The boxes indicate what action the
# dataclass decorator takes. For all of these tables, when I talk
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
# referring to the arguments to the @dataclass decorator. When
# checking if a dunder method already exists, I mean check for an
# entry in the class's __dict__. I never check to see if an attribute
# is defined in a base class.
# Key:
# +=========+=========================================+
# + Value | Meaning |
# +=========+=========================================+
# | <blank> | No action: no method is added. |
# +---------+-----------------------------------------+
# | add | Generated method is added. |
# +---------+-----------------------------------------+
# | raise | TypeError is raised. |
# +---------+-----------------------------------------+
# | None | Attribute is set to None. |
# +=========+=========================================+
# __init__
#
# +--- init= parameter
# |
# v | | |
# | no | yes | <--- class has __init__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __repr__
#
# +--- repr= parameter
# |
# v | | |
# | no | yes | <--- class has __repr__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __setattr__
# __delattr__
#
# +--- frozen= parameter
# |
# v | | |
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because not adding these methods would break the "frozen-ness"
# of the class.
# __eq__
#
# +--- eq= parameter
# |
# v | | |
# | no | yes | <--- class has __eq__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __lt__
# __le__
# __gt__
# __ge__
#
# +--- order= parameter
# |
# v | | |
# | no | yes | <--- class has any comparison method in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because to allow this case would interfere with using
# functools.total_ordering.
# __hash__
# +------------------- unsafe_hash= parameter
# | +----------- eq= parameter
# | | +--- frozen= parameter
# | | |
# v v v | | |
# | no | yes | <--- class has explicitly defined __hash__
# +=======+=======+=======+========+========+
# | False | False | False | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | False | True | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | True | False | None | | <-- the default, not hashable
# +-------+-------+-------+--------+--------+
# | False | True | True | add | | Frozen, so hashable, allows override
# +-------+-------+-------+--------+--------+
# | True | False | False | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | False | True | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | False | add | raise | Not frozen, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | True | add | raise | Frozen, so hashable
# +=======+=======+=======+========+========+
# For boxes that are blank, __hash__ is untouched and therefore
# inherited from the base class. If the base is object, then
# id-based hashing is used.
#
# Note that a class may already have __hash__=None if it specified an
# __eq__ method in the class body (not one that was created by
# @dataclass).
#
# See _hash_action (below) for a coded version of this table.
# Raised when an attempt is made to modify a frozen class.
class FrozenInstanceError(AttributeError): pass
# A sentinel object for default values to signal that a default
# factory will be used. This is given a nice repr() which will appear
# in the function signature of dataclasses' constructors.
class _HAS_DEFAULT_FACTORY_CLASS:
def __repr__(self):
return '<factory>'
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
# A sentinel object to detect if a parameter is supplied or not. Use
# a class to give it a better repr.
class _MISSING_TYPE:
pass
MISSING = _MISSING_TYPE()
# Since most per-field metadata will be unused, create an empty
# read-only proxy that can be shared among all fields.
_EMPTY_METADATA = types.MappingProxyType({})
# Markers for the various kinds of fields and pseudo-fields.
class _FIELD_BASE:
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
_FIELD = _FIELD_BASE('_FIELD')
_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
# The name of an attribute on the class where we store the Field
# objects. Also used to check if a class is a Data Class.
_FIELDS = '__dataclass_fields__'
# The name of an attribute on the class that stores the parameters to
# @dataclass.
_PARAMS = '__dataclass_params__'
# The name of the function, that if it exists, is called at the end of
# __init__.
_POST_INIT_NAME = '__post_init__'
# String regex that string annotations for ClassVar or InitVar must match.
# Allows "identifier.identifier[" or "identifier[".
# https://bugs.python.org/issue33453 for details.
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
class _InitVarMeta(type):
def __getitem__(self, params):
return self
class InitVar(metaclass=_InitVarMeta):
pass
# Instances of Field are only ever created from within this module,
# and only from the field() function, although Field instances are
# exposed externally as (conceptually) read-only objects.
#
# name and type are filled in after the fact, not in __init__.
# They're not known at the time this class is instantiated, but it's
# convenient if they're available later.
#
# When cls._FIELDS is filled in with a list of Field objects, the name
# and type fields will have been populated.
class Field:
__slots__ = ('name',
'type',
'default',
'default_factory',
'repr',
'hash',
'init',
'compare',
'metadata',
'_field_type', # Private: not to be used by user code.
)
def __init__(self, default, default_factory, init, repr, hash, compare,
metadata):
self.name = None
self.type = None
self.default = default
self.default_factory = default_factory
self.init = init
self.repr = repr
self.hash = hash
self.compare = compare
self.metadata = (_EMPTY_METADATA
if metadata is None or len(metadata) == 0 else
types.MappingProxyType(metadata))
self._field_type = None
def __repr__(self):
return ('Field('
f'name={self.name!r},'
f'type={self.type!r},'
f'default={self.default!r},'
f'default_factory={self.default_factory!r},'
f'init={self.init!r},'
f'repr={self.repr!r},'
f'hash={self.hash!r},'
f'compare={self.compare!r},'
f'metadata={self.metadata!r},'
f'_field_type={self._field_type}'
')')
# This is used to support the PEP 487 __set_name__ protocol in the
# case where we're using a field that contains a descriptor as a
# defaul value. For details on __set_name__, see
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
#
# Note that in _process_class, this Field object is overwritten
# with the default value, so the end result is a descriptor that
# had __set_name__ called on it at the right time.
def __set_name__(self, owner, name):
func = getattr(type(self.default), '__set_name__', None)
if func:
# There is a __set_name__ method on the descriptor, call
# it.
func(self.default, owner, name)
class _DataclassParams:
__slots__ = ('init',
'repr',
'eq',
'order',
'unsafe_hash',
'frozen',
)
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init
self.repr = repr
self.eq = eq
self.order = order
self.unsafe_hash = unsafe_hash
self.frozen = frozen
def __repr__(self):
return ('_DataclassParams('
f'init={self.init!r},'
f'repr={self.repr!r},'
f'eq={self.eq!r},'
f'order={self.order!r},'
f'unsafe_hash={self.unsafe_hash!r},'
f'frozen={self.frozen!r}'
')')
# This function is used instead of exposing Field creation directly,
# so that a type checker can be told (via overloads) that this is a
# function whose type depends on its parameters.
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
hash=None, compare=True, metadata=None):
"""Return an object to identify dataclass fields.
default is the default value of the field. default_factory is a
0-argument function called to initialize a field's value. If init
is True, the field will be a parameter to the class's __init__()
function. If repr is True, the field will be included in the
object's repr(). If hash is True, the field will be included in
the object's hash(). If compare is True, the field will be used
in comparison functions. metadata, if specified, must be a
mapping which is stored but not otherwise examined by dataclass.
It is an error to specify both default and default_factory.
"""
if default is not MISSING and default_factory is not MISSING:
raise ValueError('cannot specify both default and default_factory')
return Field(default, default_factory, init, repr, hash, compare,
metadata)
def _tuple_str(obj_name, fields):
# Return a string representing each field of obj_name as a tuple
# member. So, if fields is ['x', 'y'] and obj_name is "self",
# return "(self.x,self.y)".
# Special case for the 0-tuple.
if not fields:
return '()'
# Note the trailing comma, needed if this turns out to be a 1-tuple.
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
def _create_fn(name, args, body, *, globals=None, locals=None,
return_type=MISSING):
# Note that we mutate locals when exec() is called. Caller
# beware! The only callers are internal to this module, so no
# worries about external callers.
if locals is None:
locals = {}
return_annotation = ''
if return_type is not MISSING:
locals['_return_type'] = return_type
return_annotation = '->_return_type'
args = ','.join(args)
body = '\n'.join(f' {b}' for b in body)
# Compute the text of the entire function.
txt = f'def {name}({args}){return_annotation}:\n{body}'
exec(txt, globals, locals)
return locals[name]
def _field_assign(frozen, name, value, self_name):
# If we're a frozen class, then assign to our fields in __init__
# via object.__setattr__. Otherwise, just use a simple
# assignment.
#
# self_name is what "self" is called in this function: don't
# hard-code "self", since that might be a field name.
if frozen:
return f'object.__setattr__({self_name},{name!r},{value})'
return f'{self_name}.{name}={value}'
def _field_init(f, frozen, globals, self_name):
# Return the text of the line in the body of __init__ that will
# initialize this field.
default_name = f'_dflt_{f.name}'
if f.default_factory is not MISSING:
if f.init:
# This field has a default factory. If a parameter is
# given, use it. If not, call the factory.
globals[default_name] = f.default_factory
value = (f'{default_name}() '
f'if {f.name} is _HAS_DEFAULT_FACTORY '
f'else {f.name}')
else:
# This is a field that's not in the __init__ params, but
# has a default factory function. It needs to be
# initialized here by calling the factory function,
# because there's no other way to initialize it.
# For a field initialized with a default=defaultvalue, the
# class dict just has the default value
# (cls.fieldname=defaultvalue). But that won't work for a
# default factory, the factory must be called in __init__
# and we must assign that to self.fieldname. We can't
# fall back to the class dict's value, both because it's
# not set, and because it might be different per-class
# (which, after all, is why we have a factory function!).
globals[default_name] = f.default_factory
value = f'{default_name}()'
else:
# No default factory.
if f.init:
if f.default is MISSING:
# There's no default, just do an assignment.
value = f.name
elif f.default is not MISSING:
globals[default_name] = f.default
value = f.name
else:
# This field does not need initialization. Signify that
# to the caller by returning None.
return None
# Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars.
if f._field_type is _FIELD_INITVAR:
return None
# Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name)
def _init_param(f):
# Return the __init__ parameter string for this field. For
# example, the equivalent of 'x:int=3' (except instead of 'int',
# reference a variable set to int, and instead of '3', reference a
# variable set to 3).
if f.default is MISSING and f.default_factory is MISSING:
# There's no default, and no default_factory, just output the
# variable name and type.
default = ''
elif f.default is not MISSING:
# There's a default, this will be the name that's used to look
# it up.
default = f'=_dflt_{f.name}'
elif f.default_factory is not MISSING:
# There's a factory function. Set a marker.
default = '=_HAS_DEFAULT_FACTORY'
return f'{f.name}:_type_{f.name}{default}'
def _init_fn(fields, frozen, has_post_init, self_name):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
globals = {'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY}
body_lines = []
for f in fields:
line = _field_init(f, frozen, globals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
locals = {f'_type_{f.name}': f.type for f in fields}
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init],
body_lines,
locals=locals,
globals=globals,
return_type=None)
def _repr_fn(fields):
return _create_fn('__repr__',
('self',),
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
')"'])
def _frozen_get_del_attr(cls, fields):
# XXX: globals is modified on the first call to _create_fn, then
# the modified version is used in the second call. Is this okay?
globals = {'cls': cls,
'FrozenInstanceError': FrozenInstanceError}
if fields:
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
else:
# Special case for the zero-length tuple.
fields_str = '()'
return (_create_fn('__setattr__',
('self', 'name', 'value'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
f'super(cls, self).__setattr__(name, value)'),
globals=globals),
_create_fn('__delattr__',
('self', 'name'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
f'super(cls, self).__delattr__(name)'),
globals=globals),
)
def _cmp_fn(name, op, self_tuple, other_tuple):
# Create a comparison function. If the fields in the object are
# named 'x' and 'y', then self_tuple is the string
# '(self.x,self.y)' and other_tuple is the string
# '(other.x,other.y)'.
return _create_fn(name,
('self', 'other'),
[ 'if other.__class__ is self.__class__:',
f' return {self_tuple}{op}{other_tuple}',
'return NotImplemented'])
def _hash_fn(fields):
self_tuple = _tuple_str('self', fields)
return _create_fn('__hash__',
('self',),
[f'return hash({self_tuple})'])
def _is_classvar(a_type, typing):
# This test uses a typing internal class, but it's the best way to
# test if this is a ClassVar.
return type(a_type) is typing._ClassVar
def _is_initvar(a_type, dataclasses):
# The module we're checking against is the module we're
# currently in (dataclasses.py).
return a_type is dataclasses.InitVar
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
# Given a type annotation string, does it refer to a_type in
# a_module? For example, when checking that annotation denotes a
# ClassVar, then a_module is typing, and a_type is
# typing.ClassVar.
# It's possible to look up a_module given a_type, but it involves
# looking in sys.modules (again!), and seems like a waste since
# the caller already knows a_module.
# - annotation is a string type annotation
# - cls is the class that this annotation was found in
# - a_module is the module we want to match
# - a_type is the type in that module we want to match
# - is_type_predicate is a function called with (obj, a_module)
# that determines if obj is of the desired type.
# Since this test does not do a local namespace lookup (and
# instead only a module (global) lookup), there are some things it
# gets wrong.
# With string annotations, cv0 will be detected as a ClassVar:
# CV = ClassVar
# @dataclass
# class C0:
# cv0: CV
# But in this example cv1 will not be detected as a ClassVar:
# @dataclass
# class C1:
# CV = ClassVar
# cv1: CV
# In C1, the code in this function (_is_type) will look up "CV" in
# the module and not find it, so it will not consider cv1 as a
# ClassVar. This is a fairly obscure corner case, and the best
# way to fix it would be to eval() the string "CV" with the
# correct global and local namespaces. However that would involve
# a eval() penalty for every single field of every dataclass
# that's defined. It was judged not worth it.
match = _MODULE_IDENTIFIER_RE.match(annotation)
if match:
ns = None
module_name = match.group(1)
if not module_name:
# No module name, assume the class's module did
# "from dataclasses import InitVar".
ns = sys.modules.get(cls.__module__).__dict__
else:
# Look up module_name in the class's module.
module = sys.modules.get(cls.__module__)
if module and module.__dict__.get(module_name) is a_module:
ns = sys.modules.get(a_type.__module__).__dict__
if ns and is_type_predicate(ns.get(match.group(2)), a_module):
return True
return False
def _get_field(cls, a_name, a_type):
# Return a Field object for this field name and type. ClassVars
# and InitVars are also returned, but marked as such (see
# f._field_type).
# If the default value isn't derived from Field, then it's only a
# normal default value. Convert it to a Field().
default = getattr(cls, a_name, MISSING)
if isinstance(default, Field):
f = default
else:
if isinstance(default, types.MemberDescriptorType):
# This is a field in __slots__, so it has no default value.
default = MISSING
f = field(default=default)
# Only at this point do we know the name and the type. Set them.
f.name = a_name
f.type = a_type
# Assume it's a normal field until proven otherwise. We're next
# going to decide if it's a ClassVar or InitVar, everything else
# is just a normal field.
f._field_type = _FIELD
# In addition to checking for actual types here, also check for
# string annotations. get_type_hints() won't always work for us
# (see https://github.com/python/typing/issues/508 for example),
# plus it's expensive and would require an eval for every stirng
# annotation. So, make a best effort to see if this is a ClassVar
# or InitVar using regex's and checking that the thing referenced
# is actually of the correct type.
# For the complete discussion, see https://bugs.python.org/issue33453
# If typing has not been imported, then it's impossible for any
# annotation to be a ClassVar. So, only look for ClassVar if
# typing has been imported by any module (not necessarily cls's
# module).
typing = sys.modules.get('typing')
if typing:
if (_is_classvar(a_type, typing)
or (isinstance(f.type, str)
and _is_type(f.type, cls, typing, typing.ClassVar,
_is_classvar))):
f._field_type = _FIELD_CLASSVAR
# If the type is InitVar, or if it's a matching string annotation,
# then it's an InitVar.
if f._field_type is _FIELD:
# The module we're checking against is the module we're
# currently in (dataclasses.py).
dataclasses = sys.modules[__name__]
if (_is_initvar(a_type, dataclasses)
or (isinstance(f.type, str)
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
_is_initvar))):
f._field_type = _FIELD_INITVAR
# Validations for individual fields. This is delayed until now,
# instead of in the Field() constructor, since only here do we
# know the field name, which allows for better error reporting.
# Special restrictions for ClassVar and InitVar.
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
if f.default_factory is not MISSING:
raise TypeError(f'field {f.name} cannot have a '
'default factory')
# Should I check for other field settings? default_factory
# seems the most serious to check for. Maybe add others. For
# example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError(f'mutable default {type(f.default)} for field '
f'{f.name} is not allowed: use default_factory')
return f
def _set_new_attribute(cls, name, value):
# Never overwrites an existing attribute. Returns True if the
# attribute already exists.
if name in cls.__dict__:
return True
setattr(cls, name, value)
return False
# Decide if/how we're going to create a hash function. Key is
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
# take. The common case is to do nothing, so instead of providing a
# function that is a no-op, use None to signify that.
def _hash_set_none(cls, fields):
return None
def _hash_add(cls, fields):
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
return _hash_fn(flds)
def _hash_exception(cls, fields):
# Raise an exception.
raise TypeError(f'Cannot overwrite attribute __hash__ '
f'in class {cls.__name__}')
#
# +-------------------------------------- unsafe_hash?
# | +------------------------------- eq?
# | | +------------------------ frozen?
# | | | +---------------- has-explicit-hash?
# | | | |
# | | | | +------- action
# | | | | |
# v v v v v
_hash_action = {(False, False, False, False): None,
(False, False, False, True ): None,
(False, False, True, False): None,
(False, False, True, True ): None,
(False, True, False, False): _hash_set_none,
(False, True, False, True ): None,
(False, True, True, False): _hash_add,
(False, True, True, True ): None,
(True, False, False, False): _hash_add,
(True, False, False, True ): _hash_exception,
(True, False, True, False): _hash_add,
(True, False, True, True ): _hash_exception,
(True, True, False, False): _hash_add,
(True, True, False, True ): _hash_exception,
(True, True, True, False): _hash_add,
(True, True, True, True ): _hash_exception,
}
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
# version of this table.
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# Now that dicts retain insertion order, there's no reason to use
# an ordered dict. I am leveraging that ordering here, because
# derived class fields overwrite base class fields, but the order
# is defined by the base class, which is found first.
fields = {}
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
unsafe_hash, frozen))
# Find our base classes in reverse MRO order, and exclude
# ourselves. In reversed order so that more derived classes
# override earlier field definitions in base classes. As long as
# we're iterating over them, see if any are frozen.
any_frozen_base = False
has_dataclass_bases = False
for b in cls.__mro__[-1:0:-1]:
# Only process classes that have been processed by our
# decorator. That is, they have a _FIELDS attribute.
base_fields = getattr(b, _FIELDS, None)
if base_fields:
has_dataclass_bases = True
for f in base_fields.values():
fields[f.name] = f
if getattr(b, _PARAMS).frozen:
any_frozen_base = True
# Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that are
# added by this class.
#
# Fields are found from cls_annotations, which is guaranteed to be
# ordered. Default values are from class attributes, if a field
# has a default. If the default value is a Field(), then it
# contains additional info beyond (and possibly including) the
# actual default value. Pseudo-fields ClassVars and InitVars are
# included, despite the fact that they're not real fields. That's
# dealt with later.
cls_annotations = cls.__dict__.get('__annotations__', {})
# Now find fields in our class. While doing so, validate some
# things, and set the default values (as class attributes) where
# we can.
cls_fields = [_get_field(cls, name, type)
for name, type in cls_annotations.items()]
for f in cls_fields:
fields[f.name] = f
# If the class attribute (which is the default value for this
# field) exists and is of type 'Field', replace it with the
# real default. This is so that normal class introspection
# sees a real default value, not a Field.
if isinstance(getattr(cls, f.name, None), Field):
if f.default is MISSING:
# If there's no default, delete the class attribute.
# This happens if we specify field(repr=False), for
# example (that is, we specified a field object, but
# no default value). Also if we're using a default
# factory. The class attribute should not be set at
# all in the post-processed class.
delattr(cls, f.name)
else:
setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f'{name!r} is a field but has no type annotation')
# Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen:
raise TypeError('cannot inherit non-frozen dataclass from a '
'frozen one')
# Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen:
raise TypeError('cannot inherit frozen dataclass from a '
'non-frozen one')
# Remember all of the fields on our class (including bases). This
# also marks this class as being a dataclass.
setattr(cls, _FIELDS, fields)
# Was this class defined with an explicit __hash__? Note that if
# __eq__ is defined in this class, then python will automatically
# set __hash__ to None. This is a heuristic, as it's possible
# that such a __hash__ == None was not auto-generated, but it
# close enough.
class_hash = cls.__dict__.get('__hash__', MISSING)
has_explicit_hash = not (class_hash is MISSING or
(class_hash is None and '__eq__' in cls.__dict__))
# If we're generating ordering methods, we must be generating the
# eq methods.
if order and not eq:
raise ValueError('eq must be true if order is true')
if init:
# Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME)
# Include InitVars and regular fields (so, not ClassVars).
flds = [f for f in fields.values()
if f._field_type in (_FIELD, _FIELD_INITVAR)]
_set_new_attribute(cls, '__init__',
_init_fn(flds,
frozen,
has_post_init,
# The name to use for the "self"
# param in __init__. Use "self"
# if possible.
'__dataclass_self__' if 'self' in fields
else 'self',
))
# Get the fields as a list, and include only real fields. This is
# used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD]
if repr:
flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, '__repr__', _repr_fn(flds))
if eq:
# Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
_set_new_attribute(cls, '__eq__',
_cmp_fn('__eq__', '==',
self_tuple, other_tuple))
if order:
# Create and set the ordering methods.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
for name, op in [('__lt__', '<'),
('__le__', '<='),
('__gt__', '>'),
('__ge__', '>='),
]:
if _set_new_attribute(cls, name,
_cmp_fn(name, op, self_tuple, other_tuple)):
raise TypeError(f'Cannot overwrite attribute {name} '
f'in class {cls.__name__}. Consider using '
'functools.total_ordering')
if frozen:
for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn):
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
f'in class {cls.__name__}')
# Decide if/how we're going to create a hash function.
hash_action = _hash_action[bool(unsafe_hash),
bool(eq),
bool(frozen),
has_explicit_hash]
if hash_action:
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, '__doc__'):
# Create a class doc-string.
cls.__doc__ = (cls.__name__ +
str(inspect.signature(cls)).replace(' -> None', ''))
return cls
# _cls should never be specified by keyword, so start it with an
# underscore. The presence of _cls is used to detect if this
# decorator is being called with parameters or not.
def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
Examines PEP 526 __annotations__ to determine fields.
If init is true, an __init__() method is added to the class. If
repr is true, a __repr__() method is added. If order is true, rich
comparison dunder methods are added. If unsafe_hash is true, a
__hash__() method function is added. If frozen is true, fields may
not be assigned to after instance creation.
"""
def wrap(cls):
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen)
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
def fields(class_or_instance):
"""Return a tuple describing the fields of this dataclass.
Accepts a dataclass or an instance of one. Tuple elements are of
type Field.
"""
# Might it be worth caching this, per class?
try:
fields = getattr(class_or_instance, _FIELDS)
except AttributeError:
raise TypeError('must be called with a dataclass type or instance')
# Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD)
def _is_dataclass_instance(obj):
"""Returns True if obj is an instance of a dataclass."""
return not isinstance(obj, type) and hasattr(obj, _FIELDS)
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
return hasattr(obj, _FIELDS)
def asdict(obj, *, dict_factory=dict):
"""Return the fields of a dataclass instance as a new dictionary mapping
field names to field values.
Example usage:
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert asdict(c) == {'x': 1, 'y': 2}
If given, 'dict_factory' will be used instead of built-in dict.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("asdict() should be called on dataclass instances")
return _asdict_inner(obj, dict_factory)
def _asdict_inner(obj, dict_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _asdict_inner(getattr(obj, f.name), dict_factory)
result.append((f.name, value))
return dict_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def astuple(obj, *, tuple_factory=tuple):
"""Return the fields of a dataclass instance as a new tuple of field values.
Example usage::
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert astuple(c) == (1, 2)
If given, 'tuple_factory' will be used instead of built-in tuple.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("astuple() should be called on dataclass instances")
return _astuple_inner(obj, tuple_factory)
def _astuple_inner(obj, tuple_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
result.append(value)
return tuple_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
repr=True, eq=True, order=False, unsafe_hash=False,
frozen=False):
"""Return a new dynamically created dataclass.
The dataclass name will be 'cls_name'. 'fields' is an iterable
of either (name), (name, type) or (name, type, Field) objects. If type is
omitted, use the string 'typing.Any'. Field objects are created by
the equivalent of calling 'field(name, type [, Field-info])'.
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
is equivalent to:
@dataclass
class C(Base):
x: 'typing.Any'
y: int
z: int = field(init=False)
For the bases and namespace parameters, see the builtin type() function.
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
dataclass().
"""
if namespace is None:
namespace = {}
else:
# Copy namespace since we're going to mutate it.
namespace = namespace.copy()
# While we're looking through the field names, validate that they
# are identifiers, are not keywords, and not duplicates.
seen = set()
anns = {}
for item in fields:
if isinstance(item, str):
name = item
tp = 'typing.Any'
elif len(item) == 2:
name, tp, = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
else:
raise TypeError(f'Invalid field: {item!r}')
if not isinstance(name, str) or not name.isidentifier():
raise TypeError(f'Field names must be valid identifers: {name!r}')
if keyword.iskeyword(name):
raise TypeError(f'Field names must not be keywords: {name!r}')
if name in seen:
raise TypeError(f'Field name duplicated: {name!r}')
seen.add(name)
anns[name] = tp
namespace['__annotations__'] = anns
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
# of generic dataclassses.
cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace))
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen)
def replace(obj, **changes):
"""Return a new object replacing specified fields with new values.
This is especially useful for frozen classes. Example usage:
@dataclass(frozen=True)
class C:
x: int
y: int
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj.
for f in getattr(obj, _FIELDS).values():
# Only consider normal fields or InitVars.
if f._field_type is _FIELD_CLASSVAR:
continue
if not f.init:
# Error if this field is specified in changes.
if f.name in changes:
raise ValueError(f'field {f.name} is declared with '
'init=False, it cannot be specified with '
'replace()')
continue
if f.name not in changes:
if f._field_type is _FIELD_INITVAR:
raise ValueError(f"InitVar {f.name!r} "
'must be specified with replace()')
changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields we've
# added and/or left in 'changes'. If there are values supplied in
# changes that aren't fields, this will correctly raise a
# TypeError.
return obj.__class__(**changes) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/_dataclasses.py | _dataclasses.py |
import imaplib
import io
import re
import email
import chardet
import base64
import quopri
import sys
import time
from datetime import datetime
from email.header import decode_header
from zato.common.ext.imbox.utils import str_encode, str_decode
import logging
logger = logging.getLogger(__name__)
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def keys(self):
return self.__dict__.keys()
def __repr__(self):
return str(self.__dict__)
def decode_mail_header(value, default_charset='us-ascii'):
"""
Decode a header value into a unicode string.
"""
try:
headers = decode_header(value)
except email.errors.HeaderParseError:
return str_decode(str_encode(value, default_charset, 'replace'), default_charset)
else:
for index, (text, charset) in enumerate(headers):
logger.debug("Mail header no. {index}: {data} encoding {charset}".format(
index=index,
data=str_decode(text, charset or 'utf-8', 'replace'),
charset=charset))
try:
headers[index] = str_decode(text, charset or default_charset,
'replace')
except LookupError:
# if the charset is unknown, force default
headers[index] = str_decode(text, default_charset, 'replace')
return ''.join(headers)
def get_mail_addresses(message, header_name):
"""
Retrieve all email addresses from one message header.
"""
headers = [h for h in message.get_all(header_name, [])]
addresses = email.utils.getaddresses(headers)
for index, (address_name, address_email) in enumerate(addresses):
addresses[index] = {'name': decode_mail_header(address_name),
'email': address_email}
logger.debug("{} Mail address in message: <{}> {}".format(
header_name.upper(), address_name, address_email))
return addresses
def decode_param(param):
name, v = param.split('=', 1)
values = v.split('\n')
value_results = []
for value in values:
match = re.search(r'=\?((?:\w|-)+)\?([QB])\?(.+)\?=', value)
if match:
encoding, type_, code = match.groups()
if type_ == 'Q':
value = quopri.decodestring(code)
elif type_ == 'B':
value = base64.decodebytes(code.encode())
value = str_encode(value, encoding)
value_results.append(value)
if value_results:
v = ''.join(value_results)
logger.debug("Decoded parameter {} - {}".format(name, v))
return name, v
def parse_attachment(message_part):
# Check again if this is a valid attachment
content_disposition = message_part.get("Content-Disposition", None)
if content_disposition is not None and not message_part.is_multipart():
dispositions = [
disposition.strip()
for disposition in content_disposition.split(";")
if disposition.strip()
]
if dispositions[0].lower() in ["attachment", "inline"]:
file_data = message_part.get_payload(decode=True)
attachment = {
'content-type': message_part.get_content_type(),
'size': len(file_data),
'content': io.BytesIO(file_data),
'content-id': message_part.get("Content-ID", None)
}
filename = message_part.get_param('name')
if filename:
attachment['filename'] = filename
filename_parts = []
for param in dispositions[1:]:
if param:
name, value = decode_param(param)
# Check for split filename
s_name = name.split("*")
if s_name[0] == 'filename':
# If this is a split file name - use the number after the * as an index to insert this part
if len(s_name) > 1:
filename_parts.insert(int(s_name[1]),value[1:-1] if value.startswith('"') else value)
else:
filename_parts.insert(0,value[1:-1] if value.startswith('"') else value)
if 'create-date' in name:
attachment['create-date'] = value
attachment['filename'] = "".join(filename_parts)
return attachment
return None
def decode_content(message):
content = message.get_payload(decode=True)
charset = message.get_content_charset('utf-8')
try:
return content.decode(charset, 'ignore')
except LookupError:
encoding = chardet.detect(content).get('encoding')
if encoding:
return content.decode(encoding, 'ignore')
return content
except AttributeError:
return content
def fetch_email_by_uid(uid, connection, parser_policy):
message, data = connection.uid('fetch', uid, '(BODY.PEEK[] FLAGS)')
logger.debug("Fetched message for UID {}".format(int(uid)))
raw_headers, raw_email = data[0]
email_object = parse_email(raw_email, policy=parser_policy)
flags = parse_flags(raw_headers.decode())
email_object.__dict__['flags'] = flags
return email_object
def parse_flags(headers):
"""Copied from https://github.com/girishramnani/gmail/blob/master/gmail/message.py"""
if len(headers) == 0:
return []
if sys.version_info[0] == 3:
headers = bytes(headers, "ascii")
return list(imaplib.ParseFlags(headers))
def parse_email(raw_email, policy=None):
if isinstance(raw_email, bytes):
raw_email = str_encode(raw_email, 'utf-8', errors='ignore')
if policy is not None:
email_parse_kwargs = dict(policy=policy)
else:
email_parse_kwargs = {}
try:
email_message = email.message_from_string(
raw_email, **email_parse_kwargs)
except UnicodeEncodeError:
email_message = email.message_from_string(
raw_email.encode('utf-8'), **email_parse_kwargs)
maintype = email_message.get_content_maintype()
parsed_email = {'raw_email': raw_email}
body = {
"plain": [],
"html": []
}
attachments = []
if maintype in ('multipart', 'image'):
logger.debug("Multipart message. Will process parts.")
for part in email_message.walk():
content_type = part.get_content_type()
part_maintype = part.get_content_maintype()
content_disposition = part.get('Content-Disposition', None)
if content_disposition or not part_maintype == "text":
content = part.get_payload(decode=True)
else:
content = decode_content(part)
is_inline = content_disposition is None \
or content_disposition.startswith("inline")
if content_type == "text/plain" and is_inline:
body['plain'].append(content)
elif content_type == "text/html" and is_inline:
body['html'].append(content)
elif content_disposition:
attachment = parse_attachment(part)
if attachment:
attachments.append(attachment)
elif maintype == 'text':
payload = decode_content(email_message)
body['plain'].append(payload)
parsed_email['attachments'] = attachments
parsed_email['body'] = body
email_dict = dict(email_message.items())
parsed_email['sent_from'] = get_mail_addresses(email_message, 'from')
parsed_email['sent_to'] = get_mail_addresses(email_message, 'to')
parsed_email['cc'] = get_mail_addresses(email_message, 'cc')
parsed_email['bcc'] = get_mail_addresses(email_message, 'bcc')
value_headers_keys = ['subject', 'date', 'message-id']
key_value_header_keys = ['received-spf',
'mime-version',
'x-spam-status',
'x-spam-score',
'content-type']
parsed_email['headers'] = []
for key, value in email_dict.items():
if key.lower() in value_headers_keys:
valid_key_name = key.lower().replace('-', '_')
parsed_email[valid_key_name] = decode_mail_header(value)
if key.lower() in key_value_header_keys:
parsed_email['headers'].append({'Name': key,
'Value': value})
if parsed_email.get('date'):
timetuple = email.utils.parsedate(parsed_email['date'])
parsed_date = datetime.fromtimestamp(time.mktime(timetuple)) \
if timetuple else None
parsed_email['parsed_date'] = parsed_date
logger.info("Downloaded and parsed mail '{}' with {} attachments".format(
parsed_email.get('subject'), len(parsed_email.get('attachments'))))
return Struct(**parsed_email) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/imbox/parser.py | parser.py |
import imaplib
from zato.common.ext.imbox.imap import ImapTransport
from zato.common.ext.imbox.messages import Messages
import logging
from zato.common.ext.imbox.vendors import GmailMessages, hostname_vendorname_dict, name_authentication_string_dict
logger = logging.getLogger(__name__)
class Imbox:
authentication_error_message = None
def __init__(self, hostname, username=None, password=None, ssl=True,
port=None, ssl_context=None, policy=None, starttls=False,
vendor=None):
self.server = ImapTransport(hostname, ssl=ssl, port=port,
ssl_context=ssl_context, starttls=starttls)
self.hostname = hostname
self.username = username
self.password = password
self.parser_policy = policy
self.vendor = vendor or hostname_vendorname_dict.get(self.hostname)
if self.vendor is not None:
self.authentication_error_message = name_authentication_string_dict.get(
self.vendor)
try:
self.connection = self.server.connect(username, password)
except imaplib.IMAP4.error as e:
if self.authentication_error_message is None:
raise
raise imaplib.IMAP4.error(
self.authentication_error_message + '\n' + str(e))
logger.info("Connected to IMAP Server with user {username} on {hostname}{ssl}".format(
hostname=hostname, username=username, ssl=(" over SSL" if ssl or starttls else "")))
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.logout()
def logout(self):
self.connection.close()
self.connection.logout()
logger.info("Disconnected from IMAP Server {username}@{hostname}".format(
hostname=self.hostname, username=self.username))
def mark_seen(self, uid):
logger.info("Mark UID {} with \\Seen FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Seen)')
def mark_flag(self, uid):
logger.info("Mark UID {} with \\Flagged FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Flagged)')
def delete(self, uid):
logger.info(
"Mark UID {} with \\Deleted FLAG and expunge.".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Deleted)')
self.connection.expunge()
def copy(self, uid, destination_folder):
logger.info("Copy UID {} to {} folder".format(
int(uid), str(destination_folder)))
return self.connection.uid('COPY', uid, destination_folder)
def move(self, uid, destination_folder):
logger.info("Move UID {} to {} folder".format(
int(uid), str(destination_folder)))
if self.copy(uid, destination_folder):
self.delete(uid)
def messages(self, **kwargs):
folder = kwargs.get('folder', False)
messages_class = Messages
if self.vendor == 'gmail':
messages_class = GmailMessages
if folder:
self.connection.select(
messages_class.FOLDER_LOOKUP.get((folder.lower())) or folder)
msg = " from folder '{}'".format(folder)
del kwargs['folder']
else:
msg = " from inbox"
logger.info("Fetch list of messages{}".format(msg))
return messages_class(connection=self.connection,
parser_policy=self.parser_policy,
**kwargs)
def folders(self):
return self.connection.list() | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/imbox/imbox.py | imbox.py |
import logging
from zato.common.ext.imbox.query import build_search_query
from zato.common.ext.imbox.parser import fetch_email_by_uid
logger = logging.getLogger(__name__)
class Messages:
IMAP_ATTRIBUTE_LOOKUP = {
'unread': '(UNSEEN)',
'flagged': '(FLAGGED)',
'unflagged': '(UNFLAGGED)',
'sent_from': '(FROM "{}")',
'sent_to': '(TO "{}")',
'date__gt': '(SINCE "{}")',
'date__lt': '(BEFORE "{}")',
'date__on': '(ON "{}")',
'subject': '(SUBJECT "{}")',
'uid__range': '(UID {})',
'text': '(TEXT "{}")',
}
FOLDER_LOOKUP = {}
def __init__(self,
connection,
parser_policy,
**kwargs):
self.connection = connection
self.parser_policy = parser_policy
self.kwargs = kwargs
self._uid_list = self._query_uids(**kwargs)
logger.debug("Fetch all messages for UID in {}".format(self._uid_list))
def _fetch_email(self, uid):
return fetch_email_by_uid(uid=uid,
connection=self.connection,
parser_policy=self.parser_policy)
def _query_uids(self, **kwargs):
query_ = build_search_query(self.IMAP_ATTRIBUTE_LOOKUP, **kwargs)
_, data = self.connection.uid('search', None, query_)
if data[0] is None:
return []
return data[0].split()
def _fetch_email_list(self):
for uid in self._uid_list:
yield uid, self._fetch_email(uid)
def __repr__(self):
if len(self.kwargs) > 0:
return 'Messages({})'.format('\n'.join('{}={}'.format(key, value)
for key, value in self.kwargs.items()))
return 'Messages(ALL)'
def __iter__(self):
return self._fetch_email_list()
def __next__(self):
return self
def __len__(self):
return len(self._uid_list)
def __getitem__(self, index):
uids = self._uid_list[index]
if not isinstance(uids, list):
uid = uids
return uid, self._fetch_email(uid)
return [(uid, self._fetch_email(uid))
for uid in uids] | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ext/imbox/messages.py | messages.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
import sys
# Zato
from zato.common.const import SECRETS
# ################################################################################################################################
# ################################################################################################################################
def resolve_secret_key(secret_key, _url_prefix=SECRETS.URL_PREFIX):
""" Finds a secret key among command line options or via environment variables.
"""
# We always require a string
secret_key = secret_key or ''
if secret_key and (not isinstance(_url_prefix, bytes)):
_url_prefix = _url_prefix.encode('utf8')
# This is a direct value, to be used as-is
if not secret_key.startswith(_url_prefix):
return secret_key
else:
# We need to look it up somewhere
secret_key = secret_key.replace(_url_prefix, '', 1)
# Command line options
if secret_key.startswith('cli'):
# This will be used by check-config
for idx, elem in enumerate(sys.argv):
if elem == '--secret-key':
secret_key = sys.argv[idx+1]
break
# This will be used when components are invoked as subprocesses
else:
# To prevent circular imports
from zato.common.util.api import parse_cmd_line_options
cli_options = parse_cmd_line_options(sys.argv[1])
secret_key = cli_options['secret_key']
# Environment variables
elif secret_key.startswith('env'):
env_key = secret_key.replace('env.', '', 1)
secret_key = os.environ[env_key]
# Unknown scheme, we need to give up
else:
raise ValueError('Unknown secret key type `{}`'.format(secret_key))
# At this point, we have a secret key extracted in one way or another
return secret_key if isinstance(secret_key, bytes) else secret_key.encode('utf8')
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/crypto/secret_key.py | secret_key.py |
# stdlib
import base64
import logging
import os
from datetime import datetime
from math import ceil
# Bunch
from bunch import bunchify
# cryptography
from cryptography.fernet import Fernet, InvalidToken
# Python 2/3 compatibility
from builtins import bytes
# Zato
from zato.common.crypto.const import well_known_data, zato_stdin_prefix
from zato.common.ext.configobj_ import ConfigObj
from zato.common.json_internal import loads
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class SecretKeyError(Exception):
pass
# ################################################################################################################################
class CryptoManager(object):
""" Used for encryption and decryption of secrets.
"""
def __init__(self, repo_dir=None, secret_key=None, stdin_data=None, well_known_data=None):
# We always get it on input rather than reading it directly because our caller
# may want to provide it to subprocesses in which case reading it in this process
# would consume it and the other process would not be able to access it.
self.stdin_data = stdin_data
# In case we have a repository directory on input, look up the secret keys and well known data here ..
if not secret_key:
if repo_dir:
secret_key, well_known_data = self.get_config(repo_dir)
# .. no matter if given on input or through repo_dir, we can set up crypto keys now.
self.set_config(secret_key, well_known_data)
# Callers will be able to register their hashing scheme which will end up in this dict by name
self.hash_scheme = {}
# ################################################################################################################################
def add_hash_scheme(self, name, rounds, salt_size):
""" Adds a new named PBKDF2 hashing scheme, i.e. a set of named variables and a hashing object.
"""
# hashlib
from passlib import hash as passlib_hash
self.hash_scheme[name] = passlib_hash.pbkdf2_sha512.using(rounds=rounds, salt_size=salt_size)
# ################################################################################################################################
def get_config(self, repo_dir):
raise NotImplementedError('Must be implemented by subclasses')
# ################################################################################################################################
def _find_secret_key(self, secret_key):
""" It's possible that what is in config files is not a secret key directly, but information where to find it,
e.g. in environment variables or stdin. This method looks it up in such cases.
"""
secret_key = secret_key.decode('utf8') if isinstance(secret_key, bytes) else secret_key
# Environment variables
if secret_key.startswith('$'):
try:
env_key = secret_key[1:].upper()
value = os.environ[env_key]
except KeyError:
raise SecretKeyError('Environment variable not found `{}`'.format(env_key))
# Read from stdin
elif secret_key.startswith(zato_stdin_prefix):
value = self.stdin_data
if not value:
raise SecretKeyError('No data provided on stdin')
elif not secret_key:
raise SecretKeyError('Secret key is missing')
# Use the value as it is
else:
value = secret_key
# Fernet keys always require encoding
value = value if isinstance(value, bytes) else value.encode('utf8')
# Create a transient key just to confirm that what we found was syntactically correct.
# Note that we use our own invalid backend which will not be used by Fernet for anything
# but we need to provide it to make sure Fernet.__init__ does not import its default backend.
try:
Fernet(value, backend='invalid')
except Exception as e:
raise SecretKeyError(e.args)
else:
return value
# ################################################################################################################################
def set_config(self, secret_key, well_known_data):
""" Sets crypto attributes and, to be double sure that they are correct,
decrypts well known data to itself in order to confirm that keys are valid / expected.
"""
key = self._find_secret_key(secret_key)
self.secret_key = Fernet(key)
self.well_known_data = well_known_data if well_known_data else None
if self.well_known_data:
self.check_consistency()
# ################################################################################################################################
def check_consistency(self):
""" Used as a consistency check to confirm that a given component's key can decrypt well-known data.
"""
try:
decrypted = self.decrypt(self.well_known_data)
except InvalidToken:
raise SecretKeyError('Invalid key, could not decrypt well-known data')
else:
if decrypted != well_known_data:
raise SecretKeyError('Expected for `{}` to equal to `{}`'.format(decrypted, well_known_data))
# ################################################################################################################################
@staticmethod
def generate_key():
""" Creates a new random string for Fernet keys.
"""
return Fernet.generate_key()
# ################################################################################################################################
@staticmethod
def generate_secret(bits=256):
""" Generates a secret string of bits size.
"""
return base64.urlsafe_b64encode(os.urandom(int(bits / 8)))
# ################################################################################################################################
@staticmethod
def generate_password(bits=192, to_str=False):
""" Generates a string strong enough to be a password (default: 192 bits)
"""
# type: (int, bool) -> str
value = CryptoManager.generate_secret(bits)
return value.decode('utf8') if to_str else value
# ################################################################################################################################
@classmethod
def from_repo_dir(cls, secret_key, repo_dir, stdin_data):
""" Creates a new CryptoManager instance from a path to configuration file(s).
"""
return cls(secret_key=secret_key, repo_dir=repo_dir, stdin_data=stdin_data)
# ################################################################################################################################
@classmethod
def from_secret_key(cls, secret_key, well_known_data=None, stdin_data=None):
""" Creates a new CryptoManager instance from an already existing secret key.
"""
return cls(secret_key=secret_key, well_known_data=well_known_data, stdin_data=stdin_data)
# ################################################################################################################################
def encrypt(self, data):
""" Encrypts incoming data, which must be a string.
"""
if not isinstance(data, bytes):
data = data.encode('utf8')
return self.secret_key.encrypt(data)
# ################################################################################################################################
def decrypt(self, encrypted):
""" Returns input data in a clear-text, decrypted, form.
"""
if not isinstance(encrypted, bytes):
encrypted = encrypted.encode('utf8')
return self.secret_key.decrypt(encrypted).decode('utf8')
# ################################################################################################################################
def hash_secret(self, data, name='zato.default'):
""" Hashes input secret using a named configured (e.g. PBKDF2-SHA512, 100k rounds, salt 32 bytes).
"""
return self.hash_scheme[name].hash(data)
# ################################################################################################################################
def verify_hash(self, given, expected, name='zato.default'):
return self.hash_scheme[name].verify(given, expected)
# ################################################################################################################################
@staticmethod
def get_hash_rounds(goal, header_func=None, progress_func=None, footer_func=None):
return HashParamsComputer(goal, header_func, progress_func, footer_func).get_info()
# ################################################################################################################################
def get_config_entry(self, entry):
raise NotImplementedError('May be implemented by subclasses')
# ################################################################################################################################
class WebAdminCryptoManager(CryptoManager):
""" CryptoManager for web-admin instances.
"""
def get_config(self, repo_dir):
conf_path = os.path.join(repo_dir, 'web-admin.conf')
conf = bunchify(loads(open(conf_path).read()))
return conf['zato_secret_key'], conf['well_known_data']
# ################################################################################################################################
class SchedulerCryptoManager(CryptoManager):
""" CryptoManager for schedulers.
"""
def get_config(self, repo_dir):
conf_path = os.path.join(repo_dir, 'scheduler.conf')
conf = bunchify(ConfigObj(conf_path, use_zato=False))
return conf.secret_keys.key1, conf.crypto.well_known_data
# ################################################################################################################################
class ServerCryptoManager(CryptoManager):
""" CryptoManager for servers.
"""
def get_config(self, repo_dir):
conf_path = os.path.join(repo_dir, 'secrets.conf')
conf = bunchify(ConfigObj(conf_path, use_zato=False))
return conf.secret_keys.key1, conf.zato.well_known_data
# ################################################################################################################################
class HashParamsComputer(object):
""" Computes parameters for hashing purposes, e.g. number of rounds in PBKDF2.
"""
def __init__(self, goal, header_func=None, progress_func=None, footer_func=None, scheme='pbkdf2_sha512', loops=10,
iters_per_loop=10, salt_size=64, rounds_per_iter=25000):
# hashlib
from passlib import hash as passlib_hash
self.goal = goal
self.header_func = header_func
self.progress_func = progress_func
self.footer_func = footer_func
self.scheme = scheme
self.loops = loops
self.iters_per_loop = iters_per_loop
self.iters = self.loops * self.iters_per_loop
self.salt_size = salt_size
self.rounds_per_iter = rounds_per_iter
self.report_per_cent = 5.0
self.report_once_in = self.iters * self.report_per_cent / 100.0
self.hash_scheme = getattr(passlib_hash, scheme).using(salt_size=salt_size, rounds=rounds_per_iter)
self.cpu_info = self.get_cpu_info()
self._round_down_to_nearest = 1000
self._round_up_to_nearest = 5000
# ################################################################################################################################
def get_cpu_info(self):
""" Returns metadata about current CPU the computation is executed on.
"""
# py-cpuinfo
from cpuinfo import get_cpu_info
cpu_info = get_cpu_info()
return {
'brand': cpu_info['brand'],
'hz_actual': cpu_info['hz_actual']
}
# ################################################################################################################################
def get_info(self, _utcnow=datetime.utcnow):
if self.header_func:
self.header_func(self.cpu_info, self.goal)
all_results = []
current_iter = 0
current_loop = 0
# We have several iterations to take into account sudden and unexpected CPU usage spikes,
# outliers stemming from such cases which will be rejected.
while current_loop < self.loops:
current_loop += 1
current_loop_iter = 0
current_loop_result = []
while current_loop_iter < self.iters_per_loop:
current_iter += 1
current_loop_iter += 1
start = _utcnow()
self.hash_scheme.hash(well_known_data)
current_loop_result.append((_utcnow() - start).total_seconds())
if self.progress_func:
if current_iter % self.report_once_in == 0:
per_cent = int((current_iter / self.iters) * 100)
self.progress_func(per_cent)
all_results.append(sum(current_loop_result) / len(current_loop_result))
# On average, that many seconds were needed to create a hash with self.rounds rounds ..
sec_needed = min(all_results)
# .. we now need to extrapolate it to get the desired self.goal seconds.
rounds_per_second = int(self.rounds_per_iter / sec_needed)
rounds_per_second = self.round_down(rounds_per_second)
rounds = int(rounds_per_second * self.goal)
rounds = self.round_up(rounds)
rounds_per_second_str = '{:,d}'.format(rounds_per_second)
rounds_str = '{:,d}'.format(rounds).rjust(len(rounds_per_second_str))
if self.footer_func:
self.footer_func(rounds_per_second_str, rounds_str)
return {
'rounds_per_second': int(rounds_per_second),
'rounds_per_second_str': rounds_per_second_str.strip(),
'rounds': int(rounds),
'rounds_str': rounds_str.strip(),
'cpu_info': self.cpu_info,
'algorithm': 'PBKDF2-SHA512',
'salt_size': self.salt_size,
}
# ################################################################################################################################
def round_down(self, value):
return int(round(value / self._round_down_to_nearest) * self._round_down_to_nearest)
# ################################################################################################################################
def round_up(self, value):
return int(ceil(value / self._round_up_to_nearest) * self._round_up_to_nearest)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/crypto/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import errno
import logging
import os
import stat
import tempfile
from datetime import datetime, timedelta
from fcntl import fcntl
from io import StringIO
from traceback import format_exc
from uuid import uuid4
# gevent
from gevent import sleep
# pyrapidjson
from rapidjson import loads
# Python 2/3 compatibility
from builtins import bytes
# Zato
from zato.common.api import IPC
from zato.common.ipc.publisher import Publisher
from zato.common.ipc.subscriber import Subscriber
from zato.common.util.api import spawn_greenlet
from zato.common.util.file_system import fs_safe_name
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
fifo_create_mode = stat.S_IRUSR | stat.S_IWUSR
fifo_ignore_err = errno.EAGAIN, errno.EWOULDBLOCK
# On Linux, this is F_LINUX_SPECIFIC_BASE (1024) + 7
_F_SETPIPE_SZ = 1031
# ################################################################################################################################
class IPCAPI(object):
""" API through which IPC is performed.
"""
def __init__(self, name=None, on_message_callback=None, pid=None):
self.name = name
self.on_message_callback = on_message_callback
self.pid = pid
self.pid_publishers = {} # Target PID -> Publisher object connected to that target PID's subscriber socket
self.subscriber = None
# ################################################################################################################################
@staticmethod
def get_endpoint_name(cluster_name, server_name, target_pid):
return fs_safe_name('{}-{}-{}'.format(cluster_name, server_name, target_pid))
# ################################################################################################################################
def run(self):
self.subscriber = Subscriber(self.on_message_callback, self.name, self.pid)
spawn_greenlet(self.subscriber.serve_forever)
# ################################################################################################################################
def close(self):
if self.subscriber:
self.subscriber.close()
for publisher in self.pid_publishers.values():
publisher.close()
# ################################################################################################################################
def publish(self, payload):
self.publisher.publish(payload)
# ################################################################################################################################
def _get_pid_publisher(self, cluster_name, server_name, target_pid):
# We do no have a publisher connected to that PID, so we need to create it ..
if target_pid not in self.pid_publishers:
# Create a publisher and sleep for a moment until it connects to the other socket
publisher = Publisher(self.get_endpoint_name(cluster_name, server_name, target_pid), self.pid)
# We can tolerate it because it happens only the very first time our PID invokes target_pid
sleep(0.1)
# We can now store it for later use
self.pid_publishers[target_pid] = publisher
# At this point we are sure we have a publisher for target PID
return self.pid_publishers[target_pid]
# ################################################################################################################################
def _get_response(self, fifo, buffer_size, read_size=21, fifo_ignore_err=fifo_ignore_err, empty=('', b'', None)):
try:
buff = StringIO()
data = object() # Use a sentinel because '' or None are expected from os.read
# The most common use-case for IPC are publish/subscribe messages and the most
# common response is this: 'zs;{"r": {"r": null}}'
# which is 21 bytes.
while data not in empty:
data = os.read(fifo, read_size)
buff.write(data.decode('utf8') if isinstance(data, bytes) else data)
response = buff.getvalue()
status = response[:IPC.STATUS.LENGTH]
response = response[IPC.STATUS.LENGTH+1:] # Add 1 to account for the separator
is_success = status == IPC.STATUS.SUCCESS
if is_success:
response = loads(response) if response else ''
buff.close()
return is_success, response
except OSError as e:
if e.errno not in fifo_ignore_err:
raise
# ################################################################################################################################
def invoke_by_pid(self, service, payload, cluster_name, server_name, target_pid,
fifo_response_buffer_size, timeout=90, is_async=False, skip_response_elem=False):
""" Invokes a service through IPC, synchronously or in background. If target_pid is an exact PID then this one worker
process will be invoked if it exists at all.
"""
# Create a FIFO pipe to receive replies to come through
fifo_path = os.path.join(tempfile.tempdir, 'zato-ipc-fifo-{}'.format(uuid4().hex))
os.mkfifo(fifo_path, fifo_create_mode)
try:
publisher = self._get_pid_publisher(cluster_name, server_name, target_pid)
publisher.publish(payload, service, target_pid, reply_to_fifo=fifo_path)
# Async = we do not need to wait for any response
if is_async:
return
is_success, response = False, None
try:
# Open the pipe for reading ..
fifo_fd = os.open(fifo_path, os.O_RDONLY | os.O_NONBLOCK)
fcntl(fifo_fd, _F_SETPIPE_SZ, 1000000)
# .. and wait for response ..
now = datetime.utcnow()
until = now + timedelta(seconds=timeout)
while now < until:
sleep(0.05)
is_success, response = self._get_response(fifo_fd, fifo_response_buffer_size)
if response:
break
else:
now = datetime.utcnow()
except Exception:
logger.warn('Exception in IPC FIFO, e:`%s`', format_exc())
finally:
os.close(fifo_fd)
return is_success, response
except Exception:
logger.warn(format_exc())
finally:
os.remove(fifo_path)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ipc/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from datetime import datetime
from tempfile import gettempdir
# ZeroMQ
import zmq.green as zmq
# Zato
from zato.common.api import DATA_FORMAT, NO_DEFAULT_VALUE
from zato.common.util.api import get_logger_for_class, make_repr, new_cid, spawn_greenlet
# ################################################################################################################################
class Request(object):
def __init__(self, publisher_tag, publisher_pid, payload='', request_id=None):
self.publisher_tag = publisher_tag
self.publisher_pid = publisher_pid
self.action = NO_DEFAULT_VALUE
self.service = ''
self._payload = payload
self.payload_type = type(payload).__name__
self.data_format = DATA_FORMAT.DICT
self.request_id = request_id or 'ipc.{}'.format(new_cid())
self.target_pid = None
self.reply_to_tag = ''
self.reply_to_fifo = ''
self.in_reply_to = ''
self.creation_time_utc = datetime.utcnow()
@property
def payload(self):
return self._payload
@payload.setter
def payload(self, value):
self._payload = value
self.payload_type = type(self._payload)
def __repr__(self):
return make_repr(self)
# ################################################################################################################################
class IPCBase(object):
""" Base class for core IPC objects.
"""
def __init__(self, name, pid):
self.name = name
self.pid = pid
self.ctx = zmq.Context()
spawn_greenlet(self.set_up_sockets)
self.keep_running = True
self.logger = get_logger_for_class(self.__class__)
self.log_connected()
def __repr__(self):
return make_repr(self)
def set_up_sockets(self):
raise NotImplementedError('Needs to be implemented in subclasses')
def log_connected(self):
raise NotImplementedError('Needs to be implemented in subclasses')
def close(self):
raise NotImplementedError('Needs to be implemented in subclasses')
# ################################################################################################################################
class IPCEndpoint(IPCBase):
""" A participant in IPC conversations, i.e. either publisher or subscriber.
"""
socket_method = None
socket_type = None
def __init__(self, name, pid):
self.address = self.get_address(name)
super(IPCEndpoint, self).__init__(name, pid)
def get_address(self, address):
return 'ipc://{}'.format(os.path.join(gettempdir(), 'zato-ipc-{}'.format(address)))
def set_up_sockets(self):
self.socket = self.ctx.socket(getattr(zmq, self.socket_type.upper()))
self.socket.setsockopt(zmq.LINGER, 0)
getattr(self.socket, self.socket_method)(self.address)
def log_connected(self):
self.logger.info('Established %s/%s to %s (self.pid: %s)', self.socket_type, self.socket_method, self.address, self.pid)
def close(self):
self.keep_running = False
self.socket.close()
self.ctx.term()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ipc/__init__.py | __init__.py |
# ################################################################################################################################
# ################################################################################################################################
class FileTransferChannel(object):
def __init__(self):
self._config_attrs = []
self.id = None # type: int
self.name = None # type: str
self.is_active = None # type: bool
self.is_hot_deploy = None # type: bool
self.source_type = None # type: str
self.pickup_from = '' # type: str
self.parse_with = '' # type: str
self.ftp_source_id = None # type: int
self.line_by_line = None # type: bool
self.file_patterns = '' # type: str
self.service_list = None # type: list
self.topic_list = None # type: list
self.outconn_rest_list = None # type: list
self.read_on_pickup = None # type: bool
self.sftp_source_id = None # type: int
self.parse_on_pickup = None # type: bool
self.ftp_source_name = '' # type: str
self.sftp_source_name = '' # type: str
self.service_list_json = None # type: str
self.topic_list_json = None # type: str
self.outconn_rest_list_json = None # type: str
self.scheduler_job_id = None # type: int
self.move_processed_to = '' # type: str
self.delete_after_pickup = None # type: bool
# ################################################################################################################################
def to_dict(self):
out = {}
for name in self._config_attrs:
value = getattr(self, name)
out[name] = value
return out
# ################################################################################################################################
@staticmethod
def from_dict(config):
# type: (dict) -> FileTransferChannel
out = FileTransferChannel()
for k, v in config.items():
out._config_attrs.append(k)
setattr(out, k, v)
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/model/file_transfer.py | file_transfer.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# ################################################################################################################################
class BaseException(Exception):
pass
class AddressNotAllowed(BaseException):
pass
class RateLimitReached(BaseException):
pass
# ################################################################################################################################
# ################################################################################################################################
class Const:
from_any = '*'
rate_any = '*'
class Unit:
minute = 'm'
hour = 'h'
day = 'd'
@staticmethod
def all_units():
return set([Const.Unit.minute, Const.Unit.hour, Const.Unit.day])
# ################################################################################################################################
# ################################################################################################################################
class ObjectInfo(object):
""" Information about an individual object covered by rate limiting.
"""
__slots__ = 'type_', 'id', 'name'
def __init__(self):
self.type_ = None # type: str
self.id = None # type: int
self.name = None # type: str
# ################################################################################################################################
# ################################################################################################################################
class DefinitionItem(object):
__slots__ = 'config_line', 'from_', 'rate', 'unit', 'object_id', 'object_type', 'object_name'
def __init__(self):
self.config_line = None # type: int
self.from_ = None # type: object
self.rate = None # type: int
self.unit = None # type: str
self.object_id = None # type: int
self.object_type = None # type: str
self.object_name = None # type: str
def __repr__(self):
return '<{} at {}; line:{}, from:{}, rate:{}, unit:{} ({} {} {})>'.format(
self.__class__.__name__, hex(id(self)), self.config_line, self.from_, self.rate, self.unit,
self.object_id, self.object_name, self.object_type)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/rate_limiting/common.py | common.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from logging import getLogger
# gevent
from gevent.lock import RLock
# netaddr
from netaddr import IPNetwork
# SQLAlchemy
from sqlalchemy import and_
# Zato
from zato.common.rate_limiting.common import Const, DefinitionItem, ObjectInfo
from zato.common.rate_limiting.limiter import Approximate, Exact, RateLimitStateDelete, RateLimitStateTable
# Python 2/3 compatibility
from past.builtins import unicode
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
# stdlib
from typing import Callable
# Zato
from zato.common.rate_limiting.limiter import BaseLimiter
from zato.distlock import LockManager
# For pyflakes
BaseLimiter = BaseLimiter
Callable = Callable
LockManager = LockManager
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class DefinitionParser(object):
""" Parser for user-provided rate limiting definitions.
"""
@staticmethod
def get_lines(definition, object_id, object_type, object_name, parse_only=False):
# type: (unicode, int, unicode, unicode, bool) -> list
if not parse_only:
out = []
definition = definition if isinstance(definition, unicode) else definition.decode('utf8')
for idx, orig_line in enumerate(definition.splitlines(), 1): # type: int, unicode
line = orig_line.strip()
if (not line) or line.startswith('#'):
continue
line = line.split('=')
if len(line) != 2:
raise ValueError('Invalid definition line `{}`; (idx:{})'.format(orig_line, idx))
from_, rate_info = line # type: unicode, unicode
from_ = from_.strip()
if from_ != Const.from_any:
from_ = IPNetwork(from_)
rate_info = rate_info.strip()
if rate_info == Const.rate_any:
rate = Const.rate_any
unit = Const.Unit.day # This is arbitrary but it does not matter because there is no rate limit in effect
else:
rate, unit = rate_info.split('/') # type: unicode, unicode
rate = int(rate.strip())
unit = unit.strip()
all_units = Const.all_units()
if unit not in all_units:
raise ValueError('Unit `{}` is not one of `{}`'.format(unit, all_units))
# In parse-only mode we do not build any actual output
if parse_only:
continue
item = DefinitionItem()
item.config_line = idx
item.from_ = from_
item.rate = rate
item.unit = unit
item.object_id = object_id
item.object_type = object_type
item.object_name = object_name
out.append(item)
if not parse_only:
return out
# ################################################################################################################################
@staticmethod
def check_definition(definition):
# type: (unicode)
DefinitionParser.get_lines(definition.strip(), None, None, None, True)
# ################################################################################################################################
@staticmethod
def check_definition_from_input(input_data):
# type: (dict)
rate_limit_def = input_data.get('rate_limit_def') or ''
if rate_limit_def:
DefinitionParser.check_definition(rate_limit_def)
# ################################################################################################################################
def parse(self, definition, object_id, object_type, object_name):
# type: (unicode, int, unicode, unicode) -> list
return DefinitionParser.get_lines(definition.strip(), object_id, object_type, object_name)
# ################################################################################################################################
# ################################################################################################################################
class RateLimiting(object):
""" Main API for the management of rate limiting functionality.
"""
__slots__ = 'parser', 'config_store', 'lock', 'sql_session_func', 'global_lock_func', 'cluster_id'
def __init__(self):
self.parser = DefinitionParser() # type: DefinitionParser
self.config_store = {} # type: dict
self.lock = RLock()
self.global_lock_func = None # type: LockManager
self.sql_session_func = None # type: Callable
self.cluster_id = None # type: int
# ################################################################################################################################
def _get_config_key(self, object_type, object_name):
# type: (unicode, unicode) -> unicode
return '{}:{}'.format(object_type, object_name)
# ################################################################################################################################
def _get_config_by_object(self, object_type, object_name):
# type: (unicode, unicode) -> BaseLimiter
return self.config_store.get(self._get_config_key(object_type, object_name))
# ################################################################################################################################
def _create_config(self, object_dict, definition, is_exact):
# type: (dict, unicode, bool) -> BaseLimiter
object_id = object_dict['id']
object_type = object_dict['type_']
object_name = object_dict['name']
info = ObjectInfo()
info.id = object_id
info.type_ = object_type
info.name = object_name
parsed = self.parser.parse(definition or '', object_id, object_type, object_name)
if parsed:
def_first = parsed[0]
has_from_any = def_first.from_ == Const.from_any
else:
has_from_any = False
config = Exact(self.cluster_id, self.sql_session_func) if is_exact else Approximate(self.cluster_id) # type: BaseLimiter
config.is_active = object_dict['is_active']
config.is_exact = is_exact
config.api = self
config.object_info = info
config.definition = parsed
config.parent_type = object_dict['parent_type']
config.parent_name = object_dict['parent_name']
if has_from_any:
config.has_from_any = has_from_any
config.from_any_rate = def_first.rate
config.from_any_unit = def_first.unit
config.from_any_object_id = object_id
config.from_any_object_type = object_type
config.from_any_object_name = object_name
return config
# ################################################################################################################################
def create(self, object_dict, definition, is_exact):
# type: (dict, unicode, bool)
config = self._create_config(object_dict, definition, is_exact)
self.config_store[config.get_config_key()] = config
# ################################################################################################################################
def check_limit(self, cid, object_type, object_name, from_, needs_warn=True):
""" Checks if input object has already reached its allotted usage limit.
"""
# type: (unicode, unicode, unicode, unicode)
with self.lock:
config = self._get_config_by_object(object_type, object_name)
# It is possible that we do not have configuration for such an object,
# in which case we will log a warning.
if config:
with config.lock:
config.check_limit(cid, from_)
else:
if needs_warn:
logger.warn('No such rate limiting object `%s` (%s)', object_name, object_type)
# ################################################################################################################################
def _delete_from_odb(self, object_type, object_id):
with closing(self.sql_session_func()) as session:
session.execute(RateLimitStateDelete().where(and_(
RateLimitStateTable.c.object_type==object_type,
RateLimitStateTable.c.object_id==object_id,
)))
session.commit()
# ################################################################################################################################
def _delete(self, object_type, object_name, remove_parent):
""" Deletes configuration for input data, optionally deleting references to it from all objects that depended on it.
Must be called with self.lock held.
"""
# type: (unicode, unicode, bool)
config_key = self._get_config_key(object_type, object_name)
limiter = self.config_store[config_key] # type: BaseLimiter
del self.config_store[config_key]
if limiter.is_exact:
self._delete_from_odb(object_type, limiter.object_info.id)
if remove_parent:
self._set_new_parent(object_type, object_name, None, None)
# ################################################################################################################################
def _set_new_parent(self, parent_type, old_parent_name, new_parent_type, new_parent_name):
""" Sets new parent for all configuration entries matching the old one. Must be called with self.lock held.
"""
# type: (unicode, unicode, unicode, unicode)
for child_config in self.config_store.values(): # type: BaseLimiter
object_info = child_config.object_info
# This is our own config
if object_info.type_ == parent_type and object_info.name == old_parent_name:
continue
# This object has a parent, possibly it is our very configuration
if child_config.has_parent:
# Yes, this is our config ..
if child_config.parent_type == parent_type and child_config.parent_name == old_parent_name:
# We typically want to change the parent's name but it is possible
# that both type and name will be None (in case we are removing a parent from a child object)
# which is why both are set here.
child_config.parent_type = new_parent_type
child_config.parent_name = new_parent_name
# ################################################################################################################################
def edit(self, object_type, old_object_name, object_dict, definition, is_exact):
""" Changes, in place, an existing configuration entry to input data.
"""
# type: (unicode, unicode, dict, unicode, bool)
# Note the whole of this operation is under self.lock to make sure the update is atomic
# from our callers' perspective.
with self.lock:
old_config = self._get_config_by_object(object_type, old_object_name)
if not old_config:
raise ValueError('Rate limiting object not found `{}` ({})'.format(old_object_name, object_type))
# Just to be sure we are doing the right thing, compare object types, old and new
if object_type != old_config.object_info.type_:
raise ValueError('Unexpected object_type, old:`{}`, new:`{}` ({}) ({})'.format(
old_config.object_info.type_, object_type, old_object_name, object_dict))
# Now, create a new config object ..
new_config = self._create_config(object_dict, definition, is_exact)
# .. in case it was a rename ..
if old_config.object_info.name != new_config.object_info.name:
# .. make all child objects depend on the new name, in case it changed
self._set_new_parent(object_type, old_object_name, new_config.object_info.type_, new_config.object_info.name)
# First, delete the old configuration, but do not delete any objects that depended on it
# because we are just editing the former, not deleting it altogether.
self._delete(object_type, old_object_name, False)
# Now, create a new key
self.config_store[new_config.get_config_key()] = new_config
# ################################################################################################################################
def delete(self, object_type, object_name):
""" Deletes configuration for input object and clears out parent references to it.
"""
# type: (unicode, unicode)
with self.lock:
self._delete(object_type, object_name, True)
# ################################################################################################################################
def _get_config(self, object_type, object_name):
""" Returns configuration for the input object, assumming we have it at all.
"""
# type: (unicode, unicode) -> BaseLimiter
config_key = self._get_config_key(object_type, object_name)
return self.config_store.get(config_key)
# ################################################################################################################################
def get_config(self, object_type, object_name):
# type: (unicode, unicode) -> BaseLimiter
with self.lock:
return self._get_config(object_type, object_name)
# ################################################################################################################################
def has_config(self, object_type, object_name):
# type: (unicode, unicode) -> bool
with self.lock:
return bool(self._get_config(object_type, object_name))
# ################################################################################################################################
def cleanup(self):
""" Invoked periodically by the scheduler - goes through all configuration elements and cleans up
all time periods that are no longer needed.
"""
for config in self.config_store.values(): # type: BaseLimiter
config.cleanup()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/rate_limiting/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from contextlib import closing
from copy import deepcopy
from datetime import datetime
# gevent
from gevent.lock import RLock
# netaddr
from netaddr import IPAddress
# Zato
from zato.common.odb.model import RateLimitState
from zato.common.odb.query.rate_limiting import current_period_list, current_state as current_state_query
from zato.common.rate_limiting.common import Const, AddressNotAllowed, RateLimitReached
# Python 2/3 compatibility
from future.utils import iterkeys
# ################################################################################################################################
if 0:
# stdlib
from typing import Callable
# Zato
from zato.common.rate_limiting import Approximate as RateLimiterApproximate, RateLimiting
from zato.common.rate_limiting.common import DefinitionItem, ObjectInfo
# For pyflakes
Callable = Callable
DefinitionItem = DefinitionItem
ObjectInfo = ObjectInfo
RateLimiterApproximate = RateLimiterApproximate
RateLimiting = RateLimiting
# ################################################################################################################################
RateLimitStateTable = RateLimitState.__table__
RateLimitStateDelete = RateLimitStateTable.delete
# ################################################################################################################################
# ################################################################################################################################
class BaseLimiter(object):
""" A per-server, approximate, rate limiter object. It is approximate because it does not keep track
of what current rate limits in other servers are.
"""
__slots__ = 'current_idx', 'lock', 'api', 'object_info', 'definition', 'has_from_any', 'from_any_rate', 'from_any_unit', \
'is_limit_reached', 'ip_address_cache', 'current_period_func', 'by_period', 'parent_type', 'parent_name', \
'is_exact', 'from_any_object_id', 'from_any_object_type', 'from_any_object_name', 'cluster_id', 'is_active', \
'invocation_no'
initial_state = {
'requests': 0,
'last_cid': None,
'last_request_time_utc': None,
'last_from': None,
'last_network': None,
}
def __init__(self, cluster_id):
# type: (int)
self.cluster_id = cluster_id
self.is_active = None
self.current_idx = 0
self.lock = RLock()
self.api = None # type: RateLimiting
self.object_info = None # type: ObjectInfo
self.definition = None # type: list
self.has_from_any = None # type: bool
self.from_any_rate = None # type: int
self.from_any_unit = None # type: str
self.ip_address_cache = {} # type: dict
self.by_period = {} # type: dict
self.parent_type = None # type: str
self.parent_name = None # type: str
self.is_exact = None # type: bool
self.invocation_no = 0 # type: int
self.from_any_object_id = None # type: int
self.from_any_object_type = None # type: str
self.from_any_object_name = None # type: str
self.current_period_func = {
Const.Unit.day: self._get_current_day,
Const.Unit.hour: self._get_current_hour,
Const.Unit.minute: self._get_current_minute,
}
# ################################################################################################################################
@property
def has_parent(self):
return self.parent_type and self.parent_name
# ################################################################################################################################
def cleanup(self):
""" Cleans up time periods that are no longer needed.
"""
with self.lock:
# First, periodically clear out the IP cache to limit its size to 1,000 items
if len(self.ip_address_cache) >= 1000:
self.ip_address_cache.clear()
now = datetime.utcnow()
current_minute = self._get_current_minute(now)
current_hour = self._get_current_hour(now)
current_day = self._get_current_day(now)
# We need a copy so as not to modify the dict in place
periods = self._get_current_periods()
to_delete = set()
current_periods_map = {
Const.Unit.minute: current_minute,
Const.Unit.hour: current_hour,
Const.Unit.day: current_day
}
for period in periods: # type: str
period_unit = period[0] # type: str # One of Const.Unit instances
current_period = current_periods_map[period_unit]
# If this period is in the past, add it to the ones to be deleted
if period < current_period:
to_delete.add(period)
if to_delete:
self._delete_periods(to_delete)
# ################################################################################################################################
def rewrite_rate_data(self, old_config):
""" Writes rate limiting information from old configuration to our own. Used by RateLimiting.edit action.
"""
# type: (RateLimiterApproximate)
# Already collected rate limits
self.by_period.clear()
self.by_period.update(old_config.by_period)
# ################################################################################################################################
def get_config_key(self):
# type: () -> str
return '{}:{}'.format(self.object_info.type_, self.object_info.name)
# ################################################################################################################################
def _get_rate_config_by_from(self, orig_from, _from_any=Const.from_any):
# type: (str, str) -> DefinitionItem
from_ = self.ip_address_cache.setdefault(orig_from, IPAddress(orig_from)) # type: IPAddress
found = None
for line in self.definition: # type: DefinitionItem
# A catch-all * pattern
if line.from_ == _from_any:
found = line
break
# A network match
elif from_ in line.from_:
found = line
break
# We did not match any line from configuration
if not found:
raise AddressNotAllowed('Address not allowed `{}`'.format(orig_from))
# We found a matching piece of from IP configuration
return found
# ################################################################################################################################
def _get_current_day(self, now, _prefix=Const.Unit.day, _format='%Y-%m-%d'):
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
def _get_current_hour(self, now, _prefix=Const.Unit.hour, _format='%Y-%m-%dT%H'):
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
def _get_current_minute(self, now, _prefix=Const.Unit.minute, _format='%Y-%m-%dT%H:%M'):
# type: (datetime, str, str) -> str
return '{}.{}'.format(_prefix, now.strftime(_format))
# ################################################################################################################################
def _format_last_info(self, current_state):
# type: (dict) -> str
return 'last_from:`{last_from}; last_request_time_utc:`{last_request_time_utc}; last_cid:`{last_cid}`;'.format(
**current_state)
# ################################################################################################################################
def _raise_rate_limit_exceeded(self, rate, unit, orig_from, network_found, current_state, cid,
def_object_id, def_object_name, def_object_type):
raise RateLimitReached('Max. rate limit of {}/{} reached; from:`{}`, network:`{}`; {} (cid:{}) (def:{} {} {})'.format(
rate, unit, orig_from, network_found, self._format_last_info(current_state), cid, def_object_id, def_object_type,
def_object_name))
# ################################################################################################################################
def _check_limit(self, cid, orig_from, network_found, rate, unit, def_object_id, def_object_name, def_object_type,
_rate_any=Const.rate_any, _utcnow=datetime.utcnow):
# type: (str, str, str, int, str, str, object, str, str)
# Increase invocation counter
self.invocation_no += 1
# Local aliases
now = _utcnow()
# Get current period, e.g. current day, hour or minute
current_period_func = self.current_period_func[unit]
current_period = current_period_func(now)
current_state = self._get_current_state(current_period, network_found)
# Unless we are allowed to have any rate ..
if rate != _rate_any:
# We may have reached the limit already ..
if current_state['requests'] >= rate:
self._raise_rate_limit_exceeded(rate, unit, orig_from, network_found, current_state, cid,
def_object_id, def_object_name, def_object_type)
# Update current metadata state
self._set_new_state(current_state, cid, orig_from, network_found, now, current_period)
# Above, we checked our own rate limit but it is still possible that we have a parent
# that also wants to check it.
if self.has_parent:
self.api.check_limit(cid, self.parent_type, self.parent_name, orig_from)
# Clean up old entries periodically
if self.invocation_no % 1000 == 0:
self.cleanup()
# ################################################################################################################################
def check_limit(self, cid, orig_from):
# type: (str, str)
with self.lock:
if self.has_from_any:
rate = self.from_any_rate
unit = self.from_any_unit
network_found = Const.from_any
def_object_id = None
def_object_type = None
def_object_name = None
else:
found = self._get_rate_config_by_from(orig_from)
rate = found.rate
unit = found.unit
network_found = found.from_
def_object_id = found.object_id
def_object_type = found.object_type
def_object_name = found.object_name
# Now, check actual rate limits
self._check_limit(cid, orig_from, network_found, rate, unit, def_object_id, def_object_name, def_object_type)
# ################################################################################################################################
def _get_current_periods(self):
raise NotImplementedError()
_get_current_state = _set_new_state = _delete_periods = _get_current_periods
# ################################################################################################################################
# ################################################################################################################################
class Approximate(BaseLimiter):
def _get_current_periods(self):
return list(iterkeys(self.by_period))
# ################################################################################################################################
def _delete_periods(self, to_delete):
for item in to_delete: # item: str
del self.by_period[item]
# ################################################################################################################################
def _get_current_state(self, current_period, network_found):
# type: (str, str) -> dict
# Get or create a dictionary of requests information for current period
period_dict = self.by_period.setdefault(current_period, {}) # type: dict
# Get information about already stored requests for that network in current period
return period_dict.setdefault(network_found, deepcopy(self.initial_state))
# ################################################################################################################################
def _set_new_state(self, current_state, cid, orig_from, network_found, now, *ignored):
current_state['requests'] += 1
current_state['last_cid'] = cid
current_state['last_request_time_utc'] = now.isoformat()
current_state['last_from'] = orig_from
current_state['last_network'] = str(network_found)
# ################################################################################################################################
# ################################################################################################################################
class Exact(BaseLimiter):
def __init__(self, cluster_id, sql_session_func):
# type: (int, Callable)
super(Exact, self).__init__(cluster_id)
self.sql_session_func = sql_session_func
# ################################################################################################################################
def _fetch_current_state(self, session, current_period, network_found):
# type: (str, str) -> RateLimitState
# We have a complex Python object but for the query we just need its string representation
network_found = str(network_found)
return current_state_query(session, self.cluster_id, self.object_info.type_, self.object_info.id,
current_period, network_found).\
first()
# ################################################################################################################################
def _get_current_state(self, current_period, network_found):
# type: (str, str) -> dict
current_state = deepcopy(self.initial_state) # type: dict
with closing(self.sql_session_func()) as session:
item = self._fetch_current_state(session, current_period, network_found)
if item:
current_state.update(item.asdict())
return current_state
# ################################################################################################################################
def _set_new_state(self, current_state, cid, orig_from, network_found, now, current_period):
# We just need a string representation of this object
network_found = str(network_found)
with closing(self.sql_session_func()) as session:
item = self._fetch_current_state(session, current_period, network_found)
if item:
item.last_cid = cid
item.last_from = orig_from
item.last_request_time_utc = now
else:
item = RateLimitState()
item.cluster_id = self.cluster_id
item.object_type = self.object_info.type_
item.object_id = self.object_info.id
item.requests = 0
item.period = current_period
item.network = network_found
item.last_cid = cid
item.last_from = orig_from
item.last_network = network_found
item.last_request_time_utc = now
item.requests += 1
session.add(item)
session.commit()
# ################################################################################################################################
def _get_current_periods(self):
with closing(self.sql_session_func()) as session:
return [elem[0] for elem in current_period_list(session, self.cluster_id).\
all()]
# ################################################################################################################################
def _delete_periods(self, to_delete):
with closing(self.sql_session_func()) as session:
session.execute(RateLimitStateDelete().where(
RateLimitStateTable.c.period.in_(to_delete)
))
session.commit()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/rate_limiting/limiter.py | limiter.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato - Cython
from zato.simpleio import BoolConfig, IntConfig, SecretConfig, SIOServerConfig
# Python 2/3 compatibility
from past.builtins import basestring, unicode
# ################################################################################################################################
if 0:
from zato.cy.simpleio import BoolConfig as PyBoolConfig
from zato.cy.simpleio import IntConfig as PyIntConfig
from zato.cy.simpleio import SecretConfig as PySecretConfig
from zato.cy.simpleio import SIOServerConfig as PySIOServerConfig
PyBoolConfig = PyBoolConfig
PyIntConfig = PyIntConfig
PySecretConfig = PySecretConfig
PySIOServerConfig = PySIOServerConfig
# ################################################################################################################################
# ################################################################################################################################
def get_bytes_to_str_encoding():
return 'utf8'
# ################################################################################################################################
default_input_required_name = 'input_required'
default_input_optional_name = 'input_optional'
default_output_required_name = 'output_required'
default_output_optional_name = 'output_optional'
default_value = 'default_value'
default_input_value = 'default_input_value'
default_output_value = 'default_output_value'
default_response_elem = 'response'
default_skip_empty_keys = False
default_skip_empty_request_keys = False
default_skip_empty_response_keys = False
default_prefix_as_is = 'a'
default_prefix_bool = 'b'
default_prefix_csv = 'c'
default_prefix_date = 'date'
default_prefix_date_time = 'dt'
default_prefix_dict = 'd'
default_prefix_dict_list = 'dl'
default_prefix_float = 'f'
default_prefix_int = 'i'
default_prefix_list = 'l'
default_prefix_text = 't'
default_prefix_uuid = 'u'
simple_io_conf_contents = f"""
[bool]
exact=
prefix=by_, has_, is_, may_, needs_, should_
suffix=
[int]
exact=id
prefix=
suffix=_count, _id, _size, _size_min, _size_max, _timeout
[secret]
exact=auth_data, auth_token, password, password1, password2, secret_key, tls_pem_passphrase, token
prefix=
suffix=
[bytes_to_str]
encoding={{bytes_to_str_encoding}}
[default]
default_value=
default_input_value=
default_output_value=
response_elem=response
skip_empty_keys = False
skip_empty_request_keys = False
skip_empty_response_keys = False
# Configuration below is reserved for future use
input_required_name = "input_required"
input_optional_name = "input_optional"
output_required_name = "output_required"
output_optional_name = "output_optional"
prefix_as_is = {default_prefix_as_is}
prefix_bool = {default_prefix_bool}
prefix_csv = {default_prefix_csv}
prefix_date = {default_prefix_date}
prefix_date_time = {default_prefix_date_time}
prefix_dict = {default_prefix_dict}
prefix_dict_list = {default_prefix_dict_list}
prefix_float = {default_prefix_float}
prefix_int = {default_prefix_int}
prefix_list = {default_prefix_list}
prefix_text = {default_prefix_text}
prefix_uuid = {default_prefix_uuid}
""".lstrip()
# ################################################################################################################################
def c18n_sio_fs_config(sio_fs_config):
for name in 'bool', 'int', 'secret':
config_entry = sio_fs_config[name]
exact = config_entry.get('exact') or []
exact = exact if isinstance(exact, list) else [exact]
prefix = config_entry.get('prefix') or []
prefix = prefix if isinstance(prefix, list) else [prefix]
suffix = config_entry.get('suffix') or []
suffix = suffix if isinstance(suffix, list) else [suffix]
config_entry.exact = set(exact)
config_entry.prefix = set(prefix)
config_entry.suffix = set(suffix)
for key, value in sio_fs_config.get('default', {}).items():
if isinstance(value, basestring):
if not isinstance(value, unicode):
value = value.decode('utf8')
sio_fs_config.default[key] = value
# ################################################################################################################################
def get_sio_server_config(sio_fs_config):
c18n_sio_fs_config(sio_fs_config)
sio_server_config = SIOServerConfig() # type: PySIOServerConfig
bool_config = BoolConfig() # type: PyBoolConfig
bool_config.exact = sio_fs_config.bool.exact
bool_config.prefixes = sio_fs_config.bool.prefix
bool_config.suffixes = sio_fs_config.bool.suffix
int_config = IntConfig() # type: PyIntConfig
int_config.exact = sio_fs_config.int.exact
int_config.prefixes = sio_fs_config.int.prefix
int_config.suffixes = sio_fs_config.int.suffix
secret_config = SecretConfig() # type: PySecretConfig
secret_config.exact = sio_fs_config.secret.exact
secret_config.prefixes = sio_fs_config.secret.prefix
secret_config.suffixes = sio_fs_config.secret.suffix
sio_server_config.bool_config = bool_config
sio_server_config.int_config = int_config
sio_server_config.secret_config = secret_config
sio_fs_config_default = sio_fs_config.get('default')
if sio_fs_config_default:
sio_server_config.input_required_name = sio_fs_config.default.get('input_required_name', default_input_required_name)
sio_server_config.input_optional_name = sio_fs_config.default.get('input_optional_name', default_input_optional_name)
sio_server_config.output_required_name = sio_fs_config.default.get('output_required_name', default_output_required_name)
sio_server_config.output_optional_name = sio_fs_config.default.get('output_optional_name', default_output_optional_name)
sio_server_config.default_value = sio_fs_config.default.get('default_value', default_value)
sio_server_config.default_input_value = sio_fs_config.default.get('default_input_value', default_input_value)
sio_server_config.default_output_value = sio_fs_config.default.get('default_output_value', default_output_value)
sio_server_config.response_elem = sio_fs_config.default.get('response_elem', default_response_elem)
sio_server_config.skip_empty_keys = sio_fs_config.default.get('skip_empty_keys', default_skip_empty_keys)
sio_server_config.skip_empty_request_keys = sio_fs_config.default.get(
'skip_empty_request_keys', default_skip_empty_request_keys)
sio_server_config.skip_empty_response_keys = sio_fs_config.default.get(
'skip_empty_response_keys', default_skip_empty_response_keys)
sio_server_config.prefix_as_is = sio_fs_config.default.get('prefix_as_is', default_prefix_as_is)
sio_server_config.prefix_bool = sio_fs_config.default.get('prefix_bool', default_prefix_bool)
sio_server_config.prefix_csv = sio_fs_config.default.get('prefix_csv', default_prefix_csv)
sio_server_config.prefix_date = sio_fs_config.default.get('prefix_date', default_prefix_date)
sio_server_config.prefix_date_time = sio_fs_config.default.get('prefix_date_time', default_prefix_date_time)
sio_server_config.prefix_dict = sio_fs_config.default.get('prefix_dict', default_prefix_dict)
sio_server_config.prefix_dict_list = sio_fs_config.default.get('prefix_dict_list', default_prefix_dict_list)
sio_server_config.prefix_float = sio_fs_config.default.get('prefix_float', default_prefix_float)
sio_server_config.prefix_int = sio_fs_config.default.get('prefix_int', default_prefix_int)
sio_server_config.prefix_list = sio_fs_config.default.get('prefix_list', default_prefix_list)
sio_server_config.prefix_text = sio_fs_config.default.get('prefix_text', default_prefix_text)
sio_server_config.prefix_uuid = sio_fs_config.default.get('prefix_uuid', default_prefix_uuid)
else:
sio_server_config.input_required_name = default_input_required_name
sio_server_config.input_optional_name = default_input_optional_name
sio_server_config.output_required_name = default_output_required_name
sio_server_config.output_optional_name = default_output_optional_name
sio_server_config.default_value = default_value
sio_server_config.default_input_value = default_input_value
sio_server_config.default_output_value = default_output_value
sio_server_config.response_elem = default_response_elem
sio_server_config.skip_empty_keys = default_skip_empty_keys
sio_server_config.skip_empty_request_keys = default_skip_empty_request_keys
sio_server_config.skip_empty_response_keys = default_skip_empty_response_keys
bytes_to_str_encoding = sio_fs_config.bytes_to_str.encoding
if not isinstance(bytes_to_str_encoding, unicode):
bytes_to_str_encoding = bytes_to_str_encoding.decode('utf8')
sio_server_config.bytes_to_str_encoding = bytes_to_str_encoding
sio_server_config.json_encoder.bytes_to_str_encoding = bytes_to_str_encoding
return sio_server_config
# ################################################################################################################################
def drop_sio_elems(elems, *to_drop):
out = list(set(elems))
for elem in to_drop:
out.remove(elem)
return out
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/simpleio_.py | simpleio_.py |
# Zato
from datetime import datetime, timedelta
from zato.common.odb.model import KVData as KVDataModel
from zato.common.typing_ import dataclass, optional
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.odb.api import SessionWrapper
SessionWrapper = SessionWrapper
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
default_expiry_time = datetime(year=2345, month=12, day=31)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class KeyCtx:
key: str
value: optional[str] = None
data_type: str = 'string'
creation_time: datetime = None
expiry_time: optional[datetime] = None
# ################################################################################################################################
# ################################################################################################################################
class KVDataAPI:
def __init__(self, cluster_id, session_wrapper):
# type: (int, SessionWrapper) -> None
self.cluster_id = cluster_id
self.session_wrapper = session_wrapper
# ################################################################################################################################
def _get_session(self):
return self.session_wrapper.session()
# ################################################################################################################################
def get(self, key):
# type: (str) -> optional[KeyCtx]
# We always operate on bytes
key = key.encode('utf8') if isinstance(key, str) else key
# Get a new SQL session ..
session = self._get_session()
# .. prepare the query ..
query = session.query(KVDataModel).\
filter(KVDataModel.cluster_id==self.cluster_id).\
filter(KVDataModel.key==key).\
filter(KVDataModel.expiry_time > utcnow())
# .. run it ..
result = query.first() # type: KVDataModel
# .. convert the result to a business object ..
if result:
ctx = KeyCtx()
ctx.key = result.key.decode('utf8')
ctx.value = result.value
ctx.data_type = result.data_type
ctx.creation_time = result.creation_time
ctx.expiry_time = result.expiry_time
if ctx.value:
ctx.value = ctx.value.decode('utf8')
return ctx
# ################################################################################################################################
def set(self, key, value, expiry_sec=None, expiry_time=None):
# type: (str, str, int, datetime)
ctx = KeyCtx()
ctx.key = key
ctx.value = value
ctx.expiry_time = expiry_time if expiry_time else utcnow() + timedelta(seconds=expiry_sec)
self.set_with_ctx(ctx)
# ################################################################################################################################
def set_with_ctx(self, ctx, data_type='string'):
# type: (KeyCtx, str) -> None
key = ctx.key.encode('utf8') if isinstance(ctx.key, str) else ctx.key
value = ctx.value.encode('utf8') if isinstance(ctx.value, str) else ctx.value
item = KVDataModel()
item.cluster_id = self.cluster_id
item.key = key
item.value = value
item.creation_time = ctx.creation_time or utcnow()
item.expiry_time = ctx.expiry_time or default_expiry_time
session = self._get_session()
session.add(item)
session.commit()
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/kv_data.py | kv_data.py |
# stdlib
from logging import getLogger
# Python 2/3 compatibility
from past.builtins import unicode
# Zato
from zato.common.api import GENERIC
from zato.common.util.api import new_cid
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
logger = getLogger('zato_pubsub.msg')
logger_zato = getLogger('zato')
# ################################################################################################################################
sk_lists = ('reply_to_sk', 'deliver_to_sk')
skip_to_external=('delivery_status', 'topic_id', 'cluster_id', 'pub_pattern_matched', 'sub_pattern_matched',
'published_by_id', 'data_prefix', 'data_prefix_short', 'pub_time', 'expiration_time', 'recv_time',
'pub_msg_id', 'pub_correl_id', 'zato_ctx') + sk_lists
_data_keys=('data', 'data_prefix', 'data_prefix_short')
msg_pub_attrs = ('topic', 'sub_key', 'pub_msg_id', 'pub_correl_id', 'in_reply_to', 'ext_client_id', 'group_id',
'position_in_group', 'pub_time', 'ext_pub_time', 'data', 'data_prefix', 'data_prefix_short', 'mime_type', 'priority',
'expiration', 'expiration_time', 'has_gd', 'delivery_status', 'size', 'published_by_id', 'topic_id',
'is_in_sub_queue', 'topic_name', 'cluster_id', 'pub_time_iso', 'ext_pub_time_iso', 'expiration_time_iso',
'recv_time', 'data_prefix_short', 'server_name', 'server_pid', 'pub_pattern_matched', 'sub_pattern_matched',
'delivery_count', 'user_ctx', 'zato_ctx')
class MSG_PREFIX:
GROUP_ID = 'zpsg'
MSG_ID = 'zpsm'
SUB_KEY = 'zpsk'
SERVICE_SK = 'zpsk.srv'
# ################################################################################################################################
def new_msg_id(_new_cid=new_cid, _prefix=MSG_PREFIX.MSG_ID):
return '%s%s' % (_prefix, _new_cid())
# ################################################################################################################################
def new_sub_key(endpoint_type, ext_client_id='', _new_cid=new_cid, _prefix=MSG_PREFIX.SUB_KEY):
_ext_client_id = '.%s' % (ext_client_id,) if ext_client_id else (ext_client_id or '')
return '%s.%s%s.%s' % (_prefix, endpoint_type, _ext_client_id, _new_cid(3))
# ################################################################################################################################
def new_group_id(_new_cid=new_cid, _prefix=MSG_PREFIX.GROUP_ID):
return '%s%s' % (_prefix, _new_cid())
# ################################################################################################################################
class PubSubMessage(object):
""" Base container class for pub/sub message wrappers.
"""
# We are not using __slots__ because they can't be inherited by subclasses
# and this class, as well as its subclasses, will be rewritten in Cython anyway.
pub_attrs = msg_pub_attrs + sk_lists
def __init__(self):
self.recv_time = utcnow_as_ms()
self.server_name = None
self.server_pid = None
self.topic = None
self.sub_key = None
self.pub_msg_id = None
self.pub_correl_id = None
self.in_reply_to = None
self.ext_client_id = None
self.group_id = None
self.position_in_group = None
self.pub_time = None
self.ext_pub_time = None
self.data = ''
self.data_prefix = ''
self.data_prefix_short = ''
self.mime_type = None
self.priority = None
self.expiration = None
self.expiration_time = None
self.has_gd = None
self.delivery_status = None
self.pub_pattern_matched = None
self.sub_pattern_matched = {}
self.size = None
self.published_by_id = None
self.topic_id = None
self.is_in_sub_queue = None
self.topic_name = None
self.cluster_id = None
self.delivery_count = 0
self.pub_time_iso = None
self.ext_pub_time_iso = None
self.expiration_time_iso = None
self.reply_to_sk = []
self.deliver_to_sk = []
self.user_ctx = None
self.zato_ctx = None
self.serialized = None # May be set by hooks to provide an explicitly serialized output for this message
setattr(self, GENERIC.ATTR_NAME, None) # To make this class look more like an SQLAlchemy one
def to_dict(self, skip=None, needs_utf8_encode=False, add_id_attrs=False, _data_keys=_data_keys):
""" Returns a dict representation of self.
"""
skip = skip or []
out = {}
for key in sorted(PubSubMessage.pub_attrs):
if key != 'topic' and key not in skip:
value = getattr(self, key)
if value is not None:
if needs_utf8_encode:
if key in _data_keys:
value = value.encode('utf8') if isinstance(value, unicode) else value
out[key] = value
if add_id_attrs:
out['msg_id'] = self.pub_msg_id
if self.pub_correl_id:
out['correl_id'] = self.pub_correl_id
# Append the generic opaque attribute to make the output look as though it was produced from an SQLAlchemy object
# but do it only if there is any value, otherwise skip it.
opaque_value = getattr(self, GENERIC.ATTR_NAME)
if opaque_value:
out[GENERIC.ATTR_NAME] = opaque_value
return out
# For compatibility with code that already expects dictalchemy objects with their .asdict method
def asdict(self):
out = self.to_dict()
out[GENERIC.ATTR_NAME] = getattr(self, GENERIC.ATTR_NAME)
return out
def to_external_dict(self, skip=skip_to_external, needs_utf8_encode=False):
""" Returns a dict representation of self ready to be delivered to external systems,
i.e. without internal attributes on output.
"""
out = self.to_dict(skip, needs_utf8_encode, True)
if self.reply_to_sk:
out['ctx'] = {
'reply_to_sk': self.reply_to_sk
}
return out
# ################################################################################################################################
class SkipDelivery(Exception):
""" Raised to indicate to delivery tasks that a given message should be skipped - but not deleted altogether,
the delivery will be attempted in the next iteration of the task.
"""
# ################################################################################################################################
class HandleNewMessageCtx(object):
""" Encapsulates information on new messages that a pubsub tool is about to process.
"""
__slots__ = ('cid', 'has_gd', 'sub_key_list', 'non_gd_msg_list', 'is_bg_call', 'pub_time_max')
def __init__(self, cid, has_gd, sub_key_list, non_gd_msg_list, is_bg_call, pub_time_max=None):
self.cid = cid
self.has_gd = has_gd
self.sub_key_list = sub_key_list
self.non_gd_msg_list = non_gd_msg_list
self.is_bg_call = is_bg_call
self.pub_time_max = pub_time_max
# ################################################################################################################################
class HookCtx(object):
""" Data and metadata that pub/sub hooks receive on input to their methods.
"""
__slots__ = ('msg', 'response', 'soap_suds_client')
def __init__(self, msg, soap_suds_client=None):
self.msg = msg
self.soap_suds_client
self.response = None
# ################################################################################################################################
# PubSub's attributes listed separately for ease of making them part of SimpleIO definitions
pubsub_main_data = 'cluster_id', 'server_name', 'server_pid', 'server_api_address', 'keep_running', 'subscriptions_by_topic', \
'subscriptions_by_sub_key', 'sub_key_servers', 'endpoints', 'topics', 'sec_id_to_endpoint_id', \
'ws_channel_id_to_endpoint_id', 'service_id_to_endpoint_id', 'topic_name_to_id', 'pub_buffer_gd', 'pub_buffer_non_gd', \
'pubsub_tool_by_sub_key', 'pubsub_tools', 'sync_backlog', 'msg_pub_counter', 'has_meta_endpoint', \
'endpoint_meta_store_frequency', 'endpoint_meta_data_len', 'endpoint_meta_max_history', 'data_prefix_len', \
'data_prefix_short_len'
# ################################################################################################################################
class dict_keys:
endpoint = 'id', 'name', 'endpoint_type', 'role', 'is_active', 'is_internal', 'topic_patterns', \
'pub_topic_patterns', 'sub_topic_patterns'
subscription = 'id', 'creation_time', 'sub_key', 'endpoint_id', 'endpoint_name', 'topic_id', 'topic_name', \
'sub_pattern_matched', 'task_delivery_interval', 'unsub_on_wsx_close', 'ext_client_id'
topic = 'id', 'name', 'is_active', 'is_internal', 'max_depth_gd', 'max_depth_non_gd', 'has_gd', 'depth_check_freq',\
'pub_buffer_size_gd', 'task_delivery_interval', 'meta_store_frequency', 'task_sync_interval', 'msg_pub_counter', \
'msg_pub_counter_gd', 'msg_pub_counter_non_gd', 'last_synced', 'sync_has_gd_msg', 'sync_has_non_gd_msg', \
'gd_pub_time_max'
sks = 'sub_key', 'cluster_id', 'server_name', 'server_pid', 'endpoint_type', 'channel_name', 'pub_client_id', \
'ext_client_id', 'wsx_info', 'creation_time', 'endpoint_id'
all_dict_keys = dict_keys.endpoint + dict_keys.subscription + dict_keys.topic + dict_keys.sks
all_dict_keys = list(set(all_dict_keys))
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/pubsub.py | pubsub.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from inspect import isclass
# candv
from candv import Constants as _Constants, ValueConstant as _ValueConstant
# Python 2/3 compatibility
from future.utils import iteritems
from past.builtins import cmp
class Constants(_Constants):
values = _Constants.constants
class ValueConstant(_ValueConstant):
def __cmp__(self, other):
return cmp(self.value, (other.value if isinstance(other, ValueConstant) else other))
class MESSAGE:
MESSAGE_TYPE_LENGTH = 4
TOKEN_LENGTH = 32
TOKEN_START = MESSAGE_TYPE_LENGTH
TOKEN_END = MESSAGE_TYPE_LENGTH + TOKEN_LENGTH
PAYLOAD_START = MESSAGE_TYPE_LENGTH + TOKEN_LENGTH
NULL_TOKEN = '0' * TOKEN_LENGTH
class MESSAGE_TYPE:
TO_SCHEDULER = '0000'
TO_PARALLEL_ANY = '0001'
TO_PARALLEL_ALL = '0002'
TO_AMQP_PUBLISHING_CONNECTOR_ALL = '0003'
TO_AMQP_CONSUMING_CONNECTOR_ALL = '0004'
TO_AMQP_CONNECTOR_ALL = '0005'
TO_JMS_WMQ_PUBLISHING_CONNECTOR_ALL = '0006'
TO_JMS_WMQ_CONSUMING_CONNECTOR_ALL = '0007'
TO_JMS_WMQ_CONNECTOR_ALL = '0008'
USER_DEFINED_START = '5000'
TOPICS = {
MESSAGE_TYPE.TO_SCHEDULER: '/zato/to-scheduler',
MESSAGE_TYPE.TO_PARALLEL_ANY: '/zato/to-parallel/any',
MESSAGE_TYPE.TO_PARALLEL_ALL: '/zato/to-parallel/all',
MESSAGE_TYPE.TO_AMQP_PUBLISHING_CONNECTOR_ALL: '/zato/connector/amqp/publishing/all',
MESSAGE_TYPE.TO_AMQP_CONSUMING_CONNECTOR_ALL: '/zato/connector/amqp/consuming/all',
MESSAGE_TYPE.TO_AMQP_CONNECTOR_ALL: '/zato/connector/amqp/all',
MESSAGE_TYPE.TO_JMS_WMQ_PUBLISHING_CONNECTOR_ALL: '/zato/connector/jms-wmq/publishing/all',
MESSAGE_TYPE.TO_JMS_WMQ_CONSUMING_CONNECTOR_ALL: '/zato/connector/jms-wmq/consuming/all',
MESSAGE_TYPE.TO_JMS_WMQ_CONNECTOR_ALL: '/zato/connector/jms-wmq/all',
}
KEYS = {k:v.replace('/zato','').replace('/',':') for k,v in TOPICS.items()}
class SCHEDULER(Constants):
code_start = 100000
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
EXECUTE = ValueConstant('')
JOB_EXECUTED = ValueConstant('')
SET_JOB_INACTIVE = ValueConstant('')
class ZMQ_SOCKET(Constants):
code_start = 100200
CLOSE = ValueConstant('')
class SECURITY(Constants):
code_start = 100400
BASIC_AUTH_CREATE = ValueConstant('')
BASIC_AUTH_EDIT = ValueConstant('')
BASIC_AUTH_DELETE = ValueConstant('')
BASIC_AUTH_CHANGE_PASSWORD = ValueConstant('')
JWT_CREATE = ValueConstant('')
JWT_EDIT = ValueConstant('')
JWT_DELETE = ValueConstant('')
JWT_CHANGE_PASSWORD = ValueConstant('')
WSS_CREATE = ValueConstant('')
WSS_EDIT = ValueConstant('')
WSS_DELETE = ValueConstant('')
WSS_CHANGE_PASSWORD = ValueConstant('')
OAUTH_CREATE = ValueConstant('')
OAUTH_EDIT = ValueConstant('')
OAUTH_DELETE = ValueConstant('')
OAUTH_CHANGE_PASSWORD = ValueConstant('')
NTLM_CREATE = ValueConstant('')
NTLM_EDIT = ValueConstant('')
NTLM_DELETE = ValueConstant('')
NTLM_CHANGE_PASSWORD = ValueConstant('')
AWS_CREATE = ValueConstant('')
AWS_EDIT = ValueConstant('')
AWS_DELETE = ValueConstant('')
AWS_CHANGE_PASSWORD = ValueConstant('')
APIKEY_CREATE = ValueConstant('')
APIKEY_EDIT = ValueConstant('')
APIKEY_DELETE = ValueConstant('')
APIKEY_CHANGE_PASSWORD = ValueConstant('')
XPATH_SEC_CREATE = ValueConstant('')
XPATH_SEC_EDIT = ValueConstant('')
XPATH_SEC_DELETE = ValueConstant('')
XPATH_SEC_CHANGE_PASSWORD = ValueConstant('')
TLS_CA_CERT_CREATE = ValueConstant('')
TLS_CA_CERT_EDIT = ValueConstant('')
TLS_CA_CERT_DELETE = ValueConstant('')
TLS_CHANNEL_SEC_CREATE = ValueConstant('')
TLS_CHANNEL_SEC_EDIT = ValueConstant('')
TLS_CHANNEL_SEC_DELETE = ValueConstant('')
TLS_KEY_CERT_CREATE = ValueConstant('')
TLS_KEY_CERT_EDIT = ValueConstant('')
TLS_KEY_CERT_DELETE = ValueConstant('')
class DEFINITION(Constants):
code_start = 100600
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_CHANGE_PASSWORD = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_CHANGE_PASSWORD = ValueConstant('')
WMQ_PING = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
CASSANDRA_CREATE = ValueConstant('')
CASSANDRA_EDIT = ValueConstant('')
CASSANDRA_DELETE = ValueConstant('')
CASSANDRA_CHANGE_PASSWORD = ValueConstant('')
class OUTGOING(Constants):
code_start = 100800
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_PUBLISH = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_SEND = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
ZMQ_SEND = ValueConstant('')
SQL_CREATE_EDIT = ValueConstant('') # Same for creating and updating the pools
SQL_CHANGE_PASSWORD = ValueConstant('')
SQL_DELETE = ValueConstant('')
HTTP_SOAP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
HTTP_SOAP_DELETE = ValueConstant('')
FTP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
FTP_DELETE = ValueConstant('')
FTP_CHANGE_PASSWORD = ValueConstant('')
ODOO_CREATE = ValueConstant('')
ODOO_EDIT = ValueConstant('')
ODOO_DELETE = ValueConstant('')
ODOO_CHANGE_PASSWORD = ValueConstant('')
SAP_CREATE = ValueConstant('')
SAP_EDIT = ValueConstant('')
SAP_DELETE = ValueConstant('')
SAP_CHANGE_PASSWORD = ValueConstant('')
SFTP_CREATE = ValueConstant('')
SFTP_EDIT = ValueConstant('')
SFTP_DELETE = ValueConstant('')
SFTP_CHANGE_PASSWORD = ValueConstant('')
SFTP_EXECUTE = ValueConstant('')
SFTP_PING = ValueConstant('')
class CHANNEL(Constants):
code_start = 101000
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_MESSAGE_RECEIVED = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_MESSAGE_RECEIVED = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
ZMQ_MESSAGE_RECEIVED = ValueConstant('')
HTTP_SOAP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
HTTP_SOAP_DELETE = ValueConstant('')
WEB_SOCKET_CREATE = ValueConstant('')
WEB_SOCKET_EDIT = ValueConstant('')
WEB_SOCKET_DELETE = ValueConstant('')
WEB_SOCKET_BROADCAST = ValueConstant('')
FTP_CREATE = ValueConstant('')
FTP_EDIT = ValueConstant('')
FTP_DELETE = ValueConstant('')
FTP_PING = ValueConstant('')
FTP_USER_CREATE = ValueConstant('')
FTP_USER_EDIT = ValueConstant('')
FTP_USER_DELETE = ValueConstant('')
FTP_USER_CHANGE_PASSWORD = ValueConstant('')
class AMQP_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101200
CLOSE = ValueConstant('')
class JMS_WMQ_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101400
CLOSE = ValueConstant('')
class ZMQ_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101600
CLOSE = ValueConstant('')
class SERVICE(Constants):
code_start = 101800
EDIT = ValueConstant('')
DELETE = ValueConstant('')
PUBLISH = ValueConstant('')
class STATS(Constants):
code_start = 102000
DELETE = ValueConstant('')
DELETE_DAY = ValueConstant('')
class HOT_DEPLOY(Constants):
code_start = 102200
CREATE_SERVICE = ValueConstant('')
CREATE_STATIC = ValueConstant('')
CREATE_USER_CONF = ValueConstant('')
AFTER_DEPLOY = ValueConstant('')
class SINGLETON(Constants):
code_start = 102400
CLOSE = ValueConstant('')
class MSG_NS(Constants):
code_start = 102600
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class MSG_XPATH(Constants):
code_start = 102800
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class MSG_JSON_POINTER(Constants):
code_start = 103000
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class PUB_SUB_TOPIC(Constants):
code_start = 103200
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
ADD_DEFAULT_PRODUCER = ValueConstant('')
DELETE_DEFAULT_PRODUCER = ValueConstant('')
class PUB_SUB_PRODUCER(Constants):
code_start = 103400
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class PUB_SUB_CONSUMER(Constants):
code_start = 103600
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class CLOUD(Constants):
code_start = 103800
AWS_S3_CREATE_EDIT = ValueConstant('')
AWS_S3_DELETE = ValueConstant('')
class NOTIF(Constants):
code_start = 104000
RUN_NOTIFIER = ValueConstant('')
SQL_CREATE = ValueConstant('')
SQL_EDIT = ValueConstant('')
SQL_DELETE = ValueConstant('')
class SEARCH(Constants):
code_start = 104200
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
ES_CREATE = ValueConstant('')
ES_EDIT = ValueConstant('')
ES_DELETE = ValueConstant('')
ES_CHANGE_PASSWORD = ValueConstant('')
SOLR_CREATE = ValueConstant('')
SOLR_EDIT = ValueConstant('')
SOLR_DELETE = ValueConstant('')
SOLR_CHANGE_PASSWORD = ValueConstant('')
class QUERY(Constants):
code_start = 104400
CASSANDRA_CREATE = ValueConstant('')
CASSANDRA_EDIT = ValueConstant('')
CASSANDRA_DELETE = ValueConstant('')
CASSANDRA_CHANGE_PASSWORD = ValueConstant('')
class EMAIL(Constants):
code_start = 104800
SMTP_CREATE = ValueConstant('')
SMTP_EDIT = ValueConstant('')
SMTP_DELETE = ValueConstant('')
SMTP_CHANGE_PASSWORD = ValueConstant('')
IMAP_CREATE = ValueConstant('')
IMAP_EDIT = ValueConstant('')
IMAP_DELETE = ValueConstant('')
IMAP_CHANGE_PASSWORD = ValueConstant('')
class RBAC(Constants):
code_start = 105200
ROLE_CREATE = ValueConstant('')
ROLE_EDIT = ValueConstant('')
ROLE_DELETE = ValueConstant('')
CLIENT_ROLE_CREATE = ValueConstant('')
CLIENT_ROLE_DELETE = ValueConstant('')
PERMISSION_CREATE = ValueConstant('')
PERMISSION_EDIT = ValueConstant('')
PERMISSION_DELETE = ValueConstant('')
ROLE_PERMISSION_CREATE = ValueConstant('')
ROLE_PERMISSION_EDIT = ValueConstant('')
ROLE_PERMISSION_DELETE = ValueConstant('')
class VAULT(Constants):
code_start = 105400
CONNECTION_CREATE = ValueConstant('')
CONNECTION_EDIT = ValueConstant('')
CONNECTION_DELETE = ValueConstant('')
POLICY_CREATE = ValueConstant('')
POLICY_EDIT = ValueConstant('')
POLICY_DELETE = ValueConstant('')
class PUBSUB(Constants):
code_start = 105600
ENDPOINT_CREATE = ValueConstant('')
ENDPOINT_EDIT = ValueConstant('')
ENDPOINT_DELETE = ValueConstant('')
SUBSCRIPTION_CREATE = ValueConstant('')
SUBSCRIPTION_EDIT = ValueConstant('')
SUBSCRIPTION_DELETE = ValueConstant('')
TOPIC_CREATE = ValueConstant('')
TOPIC_EDIT = ValueConstant('')
TOPIC_DELETE = ValueConstant('')
SUB_KEY_SERVER_SET = ValueConstant('') # This is shared by WSX and other endpoint types
WSX_CLIENT_SUB_KEY_SERVER_REMOVE = ValueConstant('')
DELIVERY_SERVER_CHANGE = ValueConstant('')
class SMS(Constants):
code_start = 106000
TWILIO_CREATE = ValueConstant('')
TWILIO_EDIT = ValueConstant('')
TWILIO_DELETE = ValueConstant('')
class CACHE(Constants):
code_start = 106400
BUILTIN_CREATE = ValueConstant('')
BUILTIN_EDIT = ValueConstant('')
BUILTIN_DELETE = ValueConstant('')
BUILTIN_STATE_CHANGED_CLEAR = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ANY = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ANY = ValueConstant('')
BUILTIN_STATE_CHANGED_SET = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS_ANY = ValueConstant('')
MEMCACHED_CREATE = ValueConstant('')
MEMCACHED_EDIT = ValueConstant('')
MEMCACHED_DELETE = ValueConstant('')
class SERVER_STATUS(Constants):
code_start = 106800
STATUS_CHANGED = ValueConstant('')
class GENERIC(Constants):
code_start = 107000
CONNECTION_CREATE = ValueConstant('')
CONNECTION_EDIT = ValueConstant('')
CONNECTION_DELETE = ValueConstant('')
CONNECTION_CHANGE_PASSWORD = ValueConstant('')
class SSO(Constants):
code_start = 107200
USER_CREATE = ValueConstant('')
USER_EDIT = ValueConstant('')
LINK_AUTH_CREATE = ValueConstant('')
LINK_AUTH_DELETE = ValueConstant('')
class EVENT(Constants):
code_start = 107400
PUSH = ValueConstant('')
code_to_name = {}
# To prevent 'RuntimeError: dictionary changed size during iteration'
item_name, item = None, None
_globals = list(iteritems(globals()))
for item_name, item in _globals:
if isclass(item) and issubclass(item, Constants) and item is not Constants:
for idx, (attr, const) in enumerate(item.items()):
const.value = str(item.code_start + idx)
code_to_name[const.value] = '{}_{}'.format(item_name, attr) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/broker_message.py | broker_message.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from string import Template
from traceback import format_exc
# lxml
from lxml import etree
from lxml.objectify import ObjectPath as _ObjectPath
# Zato
from zato.common.exception import ParsingException
# ################################################################################################################################
# ################################################################################################################################
# XML namespace for use in all Zato's own services.
zato_namespace = 'https://zato.io/ns/v1'
zato_ns_map = {None: zato_namespace}
soapenv11_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soapenv12_namespace = 'http://www.w3.org/2003/05/soap-envelope'
wsse_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
wsu_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
common_namespaces = {
'soapenv':soapenv11_namespace,
'wsse':wsse_namespace,
'wsu':wsu_namespace,
'zato':zato_namespace
}
soap_doc = Template("""<soap:Envelope xmlns:soap='%s'><soap:Body>$body</soap:Body></soap:Envelope>""" % soapenv11_namespace)
soap_body_path = '/soapenv:Envelope/soapenv:Body'
soap_body_xpath = etree.XPath(soap_body_path, namespaces=common_namespaces)
soap_fault_path = '/soapenv:Envelope/soapenv:Body/soapenv:Fault'
soap_fault_xpath = etree.XPath(soap_fault_path, namespaces=common_namespaces)
wsse_password_type_text = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'
supported_wsse_password_types = (wsse_password_type_text,)
wsse_username_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Username'
wsse_username_xpath = etree.XPath(wsse_username_path, namespaces=common_namespaces)
wsse_password_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password'
wsse_password_xpath = etree.XPath(wsse_password_path, namespaces=common_namespaces)
wsse_password_type_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password/@Type'
wsse_password_type_xpath = etree.XPath(wsse_password_type_path, namespaces=common_namespaces)
wsse_nonce_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Nonce'
wsse_nonce_xpath = etree.XPath(wsse_nonce_path, namespaces=common_namespaces)
wsu_username_created_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsu:Created'
wsu_username_created_xpath = etree.XPath(wsu_username_created_path, namespaces=common_namespaces)
wsu_expires_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsu:Timestamp/wsu:Expires'
wsu_expires_xpath = etree.XPath(wsu_expires_path, namespaces=common_namespaces)
wsse_username_objectify = '{}Security'.format(wsse_namespace)
wsse_username_token_objectify = '{}UsernameToken'.format(wsse_namespace)
zato_data_path = soap_data_path = '/soapenv:Envelope/soapenv:Body/*[1]'
zato_data_xpath = soap_data_xpath = etree.XPath(zato_data_path, namespaces=common_namespaces)
zato_result_path = '//zato:zato_env/zato:result'
zato_result_xpath = etree.XPath(zato_result_path, namespaces=common_namespaces)
zato_cid_path = '//zato:zato_env/zato:cid'
zato_cid_xpath = etree.XPath(zato_result_path, namespaces=common_namespaces)
zato_details_path = '//zato:zato_env/zato:details'
zato_details_xpath = etree.XPath(zato_details_path, namespaces=common_namespaces)
# ################################################################################################################################
# ################################################################################################################################
class path(object):
def __init__(self, path, raise_on_not_found=False, ns='', text_only=False):
self.path = path
self.ns = ns
self.raise_on_not_found = raise_on_not_found
self.text_only = text_only
self.children_only = False
self.children_only_idx = None
def get_from(self, elem):
if self.ns:
_path = '{{{}}}{}'.format(self.ns, self.path)
else:
_path = self.path
try:
if self.children_only:
elem = elem.getchildren()[self.children_only_idx]
value = _ObjectPath(_path)(elem)
if self.text_only:
return value.text
return value
except(ValueError, AttributeError):
if self.raise_on_not_found:
raise ParsingException(None, format_exc())
else:
return None
# ################################################################################################################################
# ################################################################################################################################
class zato_path(path):
def __init__(self, path, raise_on_not_found=False, text_only=False):
super(zato_path, self).__init__(path, raise_on_not_found, zato_namespace, text_only)
self.children_only = True
self.children_only_idx = 1 # 0 is zato_env
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/xml_.py | xml_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from tempfile import NamedTemporaryFile
from traceback import format_exc
# Zato
from zato.common.util.api import make_repr, timeouting_popen
logger = getLogger(__name__)
# We'll wait up to that many seconds for HAProxy to validate the config file.
HAPROXY_VALIDATE_TIMEOUT = 0.6
# Statistics commands understood by HAproxy 1.3.x and newer. Note that the
# command numbers must be consecutively increasing across HAProxy versions.
haproxy_stats = {
("1", "3"): {
# A special command interpreted by the agent as a request for
# describing the commands available
0: ("ZATO_DESCRIBE_COMMANDS", "Describe commands"),
1: ("show info", "Show info"),
2: ("show stat", "Show stats"),
3: ("show errors", "Show errors"),
4: ("show sess", "Show sessions"),
},
("1", "4"): {
}
}
# timeout_id -> name, value in milliseconds
timeouts = {
1: (250, "250ms"),
2: (500, "500ms"),
3: (1000, "1s"),
4: (3000, "3s"),
5: (5000, "10s"),
6: (30000, "30s")
}
http_log = {
1: ("nolog", "No log"),
2: ("httplog", "HTTP log"),
}
tcp_log = {
1: ("nolog", "No log"),
2: ("tcplog", "TCP log"),
}
reversed_http_log = dict((v[0],k) for k,v in http_log.items())
reversed_tcp_log = dict((v[0],k) for k,v in tcp_log.items())
class Config(object):
""" An object for representing a HAProxy configuration file.
"""
def __init__(self):
self.global_ = {}
self.defaults = {}
self.backend = {'bck_http_plain': {}}
self.frontend = {"front_http_plain": {}}
def __repr__(self):
return make_repr(self)
def set_value(self, name, data):
if name == 'global:log':
host, port, facility, level = data
self.global_['log'] = {}
self.global_['log']['host'] = host
self.global_['log']['port'] = port
self.global_['log']['facility'] = facility
self.global_['log']['level'] = level
elif name == 'global:stats_socket':
stats_socket = data[0]
self.global_['stats_socket'] = stats_socket
elif name == 'defaults:timeout connect':
timeout = data[0]
self.defaults['timeout_connect'] = timeout
elif name == 'defaults:timeout client':
timeout = data[0]
self.defaults['timeout_client'] = timeout
elif name == 'defaults:timeout server':
timeout = data[0]
self.defaults['timeout_server'] = timeout
elif name == 'defaults:stats uri':
stats_uri = data[0]
self.defaults['stats_uri'] = stats_uri
elif name.startswith('backend bck_http_plain:server'):
backend_name, address, port, extra = data
extra = extra.strip()
backend_name = backend_name.split('http_plain--')[1]
self.backend['bck_http_plain'][backend_name] = {}
self.backend['bck_http_plain'][backend_name]['address'] = address
self.backend['bck_http_plain'][backend_name]['port'] = port
self.backend['bck_http_plain'][backend_name]['extra'] = extra
elif name == 'backend bck_http_plain:option httpchk':
method, path = data
self.backend['bck_http_plain']['option_httpchk'] = {}
self.backend['bck_http_plain']['option_httpchk']['method'] = method
self.backend['bck_http_plain']['option_httpchk']['path'] = path
elif name == 'frontend front_http_plain:monitor-uri':
path = data[0]
self.frontend['front_http_plain']['monitor_uri'] = path
elif name == 'frontend front_http_plain:option log-http-requests':
option = reversed_http_log[data[0]]
self.frontend['front_http_plain']['log_http_requests'] = option
elif name == 'frontend front_http_plain:bind':
address, port = data
self.frontend['front_http_plain']['bind'] = {}
self.frontend['front_http_plain']['bind']['address'] = address
self.frontend['front_http_plain']['bind']['port'] = port
elif name == 'frontend front_http_plain:maxconn':
maxconn = data[0]
self.frontend['front_http_plain']['maxconn'] = maxconn
else:
msg = 'Could not parse config, name:[{name}], data:[{data}]'.format(name=name, data=data)
logger.error(msg)
raise Exception(msg)
def validate_haproxy_config(config_data, haproxy_command):
""" Writes the config into a temporary file and validates it using the HAProxy's
-c check mode.
"""
try:
with NamedTemporaryFile(prefix='zato-tmp') as tf:
tf.write(config_data.encode('utf8'))
tf.flush()
common_msg = 'config_file:`{}`'
common_msg = common_msg.format(open(tf.name).read())
timeout_msg = 'HAProxy didn\'t respond in `{}` seconds. '
rc_non_zero_msg = 'Failed to validate the config file using HAProxy. '
command = [haproxy_command, '-c', '-f', tf.name]
timeouting_popen(command, HAPROXY_VALIDATE_TIMEOUT, timeout_msg, rc_non_zero_msg, common_msg)
except Exception:
msg = 'Caught an exception, e:`{}`'.format(format_exc())
logger.error(msg)
raise Exception(msg) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/haproxy.py | haproxy.py |
# stdlib
from datetime import datetime
# gevent
from gevent.lock import RLock
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class InRAMStore:
""" Base class for stores keeping data in RAM, optionally synchronising it to persistent storage.
"""
def __init__(self, sync_threshold, sync_interval):
# type: (int, int) -> None
# Sync to storage once in that many events ..
self.sync_threshold = sync_threshold
# .. or once in that many seconds.
self.sync_interval = sync_interval
# Total events received since startup
self.total_events = 0
# How many events we have received since the last synchronisation with persistent storage
self.num_events_since_sync = 0
# Reset each time we synchronise in-RAM state with the persistent storage
self.last_sync_time = utcnow()
# Maps action opcodes to actual methods so that the latter do not have to be looked up in runtime
self.opcode_to_func = {}
# A coarse-grained update lock used while modifying the in-RAM database or DB key locks
self.update_lock = RLock()
# Maps DB keys to fine-grained locks
self.key_lock = {}
# Interal usage counters and telemetry
self.telemetry = {}
# ################################################################################################################################
def get_lock(self, key):
# type: (str) -> RLock
with self.update_lock:
key_lock = self.key_lock.get(key)
if not key_lock:
key_lock = RLock()
self.key_lock[key] = key_lock
return key_lock
# ################################################################################################################################
def should_sync(self):
# type: () -> bool
sync_by_threshold = self.num_events_since_sync % self.sync_threshold == 0
sync_by_time = (utcnow() - self.last_sync_time).total_seconds() >= self.sync_interval
return sync_by_threshold or sync_by_time
# ################################################################################################################################
def sync_state(self):
raise NotImplementedError('InRAMStore.sync_state')
# ################################################################################################################################
def post_modify_state(self):
# .. update counters ..
self.num_events_since_sync += 1
self.total_events += 1
# .. check if sync is needed only if our class implements the method ..
if self.sync_state:
# .. check if we should sync RAM with persistent storage ..
if self.should_sync():
# .. save in persistent storage ..
self.sync_state()
# .. update metadata.
self.num_events_since_sync = 0
self.last_sync_time = utcnow()
# ################################################################################################################################
def access_state(self, opcode, data):
# type: (str, object) -> None
with self.update_lock:
# Maps the incoming upcode to an actual function to handle data ..
func = self.opcode_to_func[opcode]
# .. store in RAM ..
func(data)
# .. update metadata and, possibly, sync state (storage).
self.post_modify_state()
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/in_ram.py | in_ram.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
import os
import socket
# Zato
from zato.common.util.api import get_current_user
# Python 2/3 compatibility
from six import PY2
# ################################################################################################################################
logger = logging.getLogger(__name__)
logger_bzr = logging.getLogger('bzr')
logger_bzr.setLevel(logging.WARN)
logger_sh = logging.getLogger('sh.command')
logger_sh.setLevel(logging.WARN)
# ################################################################################################################################
# We use Bazaar under Zato 3.0 with Python 2.7. Any newer version of Zato, or Zato 3.0 with Python 3.x, uses git.
# ################################################################################################################################
# ################################################################################################################################
class _BaseRepoManager(object):
def __init__(self, repo_location='.'):
self.repo_location = os.path.abspath(os.path.expanduser(repo_location))
# ################################################################################################################################
# ################################################################################################################################
class NoneRepoManager(_BaseRepoManager):
def ensure_repo_consistency(self):
pass
# ################################################################################################################################
# ################################################################################################################################
class GitRepoManager(_BaseRepoManager):
def ensure_repo_consistency(self):
# Use sh for git commands
import sh
# Always work in the same directory as the repository is in
sh.cd(self.repo_location)
# (Re-)init the repository
sh.git.init(self.repo_location)
# Set user info
current_user = get_current_user()
sh.git.config('user.name', current_user)
sh.git.config('user.email', '{}@{}'.format(current_user, socket.getfqdn()))
# Default branch is called 'main'
sh.git.checkout('-B', 'main')
# Add all files
sh.git.add('-A', self.repo_location)
output = sh.git.status('--porcelain') # type: str
output = output.strip()
# And commit changes if there are any
if output:
sh.git.commit('-m', 'Committing latest changes')
# ################################################################################################################################
# ################################################################################################################################
if PY2:
RepoManager = NoneRepoManager
else:
RepoManager = GitRepoManager
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/repo.py | repo.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# PyTDS
import pytds
# SQLAlchemy
from sqlalchemy.pool import QueuePool as SAQueuePool
from sqlalchemy.pool.dbapi_proxy import _DBProxy
# Zato
from zato.common.api import MS_SQL
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
def get_queue_pool(pool_kwargs):
class _QueuePool(SAQueuePool):
def __init__(self, creator, *args, **kwargs):
super(_QueuePool, self).__init__(creator, **pool_kwargs)
return _QueuePool
# ################################################################################################################################
class SimpleSession(object):
""" A simple object simulating SQLAlchemy sessions.
"""
def __init__(self, api):
# type: (MSSQLDirectAPI)
self.api = api
def __call__(self):
return self
def execute(self, *args, **kwargs):
return self.api.execute(*args, **kwargs)
def callproc(self, *args, **kwargs):
return self.api.callproc(*args, **kwargs)
def ping(self, *args, **kwargs):
return self.api.ping(*args, **kwargs)
# ################################################################################################################################
class MSSQLDirectAPI(object):
""" An object through which MS SQL connections can be obtained and stored procedures invoked.
"""
name = MS_SQL.ZATO_DIRECT
ping_query = 'SELECT 1'
def __init__(self, name, pool_size, connect_kwargs):
# type: (str, int, dict) -> None
self._name = name
self._connect_kwargs = connect_kwargs
self._pool_kwargs = {
'pool_size': pool_size,
'max_overflow': 0,
# This is a pool-level checkout timeout, not an SQL query-level one
# so we do not need to make it configurable
'timeout': 3
}
self._pool = _DBProxy(pytds, get_queue_pool(self._pool_kwargs))
# ################################################################################################################################
def connect(self):
return self._pool.connect(**self._connect_kwargs)
# ################################################################################################################################
def dispose(self):
self._pool.dispose()
# ################################################################################################################################
def execute(self, *args, **kwargs):
conn = None
try:
conn = self.connect()
with conn.cursor() as cursor:
cursor.execute(*args, **kwargs)
return cursor.fetchall()
finally:
if conn:
conn.close()
# ################################################################################################################################
def ping(self):
return self.execute(self.ping_query)
# ################################################################################################################################
def _return_proc_rows(self, conn, proc_name, params=None):
""" Calls a procedure and returns all the rows it produced as a single list.
"""
# Result to return
result = []
# This is optional in case getting a new cursor will fail
cursor = None
# Will be set to True in the exception block
has_exception = False
try:
# Obtain a connection from pool
conn = self.connect()
# Get a new cursor
cursor = conn.cursor()
# Call the proceudre
cursor.callproc(proc_name, params or [])
while True:
result.append(cursor.fetchall())
if not cursor.nextset():
break
except Exception:
has_exception = True
logger.warn(format_exc())
raise
finally:
if cursor:
cursor.close()
conn.commit()
conn.close()
# Return the result only if there was no exception along the way
if not has_exception:
return result
# ################################################################################################################################
def _yield_proc_rows(self, conn, proc_name, params=None):
""" Calls a procedure and yields all the rows it produced, one by one.
"""
# This is optional in case getting a new cursor will fail
cursor = None
try:
# Get a new cursor
cursor = conn.cursor()
# Call the proceudre
cursor.callproc(proc_name, params or [])
while True:
yield cursor.fetchall()
if not cursor.nextset():
break
except Exception:
logger.warn(format_exc())
raise
finally:
if cursor:
cursor.close()
conn.commit()
conn.close()
# ################################################################################################################################
def callproc(self, name, params=None, use_yield=False):
params = params or []
# Obtain a connection from pool
conn = self.connect()
return self._yield_proc_rows(conn, name, params) if use_yield else self._return_proc_rows(conn, name, params)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/mssql_direct.py | mssql_direct.py |
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2019 Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import subprocess
from datetime import datetime, timedelta
from traceback import format_exc
from uuid import uuid4
# gevent
from gevent import sleep, spawn
# six
from six import binary_type
from six.moves.http_client import OK
# ws4py
from ws4py.client.geventclient import WebSocketClient
# Zato
from zato.common.json_ import dumps
from zato.common.json_internal import loads
# ################################################################################################################################
logger = logging.getLogger('zato.wsx_client')
# ################################################################################################################################
class MSG_PREFIX:
_COMMON = 'zato.ws.client.{}'
INVOKE_SERVICE = _COMMON.format('invs.{}')
SEND_AUTH = _COMMON.format('auth.{}')
SEND_RESP = _COMMON.format('resp.{}')
# ################################################################################################################################
zato_keep_alive_ping = 'zato-keep-alive-ping'
_invalid = '_invalid.' + uuid4().hex
# ################################################################################################################################
class Config(object):
def __init__(self, client_name=None, client_id=None, address=None, username=None, secret=None, on_request_callback=None,
wait_time=5):
self.client_name = client_name
self.client_id = client_id
self.address = address
self.username = username
self.secret = secret
self.on_request_callback = on_request_callback
self.wait_time = wait_time
self.needs_auth = bool(self.username)
# ################################################################################################################################
class MessageToZato(object):
""" An individual message from a WebSocket client to Zato, either request or response to a previous request from Zato.
"""
action = _invalid
def __init__(self, msg_id, config, token=None):
self.config = config
self.msg_id = msg_id
self.token = token
def serialize(self, _now=datetime.utcnow):
return dumps(self.enrich({
'data': {},
'meta': {
'action': self.action,
'id': self.msg_id,
'timestamp': _now().isoformat(),
'token': self.token,
'client_id': self.config.client_id,
'client_name': self.config.client_name,
}
}))
def enrich(self, msg):
""" Implemented by subclasses that need to add extra information.
"""
return msg
# ################################################################################################################################
class AuthRequest(MessageToZato):
""" Logs a client into a WebSocket connection.
"""
action = 'create-session'
def enrich(self, msg):
msg['meta']['username'] = self.config.username
msg['meta']['secret'] = self.config.secret
return msg
# ################################################################################################################################
class ServiceInvokeRequest(MessageToZato):
""" Encapsulates information about an invocation of a Zato service.
"""
action = 'invoke-service'
def __init__(self, request_id, data, *args, **kwargs):
self.data = data
super(ServiceInvokeRequest, self).__init__(request_id, *args, **kwargs)
def enrich(self, msg):
msg['data'].update(self.data)
return msg
# ################################################################################################################################
class ResponseFromZato(object):
""" A response from Zato to a previous request by this client.
"""
__slots__ = ('id', 'timestamp', 'in_reply_to', 'status', 'is_ok', 'data', 'msg_impl')
def __init__(self):
self.id = None
self.timestamp = None
self.in_reply_to = None
self.status = None
self.is_ok = None
self.data = None
self.msg_impl = None
@staticmethod
def from_json(msg):
response = ResponseFromZato()
response.msg_impl = msg
meta = msg['meta']
response.id = meta['id']
response.timestamp = meta['timestamp']
response.in_reply_to = meta['in_reply_to']
response.status = meta['status']
response.is_ok = response.status == OK
response.data = msg.get('data')
return response
# ################################################################################################################################
class RequestFromZato(object):
""" A request from Zato to this client.
"""
__slots__ = ('id', 'timestamp', 'data', 'msg_impl')
def __init__(self):
self.id = None
self.timestamp = None
self.data = None
self.msg_impl = None
@staticmethod
def from_json(msg):
request = RequestFromZato()
request.msg_impl = msg
request.id = msg['meta']['id']
request.timestamp = msg['meta']['timestamp']
request.data = msg['data']
return request
# ################################################################################################################################
class ResponseToZato(MessageToZato):
""" A response from this client to a previous request from Zato.
"""
action = 'client-response'
def __init__(self, in_reply_to, data, *args, **kwargs):
self.in_reply_to = in_reply_to
self.data = data
super(ResponseToZato, self).__init__(*args, **kwargs)
def enrich(self, msg):
msg['meta']['in_reply_to'] = self.in_reply_to
msg['data']['response'] = self.data
return msg
# ################################################################################################################################
class _WSClient(WebSocketClient):
""" A low-level subclass of around ws4py's WebSocket client functionality.
"""
def __init__(self, on_connected_callback, on_message_callback, on_error_callback, on_closed_callback, *args, **kwargs):
self.on_connected_callback = on_connected_callback
self.on_message_callback = on_message_callback
self.on_error_callback = on_error_callback
self.on_closed_callback = on_closed_callback
super(_WSClient, self).__init__(*args, **kwargs)
def opened(self):
spawn(self.on_connected_callback)
def received_message(self, msg):
self.on_message_callback(msg)
def unhandled_error(self, error):
spawn(self.on_error_callback, error)
def closed(self, code, reason=None):
super(_WSClient, self).closed(code, reason)
self.on_closed_callback(code, reason)
# ################################################################################################################################
class Client(object):
""" A WebSocket client that knows how to invoke Zato services.
"""
def __init__(self, config):
# type: (Config)
self.config = config
self.conn = _WSClient(self.on_connected, self.on_message, self.on_error, self.on_closed, self.config.address)
self.keep_running = True
self.is_authenticated = False
self.is_connected = False
self.is_auth_needed = bool(self.config.username)
self.auth_token = None
self.on_request_callback = self.config.on_request_callback
self.on_closed_callback = self.config.on_closed_callback
self.needs_auth = self.config.needs_auth
# Keyed by IDs of requests sent from this client to Zato
self.requests_sent = {}
# Same key as self.requests_sent but the dictionary contains responses to previously sent requests
self.responses_received = {}
# Requests initiated by Zato, keyed by their IDs
self.requests_received = {}
# ################################################################################################################################
def send(self, msg_id, msg, wait_time=2):
""" Spawns a greenlet to send a message to Zato.
"""
spawn(self._send, msg_id, msg, msg.serialize(), wait_time)
# ################################################################################################################################
def _send(self, msg_id, msg, serialized, wait_time):
""" Sends a request to Zato and waits up to wait_time or self.config.wait_time seconds for a reply.
"""
logger.info('Sending msg `%s`', serialized)
# So that it can be correlated with a future response
self.requests_sent[msg_id] = msg
# Actually send the messageas string now
self.conn.send(serialized)
# ################################################################################################################################
def _wait_for_response(self, request_id, wait_time=None, _now=datetime.utcnow, _delta=timedelta, _sleep=sleep):
""" Wait until a response arrives and return it
or return None if there is no response up to wait_time or self.config.wait_time.
"""
now = _now()
until = now + _delta(seconds=wait_time or self.config.wait_time)
while now < until:
response = self.responses_received.get(request_id)
if response:
return response
else:
_sleep(0.01)
now = _now()
# ################################################################################################################################
def authenticate(self, request_id):
""" Authenticates the client with Zato.
"""
logger.info('Authenticating as `%s` (%s %s)', self.config.username, self.config.client_name, self.config.client_id)
spawn(self.send, request_id, AuthRequest(request_id, self.config, self.auth_token))
# ################################################################################################################################
def on_connected(self):
""" Invoked upon establishing an initial connection - logs the client in with self.config's credentials
"""
logger.info('Connected to `%s` %s (%s %s)',
self.config.address,
'as `{}`'.format(self.config.username) if self.config.username else 'without credentials',
self.config.client_name, self.config.client_id)
request_id = MSG_PREFIX.SEND_AUTH.format(uuid4().hex)
self.authenticate(request_id)
response = self._wait_for_response(request_id)
if not response:
logger.warn('No response to authentication request `%s`', request_id)
else:
self.auth_token = response.data['token']
self.is_authenticated = True
del self.responses_received[request_id]
logger.info('Authenticated successfully as `%s` (%s %s)',
self.config.username, self.config.client_name, self.config.client_id)
# ################################################################################################################################
def on_message(self, msg, _uuid4=uuid4):
""" Invoked for each message received from Zato, both for responses to previous requests and for incoming requests.
"""
_msg = loads(msg.data.decode('utf-8') if isinstance(msg.data, binary_type) else msg.data)
logger.info('Received message `%s`', _msg)
in_reply_to = _msg['meta'].get('in_reply_to')
# Reply from Zato to one of our requests
if in_reply_to:
self.responses_received[in_reply_to] = ResponseFromZato.from_json(_msg)
# Request from Zato
else:
data = self.on_request_callback(RequestFromZato.from_json(_msg))
response_id = MSG_PREFIX.SEND_RESP.format(_uuid4().hex)
self.send(response_id, ResponseToZato(_msg['meta']['id'], data, response_id, self.config, self.auth_token))
# ################################################################################################################################
def on_closed(self, code, reason=None):
logger.info('Closed WSX client connection to `%s` (remote code:%s reason:%s)', self.config.address, code, reason)
if self.on_closed_callback:
self.on_closed_callback(code, reason)
# ################################################################################################################################
def on_error(self, error):
""" Invoked for each unhandled error in the lower-level ws4py library.
"""
logger.warn('Caught error %s', error)
# ################################################################################################################################
def _run(self, max_wait=10, _sleep_time=2):
needs_connect = True
start = now = datetime.utcnow()
# In the first few seconds, do not warn about socket errors in case
# the other end is intrinsically slow to connect to.
warn_from = start + timedelta(seconds=3)
use_warn = False
# Wait for max_wait seconds until we have the connection
until = now + timedelta(seconds=max_wait)
while self.keep_running and needs_connect and now < until:
try:
if self.conn.sock:
self.conn.connect()
else:
raise ValueError('No WSX connection to {} after {}'.format(self.config.address, now - start))
except Exception as e:
if use_warn:
log_func = logger.warn
else:
if now >= warn_from:
log_func = logger.warn
use_warn = True
else:
log_func = logger.debug
log_func('Exception caught `%s` while connecting to WSX `%s (%s)`', e, self.config.address, format_exc())
sleep(_sleep_time)
now = datetime.utcnow()
else:
needs_connect = False
self.is_connected = True
# ################################################################################################################################
def run(self, max_wait=20):
self._run()
now = datetime.utcnow()
until = now + timedelta(seconds=max_wait)
while not self.is_connected:
sleep(0.01)
now = datetime.utcnow()
if now >= until:
return
# ################################################################################################################################
def stop(self, reason=''):
self.keep_running = False
self.conn.close(reason=reason)
self.is_connected = False
# ################################################################################################################################
def invoke(self, request, timeout=5):
if self.needs_auth and (not self.is_authenticated):
raise Exception('Client is not authenticated')
request_id = MSG_PREFIX.INVOKE_SERVICE.format(uuid4().hex)
spawn(self.send, request_id, ServiceInvokeRequest(request_id, request, self.config, self.auth_token))
response = self._wait_for_response(request_id, wait_time=timeout)
if not response:
logger.warn('No response to invocation request `%s`', request_id)
else:
return response
# ################################################################################################################################
if __name__ == '__main__':
def on_request_from_zato(msg):
try:
return subprocess.check_output(msg.data['cmd'])
except Exception as e:
return format_exc(e)
config = Config()
config.client_name = 'My Client'
config.client_id = '32351b3f5d16'
address = 'ws://127.0.0.1:47043/zato.ws.apitests'
config.address = address
config.username = 'user1'
config.secret = 'secret1'
config.on_request_callback = on_request_from_zato
client = Client(config)
client.run()
client.invoke({'service':'zato.ping'})
logger.info('Press Ctrl-C to quit')
try:
x = 0
while x < 1000 and client.keep_running:
sleep(0.2)
except KeyboardInterrupt:
client.stop()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/wsx_client.py | wsx_client.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.api import NotGiven
from zato.common.exception import BadRequest, InternalServerError
from zato.common.rate_limiting.common import RateLimitReached as RateLimitReachedError
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
# stdlib
from typing import Callable
# Zato
from zato.common.json_schema import ValidationException as JSONSchemaValidationException
from zato.server.service import ChannelInfo
from zato.server.service.store import ServiceStore
# For pyflakes
Callable = Callable
ChannelInfo = ChannelInfo
JSONSchemaValidationException = JSONSchemaValidationException
ServiceStore = ServiceStore
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
json_rpc_version_supported = '2.0'
# ################################################################################################################################
# ################################################################################################################################
class RequestContext(object):
__slots__ = ('cid', 'orig_message', 'message')
def __init__(self):
self.cid = None # type: str
self.orig_message = None # type: object
self.message = None # type: str
# ################################################################################################################################
# ################################################################################################################################
class ErrorCtx(object):
__slots__ = ('cid', 'code', 'message')
def __init__(self):
self.cid = None # type: str
self.code = None # type: int
self.message = None # type: str
def to_dict(self):
# type: () -> dict
return {
'code': self.code,
'message': self.message,
'data': {
'ctx': {
'cid': self.cid
}
}
}
# ################################################################################################################################
# ################################################################################################################################
class ItemResponse(object):
__slots__ = ('id', 'cid', 'error', 'result')
def __init__(self):
self.id = None # type: int
self.cid = None # type: str
self.error = None # type: ErrorCtx
self.result = NotGiven # type: object
def to_dict(self, _json_rpc_version=json_rpc_version_supported):
# type: (str) -> dict
out = {
'jsonrpc': _json_rpc_version,
'id': self.id,
}
if self.result is not NotGiven:
out['result'] = self.result
else:
out['error'] = self.error.to_dict()
return out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCException(object):
code = -32000
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCBadRequest(JSONRPCException, BadRequest):
def __init__(self, cid, message):
# type: (str, str)
BadRequest.__init__(self, cid, msg=message)
# ################################################################################################################################
# ################################################################################################################################
class InvalidRequest(JSONRPCBadRequest):
code = -32600
# ################################################################################################################################
# ################################################################################################################################
class MethodNotFound(JSONRPCBadRequest):
code = -32601
# ################################################################################################################################
# ################################################################################################################################
class InternalError(JSONRPCException, InternalServerError):
code = -32603
# ################################################################################################################################
# ################################################################################################################################
class ParseError(JSONRPCBadRequest):
code = -32700
# ################################################################################################################################
# ################################################################################################################################
class Forbidden(JSONRPCBadRequest):
code = -32403
# ################################################################################################################################
# ################################################################################################################################
class RateLimitReached(JSONRPCBadRequest):
code = -32429
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCItem(object):
""" An object describing an individual JSON-RPC request.
"""
__slots__ = 'jsonrpc', 'method', 'params', 'id', 'needs_response'
# ################################################################################################################################
def __init__(self):
self.jsonrpc = None # type: str
self.method = None # type: str
self.params = None # type: object
self.id = None # type: str
self.needs_response = None # type: bool
# ################################################################################################################################
def to_dict(self):
# type: () -> dict
return {
'jsonrpc': self.jsonrpc,
'method': self.method,
'params': self.params,
'id': self.id
}
# ################################################################################################################################
@staticmethod
def from_dict(item):
# type: (dict) -> JSONRPCItem
# Our object to return
out = JSONRPCItem()
# At this stage we only create a Python-level object and input
# validation is performed by our caller.
out.jsonrpc = item.get('jsonrpc')
out.id = item.get('id', -123456789)
out.method = item.get('method')
out.params = item.get('params')
out.needs_response = out.id is not NotGiven
return out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCHandler(object):
def __init__(self, service_store, wsgi_environ, config, invoke_func, channel_info, JSONSchemaValidationException):
# type: (ServiceStore, dict, dict, Callable, ChannelInfo, JSONSchemaValidationException)
self.service_store = service_store
self.wsgi_environ = wsgi_environ
self.config = config
self.invoke_func = invoke_func
self.channel_info = channel_info
# Kept here and provided by the caller to remove circular imports between common/json_rpc.py and common/json_schema.py
self.JSONSchemaValidationException = JSONSchemaValidationException
# ################################################################################################################################
def handle(self, ctx):
# type: (RequestContext) -> object
if isinstance(ctx.message, list):
return self.handle_list(ctx)
else:
return self.handle_one_item(ctx)
# ################################################################################################################################
def can_handle(self, method):
# type: (str) -> bool
return method in self.config['service_whitelist']
# ################################################################################################################################
def _handle_one_item(self, cid, message, orig_message, _json_rpc_version=json_rpc_version_supported):
# type: (RequestContext, str) -> dict
try:
# Response to return
out = ItemResponse()
# Construct a Python object out of incoming data
item = JSONRPCItem.from_dict(message)
# We should have the ID at this point
out.id = item.id
# Confirm that we can handle the JSON-RPC version requested
if item.jsonrpc != json_rpc_version_supported:
raise InvalidRequest(cid, 'Unsupported JSON-RPC version `{}` in `{}`'.format(
item.jsonrpc, orig_message.decode('utf8')))
# Confirm that method requested is one that we can handle
if not self.can_handle(item.method):
raise MethodNotFound(cid, 'Method not supported `{}` in `{}`'.format(item.method, orig_message.decode('utf8')))
# Try to invoke the service ..
skip_response_elem = self.service_store.has_sio(item.method)
service_response = self.invoke_func(item.method, item.params, channel_info=self.channel_info,
skip_response_elem=skip_response_elem, wsgi_environ=self.wsgi_environ)
# .. no exception here = invocation was successful
out.result = service_response
return out.to_dict() if item.needs_response else None
except Exception as e:
is_schema_error = isinstance(e, self.JSONSchemaValidationException)
is_rate_limit_error = isinstance(e, RateLimitReachedError)
error_ctx = ErrorCtx()
error_ctx.cid = cid
# JSON Schema validator error
if is_schema_error:
err_code = InvalidRequest.code
err_message = e.error_msg_details if e.needs_err_details else e.error_msg
elif is_rate_limit_error:
err_code = RateLimitReached.code
err_message = 'Too Many Requests'
else:
# Any JSON-RPC error
if isinstance(e, JSONRPCException):
err_code = e.code
err_message = e.args[0]
# Any other error
else:
err_code = -32000
err_message = 'Message could not be handled'
if is_schema_error:
logger.warn('JSON Schema validation error in JSON-RPC channel `%s` (%s); msg:`%s`, e:`%s`, details:`%s`',
self.config.name, cid, orig_message, format_exc(), e.error_msg_details)
else:
logger.warn('JSON-RPC exception in `%s` (%s); msg:`%s`, e:`%s`',
self.config.name, cid, orig_message, format_exc())
error_ctx.code = err_code
error_ctx.message = err_message
out.error = error_ctx
return out.to_dict()
# ################################################################################################################################
def handle_one_item(self, ctx, _json_rpc_version=json_rpc_version_supported):
# type: (RequestContext) -> dict
return self._handle_one_item(ctx.cid, ctx.message, ctx.orig_message)
# ################################################################################################################################
def handle_list(self, ctx):
# type: (RequestContext) -> list
out = []
for item in ctx.message: # type: dict
out.append(self._handle_one_item(ctx.cid, item, ctx.orig_message))
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/json_rpc.py | json_rpc.py |
from __future__ import absolute_import, division, print_function, unicode_literals
"""
A set of settings kept in an SQLite database.
"""
# stdlib
import os
from logging import getLogger
# SQLAlchemy
from sqlalchemy import Column, create_engine, Integer, Sequence, String, Text, UniqueConstraint
from sqlalchemy.ext.declarative import declarative_base
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
Base = declarative_base()
# ################################################################################################################################
class Setting(Base):
__tablename__ = 'settings'
__table_args__ = (UniqueConstraint('name'), {})
id = Column(Integer, Sequence('settings_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(Text, nullable=True)
data_type = Column(String(20), nullable=False)
# ################################################################################################################################
class DATA_TYPE:
INTEGER = 'integer'
STRING = 'string'
data_type_handler = {
DATA_TYPE.INTEGER: int,
DATA_TYPE.STRING: lambda value: value,
}
# ################################################################################################################################
class SettingsDB(object):
""" Keeps simple settings in an SQLite database. It's new in 3.0 so to ease in migration from pre-3.0 releases
the class takes care itself of making sure that its underlying database actually exists - a future Zato version
will simply assume that it does.
"""
def __init__(self, db_path, session):
self.db_path = db_path
self.session = session
# Older environments did not have this database
if not os.path.exists(self.db_path):
self.create_db()
def get_engine(self):
return create_engine('sqlite:///{}'.format(self.db_path))
def create_db(self):
Base.metadata.create_all(self.get_engine())
def get(self, name, default=None, needs_object=False):
data = self.session.query(Setting).\
filter(Setting.name==name).\
first() or None
if needs_object:
return data
return data_type_handler[data.data_type](data.value) if data else default
def set(self, name, value, data_type=DATA_TYPE.INTEGER):
s = self.get(name, needs_object=True) or Setting()
s.name = name
s.value = value
s.data_type = data_type
self.session.add(s)
self.session.commit()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/settings_db.py | settings_db.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from logging import getLogger
# JSON Schema
from jsonschema import validate as js_validate
from jsonschema.exceptions import ValidationError as JSValidationError
from jsonschema.validators import validator_for
# Zato
from zato.common.api import CHANNEL, NotGiven
from zato.common.json_internal import dumps, loads
from zato.common.json_rpc import ErrorCtx, JSONRPCBadRequest, ItemResponse
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
# stdlib
from typing import Callable
# Bunch
from bunch import Bunch
# Zato
from zato.server.base.parallel import ParallelServer
# For pyflakes
Bunch = Bunch
Callable = Callable
ParallelServer = ParallelServer
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
def get_service_config(item, server):
# type: (Bunch, ParallelServer) -> dict
# By default services are allowed to validate input using JSON Schema
is_json_schema_enabled = item.get('is_json_schema_enabled', True)
# Unless configured per each service separately, we use server defaults here
needs_json_schema_err_details = item.get('needs_json_schema_err_details', NotGiven)
if needs_json_schema_err_details is NotGiven:
needs_json_schema_err_details = server.fs_server_config.misc.return_json_schema_errors
return {
'is_json_schema_enabled': is_json_schema_enabled,
'needs_json_schema_err_details': needs_json_schema_err_details
}
# ################################################################################################################################
# ################################################################################################################################
class ValidationException(Exception):
def __init__(self, cid, object_type, object_name, needs_err_details, error_msg, error_msg_details):
# type: (str, str, str, bool, str, str)
self.cid = cid
self.object_type = object_type
self.object_name = object_name
self.needs_err_details = needs_err_details
self.error_msg = error_msg
self.error_msg_details = error_msg_details
super(ValidationException, self).__init__('JSON Schema validation error in `{}` ({}), e:`{}`'.format(
self.object_name, cid, self.error_msg))
# ################################################################################################################################
# ################################################################################################################################
class ValidationError(object):
""" Base class for validation error-related classes.
"""
__slots__ = 'cid', 'needs_err_details', 'error_msg', 'error_extra', 'needs_prefix'
def __init__(self, cid, needs_err_details, error_msg, error_extra=None, needs_prefix=True):
# type: (str, bool, str, dict, bool)
self.cid = cid
self.needs_err_details = needs_err_details
self.error_msg = error_msg
self.error_extra = error_extra
self.needs_prefix = needs_prefix
def get_error_message(self, needs_error_msg=False):
# type: (bool) -> str
out = 'Invalid request' if self.needs_prefix else ''
if needs_error_msg or self.needs_err_details:
if out:
out += '; '
out += self.error_msg
return out
def serialize(self):
raise NotImplementedError('Must be overridden in subclasses')
# ################################################################################################################################
# ################################################################################################################################
class DictError(ValidationError):
""" An error reporter that serializes JSON Schema validation errors into Python dict responses.
"""
def serialize(self, to_string=False):
# type: (bool) -> object
out = {
'is_ok': False,
'cid': self.cid,
'message': self.get_error_message()
}
return dumps(out) if to_string else out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCError(ValidationError):
""" An error reporter that serializes JSON Schema validation errors into JSON-RPC responses.
"""
def serialize(self):
# type: () -> dict
error_ctx = ErrorCtx()
error_ctx.cid = self.cid
error_ctx.code = JSONRPCBadRequest.code
error_ctx.message = 'Invalid request'
# This may be optionally turned off
error_ctx.message = self.get_error_message()
out = ItemResponse()
out.id = self.error_extra['json_rpc_id']
out.error = error_ctx
return out.to_dict()
# ################################################################################################################################
channel_type_to_error_class = {
CHANNEL.HTTP_SOAP: DictError,
CHANNEL.JSON_RPC: JSONRPCError,
CHANNEL.SERVICE: DictError,
}
# ################################################################################################################################
# ################################################################################################################################
class ValidationConfig(object):
""" An individual set of configuration options - each object requiring validation (e.g. each channel)
will have its own instance of this class assigned to its validator.
"""
__slots__ = 'is_enabled', 'object_type', 'object_name', 'schema_path', 'schema', 'validator', 'needs_err_details'
def __init__(self):
self.is_enabled = None # type: bool
# Object type is channel type or, in the future, one of outgoing connections
# whose requests to external resources we may also want to validate.
self.object_type = None # type: str
self.object_name = None # type: str
self.schema_path = None # type: str
self.schema = None # type: dict
self.validator = None # type: object
self.needs_err_details = None # type: bool
# ################################################################################################################################
# ################################################################################################################################
class Result(object):
__slots__ = 'is_ok', 'cid', 'needs_err_details', 'error_msg', 'error_extra', 'object_type'
def __init__(self):
self.is_ok = None # type: bool
self.cid = None # type: str
self.needs_err_details = None # type: bool
self.error_msg = None # type: str
self.error_extra = None # type: dict
self.object_type = None # type: str
def __bool__(self):
return bool(self.is_ok)
__nonzero__ = __bool__
def get_error(self):
# type: () -> ValidationError
ErrorClass = channel_type_to_error_class[self.object_type]
error = ErrorClass(self.cid, self.needs_err_details, self.error_msg, self.error_extra) # type: ValidationError
return error
# ################################################################################################################################
# ################################################################################################################################
class Validator(object):
""" Validates JSON requests against a previously assigned schema and serializes errors according to the caller's type,
e.g. using REST or JSON-RPC.
"""
__slots__ = 'is_initialized', 'config'
def __init__(self):
self.is_initialized = False # type: bool
self.config = None # type: ValidationConfig
def init(self):
if not self.config.is_enabled:
logger.info('Skipped initialization of JSON Schema validation for `%s` (%s)',
self.config.object_name, self.config.object_type)
return
if not os.path.exists(self.config.schema_path):
raise ValidationException('JSON schema not found `{}` ({})'.format(self.config.schema_path, self.config.object_name))
# The file is sure to exist
with open(self.config.schema_path) as f:
schema = f.read()
# Parse the contents as JSON
schema = loads(schema)
# Assign the schema and validator for the schema for later use
self.config.schema = schema
self.config.validator = validator_for(schema)
# Everything is set up = we are initialized
self.is_initialized = True
def validate(self, cid, data, object_type=None, object_name=None, needs_err_details=False, _validate=js_validate):
# type: (str, object, str, str, Callable) -> Result
# Result we will return
result = Result()
result.cid = cid
object_type = object_type or self.config.object_type
object_name or self.config.object_name
needs_err_details = needs_err_details or self.config.needs_err_details
try:
js_validate(data, self.config.schema, self.config.validator)
except JSValidationError as e:
# These will be always used, no matter the object/channel type
result.is_ok = False
result.object_type = object_type
result.needs_err_details = needs_err_details
result.error_msg = str(e)
# This is applicable only to JSON-RPC
if object_type == CHANNEL.JSON_RPC:
result.error_extra = {'json_rpc_id': data.get('id')}
else:
result.is_ok = True
return result
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/json_schema.py | json_schema.py |
# ################################################################################################################################
# ################################################################################################################################
# stdlib
from typing import Optional as optional
# dacite
from dacite import from_dict
# Be explicit about which import error we want to catch
try:
import dataclasses # noqa: F401
# Python 3.6
except ImportError:
from zato.common.ext.dataclasses import * # noqa: F401
# Python 3.6+
else:
from dataclasses import * # noqa: F401
# ################################################################################################################################
# ################################################################################################################################
#
# TypedDict
#
try:
from typing import TypedDict
except ImportError:
from zato.common.ext.typing_extensions import TypedDict
# ################################################################################################################################
# ################################################################################################################################
# For flake8
from_dict = from_dict
optional = optional
TypedDict = TypedDict
# ################################################################################################################################
# ################################################################################################################################
def instance_from_dict(class_, data):
# type: (object, dict) -> object
instance = class_()
for key, value in data.items():
setattr(instance, key, value)
return instance
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/typing_.py | typing_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from datetime import datetime
from itertools import groupby
from io import StringIO
from operator import attrgetter
from time import time
# psutil
from psutil import Process
# PyYAML
import yaml
# pytz
from pytz import UTC
# Texttable
from texttable import Texttable
# Zato
from zato.common.api import INFO_FORMAT, MISC, ZATO_INFO_FILE
from zato.common.json_internal import dumps as json_dumps, loads as json_loads
from zato.common.util.api import current_host
def format_connections(conns, format):
""" Formats a list of connections according to the output format.
"""
groups = (groupby(conns, key=attrgetter('status')))
out = {}
for status, items in groups:
items = list(items)
items.sort(key=attrgetter('raddr'))
out_items = out.setdefault(status, [])
for item in items:
laddr_str = ':'.join(str(elem) for elem in item.laddr).ljust(21)
raddr_str = ':'.join(str(elem) for elem in item.raddr).rjust(21)
out_item = {
'from': '{}:{}'.format(*item.laddr),
'to': None,
'formatted': None,
}
if item.raddr:
out_item['to'] = '{}:{}'.format(*item.raddr)
out_item['formatted'] = '{} -> {}'.format(laddr_str, raddr_str)
else:
out_item['formatted'] = '{}:{}'.format(*item.laddr)
out_items.append(out_item)
return out
def get_worker_pids(component_path):
""" Returns PIDs of all workers of a given server, which must be already started.
"""
master_proc_pid = int(open(os.path.join(component_path, MISC.PIDFILE)).read())
return sorted(elem.pid for elem in Process(master_proc_pid).children())
def get_info(component_path, format, _now=datetime.utcnow):
component_details = open(os.path.join(component_path, ZATO_INFO_FILE)).read()
out = {
'component_details': component_details,
'component_full_path': component_path,
'component_host': current_host(),
'component_running': False,
'current_time': datetime.now().isoformat(),
'current_time_utc': datetime.utcnow().isoformat(),
'master_proc_connections': None,
'master_proc_pid': None,
'master_proc_name': None,
'master_proc_create_time': None,
'master_proc_create_time_utc': None,
'master_proc_username': None,
'master_proc_workers_no': None,
'master_proc_workers_pids': None,
}
master_proc_pid = None
try:
master_proc_pid = int(open(os.path.join(component_path, MISC.PIDFILE)).read())
except(IOError, ValueError):
# Ok, no such file or it's empty
pass
if master_proc_pid:
out['component_running'] = True
master_proc = Process(master_proc_pid)
workers_pids = sorted(elem.pid for elem in master_proc.children())
now = datetime.fromtimestamp(time(), UTC)
mater_proc_create_time = master_proc.create_time()
mater_proc_create_time_utc = datetime.fromtimestamp(mater_proc_create_time, UTC)
out['mater_proc_uptime'] = now - mater_proc_create_time_utc
out['mater_proc_uptime_seconds'] = int(out['mater_proc_uptime'].total_seconds())
out['master_proc_connections'] = format_connections(master_proc.connections(), format)
out['master_proc_pid'] = master_proc.pid
out['master_proc_create_time'] = datetime.fromtimestamp(mater_proc_create_time).isoformat()
out['master_proc_create_time_utc'] = mater_proc_create_time_utc.isoformat()
out['master_proc_username'] = master_proc.username()
out['master_proc_name'] = master_proc.name()
out['master_proc_workers_no'] = len(workers_pids)
out['master_proc_workers_pids'] = workers_pids
for pid in workers_pids:
worker = Process(pid)
worker_create_time = worker.create_time()
worker_create_time_utc = datetime.fromtimestamp(worker_create_time, UTC)
out['worker_{}_uptime'.format(pid)] = now - worker_create_time_utc
out['worker_{}_uptime_seconds'.format(pid)] = int(out['worker_{}_uptime'.format(pid)].total_seconds())
out['worker_{}_create_time'.format(pid)] = datetime.fromtimestamp(worker_create_time).isoformat()
out['worker_{}_create_time_utc'.format(pid)] = worker_create_time_utc.isoformat()
out['worker_{}_connections'.format(pid)] = format_connections(worker.connections(), format)
return out
def format_info(value, format, cols_width=None, dumper=None):
if format in(INFO_FORMAT.DICT, INFO_FORMAT.JSON, INFO_FORMAT.YAML):
value['component_details'] = json_loads(value['component_details'])
if format == INFO_FORMAT.JSON:
return json_dumps(value)
elif format == INFO_FORMAT.YAML:
buff = StringIO()
yaml.dump_all([value], default_flow_style=False, indent=4, Dumper=dumper, stream=buff)
value = buff.getvalue()
buff.close()
return value
elif format == INFO_FORMAT.TEXT:
cols_width = (elem.strip() for elem in cols_width.split(','))
cols_width = [int(elem) for elem in cols_width]
table = Texttable()
table.set_cols_width(cols_width)
# Use text ('t') instead of auto so that boolean values don't get converted into ints
table.set_cols_dtype(['t', 't'])
rows = [['Key', 'Value']]
rows.extend(sorted(value.items()))
table.add_rows(rows)
return table.draw()
else:
return value | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/component_info.py | component_info.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import itertools
# ipaddress
from ipaddress import ip_address, ip_network
# netifaces
from netifaces import AF_INET, ifaddresses as net_ifaddresses, interfaces as net_ifaces
# Python 2/3 compatibility
from builtins import bytes
from future.moves.urllib.parse import urlparse
from six import PY2
# ################################################################################################################################
def to_ip_network(adddress):
""" Converts address to a network object assuming it is feasible at all, otherwise returns None.
"""
try:
return ip_network(adddress)
except ValueError:
pass
else:
return True
# ################################################################################################################################
def ip_list_from_interface(interface, allow_loopback=False):
""" Return the list of IP address for the given interface, possibly including loopback addresses
"""
addresses = []
af_inet = net_ifaddresses(interface).get(AF_INET)
if af_inet:
_addresses = [elem.get('addr') for elem in af_inet]
if PY2:
_addresses = [elem.decode('utf8') for elem in _addresses]
for address in _addresses:
address = ip_address(address)
if address.is_loopback and not allow_loopback:
continue
addresses.append(address)
return addresses
# ################################################################################################################################
def get_preferred_ip(base_bind, user_prefs):
""" Given user preferences, iterate over all address in all interfaces and check if any matches what users prefer.
Note that preferences can include actual names of interfaces, not only IP or IP ranges.
"""
# First check out if the base address to bind does not already specify a concrete IP.
# If it does, then this will be the preferred one.
parsed = urlparse('https://{}'.format(base_bind))
if parsed.hostname != '0.0.0.0':
return parsed.hostname
# What is preferred
preferred = user_prefs.ip
# What actually exists in the system
current_ifaces = net_ifaces()
# Would be very weird not to have anything, even loopback, but oh well
if not current_ifaces:
return None
current_ifaces.sort()
current_addresses = [net_ifaddresses(elem).get(AF_INET) for elem in current_ifaces]
current_addresses = [[elem.get('addr') for elem in x] for x in current_addresses if x]
current_addresses = list(itertools.chain.from_iterable(current_addresses))
# Preferences broken out into interfacs and network ranges/IP addresses
pref_interfaces = [elem for elem in preferred if elem in net_ifaces()]
pref_networks = [to_ip_network(elem) for elem in preferred]
pref_networks = [elem for elem in pref_networks if elem]
# If users prefer a named interface and we have it then we need to return its IP
for elem in pref_interfaces:
# If any named interface is found, returns its first IP, if there is any
ip_list = ip_list_from_interface(elem, user_prefs.allow_loopback)
if ip_list:
return str(ip_list[0])
# No address has been found by its interface but perhaps one has been specified explicitly
# or through a network range.
for current in current_addresses:
for preferred in pref_networks:
if ip_address(current.decode('utf8') if isinstance(current, bytes) else current) in preferred:
return current
# Ok, still nothing, so we need to find something ourselves
loopback_ip = None
# First let's try the first non-loopback interface.
for elem in current_ifaces:
for ip in ip_list_from_interface(elem, True):
if ip.is_loopback:
loopback_ip = ip
return str(ip)
# If there is only loopback and we are allowed to use it then so be it
if user_prefs.allow_loopback:
return loopback_ip
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ipaddress_.py | ipaddress_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# ipaddress
from ipaddress import IPv4Address, IPv6Address
# Zato
from zato.common.json_internal import dumps
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class AuditPII(object):
""" Audit log for personally identifiable information (PII).
"""
def __init__(self):
self._logger = logging.getLogger('zato_audit_pii')
# ################################################################################################################################
def _log(self, func, cid, op, current_user='', target_user='', result='', extra='', _dumps=dumps):
remote_addr = extra.get('remote_addr')
if isinstance(extra, dict):
if not remote_addr:
extra['remote_addr'] = ''
else:
if isinstance(remote_addr, list) and remote_addr:
if isinstance(remote_addr[0], (IPv4Address, IPv6Address)):
extra['remote_addr'] = ';'.join(elem.exploded for elem in extra['remote_addr'])
entry = {
'cid': cid,
'op': op,
}
if current_user:
entry['current_user'] = current_user
if target_user:
entry['target_user'] = target_user
if result:
entry['result'] = result
if extra:
entry['extra'] = extra
entry = dumps(entry)
self._logger.info('%s' % entry)
# ################################################################################################################################
def info(self, *args, **kwargs):
self._log(self._logger.info, *args, **kwargs)
# ################################################################################################################################
def warn(self, *args, **kwargs):
self._log(self._logger.warn, *args, **kwargs)
# ################################################################################################################################
def error(self, *args, **kwargs):
self._log(self._logger.error, *args, **kwargs)
# ################################################################################################################################
# A singleton available everywhere
audit_pii = AuditPII()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/audit.py | audit.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# ################################################################################################################################
class SFTPOutput(object):
""" Represents output resulting from execution of SFTP command(s).
"""
__slots__ = 'is_ok', 'cid', 'command', 'command_no', 'stdout', 'stderr', 'details', 'response_time'
def __init__(self, cid, command_no, command=None, is_ok=None, stdout=None, stderr=None, details=None, response_time=None):
# type: (str, int, str, bool, str, str, str) -> None
self.cid = cid
self.command_no = command_no
self.command = command
self.is_ok = is_ok
self.stdout = stdout
self.stderr = stderr
self.details = details
self.response_time = response_time
# ################################################################################################################################
def __str__(self):
return '<{} at {}, cid:{}, command_no:{}, is_ok:{}, rt:{}>'.format(self.__class__.__name__, hex(id(self)), self.cid,
self.command_no, self.is_ok, self.response_time)
# ################################################################################################################################
def strip_stdout_prefix(self):
if self.stdout:
out = []
for line in self.stdout.splitlines():
if not line.startswith('sftp>'):
out.append(line)
self.stdout = '\n'.join(out)
# ################################################################################################################################
def to_dict(self):
# type: () -> dict
return {
'is_ok': self.is_ok,
'cid': self.cid,
'command': self.command,
'command_no': self.command_no,
'stdout': self.stdout,
'stderr': self.stderr,
'details': self.details,
'response_time': self.response_time,
}
# ################################################################################################################################
@staticmethod
def from_dict(data):
# type: (dict) -> SFTPOutput
return SFTPOutput(**data)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/sftp.py | sftp.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from collections import OrderedDict
from io import StringIO
from numbers import Number
from sys import maxsize
# Bunch
from bunch import Bunch
# ################################################################################################################################
# SQL ODB
engine_def = '{engine}://{username}:{password}@{host}:{port}/{db_name}'
engine_def_sqlite = 'sqlite:///{sqlite_path}'
# Convenience access functions and constants.
megabyte = 10 ** 6
# Hook methods whose func.im_func.func_defaults contains this argument will be assumed to have not been overridden by users
# and ServiceStore will be allowed to override them with None so that they will not be called in Service.update_handle
# which significantly improves performance (~30%).
zato_no_op_marker = 'zato_no_op_marker'
SECRET_SHADOW = '******'
# TRACE1 logging level, even more details than DEBUG
TRACE1 = 6
SECONDS_IN_DAY = 86400 # 60 seconds * 60 minutes * 24 hours (and we ignore leap seconds)
scheduler_date_time_format = '%Y-%m-%d %H:%M:%S'
soap_date_time_format = '%Y-%m-%dT%H:%M:%S.%fZ'
# TODO: Classes that have this attribute defined (no matter the value) will not be deployed
# onto servers.
DONT_DEPLOY_ATTR_NAME = 'zato_dont_import'
# A convenient constant used in several places, simplifies passing around
# arguments which are, well, not given (as opposed to being None, an empty string etc.)
ZATO_NOT_GIVEN = b'ZATO_NOT_GIVEN'
# Separates command line arguments in shell commands.
CLI_ARG_SEP = 'ZATO_ZATO_ZATO'
# Also used in a couple of places.
ZATO_OK = 'ZATO_OK'
ZATO_ERROR = 'ZATO_ERROR'
ZATO_WARNING = 'ZATO_WARNING'
ZATO_NONE = 'ZATO_NONE'
ZATO_DEFAULT = 'ZATO_DEFAULT'
ZATO_SEC_USE_RBAC = 'ZATO_SEC_USE_RBAC'
DELEGATED_TO_RBAC = 'Delegated to RBAC'
# Default HTTP method outgoing connections use to ping resources
# TODO: Move it to MISC
DEFAULT_HTTP_PING_METHOD = 'HEAD'
# Default size of an outgoing HTTP connection's pool (plain, SOAP, any).
# This is a per-outconn setting
# TODO: Move it to MISC
DEFAULT_HTTP_POOL_SIZE = 20
# Used when there's a need for encrypting/decrypting a well-known data.
# TODO: Move it to MISC
ZATO_CRYPTO_WELL_KNOWN_DATA = 'ZATO'
# Used if it could not be established what remote address a request came from
NO_REMOTE_ADDRESS = '(None)'
# Pattern matching order
TRUE_FALSE = 'true_false'
FALSE_TRUE = 'false_true'
simple_types = (bytes, str, dict, list, tuple, bool, Number)
# ################################################################################################################################
# ################################################################################################################################
generic_attrs = ('is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', 'rate_limit_check_parent_def',
'is_audit_log_sent_active', 'is_audit_log_received_active', 'max_len_messages_sent', 'max_len_messages_received',
'max_bytes_per_message_sent', 'max_bytes_per_message_received', 'hl7_version', 'json_path', 'data_encoding',
'max_msg_size', 'read_buffer_size', 'recv_timeout', 'logging_level', 'should_log_messages', 'start_seq', 'end_seq',
'max_wait_time')
# ################################################################################################################################
# ################################################################################################################################
# These are used by web-admin only because servers and scheduler use sql.conf
ping_queries = {
'db2': 'SELECT current_date FROM sysibm.sysdummy1',
'mssql': 'SELECT 1',
'mysql+pymysql': 'SELECT 1+1',
'oracle': 'SELECT 1 FROM dual',
'postgresql': 'SELECT 1',
'postgresql+pg8000': 'SELECT 1',
'sqlite': 'SELECT 1',
}
engine_display_name = {
'db2': 'DB2',
'mssql': 'MS SQL',
'zato+mssql1': 'MS SQL (Direct)',
'mysql+pymysql': 'MySQL',
'oracle': 'Oracle',
'postgresql': 'PostgreSQL',
'postgresql+pg8000': 'PostgreSQL',
'sqlite': 'SQLite',
}
# ################################################################################################################################
# ################################################################################################################################
# All URL types Zato understands.
class URL_TYPE:
SOAP = 'soap'
PLAIN_HTTP = 'plain_http'
def __iter__(self):
return iter((self.SOAP, self.PLAIN_HTTP))
# ################################################################################################################################
# ################################################################################################################################
# Whether WS-Security passwords are transmitted in clear-text or not.
ZATO_WSS_PASSWORD_CLEAR_TEXT = Bunch(name='clear_text', label='Clear text')
ZATO_WSS_PASSWORD_TYPES = {
ZATO_WSS_PASSWORD_CLEAR_TEXT.name:ZATO_WSS_PASSWORD_CLEAR_TEXT.label,
}
# ################################################################################################################################
# ################################################################################################################################
ZATO_FIELD_OPERATORS = {
'is-equal-to': '==',
'is-not-equal-to': '!=',
}
# ################################################################################################################################
# ################################################################################################################################
ZMQ_OUTGOING_TYPES = ('PUSH', 'PUB')
# ################################################################################################################################
# ################################################################################################################################
class ZMQ:
PULL = 'PULL'
PUSH = 'PUSH'
PUB = 'PUB'
SUB = 'SUB'
MDP = 'MDP'
MDP01 = MDP + '01'
MDP01_HUMAN = 'Majordomo 0.1 (MDP)'
class POOL_STRATEGY_NAME:
SINGLE = 'single'
UNLIMITED = 'unlimited'
class SERVICE_SOURCE_NAME:
ZATO = 'zato'
MDP01 = 'mdp01'
CHANNEL = OrderedDict({
PULL: 'Pull',
SUB: 'Sub',
MDP01: MDP01_HUMAN,
})
OUTGOING = OrderedDict({
PUSH: 'Push',
PUB: 'Pub',
})
class METHOD_NAME:
BIND = 'bind'
CONNECT = 'connect'
METHOD = {
METHOD_NAME.BIND: 'Bind',
METHOD_NAME.CONNECT: 'Connect',
}
POOL_STRATEGY = OrderedDict({
POOL_STRATEGY_NAME.SINGLE: 'Single',
POOL_STRATEGY_NAME.UNLIMITED: 'Unlimited',
})
SERVICE_SOURCE = OrderedDict({
SERVICE_SOURCE_NAME.ZATO: 'Zato',
SERVICE_SOURCE_NAME.MDP01: MDP01_HUMAN,
})
# ################################################################################################################################
# ################################################################################################################################
ZATO_ODB_POOL_NAME = 'ZATO_ODB'
# ################################################################################################################################
# ################################################################################################################################
SOAP_VERSIONS = ('1.1', '1.2')
SOAP_CHANNEL_VERSIONS = ('1.1',)
# ################################################################################################################################
# ################################################################################################################################
class SEARCH:
class ES:
class DEFAULTS:
BODY_AS = 'POST'
HOSTS = '127.0.0.1:9200\n'
class SOLR:
class DEFAULTS:
ADDRESS = 'http://127.0.0.1:8983/solr'
PING_PATH = '/solr/admin/ping'
TIMEOUT = '10'
POOL_SIZE = '5'
class ZATO:
class DEFAULTS:
PAGE_SIZE = 50
PAGINATE_THRESHOLD = PAGE_SIZE + 1
# ################################################################################################################################
# ################################################################################################################################
class SEC_DEF_TYPE:
APIKEY = 'apikey'
AWS = 'aws'
BASIC_AUTH = 'basic_auth'
JWT = 'jwt'
NTLM = 'ntlm'
OAUTH = 'oauth'
TLS_CHANNEL_SEC = 'tls_channel_sec'
TLS_KEY_CERT = 'tls_key_cert'
WSS = 'wss'
VAULT = 'vault_conn_sec'
XPATH_SEC = 'xpath_sec'
# ################################################################################################################################
# ################################################################################################################################
SEC_DEF_TYPE_NAME = {
SEC_DEF_TYPE.APIKEY: 'API key',
SEC_DEF_TYPE.AWS: 'AWS',
SEC_DEF_TYPE.BASIC_AUTH: 'HTTP Basic Auth',
SEC_DEF_TYPE.JWT: 'JWT',
SEC_DEF_TYPE.NTLM: 'NTLM',
SEC_DEF_TYPE.OAUTH: 'OAuth 1.0',
SEC_DEF_TYPE.TLS_CHANNEL_SEC: 'TLS channel',
SEC_DEF_TYPE.TLS_KEY_CERT: 'TLS key/cert',
SEC_DEF_TYPE.WSS: 'WS-Security',
SEC_DEF_TYPE.VAULT: 'Vault',
SEC_DEF_TYPE.XPATH_SEC: 'XPath',
}
# ################################################################################################################################
# ################################################################################################################################
class AUTH_RESULT:
class BASIC_AUTH:
INVALID_PREFIX = 'invalid-prefix'
NO_AUTH = 'no-auth'
# ################################################################################################################################
# ################################################################################################################################
DEFAULT_STATS_SETTINGS = {
'scheduler_per_minute_aggr_interval':60,
'scheduler_raw_times_interval':90,
'scheduler_raw_times_batch':99999,
'atttention_slow_threshold':2000,
'atttention_top_threshold':10,
}
# ################################################################################################################################
# ################################################################################################################################
class BATCH_DEFAULTS:
PAGE_NO = 1
SIZE = 25
MAX_SIZE = 1000
# ################################################################################################################################
# ################################################################################################################################
class MSG_SOURCE:
DUPLEX = 'duplex'
# ################################################################################################################################
# ################################################################################################################################
class NameId:
""" Wraps both an attribute's name and its ID.
"""
def __init__(self, name, id=None):
self.name = name
self.id = id or name
def __repr__(self):
return '<{} at {}; name={}; id={}>'.format(self.__class__.__name__, hex(id(self)), self.name, self.id)
# ################################################################################################################################
# ################################################################################################################################
class NotGiven:
pass # A marker for lazily-initialized attributes
# ################################################################################################################################
# ################################################################################################################################
class Attrs(type):
""" A container for class attributes that can be queried for an existence
of an attribute using the .has class-method.
"""
attrs = NotGiven
@classmethod
def has(cls, attr):
if cls.attrs is NotGiven:
cls.attrs = []
for cls_attr in dir(cls):
if cls_attr == cls_attr.upper():
cls.attrs.append(getattr(cls, cls_attr))
return attr in cls.attrs
# ################################################################################################################################
# ################################################################################################################################
class DATA_FORMAT(Attrs):
CSV = 'csv'
DICT = 'dict'
HL7 = 'hl7'
JSON = 'json'
POST = 'post'
SOAP = 'soap'
XML = 'xml'
def __iter__(self):
# Note that DICT and other attributes aren't included because they're never exposed to the external world as-is,
# they may at most only used so that services can invoke each other directly
return iter((self.XML, self.JSON, self.CSV, self.POST, self.HL7))
# ################################################################################################################################
# ################################################################################################################################
class DEPLOYMENT_STATUS(Attrs):
DEPLOYED = 'deployed'
AWAITING_DEPLOYMENT = 'awaiting-deployment'
IGNORED = 'ignored'
# ################################################################################################################################
# ################################################################################################################################
class SERVER_JOIN_STATUS(Attrs):
ACCEPTED = 'accepted'
# ################################################################################################################################
# ################################################################################################################################
class SERVER_UP_STATUS(Attrs):
RUNNING = 'running'
CLEAN_DOWN = 'clean-down'
# ################################################################################################################################
# ################################################################################################################################
class CACHE:
API_USERNAME = 'pub.zato.cache'
class TYPE:
BUILTIN = 'builtin'
MEMCACHED = 'memcached'
class BUILTIN_KV_DATA_TYPE:
STR = NameId('String/unicode', 'str')
INT = NameId('Integer', 'int')
def __iter__(self):
return iter((self.STR, self.INT))
class STATE_CHANGED:
CLEAR = 'CLEAR'
DELETE = 'DELETE'
DELETE_BY_PREFIX = 'DELETE_BY_PREFIX'
DELETE_BY_SUFFIX= 'DELETE_BY_SUFFIX'
DELETE_BY_REGEX = 'DELETE_BY_REGEX'
DELETE_CONTAINS = 'DELETE_CONTAINS'
DELETE_NOT_CONTAINS = 'DELETE_NOT_CONTAINS'
DELETE_CONTAINS_ALL = 'DELETE_CONTAINS_ALL'
DELETE_CONTAINS_ANY = 'DELETE_CONTAINS_ANY'
EXPIRE = 'EXPIRE'
EXPIRE_BY_PREFIX = 'EXPIRE_BY_PREFIX'
EXPIRE_BY_SUFFIX = 'EXPIRE_BY_SUFFIX'
EXPIRE_BY_REGEX = 'EXPIRE_BY_REGEX'
EXPIRE_CONTAINS = 'EXPIRE_CONTAINS'
EXPIRE_NOT_CONTAINS = 'EXPIRE_NOT_CONTAINS'
EXPIRE_CONTAINS_ALL = 'EXPIRE_CONTAINS_ALL'
EXPIRE_CONTAINS_ANY = 'EXPIRE_CONTAINS_ANY'
GET = 'GET'
SET = 'SET'
SET_BY_PREFIX = 'SET_BY_PREFIX'
SET_BY_SUFFIX = 'SET_BY_SUFFIX'
SET_BY_REGEX = 'SET_BY_REGEX'
SET_CONTAINS = 'SET_CONTAINS'
SET_NOT_CONTAINS = 'SET_NOT_CONTAINS'
SET_CONTAINS_ALL = 'SET_CONTAINS_ALL'
SET_CONTAINS_ANY = 'SET_CONTAINS_ANY'
class DEFAULT:
MAX_SIZE = 10000
MAX_ITEM_SIZE = 10000 # In characters for string/unicode, bytes otherwise
class PERSISTENT_STORAGE:
NO_PERSISTENT_STORAGE = NameId('No persistent storage', 'no-persistent-storage')
SQL = NameId('SQL', 'sql')
def __iter__(self):
return iter((self.NO_PERSISTENT_STORAGE, self.SQL))
class SYNC_METHOD:
NO_SYNC = NameId('No synchronization', 'no-sync')
IN_BACKGROUND = NameId('In background', 'in-background')
def __iter__(self):
return iter((self.NO_SYNC, self.IN_BACKGROUND))
# ################################################################################################################################
# ################################################################################################################################
class KVDB(Attrs):
SEPARATOR = ':::'
DICTIONARY_ITEM = 'zato:kvdb:data-dict:item'
DICTIONARY_ITEM_ID = DICTIONARY_ITEM + ':id' # ID of the last created dictionary ID, always increasing.
LOCK_PREFIX = 'zato:lock:'
LOCK_SERVER_PREFIX = '{}server:'.format(LOCK_PREFIX)
LOCK_SERVER_ALREADY_DEPLOYED = '{}already-deployed:'.format(LOCK_SERVER_PREFIX)
LOCK_SERVER_STARTING = '{}starting:'.format(LOCK_SERVER_PREFIX)
LOCK_PACKAGE_PREFIX = '{}package:'.format(LOCK_PREFIX)
LOCK_PACKAGE_UPLOADING = '{}uploading:'.format(LOCK_PACKAGE_PREFIX)
LOCK_PACKAGE_ALREADY_UPLOADED = '{}already-uploaded:'.format(LOCK_PACKAGE_PREFIX)
LOCK_DELIVERY = '{}delivery:'.format(LOCK_PREFIX)
LOCK_DELIVERY_AUTO_RESUBMIT = '{}auto-resubmit:'.format(LOCK_DELIVERY)
LOCK_SERVICE_PREFIX = '{}service:'.format(LOCK_PREFIX)
LOCK_CONFIG_PREFIX = '{}config:'.format(LOCK_PREFIX)
LOCK_FANOUT_PATTERN = '{}fanout:{{}}'.format(LOCK_PREFIX)
LOCK_PARALLEL_EXEC_PATTERN = '{}parallel-exec:{{}}'.format(LOCK_PREFIX)
LOCK_ASYNC_INVOKE_WITH_TARGET_PATTERN = '{}async-invoke-with-pattern:{{}}:{{}}'.format(LOCK_PREFIX)
TRANSLATION = 'zato:kvdb:data-dict:translation'
TRANSLATION_ID = TRANSLATION + ':id'
SERVICE_USAGE = 'zato:stats:service:usage:'
SERVICE_TIME_BASIC = 'zato:stats:service:time:basic:'
SERVICE_TIME_RAW = 'zato:stats:service:time:raw:'
SERVICE_TIME_RAW_BY_MINUTE = 'zato:stats:service:time:raw-by-minute:'
SERVICE_TIME_AGGREGATED_BY_MINUTE = 'zato:stats:service:time:aggr-by-minute:'
SERVICE_TIME_AGGREGATED_BY_HOUR = 'zato:stats:service:time:aggr-by-hour:'
SERVICE_TIME_AGGREGATED_BY_DAY = 'zato:stats:service:time:aggr-by-day:'
SERVICE_TIME_AGGREGATED_BY_MONTH = 'zato:stats:service:time:aggr-by-month:'
SERVICE_TIME_SLOW = 'zato:stats:service:time:slow:'
SERVICE_SUMMARY_PREFIX_PATTERN = 'zato:stats:service:summary:{}:'
SERVICE_SUMMARY_BY_DAY = 'zato:stats:service:summary:by-day:'
SERVICE_SUMMARY_BY_WEEK = 'zato:stats:service:summary:by-week:'
SERVICE_SUMMARY_BY_MONTH = 'zato:stats:service:summary:by-month:'
SERVICE_SUMMARY_BY_YEAR = 'zato:stats:service:summary:by-year:'
ZMQ_CONFIG_READY_PREFIX = 'zato:zmq.config.ready.{}'
REQ_RESP_SAMPLE = 'zato:req-resp:sample:'
RESP_SLOW = 'zato:resp:slow:'
DELIVERY_PREFIX = 'zato:delivery:'
DELIVERY_BY_TARGET_PREFIX = '{}by-target:'.format(DELIVERY_PREFIX)
FANOUT_COUNTER_PATTERN = 'zato:fanout:counter:{}'
FANOUT_DATA_PATTERN = 'zato:fanout:data:{}'
PARALLEL_EXEC_COUNTER_PATTERN = 'zato:parallel-exec:counter:{}'
PARALLEL_EXEC_DATA_PATTERN = 'zato:parallel-exec:data:{}'
ASYNC_INVOKE_PROCESSED_FLAG_PATTERN = 'zato:async-invoke-with-pattern:processed:{}:{}'
ASYNC_INVOKE_PROCESSED_FLAG = '1'
# ################################################################################################################################
# ################################################################################################################################
class SCHEDULER:
InitialSleepTime = 5
DefaultHost = '127.0.0.1'
DefaultPort = 31530
class JOB_TYPE(Attrs):
ONE_TIME = 'one_time'
INTERVAL_BASED = 'interval_based'
CRON_STYLE = 'cron_style'
class ON_MAX_RUNS_REACHED:
DELETE = 'delete'
INACTIVATE = 'inactivate'
# ################################################################################################################################
# ################################################################################################################################
class CHANNEL(Attrs):
AMQP = 'amqp'
DELIVERY = 'delivery'
FANOUT_CALL = 'fanout-call'
FANOUT_ON_FINAL = 'fanout-on-final'
FANOUT_ON_TARGET = 'fanout-on-target'
HTTP_SOAP = 'http-soap'
INTERNAL_CHECK = 'internal-check'
INVOKE = 'invoke'
INVOKE_ASYNC = 'invoke-async'
INVOKE_ASYNC_CALLBACK = 'invoke-async-callback'
IPC = 'ipc'
JSON_RPC = 'json-rpc'
NEW_INSTANCE = 'new-instance'
NOTIFIER_RUN = 'notifier-run'
NOTIFIER_TARGET = 'notifier-target'
PARALLEL_EXEC_CALL = 'parallel-exec-call'
PARALLEL_EXEC_ON_TARGET = 'parallel-exec-on-target'
PUBLISH = 'publish'
SCHEDULER = 'scheduler'
SCHEDULER_AFTER_ONE_TIME = 'scheduler-after-one-time'
SERVICE = 'service'
SSO_USER = 'sso-user'
STARTUP_SERVICE = 'startup-service'
URL_DATA = 'url-data'
WEB_SOCKET = 'web-socket'
IBM_MQ = 'websphere-mq'
WORKER = 'worker'
ZMQ = 'zmq'
# ################################################################################################################################
# ################################################################################################################################
class CONNECTION:
CHANNEL = 'channel'
OUTGOING = 'outgoing'
# ################################################################################################################################
# ################################################################################################################################
class INVOCATION_TARGET(Attrs):
CHANNEL_AMQP = 'channel-amqp'
CHANNEL_WMQ = 'channel-wmq'
CHANNEL_ZMQ = 'channel-zmq'
OUTCONN_AMQP = 'outconn-amqp'
OUTCONN_WMQ = 'outconn-wmq'
OUTCONN_ZMQ = 'outconn-zmq'
SERVICE = 'service'
# ################################################################################################################################
# ################################################################################################################################
class DELIVERY_STATE(Attrs):
IN_DOUBT = 'in-doubt'
IN_PROGRESS_ANY = 'in-progress-any' # A wrapper for all in-progress-* states
IN_PROGRESS_RESUBMITTED = 'in-progress-resubmitted'
IN_PROGRESS_RESUBMITTED_AUTO = 'in-progress-resubmitted-auto'
IN_PROGRESS_STARTED = 'in-progress'
IN_PROGRESS_TARGET_OK = 'in-progress-target-ok'
IN_PROGRESS_TARGET_FAILURE = 'in-progress-target-failure'
CONFIRMED = 'confirmed'
FAILED = 'failed'
UNKNOWN = 'unknown'
# ################################################################################################################################
# ################################################################################################################################
class DELIVERY_CALLBACK_INVOKER(Attrs):
SOURCE = 'source'
TARGET = 'target'
# ################################################################################################################################
# ################################################################################################################################
class BROKER:
DEFAULT_EXPIRATION = 15 # In seconds
# ################################################################################################################################
# ################################################################################################################################
class MISC:
DEFAULT_HTTP_TIMEOUT=10
OAUTH_SIG_METHODS = ['HMAC-SHA1', 'PLAINTEXT']
PIDFILE = 'pidfile'
SEPARATOR = ':::'
# ################################################################################################################################
# ################################################################################################################################
class HTTP_SOAP:
UNUSED_MARKER = 'unused'
class ACCEPT:
ANY = '*/*'
ANY_INTERNAL = 'haany'
class METHOD:
ANY_INTERNAL = 'hmany'
# ################################################################################################################################
# ################################################################################################################################
class ADAPTER_PARAMS:
APPLY_AFTER_REQUEST = 'apply-after-request'
APPLY_BEFORE_REQUEST = 'apply-before-request'
# ################################################################################################################################
# ################################################################################################################################
class INFO_FORMAT:
DICT = 'dict'
TEXT = 'text'
JSON = 'json'
YAML = 'yaml'
# ################################################################################################################################
# ################################################################################################################################
class MSG_MAPPER:
DICT_TO_DICT = 'dict-to-dict'
DICT_TO_XML = 'dict-to-xml'
XML_TO_DICT = 'xml-to-dict'
XML_TO_XML = 'xml-to-xml'
# ################################################################################################################################
# ################################################################################################################################
class CLOUD:
class AWS:
class S3:
class STORAGE_CLASS:
STANDARD = 'STANDARD'
REDUCED_REDUNDANCY = 'REDUCED_REDUNDANCY'
GLACIER = 'GLACIER'
DEFAULT = STANDARD
def __iter__(self):
return iter((self.STANDARD, self.REDUCED_REDUNDANCY, self.GLACIER))
class DEFAULTS:
ADDRESS = 'https://s3.amazonaws.com/'
CONTENT_TYPE = 'application/octet-stream' # Taken from boto.s3.key.Key.DefaultContentType
DEBUG_LEVEL = 0
POOL_SIZE = 5
PROVIDER = 'aws'
# ################################################################################################################################
# ################################################################################################################################
class URL_PARAMS_PRIORITY:
PATH_OVER_QS = 'path-over-qs'
QS_OVER_PATH = 'qs-over-path'
DEFAULT = QS_OVER_PATH
class __metaclass__(type):
def __iter__(self):
return iter((self.PATH_OVER_QS, self.QS_OVER_PATH, self.DEFAULT))
# ################################################################################################################################
# ################################################################################################################################
class PARAMS_PRIORITY:
CHANNEL_PARAMS_OVER_MSG = 'channel-params-over-msg'
MSG_OVER_CHANNEL_PARAMS = 'msg-over-channel-params'
DEFAULT = CHANNEL_PARAMS_OVER_MSG
def __iter__(self):
return iter((self.CHANNEL_PARAMS_OVER_MSG, self.MSG_OVER_CHANNEL_PARAMS, self.DEFAULT))
# ################################################################################################################################
# ################################################################################################################################
class NONCE_STORE:
KEY_PATTERN = 'zato:nonce-store:{}:{}' # E.g. zato:nonce-store:oauth:27
DEFAULT_MAX_LOG = 25000
# ################################################################################################################################
# ################################################################################################################################
class MSG_PATTERN_TYPE:
JSON_POINTER = NameId('JSONPointer', 'json-pointer')
XPATH = NameId('XPath', 'xpath')
def __iter__(self):
return iter((self.JSON_POINTER, self.XPATH))
# ################################################################################################################################
# ################################################################################################################################
class HTTP_SOAP_SERIALIZATION_TYPE:
STRING_VALUE = NameId('String', 'string')
SUDS = NameId('Suds', 'suds')
DEFAULT = STRING_VALUE
def __iter__(self):
return iter((self.STRING_VALUE, self.SUDS))
# ################################################################################################################################
# ################################################################################################################################
class PUBSUB:
SKIPPED_PATTERN_MATCHING = '<skipped>'
# All float values are converted to strings of that precision
# to make sure pg8000 does not round up the floats with loss of precision.
FLOAT_STRING_CONVERT = '{:.7f}'
class DATA_FORMAT:
CSV = NameId('CSV', DATA_FORMAT.CSV)
DICT = NameId('Dict', DATA_FORMAT.DICT)
JSON = NameId('JSON', DATA_FORMAT.JSON)
POST = NameId('POST', DATA_FORMAT.POST)
SOAP = NameId('SOAP', DATA_FORMAT.SOAP)
XML = NameId('XML', DATA_FORMAT.XML)
def __iter__(self):
return iter((self.CSV, self.DICT, self.JSON, self.POST, self.SOAP, self.XML))
class HOOK_TYPE:
BEFORE_PUBLISH = 'pubsub_before_publish'
BEFORE_DELIVERY = 'pubsub_before_delivery'
ON_OUTGOING_SOAP_INVOKE = 'pubsub_on_topic_outgoing_soap_invoke'
ON_SUBSCRIBED = 'pubsub_on_subscribed'
ON_UNSUBSCRIBED = 'pubsub_on_unsubscribed'
class HOOK_ACTION:
SKIP = 'skip'
DELETE = 'delete'
DELIVER = 'deliver'
def __iter__(self):
return iter((self.SKIP, self.DELETE, self.DELIVER))
class DELIVER_BY:
PRIORITY = 'priority'
EXT_PUB_TIME = 'ext_pub_time'
PUB_TIME = 'pub_time'
def __iter__(self):
return iter((self.PRIORITY, self.EXT_PUB_TIME, self.PUB_TIME))
class ON_NO_SUBS_PUB:
ACCEPT = NameId('Accept', 'accept')
DROP = NameId('Drop', 'drop')
class DEFAULT:
DATA_FORMAT = 'text'
MIME_TYPE = 'text/plain'
TOPIC_MAX_DEPTH_GD = 10000
TOPIC_MAX_DEPTH_NON_GD = 1000
DEPTH_CHECK_FREQ = 100
EXPIRATION = 2147483647 * 1000 # (2 ** 31 - 1) * 1000 milliseconds = around 70 years
GET_BATCH_SIZE = 50
DELIVERY_BATCH_SIZE = 500
DELIVERY_MAX_RETRY = 123456789
DELIVERY_MAX_SIZE = 500000 # 500 kB
PUB_BUFFER_SIZE_GD = 0
TASK_SYNC_INTERVAL = 500
TASK_DELIVERY_INTERVAL = 2000
WAIT_TIME_SOCKET_ERROR = 10
WAIT_TIME_NON_SOCKET_ERROR = 3
INTERNAL_ENDPOINT_NAME = 'zato.pubsub.default.internal.endpoint'
ON_NO_SUBS_PUB = 'accept'
SK_OPAQUE = ('deliver_to_sk', 'reply_to_sk')
class SERVICE_SUBSCRIBER:
NAME = 'zato.pubsub.service.endpoint'
TOPICS_ALLOWED = 'sub=/zato/s/to/*'
class TOPIC_PATTERN:
TO_SERVICE = '/zato/s/to/{}'
class QUEUE_TYPE:
STAGING = 'staging'
CURRENT = 'current'
def __iter__(self):
return iter((self.STAGING, self.CURRENT))
class GD_CHOICE:
DEFAULT_PER_TOPIC = NameId('----------', 'default-per-topic')
YES = NameId('Yes', 'true')
NO = NameId('No', 'false')
def __iter__(self):
return iter((self.DEFAULT_PER_TOPIC, self.YES, self.NO))
class QUEUE_ACTIVE_STATUS:
FULLY_ENABLED = NameId('Pub and sub', 'pub-sub')
PUB_ONLY = NameId('Pub only', 'pub-only')
SUB_ONLY = NameId('Sub only', 'sub-only')
DISABLED = NameId('Disabled', 'disabled')
def __iter__(self):
return iter((self.FULLY_ENABLED, self.PUB_ONLY, self.SUB_ONLY, self.DISABLED))
class DELIVERY_METHOD:
NOTIFY = NameId('Notify', 'notify')
PULL = NameId('Pull', 'pull')
WEB_SOCKET = NameId('WebSocket', 'web-socket')
def __iter__(self):
# Note that WEB_SOCKET is not included because it's not shown in GUI for subscriptions
return iter((self.NOTIFY, self.PULL))
class DELIVERY_STATUS:
DELIVERED = 1
INITIALIZED = 2
TO_DELETE = 3
WAITING_FOR_CONFIRMATION = 4
class PRIORITY:
DEFAULT = 5
MIN = 1
MAX = 9
class ROLE:
PUBLISHER = NameId('Publisher', 'pub-only')
SUBSCRIBER = NameId('Subscriber', 'sub-only')
PUBLISHER_SUBSCRIBER = NameId('Publisher/subscriber', 'pub-sub')
def __iter__(self):
return iter((self.PUBLISHER, self.SUBSCRIBER, self.PUBLISHER_SUBSCRIBER))
class RunDeliveryStatus:
class StatusCode:
OK = 1
Warning = 2
Error = 3
class ReasonCode:
Error_IO = 1
Error_Other = 2
No_Msg = 3
class ENDPOINT_TYPE:
AMQP = NameId('AMQP', 'amqp')
FILES = NameId('Files', 'files')
FTP = NameId('FTP', 'ftp')
IMAP = NameId('IMAP', 'imap')
INTERNAL = NameId('Internal', 'internal')
REST = NameId('REST', 'rest')
SERVICE = NameId('Service', 'srv')
SMS_TWILIO = NameId('SMS - Twilio', 'smstw')
SMTP = NameId('SMTP', 'smtp')
SOAP = NameId('SOAP', 'soap')
SQL = NameId('SQL', 'sql')
WEB_SOCKETS = NameId('WebSockets', 'wsx')
def __iter__(self):
return iter((self.AMQP.id, self.INTERNAL.id, self.REST.id, self.SERVICE.id, self.SOAP.id,
self.WEB_SOCKETS.id, self.SERVICE.id))
class REDIS:
META_TOPIC_LAST_KEY = 'zato.ps.meta.topic.last.%s.%s'
META_ENDPOINT_PUB_KEY = 'zato.ps.meta.endpoint.pub.%s.%s'
META_ENDPOINT_SUB_KEY = 'zato.ps.meta.endpoint.sub.%s.%s'
class MIMEType:
Zato = 'application/vnd.zato.ps.msg'
# ################################################################################################################################
# ################################################################################################################################
class _PUBSUB_SUBSCRIBE_CLASS:
classes = {
PUBSUB.ENDPOINT_TYPE.AMQP.id: 'zato.pubsub.subscription.subscribe-amqp',
PUBSUB.ENDPOINT_TYPE.REST.id: 'zato.pubsub.subscription.subscribe-rest',
PUBSUB.ENDPOINT_TYPE.SERVICE.id: 'zato.pubsub.subscription.subscribe-service',
PUBSUB.ENDPOINT_TYPE.SOAP.id: 'zato.pubsub.subscription.subscribe-soap',
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id: 'zato.pubsub.subscription.create-wsx-subscription',
}
@staticmethod
def get(name):
return _PUBSUB_SUBSCRIBE_CLASS.classes[name]
# ################################################################################################################################
# ################################################################################################################################
PUBSUB.SUBSCRIBE_CLASS = _PUBSUB_SUBSCRIBE_CLASS
# ################################################################################################################################
# ################################################################################################################################
# Not to be made available externally yet.
skip_endpoint_types = (
PUBSUB.ENDPOINT_TYPE.FTP.id,
PUBSUB.ENDPOINT_TYPE.INTERNAL.id,
PUBSUB.ENDPOINT_TYPE.IMAP.id,
PUBSUB.ENDPOINT_TYPE.SERVICE.id,
PUBSUB.ENDPOINT_TYPE.SMS_TWILIO.id,
PUBSUB.ENDPOINT_TYPE.SMTP.id,
PUBSUB.ENDPOINT_TYPE.SQL.id,
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id, # This will never be made because WSX clients need to use APIs to subscribe
)
# ################################################################################################################################
# ################################################################################################################################
class EMAIL:
class DEFAULT:
TIMEOUT = 10
PING_ADDRESS = 'invalid@invalid'
GET_CRITERIA = 'UNSEEN'
IMAP_DEBUG_LEVEL = 0
class IMAP:
class MODE:
PLAIN = 'plain'
SSL = 'ssl'
def __iter__(self):
return iter((self.PLAIN, self.SSL))
class SMTP:
class MODE:
PLAIN = 'plain'
SSL = 'ssl'
STARTTLS = 'starttls'
def __iter__(self):
return iter((self.PLAIN, self.SSL, self.STARTTLS))
# ################################################################################################################################
# ################################################################################################################################
class NOTIF:
class DEFAULT:
CHECK_INTERVAL = 5 # In seconds
CHECK_INTERVAL_SQL = 600 # In seconds
NAME_PATTERN = '**'
GET_DATA_PATTERN = '**'
class TYPE:
SQL = 'sql'
# ################################################################################################################################
# ################################################################################################################################
class CASSANDRA:
class DEFAULT:
CONTACT_POINTS = '127.0.0.1\n'
EXEC_SIZE = 2
PORT = 9042
PROTOCOL_VERSION = 4
KEYSPACE = 'not-set'
class COMPRESSION:
DISABLED = 'disabled'
ENABLED_NEGOTIATED = 'enabled-negotiated'
ENABLED_LZ4 = 'enabled-lz4'
ENABLED_SNAPPY = 'enabled-snappy'
# ################################################################################################################################
# ################################################################################################################################
class TLS:
# All the BEGIN/END blocks we don't want to store in logs.
# Taken from https://github.com/openssl/openssl/blob/master/crypto/pem/pem.h
# Note that the last one really is empty to denote 'BEGIN PRIVATE KEY' alone.
BEGIN_END = ('ANY ', 'RSA ', 'DSA ', 'EC ', 'ENCRYPTED ', '')
# Directories in a server's config/tls directory keeping the material
DIR_CA_CERTS = 'ca-certs'
DIR_KEYS_CERTS = 'keys-certs'
class DEFAULT:
VERSION = 'SSLv23'
CIPHERS = 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:' \
'ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:' \
'ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256'
class VERSION:
SSLv23 = NameId('SSLv23')
TLSv1 = NameId('TLSv1')
TLSv1_1 = NameId('TLSv1_1')
TLSv1_2 = NameId('TLSv1_2')
def __iter__(self):
return iter((self.SSLv23, self.TLSv1, self.TLSv1_1, self.TLSv1_2))
class CERT_VALIDATE:
CERT_NONE = NameId('Disabled', 'CERT_NONE')
CERT_OPTIONAL = NameId('Optional', 'CERT_OPTIONAL')
CERT_REQUIRED = NameId('Required', 'CERT_REQUIRED')
def __iter__(self):
return iter((self.CERT_NONE, self.CERT_OPTIONAL, self.CERT_REQUIRED))
class RATE_LIMIT:
class TYPE:
APPROXIMATE = NameId('Approximate', 'APPROXIMATE')
EXACT = NameId('Exact', 'EXACT')
def __iter__(self):
return iter((self.APPROXIMATE, self.EXACT))
class OBJECT_TYPE:
HTTP_SOAP = 'http_soap'
SERVICE = 'service'
SEC_DEF = 'sec_def'
SSO_USER = 'sso_user'
# ################################################################################################################################
# ################################################################################################################################
class ODOO:
class CLIENT_TYPE:
OPENERP_CLIENT_LIB = 'openerp-client-lib'
class DEFAULT:
PORT = 8069
POOL_SIZE = 3
class PROTOCOL:
XML_RPC = NameId('XML-RPC', 'xmlrpc')
XML_RPCS = NameId('XML-RPCS', 'xmlrpcs')
JSON_RPC = NameId('JSON-RPC', 'jsonrpc')
JSON_RPCS = NameId('JSON-RPCS', 'jsonrpcs')
def __iter__(self):
return iter((self.XML_RPC, self.XML_RPCS, self.JSON_RPC, self.JSON_RPCS))
# ################################################################################################################################
# ################################################################################################################################
class SAP:
class DEFAULT:
INSTANCE = '00'
POOL_SIZE = 1
# ################################################################################################################################
# ################################################################################################################################
class STOMP:
class DEFAULT:
ADDRESS = '127.0.0.1:61613'
PROTOCOL = '1.0'
TIMEOUT = 10 # In seconds
USERNAME = 'guest'
ACK_MODE = 'client-individual'
# ################################################################################################################################
# ################################################################################################################################
CONTENT_TYPE = Bunch(
JSON = 'application/json',
PLAIN_XML = 'application/xml',
SOAP11 = 'text/xml',
SOAP12 = 'application/soap+xml; charset=utf-8',
)
# ################################################################################################################################
# ################################################################################################################################
class IPC:
class ACTION:
INVOKE_SERVICE = 'invoke-service'
INVOKE_WORKER_STORE = 'invoke-worker-store'
class STATUS:
SUCCESS = 'zs'
FAILURE = 'zf'
LENGTH = 2 # Length of either success or failure messages
class CONNECTOR:
class USERNAME:
FTP = 'zato.connector.ftp'
IBM_MQ = 'zato.connector.wmq'
SFTP = 'zato.connector.sftp'
# ################################################################################################################################
# ################################################################################################################################
class WEB_SOCKET:
AUDIT_KEY = 'wsx-connection'
class DEFAULT:
NEW_TOKEN_TIMEOUT = 5
TOKEN_TTL = 3600
FQDN_UNKNOWN = '(Unknown)'
INTERACT_UPDATE_INTERVAL = 60 # 60 minutes = 1 hour
PINGS_MISSED_THRESHOLD = 2
PING_INTERVAL = 30
class PATTERN:
BY_EXT_ID = 'zato.by-ext-id.{}'
BY_CHANNEL = 'zato.by-channel.{}'
MSG_BROWSER_PREFIX = 'zato.msg-browser.' # This is used as a prefix in SQL queries
MSG_BROWSER = MSG_BROWSER_PREFIX + '{}'
class ACTION:
CLIENT_RESPONSE = 'client-response'
CREATE_SESSION = 'create-session'
INVOKE_SERVICE = 'invoke-service'
class OUT_MSG_TYPE:
CONNECT = 'connect'
MESSAGE = 'message'
CLOSE = 'close'
class HOOK_TYPE:
ON_CONNECTED = 'wsx_on_connected'
ON_DISCONNECTED = 'wsx_on_disconnected'
ON_PUBSUB_RESPONSE = 'wsx_on_pubsub_response'
ON_VAULT_MOUNT_POINT_NEEDED = 'wsx_on_vault_mount_point_needed'
# ################################################################################################################################
# ################################################################################################################################
class APISPEC:
OPEN_API_V3 = 'openapi_v3'
SOAP_12 = 'soap_12'
NAMESPACE_NULL = ''
DEFAULT_TAG = 'public'
GENERIC_INVOKE_PATH = '/zato/api/invoke/{service_name}' # OpenAPI
SOAP_INVOKE_PATH = '/zato/api/soap/invoke' # SOAP
# ################################################################################################################################
# ################################################################################################################################
class PADDING:
LEFT = 'left'
RIGHT = 'right'
# ################################################################################################################################
# ################################################################################################################################
class AMQP:
class DEFAULT:
POOL_SIZE = 10
PRIORITY = 5
PREFETCH_COUNT = 0
class ACK_MODE:
ACK = NameId('Ack', 'ack')
REJECT = NameId('Reject', 'reject')
def __iter__(self):
return iter((self.ACK, self.REJECT))
# ################################################################################################################################
# ################################################################################################################################
class REDIS:
class DEFAULT:
PORT = 6379
DB = 0
# ################################################################################################################################
# ################################################################################################################################
class SERVER_STARTUP:
class PHASE:
FS_CONFIG_ONLY = 'fs-config-only'
IMPL_BEFORE_RUN = 'impl-before-run'
ON_STARTING = 'on-starting'
BEFORE_POST_FORK = 'before-post-fork'
AFTER_POST_FORK = 'after-post-fork'
IN_PROCESS_FIRST = 'in-process-first'
IN_PROCESS_OTHER = 'in-process-other'
AFTER_STARTED = 'after-started'
# ################################################################################################################################
# ################################################################################################################################
class GENERIC:
ATTR_NAME = 'opaque1'
class CONNECTION:
class TYPE:
CHANNEL_FILE_TRANSFER = 'channel-file-transfer'
CHANNEL_HL7_MLLP = 'channel-hl7-mllp'
CLOUD_DROPBOX = 'cloud-dropbox'
DEF_KAFKA = 'def-kafka'
OUTCONN_HL7_MLLP = 'outconn-hl7-mllp'
OUTCONN_IM_SLACK = 'outconn-im-slack'
OUTCONN_IM_TELEGRAM = 'outconn-im-telegram'
OUTCONN_LDAP = 'outconn-ldap'
OUTCONN_MONGODB = 'outconn-mongodb'
OUTCONN_SFTP = 'outconn-sftp'
OUTCONN_WSX = 'outconn-wsx'
# ################################################################################################################################
# ################################################################################################################################
class AuditLog:
class Direction:
received = 'received'
sent = 'sent'
class Default:
max_len_messages = 50
max_data_stored_per_message = 500 # In kilobytes
# ################################################################################################################################
# ################################################################################################################################
class TOTP:
default_label = '<default-label>'
# ################################################################################################################################
# ################################################################################################################################
class LDAP:
class DEFAULT:
CONNECT_TIMEOUT = 10
POOL_EXHAUST_TIMEOUT = 5
POOL_KEEP_ALIVE = 30
POOL_LIFETIME = 3600
POOL_MAX_CYCLES = 1
POOL_SIZE = 10
class AUTH_TYPE:
NTLM = NameId('NTLM', 'NTLM')
SIMPLE = NameId('Simple', 'SIMPLE')
def __iter__(self):
return iter((self.SIMPLE, self.NTLM))
class AUTO_BIND:
DEFAULT = NameId('Default', 'DEFAULT')
NO_TLS = NameId('No TLS', 'NO_TLS')
NONE = NameId('None', 'NONE')
TLS_AFTER_BIND = NameId('Bind -> TLS', 'TLS_AFTER_BIND')
TLS_BEFORE_BIND = NameId('TLS -> Bind', 'TLS_BEFORE_BIND')
def __iter__(self):
return iter((self.DEFAULT, self.NONE, self.NO_TLS, self.TLS_AFTER_BIND, self.TLS_BEFORE_BIND))
class GET_INFO:
ALL = NameId('All', 'ALL')
DSA = NameId('DSA', 'DSA')
NONE = NameId('None', 'NONE')
SCHEMA = NameId('Schema', 'SCHEMA')
OFFLINE_EDIR_8_8_8 = NameId('EDIR 8.8.8', 'OFFLINE_EDIR_8_8_8')
OFFLINE_AD_2012_R2 = NameId('AD 2012.R2', 'OFFLINE_AD_2012_R2')
OFFLINE_SLAPD_2_4 = NameId('SLAPD 2.4', 'OFFLINE_SLAPD_2_4')
OFFLINE_DS389_1_3_3 = NameId('DS 389.1.3.3', 'OFFLINE_DS389_1_3_3')
def __iter__(self):
return iter((self.NONE, self.ALL, self.SCHEMA, self.DSA,
self.OFFLINE_EDIR_8_8_8, self.OFFLINE_AD_2012_R2, self.OFFLINE_SLAPD_2_4, self.OFFLINE_DS389_1_3_3))
class IP_MODE:
IP_V4_ONLY = NameId('Only IPv4', 'IP_V4_ONLY')
IP_V6_ONLY = NameId('Only IPv6', 'IP_V6_ONLY')
IP_V4_PREFERRED = NameId('Prefer IPv4', 'IP_V4_PREFERRED')
IP_V6_PREFERRED = NameId('Prefer IPv6', 'IP_V6_PREFERRED')
IP_SYSTEM_DEFAULT = NameId('System default', 'IP_SYSTEM_DEFAULT')
def __iter__(self):
return iter((self.IP_V4_ONLY, self.IP_V6_ONLY, self.IP_V4_PREFERRED, self.IP_V6_PREFERRED, self.IP_SYSTEM_DEFAULT))
class POOL_HA_STRATEGY:
FIRST = NameId('First', 'FIRST')
RANDOM = NameId('Random', 'RANDOM')
ROUND_ROBIN = NameId('Round robin', 'ROUND_ROBIN')
def __iter__(self):
return iter((self.FIRST, self.RANDOM, self.ROUND_ROBIN))
class SASL_MECHANISM:
GSSAPI = NameId('GSS-API', 'GSSAPI')
EXTERNAL = NameId('External', 'EXTERNAL')
def __iter__(self):
return iter((self.EXTERNAL, self.GSSAPI))
# ################################################################################################################################
# ################################################################################################################################
class MONGODB:
class DEFAULT:
AUTH_SOURCE = 'admin'
HB_FREQUENCY = 10
MAX_IDLE_TIME = 600
MAX_STALENESS = -1
POOL_SIZE_MIN = 0
POOL_SIZE_MAX = 5
SERVER_LIST = '127.0.0.1:27017'
WRITE_TO_REPLICA = ''
WRITE_TIMEOUT = 5
ZLIB_LEVEL = -1
class TIMEOUT:
CONNECT = 10
SERVER_SELECT = 5
SOCKET = 30
WAIT_QUEUE = 10
class READ_PREF:
PRIMARY = NameId('Primary', 'primary')
PRIMARY_PREFERRED = NameId('Primary pref.', 'primaryPreferred')
SECONDARY = NameId('Secondary', 'secondary')
SECONDARY_PREFERRED = NameId('Secondary pref.', 'secondaryPreferred')
NEAREST = NameId('Nearest', 'nearest')
def __iter__(self):
return iter((self.PRIMARY, self.PRIMARY_PREFERRED, self.SECONDARY, self.SECONDARY_PREFERRED, self.NEAREST))
class AUTH_MECHANISM:
SCRAM_SHA_1 = NameId('SCRAM-SHA-1')
SCRAM_SHA_256 = NameId('SCRAM-SHA-256')
def __iter__(self):
return iter((self.SCRAM_SHA_1, self.SCRAM_SHA_256))
# ################################################################################################################################
# ################################################################################################################################
class KAFKA:
class DEFAULT:
BROKER_VERSION = '0.9.0'
SERVER_LIST = '127.0.0.1:2181'
class TIMEOUT:
SOCKET = 1
OFFSETS = 10
# ################################################################################################################################
# ################################################################################################################################
class TELEGRAM:
class DEFAULT:
ADDRESS = 'https://api.telegram.org/bot{token}/{method}'
class TIMEOUT:
CONNECT = 5
INVOKE = 10
# ################################################################################################################################
# ################################################################################################################################
class SFTP:
class DEFAULT:
BANDWIDTH_LIMIT = 10
BUFFER_SIZE = 32768
COMMAND_SFTP = 'sftp'
COMMAND_PING = 'ls .'
PORT = 22
class LOG_LEVEL:
LEVEL0 = NameId('0', '0')
LEVEL1 = NameId('1', '1')
LEVEL2 = NameId('2', '2')
LEVEL3 = NameId('3', '3')
LEVEL4 = NameId('4', '4')
def __iter__(self):
return iter((self.LEVEL0, self.LEVEL1, self.LEVEL2, self.LEVEL3, self.LEVEL4))
def is_valid(self, value):
return value in (elem.id for elem in self)
class IP_TYPE:
IPV4 = NameId('IPv4', 'ipv4')
IPV6 = NameId('IPv6', 'ipv6')
def __iter__(self):
return iter((self.IPV4, self.IPV6))
def is_valid(self, value):
return value in (elem.id for elem in self)
# ################################################################################################################################
# ################################################################################################################################
class DROPBOX:
class DEFAULT:
MAX_RETRIES_ON_ERROR = 5
MAX_RETRIES_ON_RATE_LIMIT = None
OAUTH2_ACCESS_TOKEN_EXPIRATION = None
POOL_SIZE = 10
TIMEOUT = 60
USER_AGENT = None
# ################################################################################################################################
# ################################################################################################################################
class JSON_RPC:
class PREFIX:
CHANNEL = 'json.rpc.channel'
OUTGOING = 'json.rpc.outconn'
# ################################################################################################################################
# ################################################################################################################################
class CONFIG_FILE:
USER_DEFINED = 'user-defined'
# We need to use such a constant because we can sometimes be interested in setting
# default values which evaluate to boolean False.
NO_DEFAULT_VALUE = 'NO_DEFAULT_VALUE'
PLACEHOLDER = 'zato_placeholder'
# ################################################################################################################################
# ################################################################################################################################
class MS_SQL:
ZATO_DIRECT = 'zato+mssql1'
EXTRA_KWARGS = 'login_timeout', 'appname', 'blocksize', 'use_mars', 'readonly', 'use_tz', 'bytes_to_unicode', \
'cafile', 'validate_host'
# ################################################################################################################################
# ################################################################################################################################
class FILE_TRANSFER:
SCHEDULER_SERVICE = 'pub.zato.channel.file-transfer.handler'
class DEFAULT:
FILE_PATTERNS = '*'
ENCODING = 'utf-8'
class SOURCE_TYPE:
LOCAL = NameId('Local', 'local')
FTP = NameId('FTP', 'ftp')
SFTP = NameId('SFTP', 'sftp')
def __iter__(self):
return iter((self.LOCAL, self.FTP, self.SFTP))
class SOURCE_TYPE_IMPL:
LOCAL_INOTIFY = 'local-inotify'
LOCAL_SNAPSHOT = 'local-snapshot'
# ################################################################################################################################
# ################################################################################################################################
class HL7:
class Default:
""" Default values for HL7 objects.
"""
# Default TCP port for MLLP connections
address = '0.0.0.0:30901'
# Assume that UTF-8 is sent in by default
data_encoding = 'utf-8'
# Each message may be of at most that many bytes
max_msg_size = '1_000_000'
# How many seconds to wait for HL7 MLLP responses when invoking a remote end
max_wait_time = 60
# At most that many bytes will be read from a socket at a time
read_buffer_size = 2048
# We wait at most that many milliseconds for data from a socket in each iteration of the main loop
recv_timeout = 250
# At what level to log messages (Python logging)
logging_level = 'INFO'
# Should we store the contents of messages in logs (Python logging)
should_log_messages = False
# How many concurrent outgoing connections we allow
pool_size = 10
# An MLLP message may begin with these bytes ..
start_seq = '0b'
# .. and end with these below.
end_seq = '1c 0d'
class Const:
""" Various HL7-related constants.
"""
class Version:
# A generic v2 message, without an indication of a specific release.
v2 = NameId('HL7 v2', 'hl7-v2')
def __iter__(self):
return iter((self.v2,))
class LoggingLevel:
Info = NameId('INFO', 'INFO')
Debug = NameId('DEBUG', 'DEBUG')
def __iter__(self):
return iter((self.Info, self.Debug))
class ImplClass:
hl7apy = 'hl7apy'
zato = 'Zato'
# ################################################################################################################################
# ################################################################################################################################
# TODO: SIMPLE_IO.FORMAT should be removed with in favour of plain DATA_FORMAT
class SIMPLE_IO:
class FORMAT(Attrs):
JSON = DATA_FORMAT.JSON
XML = DATA_FORMAT.XML
COMMON_FORMAT = OrderedDict()
COMMON_FORMAT[DATA_FORMAT.JSON] = 'JSON'
COMMON_FORMAT[DATA_FORMAT.XML] = 'XML'
HTTP_SOAP_FORMAT = OrderedDict()
HTTP_SOAP_FORMAT[DATA_FORMAT.JSON] = 'JSON'
HTTP_SOAP_FORMAT[DATA_FORMAT.XML] = 'XML'
HTTP_SOAP_FORMAT[HL7.Const.Version.v2.id] = HL7.Const.Version.v2.name
# ################################################################################################################################
# ################################################################################################################################
class UNITTEST:
SQL_ENGINE = 'zato+unittest'
HTTP = 'zato+unittest'
VAULT_URL = 'https://zato+unittest'
class HotDeploy:
UserPrefix = 'hot-deploy.user'
# ################################################################################################################################
# ################################################################################################################################
class ZatoKVDB:
SlowResponsesName = 'zato.service.slow_responses'
UsageSamplesName = 'zato.service.usage_samples'
CurrentUsageName = 'zato.service.current_usage'
PubSubMetadataName = 'zato.pubsub.metadata'
SlowResponsesPath = SlowResponsesName + '.json'
UsageSamplesPath = UsageSamplesName + '.json'
CurrentUsagePath = CurrentUsageName + '.json'
PubSubMetadataPath = PubSubMetadataName + '.json'
DefaultSyncThreshold = 3_000
DefaultSyncInterval = 3
# ################################################################################################################################
# ################################################################################################################################
class Stats:
# This is in milliseconds, for how long do we keep old statistics in persistent storage. Defaults to two years.
# 1k ms * 60 s * 60 min * 24 hours * 365 days * 2 years = 94_608_000_000 milliseconds (or two years).
# We use milliseconds because that makes it easier to construct tests.
MaxRetention = 1000 * 60 * 60 * 24 * 365 * 2
# By default, statistics will be aggregated into time buckets of that duration
DefaultAggrTimeFreq = '5min' # Five minutes
# We always tabulate by object_id (e.g. service name)
TabulateAggr = 'object_id'
# ################################################################################################################################
# ################################################################################################################################
class StatsKey:
CurrentValue = 'current_value'
PerKeyMin = 'min'
PerKeyMax = 'max'
PerKeyMean = 'mean'
PerKeyValue = 'value'
PerKeyLastTimestamp = 'last_timestamp'
PerKeyLastDuration = 'last_duration'
# ################################################################################################################################
# ################################################################################################################################
class SSO:
class EmailTemplate:
SignupConfirm = 'signup-confirm.txt'
SignupWelcome = 'signup-welcome.txt'
PasswordResetLink = 'password-reset-link.txt'
# ################################################################################################################################
# ################################################################################################################################
ZATO_INFO_FILE = '.zato-info'
# ################################################################################################################################
# ################################################################################################################################
class SourceCodeInfo:
""" A bunch of attributes dealing the service's source code.
"""
__slots__ = 'source', 'source_html', 'len_source', 'path', 'hash', 'hash_method', 'server_name'
def __init__(self):
self.source = '' # type: str
self.source_html = '' # type: str
self.len_source = 0 # type: int
self.path = None # type: str
self.hash = None # type: str
self.hash_method = None # type: str
self.server_name = None # type: str
# ################################################################################################################################
# ################################################################################################################################
class StatsElem:
""" A single element of a statistics query result concerning a particular service.
All values make sense only within the time interval of the original query, e.g. a 'min_resp_time'
may be 18 ms in this element because it represents statistics regarding, say,
the last hour yet in a different period the 'min_resp_time' may be a completely
different value. Likewise, 'all' in the description of parameters below means
'all that matched given query criteria' rather than 'all that ever existed'.
service_name - name of the service this element describes
usage - how many times the service has been invoked
mean - an arithmetical average of all the mean response times (in ms)
rate - usage rate in requests/s (up to 1 decimal point)
time - time spent by this service on processing the messages (in ms)
usage_trend - a CSV list of values representing the service usage
usage_trend_int - a list of integers representing the service usage
mean_trend - a CSV list of values representing mean response times (in ms)
mean_trend_int - a list of integers representing mean response times (in ms)
min_resp_time - minimum service response time (in ms)
max_resp_time - maximum service response time (in ms)
all_services_usage - how many times all the services have been invoked
all_services_time - how much time all the services spent on processing the messages (in ms)
mean_all_services - an arithmetical average of all the mean response times of all services (in ms)
usage_perc_all_services - this service's usage as a percentage of all_services_usage (up to 2 decimal points)
time_perc_all_services - this service's share as a percentage of all_services_time (up to 2 decimal points)
expected_time_elems - an OrderedDict of all the time slots mapped to a mean time and rate
temp_rate - a temporary place for keeping request rates, needed to get a weighted mean of uneven execution periods
temp_mean - just like temp_rate but for mean response times
temp_mean_count - how many periods containing a mean rate there were
"""
def __init__(self, service_name=None, mean=None):
self.service_name = service_name
self.usage = 0
self.mean = mean
self.rate = 0.0
self.time = 0
self.usage_trend_int = []
self.mean_trend_int = []
self.min_resp_time = maxsize # Assuming that there sure will be at least one response time lower than that
self.max_resp_time = 0
self.all_services_usage = 0
self.all_services_time = 0
self.mean_all_services = 0
self.usage_perc_all_services = 0
self.time_perc_all_services = 0
self.expected_time_elems = OrderedDict()
self.temp_rate = 0
self.temp_mean = 0
self.temp_mean_count = 0
def get_attrs(self, ignore=[]):
for attr in dir(self):
if attr.startswith('__') or attr.startswith('temp_') or callable(getattr(self, attr)) or attr in ignore:
continue
yield attr
def to_dict(self, ignore=None):
if not ignore:
ignore = ['expected_time_elems', 'mean_trend_int', 'usage_trend_int']
return {attr: getattr(self, attr) for attr in self.get_attrs(ignore)}
@staticmethod
def from_json(item):
stats_elem = StatsElem()
for k, v in item.items():
setattr(stats_elem, k, v)
return stats_elem
@staticmethod
def from_xml(item):
stats_elem = StatsElem()
for child in item.getchildren():
setattr(stats_elem, child.xpath('local-name()'), child.pyval)
return stats_elem
def __repr__(self):
buff = StringIO()
buff.write('<{} at {} '.format(self.__class__.__name__, hex(id(self))))
attrs = ('{}=[{}]'.format(attr, getattr(self, attr)) for attr in self.get_attrs())
buff.write(', '.join(attrs))
buff.write('>')
value = buff.getvalue()
buff.close()
return value
def __iadd__(self, other):
self.max_resp_time = max(self.max_resp_time, other.max_resp_time)
self.min_resp_time = min(self.min_resp_time, other.min_resp_time)
self.usage += other.usage
return self
def __bool__(self):
return bool(self.service_name) # Empty stats_elems won't have a service name set
# ################################################################################################################################
# ################################################################################################################################
class SMTPMessage:
def __init__(self, from_=None, to=None, subject='', body='', attachments=None, cc=None, bcc=None, is_html=False, headers=None,
charset='utf8', is_rfc2231=True):
self.from_ = from_
self.to = to
self.subject = subject
self.body = body
self.attachments = attachments or []
self.cc = cc
self.bcc = bcc
self.is_html = is_html
self.headers = headers or {}
self.charset = charset
self.is_rfc2231 = is_rfc2231
def attach(self, name, contents):
self.attachments.append({'name':name, 'contents':contents})
# ################################################################################################################################
# ################################################################################################################################
class IDEDeploy:
Username = 'ide_publisher'
# ################################################################################################################################
# ################################################################################################################################
class IMAPMessage:
def __init__(self, uid, conn, data):
self.uid = uid
self.conn = conn
self.data = data
def __repr__(self):
return '<{} at {}, uid:`{}`, conn.config:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.uid, self.conn.config_no_sensitive)
def delete(self):
self.conn.delete(self.uid)
def mark_seen(self):
self.conn.mark_seen(self.uid)
# ################################################################################################################################
# ################################################################################################################################
class IBMMQCallData:
""" Metadata for information returned by IBM MQ in response to underlying MQPUT calls.
"""
__slots__ = ('msg_id', 'correlation_id')
def __init__(self, msg_id, correlation_id):
self.msg_id = msg_id
self.correlation_id = correlation_id
# For compatibility with Zato < 3.2
WebSphereMQCallData = IBMMQCallData
# ################################################################################################################################
# ################################################################################################################################
default_internal_modules = {
'zato.server.service.internal': True,
'zato.server.service.internal.apispec': True,
'zato.server.service.internal.audit_log': True,
'zato.server.service.internal.cache.builtin': True,
'zato.server.service.internal.cache.builtin.entry': True,
'zato.server.service.internal.cache.builtin.pubapi': True,
'zato.server.service.internal.cache.memcached': True,
'zato.server.service.internal.channel.amqp_': True,
'zato.server.service.internal.channel.file_transfer': True,
'zato.server.service.internal.channel.jms_wmq': True,
'zato.server.service.internal.channel.json_rpc': True,
'zato.server.service.internal.channel.web_socket': True,
'zato.server.service.internal.channel.web_socket.cleanup': True,
'zato.server.service.internal.channel.web_socket.client': True,
'zato.server.service.internal.channel.web_socket.subscription': True,
'zato.server.service.internal.channel.zmq': True,
'zato.server.service.internal.cloud.aws.s3': True,
'zato.server.service.internal.connector.amqp_': True,
'zato.server.service.internal.crypto': True,
'zato.server.service.internal.definition.amqp_': True,
'zato.server.service.internal.definition.cassandra': True,
'zato.server.service.internal.definition.jms_wmq': True,
'zato.server.service.internal.email.imap': True,
'zato.server.service.internal.email.smtp': True,
'zato.server.service.internal.generic.connection': True,
'zato.server.service.internal.helpers': True,
'zato.server.service.internal.hot_deploy': True,
'zato.server.service.internal.ide_deploy': True,
'zato.server.service.internal.info': True,
'zato.server.service.internal.http_soap': True,
'zato.server.service.internal.kv_data': True,
'zato.server.service.internal.kvdb': True,
'zato.server.service.internal.kvdb.data_dict.dictionary': True,
'zato.server.service.internal.kvdb.data_dict.impexp': True,
'zato.server.service.internal.kvdb.data_dict.translation': True,
'zato.server.service.internal.message.namespace': True,
'zato.server.service.internal.message.xpath': True,
'zato.server.service.internal.message.json_pointer': True,
'zato.server.service.internal.notif': True,
'zato.server.service.internal.notif.sql': True,
'zato.server.service.internal.outgoing.amqp_': True,
'zato.server.service.internal.outgoing.ftp': True,
'zato.server.service.internal.outgoing.jms_wmq': True,
'zato.server.service.internal.outgoing.odoo': True,
'zato.server.service.internal.outgoing.redis': True,
'zato.server.service.internal.outgoing.sql': True,
'zato.server.service.internal.outgoing.sap': True,
'zato.server.service.internal.outgoing.sftp': True,
'zato.server.service.internal.outgoing.zmq': True,
'zato.server.service.internal.pattern': True,
'zato.server.service.internal.pickup': True,
'zato.server.service.internal.pattern.invoke_retry': True,
'zato.server.service.internal.pubsub': True,
'zato.server.service.internal.pubsub.delivery': True,
'zato.server.service.internal.pubsub.endpoint': True,
'zato.server.service.internal.pubsub.hook': True,
'zato.server.service.internal.pubsub.message': True,
'zato.server.service.internal.pubsub.migrate': True,
'zato.server.service.internal.pubsub.pubapi': True,
'zato.server.service.internal.pubsub.publish': True,
'zato.server.service.internal.pubsub.subscription': True,
'zato.server.service.internal.pubsub.queue': True,
'zato.server.service.internal.pubsub.task': True,
'zato.server.service.internal.pubsub.task.delivery': True,
'zato.server.service.internal.pubsub.task.delivery.message': True,
'zato.server.service.internal.pubsub.task.delivery.server': True,
'zato.server.service.internal.pubsub.task.sync': True,
'zato.server.service.internal.pubsub.topic': True,
'zato.server.service.internal.query.cassandra': True,
'zato.server.service.internal.scheduler': True,
'zato.server.service.internal.search.es': True,
'zato.server.service.internal.search.solr': True,
'zato.server.service.internal.security': True,
'zato.server.service.internal.security.apikey': True,
'zato.server.service.internal.security.aws': True,
'zato.server.service.internal.security.basic_auth': True,
'zato.server.service.internal.security.jwt': True,
'zato.server.service.internal.security.ntlm': True,
'zato.server.service.internal.security.oauth': True,
'zato.server.service.internal.security.rbac': True,
'zato.server.service.internal.security.rbac.client_role': True,
'zato.server.service.internal.security.rbac.permission': True,
'zato.server.service.internal.security.rbac.role': True,
'zato.server.service.internal.security.rbac.role_permission': True,
'zato.server.service.internal.security.tls.ca_cert': True,
'zato.server.service.internal.security.tls.channel': True,
'zato.server.service.internal.security.tls.key_cert': True,
'zato.server.service.internal.security.wss': True,
'zato.server.service.internal.security.vault.connection': True,
'zato.server.service.internal.security.vault.policy': True,
'zato.server.service.internal.security.xpath': True,
'zato.server.service.internal.server': True,
'zato.server.service.internal.service': True,
'zato.server.service.internal.sms': True,
'zato.server.service.internal.sms.twilio': True,
'zato.server.service.internal.sso': True,
'zato.server.service.internal.sso.cleanup': True,
'zato.server.service.internal.sso.password_reset': True,
'zato.server.service.internal.sso.session': True,
'zato.server.service.internal.sso.session_attr': True,
'zato.server.service.internal.sso.signup': True,
'zato.server.service.internal.sso.user': True,
'zato.server.service.internal.sso.user_attr': True,
'zato.server.service.internal.stats': True,
'zato.server.service.internal.stats.summary': True,
'zato.server.service.internal.stats.trends': True,
'zato.server.service.internal.updates': True,
}
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# globre
from globre import match as globre_match
# Paste
from paste.util.converters import asbool
# Zato
from zato.common.api import FALSE_TRUE, TRUE_FALSE
logger = logging.getLogger(__name__)
class Matcher(object):
def __init__(self):
self.config = None
self.items = {True:[], False:[]}
self.order1 = None
self.order2 = None
self.is_allowed_cache = {}
self.special_case = None
def read_config(self, config):
self.config = config
order = config.get('order', FALSE_TRUE)
self.order1, self.order2 = (True, False) if order == TRUE_FALSE else (False, True)
for key, value in config.items():
# Ignore meta key(s)
if key == 'order':
continue
value = asbool(value)
# Add new items
self.items[value].append(key)
# Now sort everything lexicographically, the way it will be used in run-time
for key in self.items:
self.items[key] = list(reversed(sorted(self.items[key])))
for empty, non_empty in ((True, False), (False, True)):
if not self.items[empty] and '*' in self.items[non_empty]:
self.special_case = non_empty
break
def is_allowed(self, value):
logger.debug('Cache:`%s`, value:`%s`', self.is_allowed_cache, value)
if self.special_case is not None:
return self.special_case
try:
return self.is_allowed_cache[value]
except KeyError:
_match = globre_match
is_allowed = None
for order in self.order1, self.order2:
for pattern in self.items[order]:
if _match(pattern, value):
is_allowed = order
# No match at all - we don't allow it in that case
is_allowed = is_allowed if (is_allowed is not None) else False
self.is_allowed_cache[value] = is_allowed
return is_allowed | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/match.py | match.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from http.client import BAD_REQUEST, CONFLICT, FORBIDDEN, INTERNAL_SERVER_ERROR, METHOD_NOT_ALLOWED, NOT_FOUND, \
SERVICE_UNAVAILABLE, UNAUTHORIZED
# Zato
from zato.common.http_ import HTTP_RESPONSES
# ################################################################################################################################
# ################################################################################################################################
# https://tools.ietf.org/html/rfc6585
TOO_MANY_REQUESTS = 429
# ################################################################################################################################
# ################################################################################################################################
class ZatoException(Exception):
""" Base class for all Zato custom exceptions.
"""
def __init__(self, cid=None, msg=None):
super(ZatoException, self).__init__(msg)
self.cid = cid
self.msg = msg
def __repr__(self):
return '<{} at {} cid:`{}`, msg:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.cid, self.msg)
__str__ = __repr__
# ################################################################################################################################
class ClientSecurityException(ZatoException):
""" An exception for signalling errors stemming from security problems
on the client side, such as invalid username or password.
"""
# ################################################################################################################################
class ConnectionException(ZatoException):
""" Encountered a problem with an external connections, such as to AMQP brokers.
"""
# ################################################################################################################################
class TimeoutException(ConnectionException):
pass
# ################################################################################################################################
class StatusAwareException(ZatoException):
""" Raised when the underlying error condition can be easily expressed
as one of the HTTP status codes.
"""
def __init__(self, cid, msg, status):
super(StatusAwareException, self).__init__(cid, msg)
self.status = status
self.reason = HTTP_RESPONSES[status]
def __repr__(self):
return '<{} at {} cid:`{}`, status:`{}`, msg:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.cid, self.status, self.msg)
# ################################################################################################################################
class HTTPException(StatusAwareException):
pass
# ################################################################################################################################
class ParsingException(ZatoException):
""" Raised when the error is to do with parsing of documents, such as an input
XML document.
"""
# ################################################################################################################################
class NoDistributionFound(ZatoException):
""" Raised when an attempt is made to import services from a Distutils2 archive
or directory but they don't contain a proper Distutils2 distribution.
"""
def __init__(self, path):
super(NoDistributionFound, self).__init__(None, 'No Disutils distribution in path:[{}]'.format(path))
# ################################################################################################################################
class Inactive(ZatoException):
""" Raised when an attempt was made to use an inactive resource, such
as an outgoing connection or a channel.
"""
def __init__(self, name):
super(Inactive, self).__init__(None, '`{}` is inactive'.format(name))
# ################################################################################################################################
# ################################################################################################################################
# Below are HTTP exceptions
class Reportable(HTTPException):
def __init__(self, cid, msg, status):
super(ClientHTTPError, self).__init__(cid, msg, status)
# Backward compatibility with pre 3.0
ClientHTTPError = Reportable
# ################################################################################################################################
class BadRequest(Reportable):
def __init__(self, cid, msg='Received a bad request'):
super(BadRequest, self).__init__(cid, msg, BAD_REQUEST)
# ################################################################################################################################
class Conflict(Reportable):
def __init__(self, cid, msg):
super(Conflict, self).__init__(cid, msg, CONFLICT)
# ################################################################################################################################
class Forbidden(Reportable):
def __init__(self, cid, msg='You are not allowed to access this resource', *ignored_args, **ignored_kwargs):
super(Forbidden, self).__init__(cid, msg, FORBIDDEN)
# ################################################################################################################################
class MethodNotAllowed(Reportable):
def __init__(self, cid, msg):
super(MethodNotAllowed, self).__init__(cid, msg, METHOD_NOT_ALLOWED)
# ################################################################################################################################
class NotFound(Reportable):
def __init__(self, cid, msg):
super(NotFound, self).__init__(cid, msg, NOT_FOUND)
# ################################################################################################################################
class Unauthorized(Reportable):
def __init__(self, cid, msg, challenge):
super(Unauthorized, self).__init__(cid, msg, UNAUTHORIZED)
self.challenge = challenge
# ################################################################################################################################
class TooManyRequests(Reportable):
def __init__(self, cid, msg):
super(TooManyRequests, self).__init__(cid, msg, TOO_MANY_REQUESTS)
# ################################################################################################################################
class InternalServerError(Reportable):
def __init__(self, cid, msg='Internal server error'):
super(InternalServerError, self).__init__(cid, msg, INTERNAL_SERVER_ERROR)
# ################################################################################################################################
class ServiceUnavailable(Reportable):
def __init__(self, cid, msg):
super(ServiceUnavailable, self).__init__(cid, msg, SERVICE_UNAVAILABLE)
# ################################################################################################################################
class PubSubSubscriptionExists(BadRequest):
pass
# ################################################################################################################################
class ConnectorClosedException(Exception):
def __init__(self, exc, message):
self.inner_exc = exc
super().__init__(message)
# ################################################################################################################################
class IBMMQException(Exception):
pass
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/exception.py | exception.py |
# ################################################################################################################################
# ################################################################################################################################
def get_sys_info():
import platform
system = platform.system()
is_linux = 'linux' in system.lower()
is_windows = 'windows' in system.lower()
is_mac = 'darwin' in system.lower()
if is_linux:
try:
import distro
info = distro.info()
codename = info['codename'].lower()
codename = codename.replace('/', '')
out = '{}.{}'.format(info['id'], info['version'])
if codename:
out += '-{}'.format(codename)
except ImportError:
out = 'linux'
elif is_windows:
_platform = platform.platform().lower()
_edition = platform.win32_edition()
out = '{}-{}'.format(_platform, _edition)
elif is_mac:
out = 'mac'
else:
out = 'os.unrecognised'
return out
# ################################################################################################################################
# ################################################################################################################################
def get_version():
# stdlib
import os
import sys
from sys import version_info as py_version_info
# Python 2/3 compatibility
from past.builtins import execfile
try:
# Make sure the underlying git command runs in our git repository ..
code_dir = os.path.dirname(sys.executable)
os.chdir(code_dir)
curdir = os.path.dirname(os.path.abspath(__file__))
_version_py = os.path.normpath(os.path.join(curdir, '..', '..', '..', '..', '.version.py'))
_locals = {}
execfile(_version_py, _locals)
version = 'Zato {}'.format(_locals['version'])
except IOError:
version = '3.2'
finally:
sys_info = get_sys_info()
version = '{}-py{}.{}.{}-{}'.format(
version,
py_version_info.major,
py_version_info.minor,
py_version_info.micro,
sys_info)
return version
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/version.py | version.py |
# stdlib
from collections import deque
from datetime import datetime
from logging import getLogger
# gevent
from gevent.lock import RLock
# Zato
from zato.common.api import AuditLog as CommonAuditLog, CHANNEL, GENERIC, WEB_SOCKET
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
_sent = CommonAuditLog.Direction.sent
_received = CommonAuditLog.Direction.received
event_attrs = 'direction', 'data', 'event_id', 'timestamp', 'msg_id', 'in_reply_to', 'type_', 'object_id', 'conn_id'
transfer_attrs = 'total_bytes_received', 'total_messages_received', 'avg_msg_size_received', 'first_received', 'last_received', \
'total_bytes_sent', 'total_messages_sent', 'avg_msg_size_sent', 'first_sent', 'last_sent', \
'data', 'messages'
config_attrs = 'type_', 'object_id', 'max_len_messages_received', 'max_len_messages_sent', \
'max_bytes_per_message_received', 'max_bytes_per_message_sent', \
'max_bytes_per_message'
# ################################################################################################################################
# ################################################################################################################################
def new_event_id(prefix='zae', _new_cid=new_cid):
return '{}{}'.format(prefix, _new_cid())
# ################################################################################################################################
# ################################################################################################################################
class DataEvent:
def __init__(self, direction, _utcnow=datetime.utcnow, _new_event_id=new_event_id):
self.direction = direction
self.event_id = _new_event_id()
self.data = ''
self.timestamp = _utcnow()
self.msg_id = ''
self.in_reply_to = ''
self.type_ = ''
self.object_id = ''
self.conn_id = ''
# This will be the other half of a request or response,
# e.g. it will link DataSent to DataReceived or ther other way around.
self.counterpart = None # type: DataEvent
# ################################################################################################################################
def to_dict(self):
out = {}
for name in event_attrs:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
# ################################################################################################################################
class DataSent(DataEvent):
""" An individual piece of data sent by Zato to a remote end.
This can be a request or a reply to a previous one sent by an API client.
"""
__slots__ = event_attrs
def __init__(self, _direction=_sent):
super().__init__(_direction)
# ################################################################################################################################
# ################################################################################################################################
class DataReceived(DataEvent):
""" An individual piece of data received by Zato from a remote end.
This can be a request or a reply to a previous one sent by an API client.
"""
__slots__ = event_attrs
def __init__(self, _direction=_received):
super().__init__(_direction)
# ################################################################################################################################
# ################################################################################################################################
class LogContainerConfig:
""" Data retention configuration for a specific object.
"""
__slots__ = config_attrs
def __init__(self):
self.type_ = '<log-container-config-type_-not-set>'
self.object_id = '<log-container-config-object_id-not-set>'
self.max_len_messages_received = 0
self.max_len_messages_sent = 0
self.max_bytes_per_message_received = 0
self.max_bytes_per_message_sent = 0
# ################################################################################################################################
# ################################################################################################################################
class LogContainer:
""" Stores messages for a specific object, e.g. an individual REST or HL7 channel.
"""
__slots__ = config_attrs + transfer_attrs + ('lock',)
def __init__(self, config, _sent=_sent, _received=_received):
# type: (LogContainerConfig)
# To serialise access to the underlying storage
self.lock = {
_sent: RLock(),
_received: RLock(),
}
self.type_ = config.type_
self.object_id = config.object_id
self.max_len_messages_sent = config.max_len_messages_sent
self.max_len_messages_received = config.max_len_messages_received
self.max_bytes_per_message = {
_sent: config.max_bytes_per_message_sent,
_received: config.max_bytes_per_message_received,
}
self.total_bytes_sent = 0
self.total_messages_sent = 0
self.avg_msg_size_sent = 0
self.first_sent = None # type: datetime
self.last_sent = None # type: datetime
self.total_bytes_received = 0
self.total_messages_received = 0
self.avg_msg_size_received = 0
self.first_received = None # type: datetime
self.last_received = None # type: datetime
# These two deques are where the actual data is kept
self.messages = {}
self.messages[_sent] = deque(maxlen=self.max_len_messages_sent)
self.messages[_received] = deque(maxlen=self.max_len_messages_received)
# ################################################################################################################################
def store(self, data_event):
with self.lock[data_event.direction]:
# Make sure we do not exceed our limit of bytes stored
max_len = self.max_bytes_per_message[data_event.direction]
data_event.data = data_event.data[:max_len]
storage = self.messages[data_event.direction] # type: deque
storage.append(data_event)
# ################################################################################################################################
def to_dict(self, _sent=_sent, _received=_received):
out = {
_sent: [],
_received: []
}
for name in (_sent, _received):
messages = out[name]
with self.lock[name]:
for message in self.messages[name]: # type: DataEvent
messages.append(message.to_dict())
return out
# ################################################################################################################################
# ################################################################################################################################
class AuditLog:
""" Stores a log of messages for channels, outgoing connections or other objects.
"""
def __init__(self):
# Update lock
self.lock = RLock()
# The main log - keys are object types, values are dicts mapping object IDs to LogContainer objects
self._log = {
CHANNEL.HTTP_SOAP: {},
CHANNEL.WEB_SOCKET: {},
GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP: {},
WEB_SOCKET.AUDIT_KEY: {},
}
# Python logging
self.logger = getLogger('zato')
# ################################################################################################################################
def get_container(self, type_, object_id):
# type: (str, str) -> LogContainer
# Note that below we ignore any key errors, effectively silently dropping invalid requests.
return self._log.get(type_, {}).get(object_id)
# ################################################################################################################################
def _create_container(self, config):
# type: (LogContainerConfig)
# Make sure the object ID is a string (it can be an int)
config.object_id = str(config.object_id)
# Get the mapping of object types to object IDs ..
container_dict = self._log.setdefault(config.type_, {})
# .. make sure we do not have such an object already ..
if config.object_id in container_dict:
raise ValueError('Container already found `{}` ({})'.format(config.object_id, config.type_))
# .. if we are here, it means that we are really adding a new container ..
container = LogContainer(config)
# .. finally, we can attach it to the log by the object's ID.
container_dict[config.object_id] = container
# ################################################################################################################################
def create_container(self, config):
# type: (LogContainerConfig)
with self.lock:
self._create_container(config)
# ################################################################################################################################
def _delete_container(self, type_, object_id):
# type: (str, str)
# Make sure the object ID is a string (it can be an int)
object_id = str(object_id)
# Get the mapping of object types to object IDs ..
try:
container_dict = self._log[type_] # type: dict
except KeyError:
raise ValueError('Container type not found `{}` among `{}` ({})'.format(type_, sorted(self._log), object_id))
# No KeyError = we recognised that type ..
# .. so we can now try to delete that container by its object's ID.
# Note that we use .pop on purpose - e.g. when a server has just started,
# it may not have any such an object yet but the user may already try to edit
# the object this log is attached to. Using .pop ignores non-existing keys.
container_dict.pop(object_id, None)
# ################################################################################################################################
def delete_container(self, type_, object_id):
# type: (str, str)
with self.lock:
self._delete_container(type_, object_id)
# ################################################################################################################################
def edit_container(self, config):
# type: (LogContainerConfig)
with self.lock:
self._delete_container(config.type_, config.object_id)
self._create_container(config)
# ################################################################################################################################
def store_data(self, data_event):
# type: (DataEvent) -> None
# We always store IDs as string objects
data_event.object_id = str(data_event.object_id)
# At this point we assume that all the dicts and containers already exist
container_dict = self._log[data_event.type_]
container = container_dict[data_event.object_id] # type: LogContainer
container.store(data_event)
# ################################################################################################################################
def store_data_received(self, data_event):
# type: (DataReceived) -> None
self.store_data(data_event)
# ################################################################################################################################
def store_data_sent(self, data_event):
# type: (DataSent) -> None
self.store_data(data_event)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
pass | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/audit_log.py | audit_log.py |
# ################################################################################################################################
# stdlib
from calendar import timegm
from importlib import import_module
from logging import getLogger
from time import gmtime
# Cryptography
from cryptography.fernet import InvalidToken
# Python 2/3 compatibility
from past.builtins import basestring
# Zato
from zato.common.api import KVDB as _KVDB, NONCE_STORE
from zato.common.util import spawn_greenlet
from zato.common.util.kvdb import has_redis_sentinels
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class KVDB(object):
""" A wrapper around the Zato's key-value database.
"""
def __init__(self, config=None, decrypt_func=None):
self.conn = None
self.config = config
self.decrypt_func = decrypt_func
self.conn_class = None # Introduced so it's easier to test the class
self.has_sentinel = False
# ################################################################################################################################
def _get_connection_class(self):
""" Returns a concrete class to create Redis connections off basing on whether we use Redis sentinels or not.
Abstracted out to a separate method so it's easier to test the whole class in separation.
"""
if self.has_sentinel:
from redis.sentinel import Sentinel
return Sentinel
else:
from redis import StrictRedis
return StrictRedis
# ################################################################################################################################
def _parse_sentinels(self, item):
if item:
if isinstance(item, basestring):
item = [item]
out = []
for elem in item:
elem = elem.split(':')
# This will always exist ..
host = elem[0]
# .. which is why we can always use it ..
to_append = [host]
# .. but port can be optional ..
if len(elem) > 1:
port = elem[1]
port = int(port)
to_append.append(port)
out.append(tuple(to_append))
return out
# ################################################################################################################################
def init(self):
config = {}
self.has_sentinel = has_redis_sentinels(self.config)
if self.has_sentinel:
sentinels = self._parse_sentinels(self.config.get('redis_sentinels'))
if not sentinels:
raise ValueError('kvdb.redis_sentinels must be provided')
sentinel_master = self.config.get('redis_sentinels_master', None)
if not sentinel_master:
raise ValueError('kvdb.redis_sentinels_master must be provided')
config['sentinels'] = sentinels
config['sentinel_master'] = sentinel_master
else:
if self.config.get('host'):
config['host'] = self.config.host
if self.config.get('port'):
config['port'] = int(self.config.port)
if self.config.get('unix_socket_path'):
config['unix_socket_path'] = self.config.unix_socket_path
if self.config.get('db'):
config['db'] = int(self.config.db)
if self.config.get('password'):
# Heuristics - gA is a prefix of encrypted secrets so there is a chance
# we need to decrypt it. If the decryption fails, this is fine, we need
# assume in such a case that it was an actual password starting with this prefix.
if self.config.password.startswith('gA'):
try:
config['password'] = self.decrypt_func(self.config.password)
except InvalidToken:
config['password'] = self.config.password
else:
config['password'] = self.config.password
if self.config.get('socket_timeout'):
config['socket_timeout'] = float(self.config.socket_timeout)
if self.config.get('connection_pool'):
split = self.config.connection_pool.split('.')
module, class_name = split[:-1], split[-1]
mod = import_module(module)
config['connection_pool'] = getattr(mod, class_name)
if self.config.get('charset'):
config['charset'] = self.config.charset
if self.config.get('errors'):
config['errors'] = self.config.errors
self.conn_class = self._get_connection_class()
if self.has_sentinel:
instance = self.conn_class(config['sentinels'], min_other_sentinels=0, password=config.get('password'),
socket_timeout=config.get('socket_timeout'), decode_responses=True)
self.conn = instance.master_for(config['sentinel_master'])
else:
self.conn = self.conn_class(charset='utf-8', decode_responses=True, **config)
# Confirm whether we can connect
self.ping()
# ################################################################################################################################
def pubsub(self):
return self.conn.pubsub()
# ################################################################################################################################
def publish(self, *args, **kwargs):
return self.conn.publish(*args, **kwargs)
# ################################################################################################################################
def subscribe(self, *args, **kwargs):
return self.conn.subscribe(*args, **kwargs)
# ################################################################################################################################
def translate(self, system1, key1, value1, system2, key2, default=''):
return self.conn.hget(
_KVDB.SEPARATOR.join(
(_KVDB.TRANSLATION, system1, key1, value1, system2, key2)), 'value2') or default
# ################################################################################################################################
def reconfigure(self, config):
# type: (dict) -> None
self.config = config
self.init()
# ################################################################################################################################
def set_password(self, password):
# type: (dict) -> None
self.config['password'] = password
self.init()
# ################################################################################################################################
def copy(self):
""" Returns an KVDB with the configuration copied over from self. Note that
the object returned isn't initialized, in particular, the connection to the
database won't have been initialized.
"""
kvdb = KVDB()
kvdb.config = self.config
kvdb.decrypt_func = self.decrypt_func
return kvdb
# ################################################################################################################################
def close(self):
self.conn.connection_pool.disconnect()
# ################################################################################################################################
def ping(self):
try:
spawn_greenlet(self.conn.ping)
except Exception as e:
logger.warn('Could not ping %s due to `%s`', self.conn, e.args[0])
else:
logger.info('Redis ping OK -> %s', self.conn)
# ################################################################################################################################
@staticmethod
def is_config_enabled(config):
""" Returns True if the configuration indicates that Redis is enabled.
"""
# type: (dict) -> bool
return config.get('host') and config.get('port')
# ################################################################################################################################
# OAuth
def add_oauth_nonce(self, username, nonce, max_nonce_log):
""" Adds an OAuth to the set containing last N used ones for a given username.
"""
key = NONCE_STORE.KEY_PATTERN.format('oauth', username)
# This lets us trim the set to top (last) N nonces
score = timegm(gmtime())
self.conn.zadd(key, score, nonce)
self.conn.zremrangebyrank(key, 0, -max_nonce_log)
def has_oauth_nonce(self, username, nonce):
""" Returns a boolean flag indicating if there's an OAuth nonce for a given
username stored in KVDB.
"""
return self.conn.zscore(NONCE_STORE.KEY_PATTERN.format('oauth', username), nonce)
# ################################################################################################################################
# ################################################################################################################################
'''
# -*- coding: utf-8 -*-
# Zato
from zato.common.util import get_config
from zato.server.service import AsIs, Bool, Int, Service, SIOElem
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
if 0:
from typing import Union as union
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class MyService(AdminService):
name = 'kvdb1.get-list'
class SimpleIO:
input_optional = 'id', 'name'
output_optional = AsIs('id'), 'is_active', 'name', 'host', Int('port'), 'db', Bool('use_redis_sentinels'), \
'redis_sentinels', 'redis_sentinels_master'
default_value = None
# ################################################################################################################################
def get_data(self):
# Response to produce
out = []
# For now, we only return one item containing data read from server.conf
item = {
'id': 'default',
'name': 'default',
'is_active': True,
}
repo_location = self.server.repo_location
config_name = 'server.conf'
config = get_config(repo_location, config_name, bunchified=False)
config = config['kvdb']
for elem in self.SimpleIO.output_optional:
# Extract the embedded name or use it as is
name = elem.name if isinstance(elem, SIOElem) else elem
# These will not exist in server.conf
if name in ('id', 'is_active', 'name'):
continue
# Add it to output
item[name] = config[name]
# Add our only item to response
out.append(item)
return out
# ################################################################################################################################
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
name = 'kvdb1.edit'
class SimpleIO:
input_optional = AsIs('id'), 'name', Bool('use_redis_sentinels')
input_required = 'host', 'port', 'db', 'redis_sentinels', 'redis_sentinels_master'
output_optional = 'name'
def handle(self):
# Local alias
input = self.request.input
# If provided, turn sentinels configuration into a format expected by the underlying KVDB object
redis_sentinels = input.redis_sentinels or '' # type: str
if redis_sentinels:
redis_sentinels = redis_sentinels.splitlines()
redis_sentinels = ', '.join(redis_sentinels)
# Assign new server-wide configuration ..
self.server.fs_server_config.kvdb.host = input.host
self.server.fs_server_config.kvdb.port = int(input.port)
self.server.fs_server_config.kvdb.redis_sentinels = redis_sentinels
self.server.fs_server_config.kvdb.redis_sentinels_master = input.redis_sentinels_master or ''
# .. and rebuild the Redis connection object.
self.server.kvdb.reconfigure(self.server.fs_server_config.kvdb)
self.response.payload.name = self.request.input.name
# ################################################################################################################################
# ################################################################################################################################
''' | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/kvdb/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# stdlib
from logging import getLogger
from string import punctuation
# PyParsing
from pyparsing import alphanums, oneOf, OneOrMore, Optional, White, Word
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# Redis PyParsing grammar
quot = Optional(oneOf(('"', "'")))
command = oneOf((
'CONFIG', 'DBSIZE', 'DECR', 'DECRBY', 'DEL', 'DUMP', 'ECHO',
'EXISTS', 'EXPIRE', 'EXPIREAT', 'FLUSHDB', 'GET',
'HDEL', 'HEXISTS', 'HGET', 'HGETALL', 'HINCRBY', 'HKEYS', 'HLEN', 'HSET', 'HSETNX',
'HVALS', 'INCR', 'INCRBY', 'INFO', 'KEYS', 'LLEN', 'LPOP', 'LPUSH', 'LPUSHX',
'LRANGE', 'LREM', 'LSET', 'LTRIM', 'MGET', 'MSET', 'MSETNX', 'OBJECT', 'PERSIST',
'PEXPIRE', 'PEXPIREAT', 'PING', 'PSETEX', 'PTTL', 'RANDOMKEY', 'RENAME', 'RENAMENX',
'RESTORE', 'RPOP', 'SADD', 'SET', 'SISMEMBER', 'SMEMBERS', 'SREM', 'TIME', 'TTL', 'TYPE',
'ZADD', 'ZRANGE', 'ZREM'), caseless=True).setResultsName('command')
parameters = (OneOrMore(Word(alphanums + '-' + punctuation))).setResultsName('parameters')
redis_grammar = command + Optional(White().suppress() + parameters)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/kvdb/parsing.py | parsing.py |
# Python 2/3 compatibility
from six import PY2
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright 2006-2011 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# ################################################################################################################################
# ################################################################################################################################
# stdlib
import http.client as http_client
import socket
import ssl
class CAValidatingHTTPSConnection(http_client.HTTPConnection):
""" This class allows communication via SSL/TLS and takes Certificate Authorities
into account.
"""
def __init__(self, host, port=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=None, strict=None, ssl_version=None,
timeout=None):
http_client.HTTPConnection.__init__(self, host, port, strict, timeout)
self.ca_certs = ca_certs
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
def connect(self):
""" Connect to a host on a given (SSL/TLS) port.
"""
sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
self.sock = self.wrap_socket(sock)
def wrap_socket(self, sock):
""" Gets a socket object and wraps it into an SSL/TLS-aware one. May be
overridden in subclasses if the wrapping process needs to be customized.
"""
return ssl.wrap_socket(sock, self.keyfile, self.certfile,
ca_certs=self.ca_certs, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version)
class CAValidatingHTTPS(http_client.HTTPConnection):
""" A subclass of http.client.HTTPConnection which is aware of Certificate Authorities
used in SSL/TLS transactions.
"""
_connection_class = CAValidatingHTTPSConnection
def __init__(self, host=None, port=None, strict=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=None, ssl_version=None, timeout=None):
self._setup(self._connection_class(host, port, ca_certs, keyfile, certfile,
cert_reqs, strict, ssl_version, timeout))
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright 2006-2011 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# stdlib
import logging
import sys
import traceback
if PY2:
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from xmlrpclib import ServerProxy, Transport
else:
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from xmlrpc.client import ServerProxy, Transport
class VerificationException(Exception):
""" Raised when the verification of a certificate's fields fails.
"""
# ##############################################################################
# Server
# ##############################################################################
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ("/", "/RPC2",)
def setup(self):
self.connection = self.request # for doPOST
self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
class SSLServer(SimpleXMLRPCServer):
def __init__(self, host=None, port=None, keyfile=None, certfile=None,
ca_certs=None, cert_reqs=ssl.CERT_NONE, ssl_version=ssl.PROTOCOL_TLSv1,
do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None,
log_requests=True, **kwargs):
if PY2:
SimpleXMLRPCServer.__init__(self, (host, port), requestHandler=RequestHandler)
else:
SimpleXMLRPCServer.__init__(self, (host, port))
self.keyfile = keyfile
self.certfile = certfile
self.ca_certs = ca_certs
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
self.ciphers = ciphers
# Looks awkward to use camelCase here but that's what SimpleXMLRPCRequestHandler
# expects.
self.logRequests = log_requests
# 'verify_fields' is taken from kwargs to allow for adding more keywords
# in future versions.
self.verify_fields = kwargs.get("verify_fields")
def get_request(self):
""" Overridden from SocketServer.TCPServer.get_request, wraps the socket in
an SSL context.
"""
sock, from_addr = self.socket.accept()
# 'ciphers' argument is new in 2.7 and we must support 2.6 so add it
# to kwargs conditionally, depending on the Python version.
kwargs = {"keyfile":self.keyfile, "certfile":self.certfile,
"server_side":True, "cert_reqs":self.cert_reqs, "ssl_version":self.ssl_version,
"ca_certs":self.ca_certs, "do_handshake_on_connect":self.do_handshake_on_connect,
"suppress_ragged_eofs":self.suppress_ragged_eofs}
if sys.version_info >= (2, 7):
kwargs["ciphers"] = self.ciphers
sock = ssl.wrap_socket(sock, **kwargs)
if self.logger.isEnabledFor(logging.DEBUG):
self.logger.debug("get_request cert='%s', from_addr='%s'" % (sock.getpeercert(), from_addr))
return sock, from_addr
def verify_request(self, sock, from_addr):
""" Overridden from SocketServer.TCPServer.verify_request, adds validation of the
other side's certificate fields.
"""
try:
if self.verify_fields:
cert = sock.getpeercert()
if not cert:
msg = "Couldn't verify fields, peer didn't send the certificate, from_addr='%s'" % (from_addr,)
raise VerificationException(msg)
allow_peer, reason = self.verify_peer(cert, from_addr)
if not allow_peer:
self.logger.error(reason)
sock.close()
return False
except Exception:
# It was either an error on our side or the client didn't send the
# certificate even though self.cert_reqs was CERT_OPTIONAL (it couldn't
# have been CERT_REQUIRED because we wouldn't have got so far, the
# session would've been terminated much earlier in ssl.wrap_socket call).
# Regardless of the reason we cannot accept the client in that case.
msg = "Verification error='%s', cert='%s', from_addr='%s'" % (
traceback.format_exc(), sock.getpeercert(), from_addr)
self.logger.error(msg)
sock.close()
return False
return True
def verify_peer(self, cert, from_addr):
""" Verifies the other side's certificate. May be overridden in subclasses
if the verification process needs to be customized.
"""
subject = cert.get("subject")
if not subject:
msg = "Peer certificate doesn't have the 'subject' field, cert='%s'" % cert
raise VerificationException(msg)
subject = dict(elem[0] for elem in subject)
for verify_field in self.verify_fields:
expected_value = self.verify_fields[verify_field]
cert_value = subject.get(verify_field, None)
if not cert_value:
reason = "Peer didn't send the '%s' field, subject fields received '%s'" % (
verify_field, subject)
return False, reason
if expected_value != cert_value:
reason = "Expected the subject field '%s' to have value '%s' instead of '%s', subject='%s'" % (
verify_field, expected_value, cert_value, subject)
return False, reason
return True, None
def register_functions(self):
raise NotImplementedError("Must be overridden by subclasses")
# ##############################################################################
# Client
# ##############################################################################
class SSLClientTransport(Transport):
""" Handles an HTTPS transaction to an XML-RPC server.
"""
user_agent = "SSL XML-RPC Client (by http://springpython.webfactional.com)"
def __init__(self, ca_certs=None, keyfile=None, certfile=None, cert_reqs=None,
ssl_version=None, timeout=None, strict=None):
self.ca_certs = ca_certs
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.timeout = timeout
self.strict = strict
Transport.__init__(self)
def make_connection(self, host):
return CAValidatingHTTPS(host, strict=self.strict, ca_certs=self.ca_certs,
keyfile=self.keyfile, certfile=self.certfile, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version, timeout=self.timeout)
class SSLClient(ServerProxy):
def __init__(self, uri=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=ssl.CERT_REQUIRED, ssl_version=ssl.PROTOCOL_TLSv1,
transport=None, encoding=None, verbose=0, allow_none=0, use_datetime=0,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, strict=None):
if not transport:
_transport=SSLClientTransport(ca_certs, keyfile, certfile, cert_reqs,
ssl_version, timeout, strict)
else:
_transport=transport(ca_certs, keyfile, certfile, cert_reqs, ssl_version, timeout, strict)
ServerProxy.__init__(self, uri, _transport, encoding, verbose, allow_none, use_datetime)
self.logger = logging.getLogger(self.__class__.__name__)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/py23_/spring_.py | spring_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint
# ################################################################################################################################
# Type checking
if 0:
from zato.common.odb.model import Cluster
Cluster = Cluster
# ################################################################################################################################
# ################################################################################################################################
class ODBPostProcess(object):
""" SQL post-processing functionality, e.g. creation of objects only after aserver has started.
"""
def __init__(self, session, cluster, cluster_id):
# type: (object, Cluster, int)
if not (cluster or cluster_id):
raise ValueError('At least one of cluster or cluster_id is required in place of `{}` `{}`'.format(
cluster, cluster_id))
self.session = session
self.cluster = cluster
self.cluster_id = cluster_id
# ################################################################################################################################
def run(self):
self.add_pubsub_service_endpoint()
self.session.commit()
# ################################################################################################################################
def add_pubsub_service_endpoint(self, _name=PUBSUB.SERVICE_SUBSCRIBER.NAME):
existing = self.session.query(PubSubEndpoint.id).\
filter(PubSubEndpoint.name==_name).\
first()
if not existing:
endpoint = PubSubEndpoint()
endpoint.name = _name
endpoint.is_internal = True
endpoint.role = PUBSUB.ROLE.SUBSCRIBER.id
endpoint.topic_patterns = PUBSUB.SERVICE_SUBSCRIBER.TOPICS_ALLOWED
endpoint.endpoint_type = PUBSUB.ENDPOINT_TYPE.SERVICE.id
if self.cluster:
endpoint.cluster = self.cluster
else:
endpoint.cluster_id = self.cluster_id
self.session.add(endpoint)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/post_process.py | post_process.py |
# stdlib
import logging
from contextlib import closing
from copy import deepcopy
from datetime import datetime
from io import StringIO
from logging import DEBUG, getLogger
from threading import RLock
from time import time
from traceback import format_exc
# SQLAlchemy
from sqlalchemy import and_, create_engine, event, select
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.orm.query import Query
from sqlalchemy.pool import NullPool
from sqlalchemy.sql.expression import true
from sqlalchemy.sql.type_api import TypeEngine
# Bunch
from bunch import Bunch, bunchify
# Zato
from zato.common.api import DEPLOYMENT_STATUS, GENERIC, HTTP_SOAP, MS_SQL, NotGiven, PUBSUB, SEC_DEF_TYPE, SECRET_SHADOW, \
SERVER_UP_STATUS, UNITTEST, ZATO_NONE, ZATO_ODB_POOL_NAME
from zato.common.exception import Inactive
from zato.common.mssql_direct import MSSQLDirectAPI, SimpleSession
from zato.common.odb import query
from zato.common.odb.ping import get_ping_query
from zato.common.odb.model import APIKeySecurity, Cluster, DeployedService, DeploymentPackage, DeploymentStatus, HTTPBasicAuth, \
JWT, OAuth, PubSubEndpoint, SecurityBase, Server, Service, TLSChannelSecurity, XPathSecurity, \
WSSDefinition, VaultConnection
from zato.common.odb.testing import UnittestEngine
from zato.common.odb.query.pubsub import subscription as query_ps_subscription
from zato.common.odb.query import generic as query_generic
from zato.common.util.api import current_host, get_component_name, get_engine_url, new_cid, parse_extra_into_dict, \
parse_tls_channel_security_definition, spawn_greenlet
from zato.common.util.sql import ElemsWithOpaqueMaker, elems_with_opaque
from zato.common.util.url_dispatcher import get_match_target
from zato.sso.odb.query import get_rate_limiting_info as get_sso_user_rate_limiting_info
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
from zato.server.base.parallel import ParallelServer
# For pyflakes
ParallelServer = ParallelServer
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
rate_limit_keys = 'is_rate_limit_active', 'rate_limit_def', 'rate_limit_type', 'rate_limit_check_parent_def'
unittest_fs_sql_config = {
UNITTEST.SQL_ENGINE: {
'ping_query': 'SELECT 1+1'
}
}
# ################################################################################################################################
ServiceTable = Service.__table__
ServiceTableInsert = ServiceTable.insert
DeployedServiceTable = DeployedService.__table__
DeployedServiceInsert = DeployedServiceTable.insert
DeployedServiceDelete = DeployedServiceTable.delete
# ################################################################################################################################
# ################################################################################################################################
# Based on https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WriteableTuple
class WritableKeyedTuple(object):
def __init__(self, elem):
object.__setattr__(self, '_elem', elem)
# ################################################################################################################################
def __getattr__(self, key):
return getattr(self._elem, key)
# ################################################################################################################################
def __getitem__(self, idx):
return self._elem.__getitem__(idx)
# ################################################################################################################################
def __setitem__(self, idx, value):
return self._elem.__setitem__(idx, value)
# ################################################################################################################################
def __nonzero__(self):
return bool(self._elem)
# ################################################################################################################################
def __repr__(self):
return '<WritableKeyedTuple at {}>'.format(hex(id(self)))
# ################################################################################################################################
def get_value(self):
return self._elem._asdict()
# ################################################################################################################################
# ################################################################################################################################
class SessionWrapper(object):
""" Wraps an SQLAlchemy session.
"""
def __init__(self):
self.session_initialized = False
self.pool = None # type: SQLConnectionPool
self.config = None # type: dict
self.is_sqlite = None # type: bool
self.logger = logging.getLogger(self.__class__.__name__)
def init_session(self, *args, **kwargs):
spawn_greenlet(self._init_session, *args, **kwargs)
def _init_session(self, name, config, pool, use_scoped_session=True):
# type: (str, dict, SQLConnectionPool, bool)
self.config = config
self.fs_sql_config = config['fs_sql_config']
self.pool = pool
try:
self.pool.ping(self.fs_sql_config)
except Exception as e:
msg = 'Could not ping:`%s`, session will be left uninitialised, e:`%s`'
if self.config['is_active']:
err_details = format_exc()
else:
err_details = e.args[0]
self.logger.warn(msg, name, err_details)
else:
if config['engine'] == MS_SQL.ZATO_DIRECT:
self._Session = SimpleSession(self.pool.engine)
else:
if use_scoped_session:
self._Session = scoped_session(sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery))
else:
self._Session = sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery)
self._session = self._Session()
self.session_initialized = True
self.is_sqlite = self.pool.engine and self.pool.engine.name == 'sqlite'
def session(self):
return self._Session()
def close(self):
self._session.close()
# ################################################################################################################################
# ################################################################################################################################
class WritableTupleQuery(Query):
def __iter__(self):
out = super(WritableTupleQuery, self).__iter__()
columns_desc = self.column_descriptions
first_type = columns_desc[0]['type']
len_columns_desc = len(columns_desc)
# This is a simple result of a query such as session.query(ObjectName).count()
if len_columns_desc == 1 and isinstance(first_type, TypeEngine):
return out
# A list of objects, e.g. from .all()
elif len_columns_desc > 1:
return (WritableKeyedTuple(elem) for elem in out)
# Anything else
else:
return out
# ################################################################################################################################
# ################################################################################################################################
class SQLConnectionPool(object):
""" A pool of SQL connections wrapping an SQLAlchemy engine.
"""
def __init__(self, name, config, config_no_sensitive, should_init=True):
# type: (str, dict, dict) -> None
self.name = name
self.config = config
self.config_no_sensitive = config_no_sensitive
self.logger = getLogger(self.__class__.__name__)
self.has_debug = self.logger.isEnabledFor(DEBUG)
self.engine = None
self.engine_name = config['engine'] # self.engine.name is 'mysql' while 'self.engine_name' is mysql+pymysql
if should_init:
self.init()
def init(self):
_extra = {
'pool_pre_ping': True, # Make sure SQLAlchemy 1.2+ can refresh connections on transient errors
}
# MySQL only
if self.engine_name.startswith('mysql'):
_extra['pool_recycle'] = 600
# Postgres-only
elif self.engine_name.startswith('postgres'):
_extra['connect_args'] = {'application_name': get_component_name()}
extra = self.config.get('extra') # Optional, hence .get
_extra.update(parse_extra_into_dict(extra))
# SQLite has no pools
if self.engine_name != 'sqlite':
_extra['pool_size'] = int(self.config.get('pool_size', 1))
if _extra['pool_size'] == 0:
_extra['poolclass'] = NullPool
engine_url = get_engine_url(self.config)
try:
self.engine = self._create_engine(engine_url, self.config, _extra)
except Exception as e:
self.logger.warn('Could not create SQL connection `%s`, e:`%s`', self.config['name'], e.args[0])
if self.engine and (not self._is_unittest_engine(engine_url)) and self._is_sa_engine(engine_url):
event.listen(self.engine, 'checkin', self.on_checkin)
event.listen(self.engine, 'checkout', self.on_checkout)
event.listen(self.engine, 'connect', self.on_connect)
event.listen(self.engine, 'first_connect', self.on_first_connect)
self.checkins = 0
self.checkouts = 0
self.checkins = 0
self.checkouts = 0
# ################################################################################################################################
def __str__(self):
return '<{} at {}, config:[{}]>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def _is_sa_engine(self, engine_url):
# type: (str)
return 'zato+mssql1' not in engine_url
# ################################################################################################################################
def _is_unittest_engine(self, engine_url):
# type: (str)
return 'zato+unittest' in engine_url
# ################################################################################################################################
def _create_unittest_engine(self, engine_url, config):
# type: (str, dict)
return UnittestEngine(engine_url, config)
# ################################################################################################################################
def _create_engine(self, engine_url, config, extra):
if self._is_unittest_engine(engine_url):
return self._create_unittest_engine(engine_url, config)
elif self._is_sa_engine(engine_url):
return create_engine(engine_url, **extra)
else:
# This is a direct MS SQL connection
connect_kwargs = {
'dsn': config['host'],
'port': config['port'],
'database': config['db_name'],
'user': config['username'],
'password': config['password'],
'login_timeout': 3,
'as_dict': True,
}
for name in MS_SQL.EXTRA_KWARGS:
value = extra.get(name, NotGiven)
if value is not NotGiven:
connect_kwargs[name] = value
return MSSQLDirectAPI(config['name'], config['pool_size'], connect_kwargs)
# ################################################################################################################################
def on_checkin(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Checked in dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
self.checkins += 1
# ################################################################################################################################
def on_checkout(self, dbapi_conn, conn_record, conn_proxy):
if self.has_debug:
self.logger.debug('Checked out dbapi_conn:%s, conn_record:%s, conn_proxy:%s',
dbapi_conn, conn_record, conn_proxy)
self.checkouts += 1
self.logger.debug('co-cin-diff %d-%d-%d', self.checkouts, self.checkins, self.checkouts - self.checkins)
# ################################################################################################################################
def on_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def on_first_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('First connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def ping(self, fs_sql_config):
""" Pings the SQL database and returns the response time, in milliseconds.
"""
if not self.engine:
return
if hasattr(self.engine, 'ping'):
func = self.engine.ping
query = self.engine.ping_query
args = []
else:
func = self.engine.connect().execute
query = get_ping_query(fs_sql_config, self.config)
args = [query]
self.logger.debug('About to ping the SQL connection pool:`%s`, query:`%s`', self.config_no_sensitive, query)
start_time = time()
func(*args)
response_time = time() - start_time
self.logger.debug('Ping OK, pool:`%s`, response_time:`%s` s', self.config_no_sensitive, response_time)
return response_time
# ################################################################################################################################
def _conn(self):
""" Returns an SQLAlchemy connection object.
"""
return self.engine.connect()
# ################################################################################################################################
conn = property(fget=_conn, doc=_conn.__doc__)
# ################################################################################################################################
def _impl(self):
""" Returns the underlying connection's implementation, the SQLAlchemy engine.
"""
return self.engine
# ################################################################################################################################
impl = property(fget=_impl, doc=_impl.__doc__)
# ################################################################################################################################
class PoolStore(object):
""" A main class for accessing all of the SQL connection pools. Each server
thread has its own store.
"""
def __init__(self, sql_conn_class=SQLConnectionPool):
self.sql_conn_class = sql_conn_class
self._lock = RLock()
self.wrappers = {}
self.logger = getLogger(self.__class__.__name__)
# ################################################################################################################################
def __getitem__(self, name, enforce_is_active=True):
""" Checks out the connection pool. If enforce_is_active is False,
the pool's is_active flag will be ignored.
"""
with self._lock:
if enforce_is_active:
wrapper = self.wrappers[name]
if wrapper.config['is_active']:
return wrapper
raise Inactive(name)
else:
return self.wrappers[name]
# ################################################################################################################################
get = __getitem__
# ################################################################################################################################
def __setitem__(self, name, config):
""" Stops a connection pool if it exists and replaces it with a new one
using updated settings.
"""
with self._lock:
if name in self.wrappers:
del self[name]
config_no_sensitive = {}
for key in config:
if key != 'callback_func':
config_no_sensitive[key] = config[key]
config_no_sensitive['password'] = SECRET_SHADOW
pool = self.sql_conn_class(name, config, config_no_sensitive)
wrapper = SessionWrapper()
wrapper.init_session(name, config, pool)
self.wrappers[name] = wrapper
set_item = __setitem__
# ################################################################################################################################
def add_unittest_item(self, name, fs_sql_config=unittest_fs_sql_config):
self.set_item(name, {
'password': 'password.{}'.format(new_cid),
'engine': UNITTEST.SQL_ENGINE,
'fs_sql_config': fs_sql_config,
'is_active': True,
})
# ################################################################################################################################
def __delitem__(self, name):
""" Stops a pool and deletes it from the store.
"""
with self._lock:
engine = self.wrappers[name].pool.engine
if engine:
engine.dispose()
del self.wrappers[name]
# ################################################################################################################################
def __str__(self):
out = StringIO()
out.write('<{} at {} wrappers:['.format(self.__class__.__name__, hex(id(self))))
out.write(', '.join(sorted(self.wrappers.keys())))
out.write(']>')
return out.getvalue()
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def change_password(self, name, password):
""" Updates the password which means recreating the pool using the new
password.
"""
with self._lock:
# Do not check if the connection is active when changing the password,
# sometimes it is desirable to change it even if it is Inactive.
item = self.get(name, enforce_is_active=False)
item.pool.engine.dispose()
config = deepcopy(self.wrappers[name].pool.config)
config['password'] = password
self[name] = config
# ################################################################################################################################
def cleanup_on_stop(self):
""" Invoked when the server is stopping.
"""
with self._lock:
for name, wrapper in self.wrappers.items():
wrapper.pool.engine.dispose()
# ################################################################################################################################
class _Server(object):
""" A plain Python object which is used instead of an SQLAlchemy model so the latter is not tied to a session
for as long a server is up.
"""
def __init__(self, odb_server, odb_cluster):
self.id = odb_server.id
self.name = odb_server.name
self.last_join_status = odb_server.last_join_status
self.token = odb_server.token
self.cluster_id = odb_cluster.id
self.cluster = odb_cluster
# ################################################################################################################################
class ODBManager(SessionWrapper):
""" Manages connections to a given component's Operational Database.
"""
def __init__(self, parallel_server=None, well_known_data=None, token=None, crypto_manager=None, server_id=None,
server_name=None, cluster_id=None, pool=None, decrypt_func=None):
# type: (ParallelServer, str, str, object, int, str, int, object, object)
super(ODBManager, self).__init__()
self.parallel_server = parallel_server
self.well_known_data = well_known_data
self.token = token
self.crypto_manager = crypto_manager
self.server_id = server_id
self.server_name = server_name
self.cluster_id = cluster_id
self.pool = pool
self.decrypt_func = decrypt_func
# ################################################################################################################################
def on_deployment_finished(self):
""" Commits all the implicit BEGIN blocks opened by SELECTs.
"""
self._session.commit()
# ################################################################################################################################
def fetch_server(self, odb_config):
""" Fetches the server from the ODB. Also sets the 'cluster' attribute
to the value pointed to by the server's .cluster attribute.
"""
if not self.session_initialized:
self.init_session(ZATO_ODB_POOL_NAME, odb_config, self.pool, False)
with closing(self.session()) as session:
try:
server = session.query(Server).\
filter(Server.token == self.token).\
one()
self.server = _Server(server, server.cluster)
self.server_id = server.id
self.cluster = server.cluster
self.cluster_id = server.cluster.id
return self.server
except Exception:
msg = 'Could not find server in ODB, token:`{}`'.format(
self.token)
logger.error(msg)
raise
# ################################################################################################################################
def get_servers(self, up_status=SERVER_UP_STATUS.RUNNING, filter_out_self=True):
""" Returns all servers matching criteria provided on input.
"""
with closing(self.session()) as session:
query = session.query(Server).\
filter(Server.cluster_id == self.cluster_id)
if up_status:
query = query.filter(Server.up_status == up_status)
if filter_out_self:
query = query.filter(Server.id != self.server_id)
return query.all()
# ################################################################################################################################
def get_default_internal_pubsub_endpoint(self):
with closing(self.session()) as session:
return session.query(PubSubEndpoint).\
filter(PubSubEndpoint.name==PUBSUB.DEFAULT.INTERNAL_ENDPOINT_NAME).\
filter(PubSubEndpoint.endpoint_type==PUBSUB.ENDPOINT_TYPE.INTERNAL.id).\
filter(PubSubEndpoint.cluster_id==self.cluster_id).\
one()
# ################################################################################################################################
def get_missing_services(self, server, locally_deployed):
""" Returns services deployed on the server given on input that are not among locally_deployed.
"""
missing = set()
with closing(self.session()) as session:
server_services = session.query(
Service.id, Service.name,
DeployedService.source_path, DeployedService.source).\
join(DeployedService, Service.id==DeployedService.service_id).\
join(Server, DeployedService.server_id==Server.id).\
filter(Service.is_internal!=true()).\
all()
for item in server_services:
if item.name not in locally_deployed:
missing.add(item)
return missing
# ################################################################################################################################
def server_up_down(self, token, status, update_host=False, bind_host=None, bind_port=None, preferred_address=None,
crypto_use_tls=None):
""" Updates the information regarding the server is RUNNING or CLEAN_DOWN etc.
and what host it's running on.
"""
with closing(self.session()) as session:
server = session.query(Server).\
filter(Server.token==token).\
first()
# It may be the case that the server has been deleted from web-admin before it shut down,
# in which case during the shut down it will not be able to find itself in ODB anymore.
if not server:
logger.info('No server found for token `%s`, status:`%s`', token, status)
return
server.up_status = status
server.up_mod_date = datetime.utcnow()
if update_host:
server.host = current_host()
server.bind_host = bind_host
server.bind_port = bind_port
server.preferred_address = preferred_address
server.crypto_use_tls = crypto_use_tls
session.add(server)
session.commit()
# ################################################################################################################################
def _copy_rate_limiting_config(self, copy_from, copy_to, _keys=rate_limit_keys):
for key in _keys:
copy_to[key] = copy_from.get(key)
# ################################################################################################################################
def get_url_security(self, cluster_id, connection=None, any_internal=HTTP_SOAP.ACCEPT.ANY_INTERNAL):
""" Returns the security configuration of HTTP URLs.
"""
# Temporary cache of security definitions visited so as not to
# look the same ones for each HTTP object that uses them.
sec_def_cache = {}
with closing(self.session()) as session:
# What DB class to fetch depending on the string value of the security type.
sec_type_db_class = {
SEC_DEF_TYPE.APIKEY: APIKeySecurity,
SEC_DEF_TYPE.BASIC_AUTH: HTTPBasicAuth,
SEC_DEF_TYPE.JWT: JWT,
SEC_DEF_TYPE.OAUTH: OAuth,
SEC_DEF_TYPE.TLS_CHANNEL_SEC: TLSChannelSecurity,
SEC_DEF_TYPE.WSS: WSSDefinition,
SEC_DEF_TYPE.VAULT: VaultConnection,
SEC_DEF_TYPE.XPATH_SEC: XPathSecurity,
}
result = {}
q = query.http_soap_security_list(session, cluster_id, connection)
columns = Bunch()
# So ConfigDict has its data in the format it expects
for c in q.statement.columns:
columns[c.name] = None
for item in elems_with_opaque(q):
target = get_match_target({
'http_accept': item.get('http_accept'),
'http_method': item.get('method'),
'soap_action': item.soap_action,
'url_path': item.url_path,
}, http_methods_allowed_re=self.parallel_server.http_methods_allowed_re)
result[target] = Bunch()
result[target].is_active = item.is_active
result[target].transport = item.transport
result[target].data_format = item.data_format
result[target].sec_use_rbac = item.sec_use_rbac
if item.security_id:
# For later use
result[target].sec_def = Bunch()
# We either have already seen this security definition ..
if item.security_id in sec_def_cache:
sec_def = sec_def_cache[item.security_id]
# .. or we have not, in which case we need to look it up
# and then cache it for later use.
else:
# Will raise KeyError if the DB gets somehow misconfigured.
db_class = sec_type_db_class[item.sec_type]
sec_def_item = session.query(db_class).\
filter(db_class.id==item.security_id).\
one()
sec_def = bunchify(sec_def_item.asdict())
ElemsWithOpaqueMaker.process_config_dict(sec_def)
sec_def_cache[item.security_id] = sec_def
# Common things first
result[target].sec_def.id = sec_def.id
result[target].sec_def.name = sec_def.name
result[target].sec_def.password = self.decrypt_func(sec_def.password or '')
result[target].sec_def.sec_type = item.sec_type
if item.sec_type == SEC_DEF_TYPE.BASIC_AUTH:
result[target].sec_def.username = sec_def.username
result[target].sec_def.realm = sec_def.realm
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.JWT:
result[target].sec_def.username = sec_def.username
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.APIKEY:
result[target].sec_def.username = 'HTTP_{}'.format(sec_def.username.upper().replace('-', '_'))
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.WSS:
result[target].sec_def.username = sec_def.username
result[target].sec_def.password_type = sec_def.password_type
result[target].sec_def.reject_empty_nonce_creat = sec_def.reject_empty_nonce_creat
result[target].sec_def.reject_stale_tokens = sec_def.reject_stale_tokens
result[target].sec_def.reject_expiry_limit = sec_def.reject_expiry_limit
result[target].sec_def.nonce_freshness_time = sec_def.nonce_freshness_time
elif item.sec_type == SEC_DEF_TYPE.TLS_CHANNEL_SEC:
result[target].sec_def.value = dict(parse_tls_channel_security_definition(sec_def.value))
elif item.sec_type == SEC_DEF_TYPE.XPATH_SEC:
result[target].sec_def.username = sec_def.username
result[target].sec_def.username_expr = sec_def.username_expr
result[target].sec_def.password_expr = sec_def.password_expr
else:
result[target].sec_def = ZATO_NONE
return result, columns
# ################################################################################################################################
def get_sql_internal_service_list(self, cluster_id):
""" Returns a list of service name and IDs for input cluster ID. It represents what is currently found in the ODB
and is used during server startup to decide if any new services should be added from what is found in the filesystem.
"""
with closing(self.session()) as session:
return session.query(
Service.id,
Service.impl_name,
Service.is_active,
Service.slow_threshold,
).\
filter(Service.cluster_id==cluster_id).\
all()
# ################################################################################################################################
def get_basic_data_service_list(self, session):
""" Returns basic information about all the services in ODB.
"""
query = select([
ServiceTable.c.id,
ServiceTable.c.name,
ServiceTable.c.impl_name,
]).where(
ServiceTable.c.cluster_id==self.cluster_id
)
return session.execute(query).\
fetchall()
# ################################################################################################################################
def get_basic_data_deployed_service_list(self):
""" Returns basic information about all the deployed services in ODB.
"""
with closing(self.session()) as session:
query = select([
ServiceTable.c.name,
DeployedServiceTable.c.source,
]).where(and_(
DeployedServiceTable.c.service_id==ServiceTable.c.id,
DeployedServiceTable.c.server_id==self.server_id
))
return session.execute(query).\
fetchall()
# ################################################################################################################################
def add_services(self, session, data):
# type: (list[dict]) -> None
try:
session.execute(ServiceTableInsert().values(data))
except IntegrityError:
# This can be ignored because it is possible that there will be
# more than one server trying to insert rows related to services
# that are hot-deployed from web-admin or another source.
logger.debug('Ignoring IntegrityError with `%s`', data)
# ################################################################################################################################
def add_deployed_services(self, session, data):
# type: (list[dict]) -> None
session.execute(DeployedServiceInsert().values(data))
# ################################################################################################################################
def drop_deployed_services_by_name(self, session, service_id_list):
session.execute(
DeployedServiceDelete().\
where(DeployedService.service_id.in_(service_id_list))
)
# ################################################################################################################################
def drop_deployed_services(self, server_id):
""" Removes all the deployed services from a server.
"""
with closing(self.session()) as session:
session.execute(
DeployedServiceDelete().\
where(DeployedService.server_id==server_id)
)
session.commit()
# ################################################################################################################################
def is_service_active(self, service_id):
""" Returns whether the given service is active or not.
"""
with closing(self.session()) as session:
return session.query(Service.is_active).\
filter(Service.id==service_id).\
one()[0]
# ################################################################################################################################
def hot_deploy(self, deployment_time, details, payload_name, payload, server_id):
""" Inserts hot-deployed data into the DB along with setting the preliminary
AWAITING_DEPLOYMENT status for each of the servers this server's cluster
is aware of.
"""
with closing(self.session()) as session:
# Create the deployment package info ..
dp = DeploymentPackage()
dp.deployment_time = deployment_time
dp.details = details
dp.payload_name = payload_name
dp.payload = payload
dp.server_id = server_id
# .. add it to the session ..
session.add(dp)
# .. for each of the servers in this cluster set the initial status ..
servers = session.query(Cluster).\
filter(Cluster.id == self.server.cluster_id).\
one().servers
for server in servers:
ds = DeploymentStatus()
ds.package_id = dp.id
ds.server_id = server.id
ds.status = DEPLOYMENT_STATUS.AWAITING_DEPLOYMENT
ds.status_change_time = datetime.utcnow()
session.add(ds)
session.commit()
return dp.id
# ################################################################################################################################
def add_delivery(self, deployment_time, details, service, source_info):
""" Adds information about the server's deployed service into the ODB.
"""
raise NotImplementedError()
# ################################################################################################################################
def get_internal_channel_list(self, cluster_id, needs_columns=False):
""" Returns the list of internal HTTP/SOAP channels, that is,
channels pointing to internal services.
"""
with closing(self.session()) as session:
return query.internal_channel_list(session, cluster_id, needs_columns)
def get_http_soap_list(self, cluster_id, connection=None, transport=None, needs_columns=False):
""" Returns the list of all HTTP/SOAP connections.
"""
with closing(self.session()) as session:
return query.http_soap_list(session, cluster_id, connection, transport, True, None, needs_columns)
# ################################################################################################################################
def get_job_list(self, cluster_id, needs_columns=False):
""" Returns a list of jobs defined on the given cluster.
"""
with closing(self.session()) as session:
return query.job_list(session, cluster_id, None, needs_columns)
# ################################################################################################################################
def get_service_list(self, cluster_id, needs_columns=False):
""" Returns a list of services defined on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.service_list(session, cluster_id, needs_columns=needs_columns))
# ################################################################################################################################
def get_service_id_list(self, session, cluster_id, name_list):
""" Returns a list of IDs matching input service names.
"""
# type: (object, int, list)
return query.service_id_list(session, cluster_id, name_list)
# ################################################################################################################################
def get_service_list_with_include(self, session, cluster_id, include_list, needs_columns=False):
""" Returns a list of all services from the input include_list.
"""
# type: (object, int, list)
return query.service_list_with_include(session, cluster_id, include_list, needs_columns)
# ################################################################################################################################
def get_apikey_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of API keys existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.apikey_security_list(session, cluster_id, needs_columns))
# ################################################################################################################################
def get_aws_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.aws_security_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_basic_auth_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of HTTP Basic Auth definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.basic_auth_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_jwt_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of JWT definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.jwt_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_ntlm_list(self, cluster_id, needs_columns=False):
""" Returns a list of NTLM definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.ntlm_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_oauth_list(self, cluster_id, needs_columns=False):
""" Returns a list of OAuth accounts existing on the given cluster.
"""
with closing(self.session()) as session:
return query.oauth_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_ca_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS CA certs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_ca_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_channel_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of definitions for securing TLS channels.
"""
with closing(self.session()) as session:
return query.tls_channel_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_key_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS key/cert pairs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_key_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_wss_list(self, cluster_id, needs_columns=False):
""" Returns a list of WS-Security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.wss_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_vault_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of Vault connections on the given cluster.
"""
with closing(self.session()) as session:
return query.vault_connection_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath-based security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.xpath_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_definition_amqp(self, cluster_id, def_id):
""" Returns an AMQP definition's details.
"""
with closing(self.session()) as session:
return query.definition_amqp(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_amqp(self, cluster_id, out_id):
""" Returns an outgoing AMQP connection's details.
"""
with closing(self.session()) as session:
return query.out_amqp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing AMQP connections.
"""
with closing(self.session()) as session:
return query.out_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_amqp(self, cluster_id, channel_id):
""" Returns a particular AMQP channel.
"""
with closing(self.session()) as session:
return query.channel_amqp(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP channels.
"""
with closing(self.session()) as session:
return query.channel_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_def_wmq(self, cluster_id, def_id):
""" Returns an IBM MQ definition's details.
"""
with closing(self.session()) as session:
return query.definition_wmq(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_wmq(self, cluster_id, out_id):
""" Returns an outgoing IBM MQ connection's details.
"""
with closing(self.session()) as session:
return query.out_wmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing IBM MQ connections.
"""
with closing(self.session()) as session:
return query.out_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_wmq(self, cluster_id, channel_id):
""" Returns a particular IBM MQ channel.
"""
with closing(self.session()) as session:
return query.channel_wmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ channels.
"""
with closing(self.session()) as session:
return query.channel_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_zmq(self, cluster_id, out_id):
""" Returns an outgoing ZMQ connection's details.
"""
with closing(self.session()) as session:
return query.out_zmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing ZMQ connections.
"""
with closing(self.session()) as session:
return query.out_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_zmq(self, cluster_id, channel_id):
""" Returns a particular ZMQ channel.
"""
with closing(self.session()) as session:
return query.channel_zmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of ZMQ channels.
"""
with closing(self.session()) as session:
return query.channel_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_file_transfer_list(self, cluster_id, needs_columns=False):
""" Returns a list of file transfer channels.
"""
with closing(self.session()) as session:
return query_generic.connection_list(
session, cluster_id, GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER, needs_columns)
# ################################################################################################################################
def get_channel_web_socket(self, cluster_id, channel_id):
""" Returns a particular WebSocket channel.
"""
with closing(self.session()) as session:
return query.channel_web_socket(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_web_socket_list(self, cluster_id, needs_columns=False):
""" Returns a list of WebSocket channels.
"""
with closing(self.session()) as session:
return query.channel_web_socket_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sql(self, cluster_id, out_id):
""" Returns an outgoing SQL connection's details.
"""
with closing(self.session()) as session:
return query.out_sql(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SQL connections.
"""
with closing(self.session()) as session:
return query.out_sql_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_odoo(self, cluster_id, out_id):
""" Returns an outgoing Odoo connection's details.
"""
with closing(self.session()) as session:
return query.out_odoo(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_odoo_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing Odoo connections.
"""
with closing(self.session()) as session:
return query.out_odoo_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sap(self, cluster_id, out_id):
""" Returns an outgoing SAP RFC connection's details.
"""
with closing(self.session()) as session:
return query.out_sap(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sap_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SAP RFC connections.
"""
with closing(self.session()) as session:
return query.out_sap_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SFTP connections.
"""
with closing(self.session()) as session:
return query_generic.connection_list(session, cluster_id, GENERIC.CONNECTION.TYPE.OUTCONN_SFTP, needs_columns)
# ################################################################################################################################
def get_out_ftp(self, cluster_id, out_id):
""" Returns an outgoing FTP connection's details.
"""
with closing(self.session()) as session:
return query.out_ftp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_ftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing FTP connections.
"""
with closing(self.session()) as session:
return query.out_ftp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_builtin(self, cluster_id, id):
""" Returns a built-in cache definition's details.
"""
with closing(self.session()) as session:
return query.cache_builtin(session, cluster_id, id)
# ################################################################################################################################
def get_cache_builtin_list(self, cluster_id, needs_columns=False):
""" Returns a list of built-in cache definitions.
"""
with closing(self.session()) as session:
return query.cache_builtin_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_memcached(self, cluster_id, id):
""" Returns a Memcached-based definition's details.
"""
with closing(self.session()) as session:
return query.cache_memcached(session, cluster_id, id)
# ################################################################################################################################
def get_cache_memcached_list(self, cluster_id, needs_columns=False):
""" Returns a list of Memcached-based cache definitions.
"""
with closing(self.session()) as session:
return query.cache_memcached_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_namespace_list(self, cluster_id, needs_columns=False):
""" Returns a list of XML namespaces.
"""
with closing(self.session()) as session:
return query.namespace_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath expressions.
"""
with closing(self.session()) as session:
return query.xpath_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_json_pointer_list(self, cluster_id, needs_columns=False):
""" Returns a list of JSON Pointer expressions.
"""
with closing(self.session()) as session:
return query.json_pointer_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cloud_aws_s3_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS S3 connections.
"""
with closing(self.session()) as session:
return query.cloud_aws_s3_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_topic_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub topics defined in a cluster.
"""
return elems_with_opaque(query.pubsub_topic_list(self._session, cluster_id, needs_columns))
# ################################################################################################################################
def get_pubsub_subscription_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub subscriptions defined in a cluster.
"""
return query_ps_subscription.pubsub_subscription_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_notif_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of SQL notification definitions.
"""
return query.notif_sql_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_conn_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra connections.
"""
return query.cassandra_conn_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_query_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra queries.
"""
return query.cassandra_query_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_es_list(self, cluster_id, needs_columns=False):
""" Returns a list of ElasticSearch connections.
"""
return query.search_es_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_solr_list(self, cluster_id, needs_columns=False):
""" Returns a list of Solr connections.
"""
return query.search_solr_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_sms_twilio_list(self, cluster_id, needs_columns=False):
""" Returns a list of Twilio connections.
"""
return query.sms_twilio_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_smtp_list(self, cluster_id, needs_columns=False):
""" Returns a list of SMTP connections.
"""
return query.email_smtp_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_imap_list(self, cluster_id, needs_columns=False):
""" Returns a list of IMAP connections.
"""
return query.email_imap_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions.
"""
return query.rbac_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles.
"""
return query.rbac_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_client_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles assigned to clients.
"""
return query.rbac_client_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions for roles.
"""
return query.rbac_role_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_endpoint_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub endpoints.
"""
return query.pubsub_endpoint_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_generic_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of generic connections.
"""
return query_generic.connection_list(self._session, cluster_id, needs_columns=needs_columns)
# ################################################################################################################################
def get_sso_user_rate_limiting_info(self):
""" Returns a list of SSO users that have rate limiting enabled.
"""
with closing(self.session()) as session:
return get_sso_user_rate_limiting_info(session)
# ################################################################################################################################
def _migrate_30_encrypt_sec_base(self, session, id, attr_name, encrypted_value):
""" Sets an encrypted value of a named attribute in a security definition.
"""
item = session.query(SecurityBase).\
filter(SecurityBase.id==id).\
one()
setattr(item, attr_name, encrypted_value)
session.add(item)
_migrate_30_encrypt_sec_apikey = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_aws = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_basic_auth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_jwt = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_ntlm = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_oauth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_vault_conn_sec = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_wss = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_xpath_sec = _migrate_30_encrypt_sec_base
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
def ping_database(params, ping_query):
connection = None
try:
#
# MySQL
#
if params['engine'].startswith('mysql'):
import pymysql
connection = pymysql.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
db = params['db_name'],
)
#
# PostgreSQL
#
elif params['engine'].startswith('postgres'):
import pg8000
connection = pg8000.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
database = params['db_name'],
)
#
# SQLite
#
elif params['engine'].startswith('sqlite'):
pass
#
# Unrecognised
#
else:
raise ValueError('Unrecognised database `{}`'.format(params['engine']))
finally:
if connection:
connection.close()
# ################################################################################################################################
def create_pool(engine_params, ping_query, query_class=None):
# stdlib
import copy
# SQLAlchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# Zato
from zato.common.util.api import get_engine_url
engine_params = copy.deepcopy(engine_params)
if engine_params['engine'] != 'sqlite':
engine_params['password'] = str(engine_params['password'])
engine_params['extra']['pool_size'] = engine_params.pop('pool_size')
engine = create_engine(get_engine_url(engine_params), **engine_params.get('extra', {}))
engine.execute(ping_query)
Session = sessionmaker()
Session.configure(bind=engine, query_cls=query_class)
session = Session()
return session
# ################################################################################################################################
# Taken from http://www.siafoo.net/snippet/85
# Licensed under BSD2 - http://opensource.org/licenses/bsd-license.php
def drop_all(engine):
""" Drops all tables and sequences (but not VIEWS) from a Postgres database
"""
# stdlib
import logging
from traceback import format_exc
# SQLAlchemy
from sqlalchemy.sql import text
logger = logging.getLogger('zato')
sequence_sql="""SELECT sequence_name FROM information_schema.sequences
WHERE sequence_schema='public'
"""
table_sql="""SELECT table_name FROM information_schema.tables
WHERE table_schema='public' AND table_type != 'VIEW' AND table_name NOT LIKE 'pg_ts_%%'
"""
for table in [name for (name,) in engine.execute(text(table_sql))]:
try:
engine.execute(text('DROP TABLE %s CASCADE' % table))
except Exception:
logger.warn(format_exc())
for seq in [name for (name,) in engine.execute(text(sequence_sql))]:
try:
engine.execute(text('DROP SEQUENCE %s CASCADE' % seq))
except Exception:
logger.warn(format_exc())
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# SQLAlchemy
from sqlalchemy import and_, exists, insert, update
# Zato
from zato.common.api import GENERIC, FILE_TRANSFER
from zato.common.odb.model import GenericConn as ModelGenericConn, GenericObject as ModelGenericObject
from zato.common.odb.query import query_wrapper
from zato.common.util.sql import get_dict_with_opaque
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_generic_attr_name = GENERIC.ATTR_NAME
ModelGenericObjectTable = ModelGenericObject.__table__
# ################################################################################################################################
# ################################################################################################################################
class GenericObjectWrapper:
""" Wraps access to generic objects.
"""
type_ = None
subtype = None
model_class = ModelGenericObject
def __init__(self, session, cluster_id):
# type: (object, int)
self.session = session
self.cluster_id = cluster_id
# ################################################################################################################################
def _build_get_where_query(self, name):
# type: (str) -> object
return and_(
self.model_class.name==name,
self.model_class.type_==self.type_,
self.model_class.cluster_id==self.cluster_id,
)
# ################################################################################################################################
def get(self, name):
# type: (str) -> object
item = self.session.query(self.model_class).\
filter(self.model_class.name==name).\
filter(self.model_class.type_==self.type_).\
filter(self.model_class.cluster_id==self.cluster_id).\
first()
return get_dict_with_opaque(item) if item else None
# ################################################################################################################################
def exists(self, name):
""" Returns a boolean flag indicating whether the input name is already stored in the ODB. False otherwise.
"""
where_query = self._build_get_where_query(name)
exists_query = exists().where(where_query)
return self.session.query(exists_query).\
scalar()
# ################################################################################################################################
def create(self, name, opaque):
""" Creates a new row for input data.
"""
return insert(self.model_class).values(**{
'name': name,
'type_': self.type_,
'subtype': self.subtype,
'cluster_id': self.cluster_id,
_generic_attr_name: opaque,
})
def update(self, name, opaque):
""" Updates an already existing object.
"""
# type: (str, str) -> object
return update(ModelGenericObjectTable).\
values({
_generic_attr_name: opaque,
}).\
where(and_(
ModelGenericObjectTable.c.name==name,
ModelGenericObjectTable.c.type_==self.type_,
ModelGenericObjectTable.c.cluster_id==self.cluster_id,
))
# ################################################################################################################################
def store(self, name, opaque):
""" Inserts new data or updates an already existing row matching the input.
"""
# type: (str, str)
already_exists = self.exists(name)
query = self.update(name, opaque) if already_exists else self.create(name, opaque)
self.session.execute(query)
self.session.commit()
# ################################################################################################################################
# ################################################################################################################################
class FileTransferWrapper(GenericObjectWrapper):
type_ = GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER
class FTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.FTP.id
class SFTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.SFTP.id
# ################################################################################################################################
# ################################################################################################################################
@query_wrapper
def connection_list(session, cluster_id, type_=None, needs_columns=False):
""" A list of generic connections by their type.
"""
q = session.query(ModelGenericConn).\
filter(ModelGenericConn.cluster_id==cluster_id)
if type_:
q = q.filter(ModelGenericConn.type_==type_)
q = q.order_by(ModelGenericConn.name)
return q
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/generic.py | generic.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
from functools import wraps
# Bunch
from bunch import bunchify
# SQLAlchemy
from sqlalchemy import and_, func, not_, or_
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import case
# Zato
from zato.common.api import CACHE, DEFAULT_HTTP_PING_METHOD, DEFAULT_HTTP_POOL_SIZE, GENERIC, HTTP_SOAP_SERIALIZATION_TYPE, \
PARAMS_PRIORITY, PUBSUB, URL_PARAMS_PRIORITY
from zato.common.json_internal import loads
from zato.common.odb.model import AWSS3, APIKeySecurity, AWSSecurity, Cache, CacheBuiltin, CacheMemcached, CassandraConn, \
CassandraQuery, ChannelAMQP, ChannelWebSocket, ChannelWMQ, ChannelZMQ, Cluster, ConnDefAMQP, ConnDefWMQ, \
CronStyleJob, ElasticSearch, HTTPBasicAuth, HTTPSOAP, IMAP, IntervalBasedJob, Job, JSONPointer, JWT, \
MsgNamespace, NotificationSQL as NotifSQL, NTLM, OAuth, OutgoingOdoo, \
OutgoingAMQP, OutgoingFTP, OutgoingWMQ, OutgoingZMQ, PubSubEndpoint, \
PubSubEndpointTopic, PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, PubSubTopic, RBACClientRole, \
RBACPermission, RBACRole, RBACRolePermission, SecurityBase, Server, Service, SMSTwilio, SMTP, Solr, SQLConnectionPool, \
TLSCACert, TLSChannelSecurity, TLSKeyCertSecurity, WebSocketClient, WebSocketClientPubSubKeys, WebSocketSubscription, \
WSSDefinition, VaultConnection, XPath, XPathSecurity, OutgoingSAP
from zato.common.util.search import SearchResults as _SearchResults
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
_not_given = object()
_no_page_limit = 2 ** 24 # ~16.7 million results, tops
_gen_attr = GENERIC.ATTR_NAME
# ################################################################################################################################
def count(session, q):
_q = q.statement.with_only_columns([func.count()]).order_by(None)
return session.execute(_q).scalar()
# ################################################################################################################################
class _QueryConfig:
@staticmethod
def supports_kwargs(query_func):
""" Returns True if the given query func supports kwargs, False otherwise.
"""
return query_func in (
http_soap_list,
)
# ################################################################################################################################
class _SearchWrapper(object):
""" Wraps results in pagination and/or filters out objects by their name or other attributes.
"""
def __init__(self, q, default_page_size=_no_page_limit, **config):
# Apply WHERE conditions
where = config.get('where') or _not_given
if where is not _not_given:
q = q.filter(where)
else:
# If there are multiple filters, they are by default OR-joined
# to ease in look ups over more than one column.
filter_op = and_ if config.get('filter_op') == 'and' else or_
filters = []
for filter_by in config.get('filter_by', []):
for criterion in config.get('query', []):
filters.append(filter_by.contains(criterion))
q = q.filter(filter_op(*filters))
# Total number of results
total_q = q.statement.with_only_columns([func.count()]).order_by(None)
self.total = q.session.execute(total_q).scalar()
# Pagination
page_size = config.get('page_size', default_page_size)
cur_page = config.get('cur_page', 0)
slice_from = cur_page * page_size
slice_to = slice_from + page_size
self.q = q.slice(slice_from, slice_to)
# ################################################################################################################################
def query_wrapper(func):
""" A decorator for queries which works out whether a given query function should return the result only
or a column list retrieved in addition to the result. This is useful because some callers prefer the former
and some need the latter. Also, paginates the results if requested to by the caller.
"""
@wraps(func)
def inner(*args, **kwargs):
# Each query function will have the last argument either False or True
# depending on whether columns are needed or not.
needs_columns = args[-1]
if _QueryConfig.supports_kwargs(func):
result = func(*args, **kwargs)
else:
result = func(*args)
tool = _SearchWrapper(result, **kwargs)
result = _SearchResults(tool.q, tool.q.all(), tool.q.statement.columns, tool.total)
if needs_columns:
return result, result.columns
return result
return inner
# ################################################################################################################################
def bunch_maker(func):
""" Turns SQLAlchemy rows into bunch instances, taking opaque elements into account.
"""
@wraps(func)
def inner(*args, **kwargs):
result = func(*args, **kwargs)
out = bunchify(result._asdict())
opaque = out.pop(_gen_attr, None)
if opaque:
opaque = loads(opaque)
out.update(opaque)
return out
return inner
# ################################################################################################################################
def internal_channel_list(session, cluster_id):
""" All the HTTP/SOAP channels that point to internal services.
"""
return session.query(
HTTPSOAP.soap_action, Service.name).\
filter(HTTPSOAP.cluster_id==Cluster.id).\
filter(HTTPSOAP.service_id==Service.id).\
filter(Service.is_internal==True).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==HTTPSOAP.cluster_id) # noqa: E712
# ################################################################################################################################
def _job(session, cluster_id):
return session.query(
Job.id,
Job.name,
Job.is_active,
Job.job_type,
Job.start_date,
Job.extra,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name'),
Service.id.label('service_id'),
IntervalBasedJob.weeks,
IntervalBasedJob.days,
IntervalBasedJob.hours,
IntervalBasedJob.minutes,
IntervalBasedJob.seconds,
IntervalBasedJob.repeats,
CronStyleJob.cron_definition
).\
outerjoin(IntervalBasedJob, Job.id==IntervalBasedJob.job_id).\
outerjoin(CronStyleJob, Job.id==CronStyleJob.job_id).\
filter(Job.cluster_id==Cluster.id).\
filter(Job.service_id==Service.id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def job_list(session, cluster_id, service_name=None, needs_columns=False):
""" All the scheduler's jobs defined in the ODB.
"""
q = _job(session, cluster_id)
if service_name:
q = q.filter(Service.name==service_name)
return q.\
order_by(Job.name)
def job_by_id(session, cluster_id, job_id):
""" A scheduler's job fetched by its ID.
"""
return _job(session, cluster_id).\
filter(Job.id==job_id).\
one()
def job_by_name(session, cluster_id, name):
""" A scheduler's job fetched by its name.
"""
return _job(session, cluster_id).\
filter(Job.name==name).\
one()
# ################################################################################################################################
def _sec_base(session, cluster_id):
return session.query(
SecurityBase.id,
SecurityBase.is_active,
SecurityBase.sec_type,
SecurityBase.name,
SecurityBase.username).\
filter(SecurityBase.cluster_id==Cluster.id).\
filter(Cluster.id==cluster_id)
def sec_base(session, cluster_id, sec_base_id):
return _sec_base(session, cluster_id).\
filter(SecurityBase.id==sec_base_id).\
one()
@query_wrapper
def apikey_security_list(session, cluster_id, needs_columns=False):
""" All the API keys.
"""
return session.query(
APIKeySecurity.id,
APIKeySecurity.name,
APIKeySecurity.is_active,
APIKeySecurity.username,
APIKeySecurity.password,
APIKeySecurity.sec_type,
APIKeySecurity.opaque1,
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==APIKeySecurity.cluster_id).\
filter(SecurityBase.id==APIKeySecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def aws_security_list(session, cluster_id, needs_columns=False):
""" All the Amazon security definitions.
"""
return session.query(
AWSSecurity.id, AWSSecurity.name,
AWSSecurity.is_active,
AWSSecurity.username,
AWSSecurity.password, AWSSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==AWSSecurity.cluster_id).\
filter(SecurityBase.id==AWSSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def basic_auth_list(session, cluster_id, cluster_name, needs_columns=False):
""" All the HTTP Basic Auth definitions.
"""
q = session.query(
HTTPBasicAuth.id,
HTTPBasicAuth.name,
HTTPBasicAuth.is_active,
HTTPBasicAuth.username,
HTTPBasicAuth.realm,
HTTPBasicAuth.password,
HTTPBasicAuth.sec_type,
HTTPBasicAuth.password_type,
HTTPBasicAuth.opaque1,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==HTTPBasicAuth.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==HTTPBasicAuth.id).\
order_by(SecurityBase.name)
return q
def _jwt(session, cluster_id, cluster_name, needs_columns=False):
""" All the JWT definitions.
"""
q = session.query(
JWT.id,
JWT.name,
JWT.is_active,
JWT.username,
JWT.password,
JWT.ttl,
JWT.sec_type,
JWT.password_type,
JWT.opaque1,
Cluster.id.label('cluster_id'),
Cluster.name.label('cluster_name')).\
filter(Cluster.id==JWT.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==JWT.id).\
order_by(SecurityBase.name)
return q
@query_wrapper
def jwt_list(*args, **kwargs):
return _jwt(*args, **kwargs)
def jwt_by_username(session, cluster_id, username, needs_columns=False):
""" An individual JWT definition by its username.
"""
return _jwt(session, cluster_id, None, needs_columns).\
filter(JWT.username==username).\
one()
@query_wrapper
def ntlm_list(session, cluster_id, needs_columns=False):
""" All the NTLM definitions.
"""
return session.query(
NTLM.id, NTLM.name,
NTLM.is_active,
NTLM.username,
NTLM.password, NTLM.sec_type,
NTLM.password_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==NTLM.cluster_id).\
filter(SecurityBase.id==NTLM.id).\
order_by(SecurityBase.name)
@query_wrapper
def oauth_list(session, cluster_id, needs_columns=False):
""" All the OAuth definitions.
"""
return session.query(
OAuth.id, OAuth.name,
OAuth.is_active,
OAuth.username, OAuth.password,
OAuth.proto_version, OAuth.sig_method,
OAuth.max_nonce_log, OAuth.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OAuth.cluster_id).\
filter(SecurityBase.id==OAuth.id).\
order_by(SecurityBase.name)
@query_wrapper
def tls_ca_cert_list(session, cluster_id, needs_columns=False):
""" TLS CA certs.
"""
return session.query(TLSCACert).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSCACert.cluster_id).\
order_by(TLSCACert.name)
@query_wrapper
def tls_channel_sec_list(session, cluster_id, needs_columns=False):
""" TLS-based channel security.
"""
return session.query(
TLSChannelSecurity.id, TLSChannelSecurity.name,
TLSChannelSecurity.is_active, TLSChannelSecurity.value,
TLSChannelSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSChannelSecurity.cluster_id).\
filter(SecurityBase.id==TLSChannelSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def tls_key_cert_list(session, cluster_id, needs_columns=False):
""" TLS key/cert pairs.
"""
return session.query(
TLSKeyCertSecurity.id, TLSKeyCertSecurity.name,
TLSKeyCertSecurity.is_active, TLSKeyCertSecurity.info,
TLSKeyCertSecurity.auth_data, TLSKeyCertSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSKeyCertSecurity.cluster_id).\
filter(SecurityBase.id==TLSKeyCertSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def wss_list(session, cluster_id, needs_columns=False):
""" All the WS-Security definitions.
"""
return session.query(
WSSDefinition.id, WSSDefinition.name, WSSDefinition.is_active,
WSSDefinition.username, WSSDefinition.password, WSSDefinition.password_type,
WSSDefinition.reject_empty_nonce_creat, WSSDefinition.reject_stale_tokens,
WSSDefinition.reject_expiry_limit, WSSDefinition.nonce_freshness_time,
WSSDefinition.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==WSSDefinition.cluster_id).\
filter(SecurityBase.id==WSSDefinition.id).\
order_by(SecurityBase.name)
@query_wrapper
def xpath_sec_list(session, cluster_id, needs_columns=False):
""" All the XPath security definitions.
"""
return session.query(
XPathSecurity.id, XPathSecurity.name, XPathSecurity.is_active, XPathSecurity.username, XPathSecurity.username_expr,
XPathSecurity.password_expr, XPathSecurity.password, XPathSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==XPathSecurity.cluster_id).\
filter(SecurityBase.id==XPathSecurity.id).\
order_by(SecurityBase.name)
# ################################################################################################################################
def _definition_amqp(session, cluster_id):
return session.query(
ConnDefAMQP.name, ConnDefAMQP.id, ConnDefAMQP.host,
ConnDefAMQP.port, ConnDefAMQP.vhost, ConnDefAMQP.username,
ConnDefAMQP.frame_max, ConnDefAMQP.heartbeat, ConnDefAMQP.password).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefAMQP.name)
def definition_amqp(session, cluster_id, id):
""" A particular AMQP definition
"""
return _definition_amqp(session, cluster_id).\
filter(ConnDefAMQP.id==id).\
one()
@query_wrapper
def definition_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP connection definitions.
"""
return _definition_amqp(session, cluster_id)
# ################################################################################################################################
def _def_wmq(session, cluster_id):
return session.query(
ConnDefWMQ.id, ConnDefWMQ.name, ConnDefWMQ.host,
ConnDefWMQ.port, ConnDefWMQ.queue_manager, ConnDefWMQ.channel,
ConnDefWMQ.cache_open_send_queues, ConnDefWMQ.cache_open_receive_queues,
ConnDefWMQ.use_shared_connections, ConnDefWMQ.ssl, ConnDefWMQ.ssl_cipher_spec,
ConnDefWMQ.ssl_key_repository, ConnDefWMQ.needs_mcd, ConnDefWMQ.max_chars_printed,
ConnDefWMQ.username, ConnDefWMQ.password, ConnDefWMQ.use_jms).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefWMQ.name)
def definition_wmq(session, cluster_id, id):
""" A particular IBM MQ definition
"""
return _def_wmq(session, cluster_id).\
filter(ConnDefWMQ.id==id).\
one()
@query_wrapper
def definition_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ connection definitions.
"""
return _def_wmq(session, cluster_id)
# ################################################################################################################################
def _out_amqp(session, cluster_id):
return session.query(
OutgoingAMQP.id, OutgoingAMQP.name, OutgoingAMQP.is_active,
OutgoingAMQP.delivery_mode, OutgoingAMQP.priority, OutgoingAMQP.content_type,
OutgoingAMQP.content_encoding, OutgoingAMQP.expiration, OutgoingAMQP.pool_size, OutgoingAMQP.user_id,
OutgoingAMQP.app_id, ConnDefAMQP.name.label('def_name'), OutgoingAMQP.def_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(ConnDefAMQP.id==OutgoingAMQP.def_id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingAMQP.name)
def out_amqp(session, cluster_id, id):
""" An outgoing AMQP connection.
"""
return _out_amqp(session, cluster_id).\
filter(OutgoingAMQP.id==id).\
one()
@query_wrapper
def out_amqp_list(session, cluster_id, needs_columns=False):
""" Outgoing AMQP connections.
"""
return _out_amqp(session, cluster_id)
# ################################################################################################################################
def _out_wmq(session, cluster_id):
return session.query(
OutgoingWMQ.id, OutgoingWMQ.name, OutgoingWMQ.is_active,
OutgoingWMQ.delivery_mode, OutgoingWMQ.priority, OutgoingWMQ.expiration,
ConnDefWMQ.name.label('def_name'), OutgoingWMQ.def_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(ConnDefWMQ.id==OutgoingWMQ.def_id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingWMQ.name)
def out_wmq(session, cluster_id, id):
""" An outgoing IBM MQ connection (by ID).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.id==id).\
one()
def out_wmq_by_name(session, cluster_id, name):
""" An outgoing IBM MQ connection (by name).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.name==name).\
first()
@query_wrapper
def out_wmq_list(session, cluster_id, needs_columns=False):
""" Outgoing IBM MQ connections.
"""
return _out_wmq(session, cluster_id)
# ################################################################################################################################
def _channel_amqp(session, cluster_id):
return session.query(
ChannelAMQP.id, ChannelAMQP.name, ChannelAMQP.is_active,
ChannelAMQP.queue, ChannelAMQP.consumer_tag_prefix,
ConnDefAMQP.name.label('def_name'), ChannelAMQP.def_id,
ChannelAMQP.pool_size, ChannelAMQP.ack_mode, ChannelAMQP.prefetch_count,
ChannelAMQP.data_format,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.service_id==Service.id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelAMQP.name)
def channel_amqp(session, cluster_id, id):
""" A particular AMQP channel.
"""
return _channel_amqp(session, cluster_id).\
filter(ChannelAMQP.id==id).\
one()
@query_wrapper
def channel_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP channels.
"""
return _channel_amqp(session, cluster_id)
# ################################################################################################################################
def _channel_wmq(session, cluster_id):
return session.query(
ChannelWMQ.id, ChannelWMQ.name, ChannelWMQ.is_active,
ChannelWMQ.queue, ConnDefWMQ.name.label('def_name'), ChannelWMQ.def_id,
ChannelWMQ.data_format, Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.service_id==Service.id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWMQ.name)
def channel_wmq(session, cluster_id, id):
""" A particular IBM MQ channel.
"""
return _channel_wmq(session, cluster_id).\
filter(ChannelWMQ.id==id).\
one()
@query_wrapper
def channel_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ channels.
"""
return _channel_wmq(session, cluster_id)
# ################################################################################################################################
def _out_zmq(session, cluster_id):
return session.query(
OutgoingZMQ.id, OutgoingZMQ.name, OutgoingZMQ.is_active,
OutgoingZMQ.address, OutgoingZMQ.socket_type, OutgoingZMQ.socket_method).\
filter(Cluster.id==OutgoingZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingZMQ.name)
def out_zmq(session, cluster_id, id):
""" An outgoing ZeroMQ connection.
"""
return _out_zmq(session, cluster_id).\
filter(OutgoingZMQ.id==id).\
one()
@query_wrapper
def out_zmq_list(session, cluster_id, needs_columns=False):
""" Outgoing ZeroMQ connections.
"""
return _out_zmq(session, cluster_id)
# ################################################################################################################################
def _channel_zmq(session, cluster_id):
return session.query(
ChannelZMQ.id, ChannelZMQ.name, ChannelZMQ.is_active,
ChannelZMQ.address, ChannelZMQ.socket_type, ChannelZMQ.socket_method, ChannelZMQ.sub_key,
ChannelZMQ.pool_strategy, ChannelZMQ.service_source, ChannelZMQ.data_format,
Service.name.label('service_name'), Service.impl_name.label('service_impl_name')).\
filter(Service.id==ChannelZMQ.service_id).\
filter(Cluster.id==ChannelZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelZMQ.name)
def channel_zmq(session, cluster_id, id):
""" An incoming ZeroMQ connection.
"""
return _channel_zmq(session, cluster_id).\
filter(ChannelZMQ.id==id).\
one()
@query_wrapper
def channel_zmq_list(session, cluster_id, needs_columns=False):
""" Incoming ZeroMQ connections.
"""
return _channel_zmq(session, cluster_id)
# ################################################################################################################################
def _http_soap(session, cluster_id):
return session.query(
HTTPSOAP.id,
HTTPSOAP.name,
HTTPSOAP.is_active,
HTTPSOAP.is_internal,
HTTPSOAP.transport,
HTTPSOAP.host,
HTTPSOAP.url_path,
HTTPSOAP.method,
HTTPSOAP.soap_action,
HTTPSOAP.soap_version,
HTTPSOAP.data_format,
HTTPSOAP.security_id,
HTTPSOAP.has_rbac,
HTTPSOAP.connection,
HTTPSOAP.content_type,
case([(HTTPSOAP.ping_method != None, HTTPSOAP.ping_method)], else_=DEFAULT_HTTP_PING_METHOD).label('ping_method'), # noqa
case([(HTTPSOAP.pool_size != None, HTTPSOAP.pool_size)], else_=DEFAULT_HTTP_POOL_SIZE).label('pool_size'),
case([(HTTPSOAP.merge_url_params_req != None, HTTPSOAP.merge_url_params_req)], else_=True).label('merge_url_params_req'),
case([(HTTPSOAP.url_params_pri != None, HTTPSOAP.url_params_pri)], else_=URL_PARAMS_PRIORITY.DEFAULT).label('url_params_pri'),
case([(HTTPSOAP.params_pri != None, HTTPSOAP.params_pri)], else_=PARAMS_PRIORITY.DEFAULT).label('params_pri'),
case([(
HTTPSOAP.serialization_type != None, HTTPSOAP.serialization_type)],
else_=HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id).label('serialization_type'),
HTTPSOAP.timeout,
HTTPSOAP.sec_tls_ca_cert_id,
HTTPSOAP.sec_use_rbac,
HTTPSOAP.cache_id,
HTTPSOAP.cache_expiry,
HTTPSOAP.content_encoding,
HTTPSOAP.opaque1,
Cache.name.label('cache_name'),
Cache.cache_type,
TLSCACert.name.label('sec_tls_ca_cert_name'),
SecurityBase.sec_type,
Service.name.label('service_name'),
Service.id.label('service_id'),
Service.impl_name.label('service_impl_name'),
SecurityBase.name.label('security_name'),
SecurityBase.username.label('username'),
SecurityBase.password.label('password'),
SecurityBase.password_type.label('password_type'),).\
outerjoin(Service, Service.id==HTTPSOAP.service_id).\
outerjoin(Cache, Cache.id==HTTPSOAP.cache_id).\
outerjoin(TLSCACert, TLSCACert.id==HTTPSOAP.sec_tls_ca_cert_id).\
outerjoin(SecurityBase, HTTPSOAP.security_id==SecurityBase.id).\
filter(Cluster.id==HTTPSOAP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(HTTPSOAP.name)
def http_soap_security_list(session, cluster_id, connection=None):
""" HTTP/SOAP security definitions.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
return q
def http_soap(session, cluster_id, item_id=None, name=None):
""" An HTTP/SOAP connection.
"""
q = _http_soap(session, cluster_id)
if item_id:
q = q.filter(HTTPSOAP.id==item_id)
elif name:
q = q.filter(HTTPSOAP.name==name)
else:
raise Exception('Exactly one of \'id\' or \'name\' is required')
return q.one()
@query_wrapper
def http_soap_list(session, cluster_id, connection=None, transport=None, return_internal=True, data_format=None,
needs_columns=False, *args, **kwargs):
""" HTTP/SOAP connections, both channels and outgoing ones.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
if transport:
q = q.filter(HTTPSOAP.transport==transport)
if not return_internal:
q = q.filter(not_(HTTPSOAP.name.startswith('zato')))
if data_format:
q = q.filter(HTTPSOAP.data_format.startswith(data_format))
return q
# ################################################################################################################################
def _out_sql(session, cluster_id):
return session.query(SQLConnectionPool).\
filter(Cluster.id==SQLConnectionPool.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SQLConnectionPool.name)
def out_sql(session, cluster_id, id):
""" An outgoing SQL connection.
"""
return _out_sql(session, cluster_id).\
filter(SQLConnectionPool.id==id).\
one()
@query_wrapper
def out_sql_list(session, cluster_id, needs_columns=False):
""" Outgoing SQL connections.
"""
return _out_sql(session, cluster_id)
# ################################################################################################################################
def _out_ftp(session, cluster_id):
return session.query(
OutgoingFTP.id,
OutgoingFTP.name,
OutgoingFTP.is_active,
OutgoingFTP.host,
OutgoingFTP.port,
OutgoingFTP.user,
OutgoingFTP.password,
OutgoingFTP.acct,
OutgoingFTP.timeout,
OutgoingFTP.dircache,
OutgoingFTP.opaque1,
).\
filter(Cluster.id==OutgoingFTP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingFTP.name)
def out_ftp(session, cluster_id, id):
""" An outgoing FTP connection.
"""
return _out_ftp(session, cluster_id).\
filter(OutgoingFTP.id==id).\
one()
@query_wrapper
def out_ftp_list(session, cluster_id, needs_columns=False):
""" Outgoing FTP connections.
"""
return _out_ftp(session, cluster_id)
# ################################################################################################################################
def _service(session, cluster_id):
return session.query(
Service.id,
Service.name,
Service.is_active,
Service.impl_name,
Service.is_internal,
Service.slow_threshold,
Service.opaque1,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Service.name)
def service(session, cluster_id, id=None, name=None):
""" A service.
"""
q = _service(session, cluster_id)
if name:
q = q.filter(Service.name==name)
elif id:
q = q.filter(Service.id==id)
return q.one()
@query_wrapper
def service_list(session, cluster_id, return_internal=True, include_list=None, needs_columns=False):
""" All services.
"""
q = _service(session, cluster_id)
if include_list:
q = q.filter(or_(Service.name.in_(include_list)))
else:
if not return_internal:
q = q.filter(not_(Service.name.startswith('zato')))
return q
@query_wrapper
def service_list_with_include(session, cluster_id, include_list, needs_columns=False):
q = _service(session, cluster_id)
return q.filter(Service.name.in_(include_list))
def service_id_list(session, cluster_id, name_list=None):
return session.query(
Service.id,
Service.impl_name).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(Service.name.in_(name_list))
# ################################################################################################################################
def _msg_list(class_, order_by, session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return session.query(
class_.id, class_.name,
class_.value).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==class_.cluster_id).\
order_by(order_by)
@query_wrapper
def namespace_list(session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return _msg_list(MsgNamespace, MsgNamespace.name, session, cluster_id, query_wrapper)
@query_wrapper
def xpath_list(session, cluster_id, needs_columns=False):
""" All the XPaths.
"""
return _msg_list(XPath, XPath.name, session, cluster_id, query_wrapper)
@query_wrapper
def json_pointer_list(session, cluster_id, needs_columns=False):
""" All the JSON Pointers.
"""
return _msg_list(JSONPointer, JSONPointer.name, session, cluster_id, query_wrapper)
# ################################################################################################################################
def _cloud_aws_s3(session, cluster_id):
return session.query(
AWSS3.id, AWSS3.name, AWSS3.is_active, AWSS3.pool_size, AWSS3.address, AWSS3.debug_level, AWSS3.suppr_cons_slashes,
AWSS3.content_type, AWSS3.metadata_, AWSS3.security_id, AWSS3.bucket, AWSS3.encrypt_at_rest, AWSS3.storage_class,
SecurityBase.username, SecurityBase.password).\
filter(Cluster.id==cluster_id).\
filter(AWSS3.security_id==SecurityBase.id).\
order_by(AWSS3.name)
def cloud_aws_s3(session, cluster_id, id):
""" An AWS S3 connection.
"""
return _cloud_aws_s3(session, cluster_id).\
filter(AWSS3.id==id).\
one()
@query_wrapper
def cloud_aws_s3_list(session, cluster_id, needs_columns=False):
""" AWS S3 connections.
"""
return _cloud_aws_s3(session, cluster_id)
# ################################################################################################################################
def _pubsub_endpoint(session, cluster_id):
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.name,
PubSubEndpoint.endpoint_type,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.tags,
PubSubEndpoint.topic_patterns,
PubSubEndpoint.pub_tag_patterns,
PubSubEndpoint.message_tag_patterns,
PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id,
SecurityBase.sec_type,
SecurityBase.name.label('sec_name'),
Service.id.label('service_id'),
Service.name.label('service_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(Service, PubSubEndpoint.id==PubSubEndpoint.service_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
order_by(PubSubEndpoint.id)
def pubsub_endpoint(session, cluster_id, id):
""" An individual pub/sub endpoint.
"""
return _pubsub_endpoint(session, cluster_id).\
filter(PubSubEndpoint.id==id).\
one()
@query_wrapper
def pubsub_endpoint_list(session, cluster_id, needs_columns=False):
""" A list of pub/sub endpoints.
"""
return _pubsub_endpoint(session, cluster_id)
# ################################################################################################################################
def _pubsub_topic(session, cluster_id):
return session.query(
PubSubTopic.id,
PubSubTopic.name,
PubSubTopic.is_active,
PubSubTopic.is_internal,
PubSubTopic.max_depth_gd,
PubSubTopic.max_depth_non_gd,
PubSubTopic.has_gd,
PubSubTopic.is_api_sub_allowed,
PubSubTopic.depth_check_freq,
PubSubTopic.hook_service_id,
PubSubTopic.pub_buffer_size_gd,
PubSubTopic.task_sync_interval,
PubSubTopic.task_delivery_interval,
PubSubTopic.opaque1,
Service.name.label('hook_service_name'),
).\
outerjoin(Service, Service.id==PubSubTopic.hook_service_id).\
filter(Cluster.id==PubSubTopic.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubTopic.name)
@bunch_maker
def pubsub_topic(session, cluster_id, id):
""" A pub/sub topic.
"""
return _pubsub_topic(session, cluster_id).\
filter(PubSubTopic.id==id).\
one()
@query_wrapper
def pubsub_topic_list(session, cluster_id, needs_columns=False):
""" All pub/sub topics.
"""
return _pubsub_topic(session, cluster_id)
# ################################################################################################################################
def pubsub_publishers_for_topic(session, cluster_id, topic_id):
return session.query(
PubSubEndpoint.service_id, PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id, PubSubEndpoint.name,
PubSubEndpoint.is_active, PubSubEndpoint.is_internal,
PubSubEndpoint.last_seen, PubSubEndpoint.last_pub_time,
PubSubEndpointTopic.last_pub_time,
PubSubEndpointTopic.pub_msg_id.label('last_msg_id'),
PubSubEndpointTopic.pub_correl_id.label('last_correl_id'),
PubSubEndpointTopic.in_reply_to.label('last_in_reply_to'),
PubSubEndpointTopic.ext_client_id,
Service.name.label('service_name'),
SecurityBase.name.label('sec_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(Service, Service.id==PubSubEndpoint.service_id).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(PubSubEndpointTopic.topic_id==PubSubTopic.id).\
filter(PubSubEndpointTopic.topic_id==topic_id).\
filter(PubSubEndpointTopic.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointTopic.cluster_id==cluster_id)
# ################################################################################################################################
def _pubsub_topic_message(session, cluster_id, needs_sub_queue_check):
q = session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.pub_time, PubSubMessage.data_prefix_short,
PubSubMessage.pub_pattern_matched, PubSubMessage.priority,
PubSubMessage.ext_pub_time, PubSubMessage.size,
PubSubMessage.data_format, PubSubMessage.mime_type,
PubSubMessage.data, PubSubMessage.expiration,
PubSubMessage.expiration_time, PubSubMessage.has_gd,
PubSubMessage.ext_client_id,
PubSubEndpoint.id.label('endpoint_id'),
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.service_id,
PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
).\
filter(PubSubMessage.published_by_id==PubSubEndpoint.id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==PubSubTopic.id)
if needs_sub_queue_check:
q = q.\
filter(~PubSubMessage.is_in_sub_queue)
return q
# ################################################################################################################################
def pubsub_message(session, cluster_id, pub_msg_id, needs_sub_queue_check=True):
return _pubsub_topic_message(session, cluster_id, needs_sub_queue_check).\
filter(PubSubMessage.pub_msg_id==pub_msg_id)
# ################################################################################################################################
def _pubsub_endpoint_queue(session, cluster_id):
return session.query(
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.active_status,
PubSubSubscription.is_internal,
PubSubSubscription.creation_time,
PubSubSubscription.sub_key,
PubSubSubscription.has_gd,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.ext_client_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('name'), # Currently queue names are the same as their originating topics
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.id.label('endpoint_id'),
WebSocketSubscription.ext_client_id.label('ws_ext_client_id'),
).\
outerjoin(WebSocketSubscription, WebSocketSubscription.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_queue_list(session, cluster_id, endpoint_id, needs_columns=False):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id).\
order_by(PubSubSubscription.creation_time.desc())
# ################################################################################################################################
def pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.sub_key.in_(sub_key_list)).\
all()
# ################################################################################################################################
def pubsub_endpoint_queue(session, cluster_id, sub_id):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.id==sub_id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_topic(session, cluster_id, topic_id, needs_columns=False):
return _pubsub_topic_message(session, cluster_id, True).\
filter(PubSubMessage.topic_id==topic_id).\
order_by(PubSubMessage.pub_time.desc())
# ################################################################################################################################
def _pubsub_queue_message(session, cluster_id):
return session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.data_prefix_short,
PubSubMessage.priority,
PubSubMessage.ext_pub_time,
PubSubMessage.size,
PubSubMessage.data_format,
PubSubMessage.mime_type,
PubSubMessage.data,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.ext_client_id,
PubSubMessage.published_by_id,
PubSubMessage.pub_pattern_matched,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('queue_name'), # Currently, queue name = name of its underlying topic
PubSubEndpointEnqueuedMessage.creation_time.label('recv_time'),
PubSubEndpointEnqueuedMessage.delivery_count,
PubSubEndpointEnqueuedMessage.last_delivery_time,
PubSubEndpointEnqueuedMessage.is_in_staging,
PubSubEndpointEnqueuedMessage.endpoint_id.label('subscriber_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpoint.name.label('subscriber_name'),
PubSubSubscription.sub_pattern_matched,
).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.topic_id==PubSubTopic.id).\
filter(PubSubEndpointEnqueuedMessage.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==PubSubSubscription.sub_key).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id)
# ################################################################################################################################
def pubsub_queue_message(session, cluster_id, msg_id):
return _pubsub_queue_message(session, cluster_id).\
filter(PubSubMessage.pub_msg_id==msg_id)
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_queue(session, cluster_id, sub_key, skip_delivered=False, needs_columns=False):
q = _pubsub_queue_message(session, cluster_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
if skip_delivered:
q = q.filter(PubSubEndpointEnqueuedMessage.delivery_status != PUBSUB.DELIVERY_STATUS.DELIVERED)
return q.order_by(PubSubEndpointEnqueuedMessage.creation_time.desc())
# ################################################################################################################################
def pubsub_hook_service(session, cluster_id, endpoint_id, model_class):
return session.query(
Service.id,
Service.name,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Service.id==model_class.hook_service_id).\
first()
# ################################################################################################################################
def _notif_sql(session, cluster_id, needs_password):
""" SQL notifications.
"""
columns = [NotifSQL.id, NotifSQL.is_active, NotifSQL.name, NotifSQL.query, NotifSQL.notif_type, NotifSQL.interval,
NotifSQL.def_id, SQLConnectionPool.name.label('def_name'), Service.name.label('service_name')]
if needs_password:
columns.append(SQLConnectionPool.password)
return session.query(*columns).\
filter(Cluster.id==NotifSQL.cluster_id).\
filter(SQLConnectionPool.id==NotifSQL.def_id).\
filter(Service.id==NotifSQL.service_id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def notif_sql_list(session, cluster_id, needs_password=False, needs_columns=False):
""" All the SQL notifications.
"""
return _notif_sql(session, cluster_id, needs_password)
# ################################################################################################################################
def _search_es(session, cluster_id):
""" ElasticSearch connections.
"""
return session.query(ElasticSearch).\
filter(Cluster.id==ElasticSearch.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ElasticSearch.name)
@query_wrapper
def search_es_list(session, cluster_id, needs_columns=False):
""" All the ElasticSearch connections.
"""
return _search_es(session, cluster_id)
# ################################################################################################################################
def _search_solr(session, cluster_id):
""" Solr sonnections.
"""
return session.query(Solr).\
filter(Cluster.id==Solr.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Solr.name)
@query_wrapper
def search_solr_list(session, cluster_id, needs_columns=False):
""" All the Solr connections.
"""
return _search_solr(session, cluster_id)
# ################################################################################################################################
def _server(session, cluster_id, cluster_name):
q = session.query(
Server.id, Server.name, Server.bind_host, Server.bind_port, Server.last_join_status, Server.last_join_mod_date,
Server.last_join_mod_by, Server.up_status, Server.up_mod_date, Server.preferred_address,
Server.crypto_use_tls,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==Server.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.order_by(Server.name)
return q
@query_wrapper
def server_list(session, cluster_id, cluster_name, up_status=None, needs_columns=False):
""" All the servers defined on a cluster.
"""
q = _server(session, cluster_id, cluster_name)
if up_status:
q = q.filter(Server.up_status==up_status)
return q
def server_by_name(session, cluster_id, cluster_name, server_name):
return _server(session, cluster_id, cluster_name).\
filter(Server.name==server_name).\
all()
def server_by_id(session, cluster_id, server_id):
return _server(session, cluster_id, None).\
filter(Server.id==server_id).\
one()
# ################################################################################################################################
def _cassandra_conn(session, cluster_id):
return session.query(CassandraConn).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraConn.cluster_id).\
order_by(CassandraConn.name)
def cassandra_conn(session, cluster_id, id):
""" A Cassandra connection definition.
"""
return _cassandra_conn(session, cluster_id).\
filter(CassandraConn.id==id).\
one()
@query_wrapper
def cassandra_conn_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra connection definitions.
"""
return _cassandra_conn(session, cluster_id)
# ################################################################################################################################
def _cassandra_query(session, cluster_id):
return session.query(
CassandraQuery.id, CassandraQuery.name, CassandraQuery.value,
CassandraQuery.is_active, CassandraQuery.cluster_id,
CassandraConn.name.label('def_name'),
CassandraConn.id.label('def_id'),
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraQuery.cluster_id).\
filter(CassandraConn.id==CassandraQuery.def_id).\
order_by(CassandraQuery.name)
def cassandra_query(session, cluster_id, id):
""" A Cassandra prepared statement.
"""
return _cassandra_query(session, cluster_id).\
filter(CassandraQuery.id==id).\
one()
@query_wrapper
def cassandra_query_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra prepared statements.
"""
return _cassandra_query(session, cluster_id)
# ################################################################################################################################
def _email_smtp(session, cluster_id):
return session.query(SMTP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==SMTP.cluster_id).\
order_by(SMTP.name)
def email_smtp(session, cluster_id, id):
""" An SMTP connection.
"""
return _email_smtp(session, cluster_id).\
filter(SMTP.id==id).\
one()
@query_wrapper
def email_smtp_list(session, cluster_id, needs_columns=False):
""" A list of SMTP connections.
"""
return _email_smtp(session, cluster_id)
# ################################################################################################################################
def _email_imap(session, cluster_id):
return session.query(IMAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==IMAP.cluster_id).\
order_by(IMAP.name)
def email_imap(session, cluster_id, id):
""" An IMAP connection.
"""
return _email_imap(session, cluster_id).\
filter(IMAP.id==id).\
one()
@query_wrapper
def email_imap_list(session, cluster_id, needs_columns=False):
""" A list of IMAP connections.
"""
return _email_imap(session, cluster_id)
# ################################################################################################################################
def _rbac_permission(session, cluster_id):
return session.query(RBACPermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACPermission.cluster_id).\
order_by(RBACPermission.name)
def rbac_permission(session, cluster_id, id=None, name=None):
""" An RBAC permission.
"""
q = _rbac_permission(session, cluster_id)
if name:
q = q.filter(RBACPermission.name==name)
elif id:
q = q.filter(RBACPermission.id==id)
return q.one()
@query_wrapper
def rbac_permission_list(session, cluster_id, needs_columns=False):
""" A list of RBAC permissions.
"""
return _rbac_permission(session, cluster_id)
# ################################################################################################################################
def _rbac_role(session, cluster_id):
rbac_parent = aliased(RBACRole)
return session.query(RBACRole.id, RBACRole.name, RBACRole.parent_id, rbac_parent.name.label('parent_name')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRole.cluster_id).\
outerjoin(rbac_parent, rbac_parent.id==RBACRole.parent_id).\
order_by(RBACRole.name)
def rbac_role(session, cluster_id, id=None, name=None):
""" An RBAC role.
"""
q = _rbac_role(session, cluster_id)
if name:
q = q.filter(RBACRole.name==name)
elif id:
q = q.filter(RBACRole.id==id)
return q.one()
@query_wrapper
def rbac_role_list(session, cluster_id, needs_columns=False):
""" A list of RBAC roles.
"""
return _rbac_role(session, cluster_id)
# ################################################################################################################################
def _rbac_client_role(session, cluster_id):
return session.query(RBACClientRole).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACClientRole.cluster_id).\
order_by(RBACClientRole.client_def)
def rbac_client_role(session, cluster_id, id):
""" An individual mapping between a client and role.
"""
return _rbac_client_role(session, cluster_id).\
filter(RBACClientRole.id==id).\
one()
@query_wrapper
def rbac_client_role_list(session, cluster_id, needs_columns=False):
""" A list of mappings between clients and roles.
"""
return _rbac_client_role(session, cluster_id)
# ################################################################################################################################
def _rbac_role_permission(session, cluster_id):
return session.query(RBACRolePermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRolePermission.cluster_id).\
order_by(RBACRolePermission.role_id)
def rbac_role_permission(session, cluster_id, id):
""" An individual permission for a given role against a service.
"""
return _rbac_role_permission(session, cluster_id).\
filter(RBACRolePermission.id==id).\
one()
@query_wrapper
def rbac_role_permission_list(session, cluster_id, needs_columns=False):
""" A list of permissions for roles against services.
"""
return _rbac_role_permission(session, cluster_id)
# ################################################################################################################################
def cache_by_id(session, cluster_id, cache_id):
return session.query(Cache).\
filter(Cache.id==cluster_id).\
filter(Cluster.id==Cache.cluster_id).\
filter(Cache.id==cache_id).\
one()
# ################################################################################################################################
def _cache_builtin(session, cluster_id):
return session.query(CacheBuiltin).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheBuiltin.cluster_id).\
filter(Cache.id==CacheBuiltin.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.BUILTIN).\
order_by(CacheBuiltin.name)
def cache_builtin(session, cluster_id, id):
""" An individual built-in cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheBuiltin.id==id).\
one()
@query_wrapper
def cache_builtin_list(session, cluster_id, needs_columns=False):
""" A list of built-in cache definitions.
"""
return _cache_builtin(session, cluster_id)
# ################################################################################################################################
def _cache_memcached(session, cluster_id):
return session.query(
CacheMemcached.cache_id, CacheMemcached.name, CacheMemcached.is_active,
CacheMemcached.is_default, CacheMemcached.is_debug,
CacheMemcached.servers, CacheMemcached.extra,
CacheMemcached.cache_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheMemcached.cluster_id).\
filter(Cache.id==CacheMemcached.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.MEMCACHED).\
order_by(CacheMemcached.name)
def cache_memcached(session, cluster_id, id):
""" An individual Memcached cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheMemcached.id==id).\
one()
@query_wrapper
def cache_memcached_list(session, cluster_id, needs_columns=False):
""" A list of Memcached cache definitions.
"""
return _cache_memcached(session, cluster_id)
# ################################################################################################################################
def _out_odoo(session, cluster_id):
return session.query(OutgoingOdoo).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingOdoo.cluster_id).\
order_by(OutgoingOdoo.name)
def out_odoo(session, cluster_id, id):
""" An individual Odoo connection.
"""
return _out_odoo(session, cluster_id).\
filter(OutgoingOdoo.id==id).\
one()
@query_wrapper
def out_odoo_list(session, cluster_id, needs_columns=False):
""" A list of Odoo connections.
"""
return _out_odoo(session, cluster_id)
# ################################################################################################################################
def _out_sap(session, cluster_id):
return session.query(OutgoingSAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingSAP.cluster_id).\
order_by(OutgoingSAP.name)
def out_sap(session, cluster_id, id):
""" An individual SAP RFC connection.
"""
return _out_sap(session, cluster_id).\
filter(OutgoingSAP.id==id).\
one()
@query_wrapper
def out_sap_list(session, cluster_id, needs_columns=False):
""" A list of SAP RFC connections.
"""
return _out_sap(session, cluster_id)
# ################################################################################################################################
def _channel_web_socket(session, cluster_id):
""" WebSocket channels
"""
return session.query(
ChannelWebSocket.id,
ChannelWebSocket.name,
ChannelWebSocket.is_active,
ChannelWebSocket.is_internal,
ChannelWebSocket.address,
ChannelWebSocket.data_format,
ChannelWebSocket.service_id,
ChannelWebSocket.security_id,
ChannelWebSocket.new_token_wait_time,
ChannelWebSocket.token_ttl,
ChannelWebSocket.is_out,
ChannelWebSocket.opaque1,
SecurityBase.sec_type,
VaultConnection.default_auth_method.label('vault_conn_default_auth_method'),
SecurityBase.name.label('sec_name'),
Service.name.label('service_name'),
).\
outerjoin(Service, Service.id==ChannelWebSocket.service_id).\
outerjoin(SecurityBase, SecurityBase.id==ChannelWebSocket.security_id).\
outerjoin(VaultConnection, SecurityBase.id==VaultConnection.id).\
filter(Cluster.id==ChannelWebSocket.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWebSocket.name)
def channel_web_socket(session, cluster_id, id):
""" An incoming WebSocket connection.
"""
return _channel_web_socket(session, cluster_id).\
filter(ChannelWebSocket.id==id).\
one()
@query_wrapper
def channel_web_socket_list(session, cluster_id, needs_columns=False):
""" All the WebSocket channel connections.
"""
return _channel_web_socket(session, cluster_id)
# ################################################################################################################################
def web_socket_client_by_pub_id(session, pub_client_id):
""" An individual WebSocket connection by its public ID.
"""
return session.query(
WebSocketClient.id,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id).\
one()
# ################################################################################################################################
def web_socket_client_by_ext_id(session, ext_client_id, needs_one_or_none=False):
""" An individual WebSocket connection by its external client ID.
"""
query = session.query(
WebSocketClient,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.ext_client_id==ext_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id)
return query.one_or_none() if needs_one_or_none else query.all()
# ################################################################################################################################
def web_socket_clients_by_server_id(session, server_id, server_pid):
""" A list of WebSocket clients attached to a particular server by the latter's ID.
"""
query = session.query(WebSocketClient).\
filter(WebSocketClient.server_id==server_id)
if server_pid:
query = query.\
filter(WebSocketClient.server_proc_pid==server_pid)
return query
# ################################################################################################################################
def _web_socket_client(session, cluster_id, channel_id):
return session.query(WebSocketClient).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.channel_id==channel_id).\
order_by(WebSocketClient.connection_time.desc())
# ################################################################################################################################
def web_socket_client(session, cluster_id, channel_id, pub_client_id=None, ext_client_id=None, use_first=True):
query = _web_socket_client(session, cluster_id, channel_id)
if pub_client_id:
query = query.filter(WebSocketClient.pub_client_id==pub_client_id)
elif ext_client_id:
query = query.filter(WebSocketClient.ext_client_id==ext_client_id)
else:
raise ValueError('Either pub_client_id or ext_client_id is required on input')
return query.first() if use_first else query.all()
# ################################################################################################################################
@query_wrapper
def web_socket_client_list(session, cluster_id, channel_id, needs_columns=False):
""" A list of subscriptions to a particular pattern.
"""
return _web_socket_client(session, cluster_id, channel_id)
# ################################################################################################################################
def _web_socket_sub_key_data(session, cluster_id, pub_client_id):
return session.query(
WebSocketClientPubSubKeys.sub_key,
PubSubSubscription.topic_id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.creation_time,
PubSubSubscription.endpoint_id,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.ext_client_id,
PubSubEndpoint.name.label('endpoint_name'),
PubSubTopic.name.label('topic_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==WebSocketSubscription.sub_key).\
filter(WebSocketClientPubSubKeys.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
@query_wrapper
def web_socket_sub_key_data_list(session, cluster_id, pub_client_id, needs_columns=False):
return _web_socket_sub_key_data(session, cluster_id, pub_client_id)
# ################################################################################################################################
def _vault_connection(session, cluster_id):
return session.query(VaultConnection.id, VaultConnection.is_active, VaultConnection.name,
VaultConnection.url, VaultConnection.token, VaultConnection.default_auth_method,
VaultConnection.timeout, VaultConnection.allow_redirects, VaultConnection.tls_verify,
VaultConnection.tls_ca_cert_id, VaultConnection.tls_key_cert_id, VaultConnection.sec_type,
Service.name.label('service_name'), Service.id.label('service_id')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==VaultConnection.cluster_id).\
outerjoin(Service, Service.id==VaultConnection.service_id).\
order_by(VaultConnection.name)
def vault_connection(session, cluster_id, id):
""" An individual Vault connection.
"""
return _vault_connection(session, cluster_id).\
filter(VaultConnection.id==id).\
one()
@query_wrapper
def vault_connection_list(session, cluster_id, needs_columns=False):
""" A list of Vault connections.
"""
return _vault_connection(session, cluster_id)
# ################################################################################################################################
def _sms_twilio(session, cluster_id):
""" SMS Twilio connections.
"""
return session.query(SMSTwilio).\
filter(Cluster.id==SMSTwilio.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SMSTwilio.name)
def sms_twilio(session, cluster_id, id):
""" An individual SMS Twilio connection.
"""
return _sms_twilio(session, cluster_id).\
filter(SMSTwilio.id==id).\
one()
@query_wrapper
def sms_twilio_list(session, cluster_id, needs_columns=False):
""" All the SMS Twilio connections.
"""
return _sms_twilio(session, cluster_id)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import and_, exists, insert, update
from sqlalchemy.sql import expression as expr
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, WebSocketSubscription
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
# ################################################################################################################################
def has_subscription(session, cluster_id, topic_id, endpoint_id):
""" Returns a boolean flag indicating whether input endpoint has subscription to a given topic.
"""
return session.query(exists().where(and_(
PubSubSubscription.endpoint_id==endpoint_id,
PubSubSubscription.topic_id==topic_id,
PubSubSubscription.cluster_id==cluster_id,
))).\
scalar()
# ################################################################################################################################
def add_wsx_subscription(session, cluster_id, is_internal, sub_key, ext_client_id, ws_channel_id, sub_id):
""" Adds an object representing a subscription of a WebSockets client.
"""
wsx_sub = WebSocketSubscription()
wsx_sub.is_internal = is_internal or False
wsx_sub.sub_key = sub_key
wsx_sub.ext_client_id = ext_client_id
wsx_sub.channel_id = ws_channel_id
wsx_sub.cluster_id = cluster_id
wsx_sub.subscription_id = sub_id
session.add(wsx_sub)
return wsx_sub
# ################################################################################################################################
def add_subscription(session, cluster_id, sub_key, ctx):
""" Adds an object representing a subscription regardless of the underlying protocol.
"""
# Common
ps_sub = PubSubSubscription()
ps_sub.cluster_id = ctx.cluster_id
ps_sub.server_id = ctx.server_id
ps_sub.topic_id = ctx.topic.id
ps_sub.is_internal = ctx.is_internal
ps_sub.is_staging_enabled = ctx.is_staging_enabled
ps_sub.creation_time = ctx.creation_time
ps_sub.sub_key = sub_key
ps_sub.sub_pattern_matched = ctx.sub_pattern_matched
ps_sub.has_gd = ctx.has_gd
ps_sub.active_status = ctx.active_status
ps_sub.endpoint_type = ctx.endpoint_type
ps_sub.endpoint_id = ctx.endpoint_id
ps_sub.delivery_method = ctx.delivery_method
ps_sub.delivery_data_format = ctx.delivery_data_format
ps_sub.delivery_batch_size = ctx.delivery_batch_size
ps_sub.wrap_one_msg_in_list = ctx.wrap_one_msg_in_list if ctx.wrap_one_msg_in_list is not None else True
ps_sub.delivery_max_retry = ctx.delivery_max_retry
ps_sub.delivery_err_should_block = ctx.delivery_err_should_block if ctx.delivery_err_should_block is not None else True
ps_sub.wait_sock_err = ctx.wait_sock_err
ps_sub.wait_non_sock_err = ctx.wait_non_sock_err
ps_sub.ext_client_id = ctx.ext_client_id
# AMQP
ps_sub.amqp_exchange = ctx.amqp_exchange
ps_sub.amqp_routing_key = ctx.amqp_routing_key
ps_sub.out_amqp_id = ctx.out_amqp_id
# Local files
ps_sub.files_directory_list = ctx.files_directory_list
# FTP
ps_sub.ftp_directory_list = ctx.ftp_directory_list
# REST/SOAP
ps_sub.security_id = ctx.security_id
ps_sub.out_http_soap_id = ctx.out_http_soap_id
ps_sub.out_http_method = ctx.out_http_method
# Services
ps_sub.service_id = ctx.service_id
# SMS - Twilio
ps_sub.sms_twilio_from = ctx.sms_twilio_from
ps_sub.sms_twilio_to_list = ctx.sms_twilio_to_list
ps_sub.smtp_is_html = ctx.smtp_is_html
ps_sub.smtp_subject = ctx.smtp_subject
ps_sub.smtp_from = ctx.smtp_from
ps_sub.smtp_to_list = ctx.smtp_to_list
ps_sub.smtp_body = ctx.smtp_body
# WebSockets
ps_sub.ws_channel_id = ctx.ws_channel_id
session.add(ps_sub)
return ps_sub
# ################################################################################################################################
def move_messages_to_sub_queue(session, cluster_id, topic_id, endpoint_id, sub_pattern_matched, sub_key, pub_time_max,
_initialized=_initialized):
""" Move all unexpired messages from topic to a given subscriber's queue. This method must be called with a global lock
held for topic because it carries out its job through a couple of non-atomic queries.
"""
enqueued_id_subquery = session.query(
PubSubEndpointEnqueuedMessage.pub_msg_id
).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
now = utcnow_as_ms()
# SELECT statement used by the INSERT below finds all messages for that topic
# that haven't expired yet.
select_messages = session.query(
PubSubMessage.pub_msg_id,
PubSubMessage.topic_id,
expr.bindparam('creation_time', now),
expr.bindparam('endpoint_id', endpoint_id),
expr.bindparam('sub_pattern_matched', sub_pattern_matched),
expr.bindparam('sub_key', sub_key),
expr.bindparam('is_in_staging', False),
expr.bindparam('cluster_id', cluster_id),
).\
filter(PubSubMessage.topic_id==topic_id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.expiration_time > pub_time_max).\
filter(~PubSubMessage.is_in_sub_queue).\
filter(PubSubMessage.pub_msg_id.notin_(enqueued_id_subquery))
# All message IDs that are available in topic for that subscriber, if there are any at all.
# In theory, it is not required to pull all the messages to build the list in Python, but this is a relatively
# efficient operation because there won't be that many data returned yet it allows us to make sure
# the INSERT and UPDATE below are issued only if truly needed.
msg_ids = [elem.pub_msg_id for elem in select_messages.all()]
if msg_ids:
# INSERT references to topic's messages in the subscriber's queue.
insert_messages = insert(PubSubEndpointEnqueuedMessage).\
from_select((
PubSubEndpointEnqueuedMessage.pub_msg_id,
PubSubEndpointEnqueuedMessage.topic_id,
expr.column('creation_time'),
expr.column('endpoint_id'),
expr.column('sub_pattern_matched'),
expr.column('sub_key'),
expr.column('is_in_staging'),
expr.column('cluster_id'),
), select_messages)
# Move messages to subscriber's queue
session.execute(insert_messages)
# Indicate that all the messages are being delivered to the subscriber which means that no other
# subscriber will ever receive them. Note that we are changing the status only for the messages pertaining
# to the current subscriber without ever touching messages reiceved by any other one.
session.execute(
update(MsgTable).\
values({
'is_in_sub_queue': True,
}).\
where(and_(
MsgTable.c.pub_msg_id.in_(msg_ids),
~MsgTable.c.is_in_sub_queue
))
)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/subscribe.py | subscribe.py |
# SQLAlchemy
from sqlalchemy import and_, func, select
from sqlalchemy.sql.expression import false as sa_false
# Zato
from zato.common.odb.model import PubSubMessage, PubSubTopic, PubSubSubscription
from zato.common.odb.query import count
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
def get_topics_by_sub_keys(session, cluster_id, sub_keys):
""" Returns (topic_id, sub_key) for each input sub_key.
"""
return session.query(
PubSubTopic.id,
PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.sub_key.in_(sub_keys)).\
all()
# ################################################################################################################################
def get_gd_depth_topic(session, cluster_id, topic_id):
""" Returns current depth of input topic by its ID.
"""
q = session.query(MsgTable.c.id).\
filter(MsgTable.c.topic_id==topic_id).\
filter(MsgTable.c.cluster_id==cluster_id).\
filter(~MsgTable.c.is_in_sub_queue)
return count(session, q)
# ################################################################################################################################
def get_gd_depth_topic_list(session, cluster_id, topic_id_list):
""" Returns topics matching the input list as long as they have any messages undelivered to their queues.
"""
q = select([
MsgTable.c.topic_id,
func.count(MsgTable.c.topic_id).label('depth')]).\
where(and_(
MsgTable.c.cluster_id == cluster_id,
MsgTable.c.is_in_sub_queue == sa_false(),
MsgTable.c.topic_id.in_(topic_id_list),
)).\
group_by('topic_id')
return session.execute(q).fetchall()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/topic.py | topic.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, PubSubTopic, PubSubEndpoint, PubSubSubscription
from zato.common.odb.query import query_wrapper
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
def _pubsub_subscription(session, cluster_id):
return session.query(
PubSubSubscription.id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.id.label('name'), # A unique 'name' attribute is needed by ConfigDict
PubSubSubscription.active_status,
PubSubSubscription.server_id,
PubSubSubscription.is_internal,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.creation_time,
PubSubSubscription.last_interaction_time,
PubSubSubscription.last_interaction_type,
PubSubSubscription.last_interaction_details,
PubSubSubscription.sub_key,
PubSubSubscription.is_durable,
PubSubSubscription.has_gd,
PubSubSubscription.topic_id,
PubSubSubscription.endpoint_id,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_batch_size,
PubSubSubscription.wrap_one_msg_in_list,
PubSubSubscription.delivery_max_retry,
PubSubSubscription.ext_client_id,
PubSubSubscription.delivery_err_should_block,
PubSubSubscription.wait_sock_err,
PubSubSubscription.wait_non_sock_err,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.out_amqp_id,
PubSubSubscription.amqp_exchange,
PubSubSubscription.amqp_routing_key,
PubSubSubscription.files_directory_list,
PubSubSubscription.ftp_directory_list,
PubSubSubscription.sms_twilio_from,
PubSubSubscription.sms_twilio_to_list,
PubSubSubscription.smtp_is_html,
PubSubSubscription.smtp_subject,
PubSubSubscription.smtp_from,
PubSubSubscription.smtp_to_list,
PubSubSubscription.smtp_body,
PubSubSubscription.out_http_soap_id,
PubSubSubscription.out_http_soap_id.label('out_rest_http_soap_id'),
PubSubSubscription.out_http_soap_id.label('out_soap_http_soap_id'),
PubSubSubscription.out_http_method,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.ws_channel_id,
PubSubSubscription.cluster_id,
PubSubTopic.name.label('topic_name'),
PubSubTopic.task_delivery_interval,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.service_id,
).\
outerjoin(PubSubTopic, PubSubTopic.id==PubSubSubscription.topic_id).\
filter(PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubSubscription.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubSubscription.id.desc())
# ################################################################################################################################
def pubsub_subscription(session, cluster_id, id):
""" A pub/sub subscription.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.id==id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list(session, cluster_id, needs_columns=False):
""" All pub/sub subscriptions.
"""
return _pubsub_subscription(session, cluster_id)
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list_by_endpoint_id(session, cluster_id, endpoint_id, needs_columns=False):
""" A list of all pub/sub subscriptions for a given endpoint with a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
def pubsub_subscription_list_by_endpoint_id_no_search(session, cluster_id, endpoint_id):
""" A list of all pub/sub subscriptions for a given endpoint without a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_summary_list(session, cluster_id, needs_columns=False):
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubEndpoint.role.in_(_subscriber_role)).\
order_by(PubSubEndpoint.id)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/subscription.py | subscription.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, PubSubEndpoint, PubSubSubscription
from zato.common.odb.query import query_wrapper
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
def _pubsub_endpoint_summary(session, cluster_id, topic_id):
q = session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubEndpoint.role.in_(_subscriber_role))
if topic_id:
q = q.\
filter(PubSubSubscription.topic_id==topic_id)
return q
@query_wrapper
def pubsub_endpoint_summary_list(session, cluster_id, topic_id=None, needs_columns=False):
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
order_by(PubSubEndpoint.id)
def pubsub_endpoint_summary(session, cluster_id, endpoint_id, topic_id=None):
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
filter(PubSubEndpoint.id==endpoint_id).\
one()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/endpoint.py | endpoint.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func, update
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription
from zato.common.odb.query import count, _pubsub_queue_message
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
PubSubEnqMsg = PubSubEndpointEnqueuedMessage
# ################################################################################################################################
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_to_delete = PUBSUB.DELIVERY_STATUS.TO_DELETE
_waiting = PUBSUB.DELIVERY_STATUS.WAITING_FOR_CONFIRMATION
# ################################################################################################################################
def get_messages(session, cluster_id, sub_key, batch_size, now, _initialized=_initialized, _waiting=_waiting):
""" Returns up to batch_size messages for input sub_key and mark them as being delivered.
"""
# First, get all messages but note it is SELECT FOR UPDATE
messages = _pubsub_queue_message(session, cluster_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubMessage.expiration_time>=now).\
with_for_update().\
order_by(PubSubMessage.ext_pub_time.desc()).\
limit(batch_size).\
all()
# Now, within the same transaction, update their delivery status to indicate they are being delivered
msg_id_list = [elem.msg_id for elem in messages]
if msg_id_list:
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': _waiting,
'delivery_time': now,
'delivery_count': PubSubEnqMsg.__table__.c.delivery_count + 1,
}).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# Return all messages fetched - our caller will commit the transaction thus releasing the FOR UPDATE lock
return messages
# ################################################################################################################################
def _set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status):
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': status,
'delivery_time': now,
}).\
where(PubSubSubscription.sub_key==sub_key).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.sub_key==PubSubSubscription.sub_key).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# ################################################################################################################################
def set_to_delete(session, cluster_id, sub_key, msg_id_list, now, status=_to_delete):
""" Marks all input messages as to be deleted.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def acknowledge_delivery(session, cluster_id, sub_key, msg_id_list, now, status=_delivered):
""" Confirms delivery of all messages from msg_id_list.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def get_queue_depth_by_sub_key(session, cluster_id, sub_key, now):
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
current_q = session.query(PubSubEnqMsg.id).\
filter(PubSubSubscription.sub_key==PubSubEnqMsg.sub_key).\
filter(PubSubEnqMsg.is_in_staging != True).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=now).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.cluster_id==cluster_id) # noqa: E712
return count(session, current_q)
# ################################################################################################################################
def get_queue_depth_by_topic_id_list(session, cluster_id, topic_id_list):
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
return session.query(PubSubEnqMsg.topic_id, func.count(PubSubEnqMsg.topic_id)).\
filter(PubSubEnqMsg.topic_id.in_(topic_id_list)).\
filter(PubSubEnqMsg.cluster_id==cluster_id).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=utcnow_as_ms()).\
group_by(PubSubMessage.topic_id).\
all()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/queue.py | queue.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# SQLAlchemy
from sqlalchemy import update
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint, PubSubMessage, PubSubEndpointEnqueuedMessage, PubSubSubscription, Server, \
WebSocketClient, WebSocketClientPubSubKeys
logger = getLogger('zato_pubsub.sql')
# ################################################################################################################################
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_wsx = PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
# ################################################################################################################################
sql_messages_columns = (
PubSubMessage.pub_msg_id,
PubSubMessage.pub_correl_id,
PubSubMessage.in_reply_to,
PubSubMessage.published_by_id,
PubSubMessage.ext_client_id,
PubSubMessage.group_id,
PubSubMessage.position_in_group,
PubSubMessage.pub_time,
PubSubMessage.ext_pub_time,
PubSubMessage.data,
PubSubMessage.mime_type,
PubSubMessage.priority,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.size,
PubSubMessage.user_ctx,
PubSubMessage.zato_ctx,
PubSubMessage.opaque1,
PubSubEndpointEnqueuedMessage.id.label('endp_msg_queue_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpointEnqueuedMessage.sub_pattern_matched,
)
sql_msg_id_columns = (
PubSubMessage.pub_msg_id,
)
# ################################################################################################################################
def _get_base_sql_msg_query(session, columns, sub_key_list, pub_time_max, cluster_id, _float_str=PUBSUB.FLOAT_STRING_CONVERT):
return session.query(*columns).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key.in_(sub_key_list)).\
filter(PubSubEndpointEnqueuedMessage.delivery_status==_initialized).\
filter(PubSubMessage.expiration_time > _float_str.format(pub_time_max)).\
filter(PubSubMessage.cluster_id==cluster_id)
# ################################################################################################################################
def _get_sql_msg_data_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max, columns, ignore_list=None,
needs_result=True, _initialized=_initialized, _float_str=PUBSUB.FLOAT_STRING_CONVERT):
""" Returns all SQL messages queued up for a given sub_key that are not being delivered
or have not been delivered already.
"""
logger.info('Getting GD messages for `%s` last_run:%r pub_time_max:%r needs_result:%d', sub_key_list, last_sql_run,
pub_time_max, int(needs_result))
query = _get_base_sql_msg_query(session, columns, sub_key_list, pub_time_max, cluster_id)
# If there is the last SQL run time given, it means that we have to fetch all messages
# enqueued for that subscriber since that time ..
if last_sql_run:
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time > _float_str.format(last_sql_run))
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time <= _float_str.format(pub_time_max))
if ignore_list:
query = query.\
filter(PubSubEndpointEnqueuedMessage.id.notin_(ignore_list))
query = query.\
order_by(PubSubMessage.priority.desc()).\
order_by(PubSubMessage.ext_pub_time).\
order_by(PubSubMessage.pub_time)
return query.all() if needs_result else query
# ################################################################################################################################
def get_sql_messages_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max, ignore_list):
return _get_sql_msg_data_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max,
sql_messages_columns, ignore_list)
# ################################################################################################################################
def get_sql_messages_by_msg_id_list(session, cluster_id, sub_key, pub_time_max, msg_id_list):
query = _get_base_sql_msg_query(session, sql_messages_columns, [sub_key], pub_time_max, cluster_id)
return query.\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(msg_id_list))
# ################################################################################################################################
def get_sql_msg_ids_by_sub_key(session, cluster_id, sub_key, last_sql_run, pub_time_max):
return _get_sql_msg_data_by_sub_key(session, cluster_id, [sub_key], last_sql_run, pub_time_max, sql_msg_id_columns,
needs_result=False)
# ################################################################################################################################
def confirm_pubsub_msg_delivered(session, cluster_id, sub_key, delivered_pub_msg_id_list, now, _delivered=_delivered):
""" Returns all SQL messages queued up for a given sub_key.
"""
session.execute(
update(PubSubEndpointEnqueuedMessage).\
values({
'delivery_status': _delivered,
'delivery_time': now
}).\
where(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(delivered_pub_msg_id_list)).\
where(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
)
# ################################################################################################################################
def get_delivery_server_for_sub_key(session, cluster_id, sub_key, is_wsx):
""" Returns information about which server handles delivery tasks for input sub_key, the latter must exist in DB.
Assumes that sub_key belongs to a non-WSX endpoint and then checks WebSockets in case the former query founds
no matching server.
"""
# Sub key belongs to a WebSockets client ..
if is_wsx:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
).\
filter(WebSocketClient.server_id==Server.id).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==sub_key).\
first()
# .. otherwise, it is a REST, SOAP or another kind of client, but for sure it's not WebSockets.
else:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
PubSubEndpoint.endpoint_type,
).\
filter(Server.id==PubSubSubscription.server_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
first()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/delivery.py | delivery.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import true as sa_true
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage
# ################################################################################################################################
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_to_delete = PUBSUB.DELIVERY_STATUS.TO_DELETE
# ################################################################################################################################
def delete_msg_delivered(session, cluster_id, topic_id):
""" Deletes from topics all messages that have been delivered from their queues.
"""
# When a message is published and there are subscribers for it, its PubSubMessage.is_in_sub_queue attribute
# is set to True and a reference to that message is stored in PubSubEndpointEnqueuedMessage. Then, once the message
# is delivered to all subscribers, a background process calling delete_enq_delivered deletes all the references.
# Therefore, we can delete all PubSubMessage that have is_in_sub_queue = True because it means that there must have
# been subscribers to it and, seeing as there are no references to it anymore, it means that they must have been
# already deleted, so we can safely delete the PubSubMessage itself.
enqueued_subquery = session.query(PubSubMessage.pub_msg_id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==topic_id).\
filter(PubSubMessage.is_in_sub_queue==sa_true()).\
filter(PubSubMessage.pub_msg_id==PubSubEndpointEnqueuedMessage.pub_msg_id)
return session.query(PubSubMessage).\
filter(PubSubMessage.pub_msg_id.notin_(enqueued_subquery)).\
delete(synchronize_session=False)
# ################################################################################################################################
def delete_msg_expired(session, cluster_id, topic_id, now):
""" Deletes all expired messages from all topics.
"""
q = session.query(PubSubMessage).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.expiration_time<=now)
if topic_id:
q = q.filter(PubSubMessage.topic_id==topic_id)
return q.delete()
# ################################################################################################################################
def _delete_enq_msg_by_status(session, cluster_id, topic_id, status):
""" Deletes all messages already delivered or the ones that have been explicitly marked for deletion from delivery queues.
"""
q = session.query(PubSubEndpointEnqueuedMessage).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id).\
filter(PubSubEndpointEnqueuedMessage.delivery_status==status)
if topic_id:
q = q.filter(PubSubEndpointEnqueuedMessage.topic_id==topic_id)
return q.delete()
# ################################################################################################################################
def delete_enq_delivered(session, cluster_id, topic_id, status=_delivered):
""" Deletes all messages already delivered or the ones that have been explicitly marked for deletion from delivery queues.
"""
return _delete_enq_msg_by_status(session, cluster_id, topic_id, status)
# ################################################################################################################################
def delete_enq_marked_deleted(session, cluster_id, topic_id, status=_to_delete):
""" Deletes all messages that have been explicitly marked for deletion from delivery queues.
"""
return _delete_enq_msg_by_status(session, cluster_id, topic_id, status)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/cleanup.py | cleanup.py |
# stdlib
from logging import DEBUG, getLogger
from traceback import format_exc
# SQLAlchemy
from sqlalchemy.exc import IntegrityError
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import BadRequest
from zato.common.odb.model import PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubEndpointTopic, PubSubMessage, PubSubTopic
from zato.common.util.sql import sql_op_with_deadlock_retry
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
has_debug = logger_zato.isEnabledFor(DEBUG) or logger_pubsub.isEnabledFor(DEBUG)
# ################################################################################################################################
MsgInsert = PubSubMessage.__table__.insert
EndpointTopicInsert = PubSubEndpointTopic.__table__.insert
EnqueuedMsgInsert = PubSubEndpointEnqueuedMessage.__table__.insert
MsgTable = PubSubMessage.__table__
TopicTable = PubSubTopic.__table__
EndpointTable = PubSubEndpoint.__table__
EndpointTopicTable = PubSubEndpointTopic.__table__
# ################################################################################################################################
_initialized=PUBSUB.DELIVERY_STATUS.INITIALIZED
# ################################################################################################################################
def _sql_publish_with_retry(session, cid, cluster_id, topic_id, subscriptions_by_topic, gd_msg_list, now):
""" A low-level implementation of sql_publish_with_retry.
"""
# Publish messages - INSERT rows, each representing an individual message
topic_messages_inserted = insert_topic_messages(session, cid, gd_msg_list)
if has_debug:
sub_keys_by_topic = sorted(elem.sub_key for elem in subscriptions_by_topic)
logger_zato.info('With topic_messages_inserted `%s` `%s` `%s` `%s` `%s` `%s` `%s`',
cid, topic_messages_inserted, cluster_id, topic_id, sub_keys_by_topic, gd_msg_list, now)
if topic_messages_inserted:
# Move messages to each subscriber's queue
if subscriptions_by_topic:
try:
insert_queue_messages(session, cluster_id, subscriptions_by_topic, gd_msg_list, topic_id, now, cid)
if has_debug:
logger_zato.info('Inserted queue messages `%s` `%s` `%s` `%s` `%s` `%s`', cid, cluster_id,
sub_keys_by_topic, gd_msg_list, topic_id, now)
# No integrity error / no deadlock = all good
return True
except IntegrityError:
if has_debug:
logger_zato.info('Caught IntegrityError (_sql_publish_with_retry) `%s` `%s`', cid, format_exc())
# If we have an integrity error here it means that our transaction, the whole of it,
# was rolled back - this will happen on MySQL in case in case of deadlocks which may
# occur because delivery tasks update the table that insert_queue_messages wants to insert to.
# We need to return False for our caller to understand that the whole transaction needs
# to be repeated.
return False
else:
if has_debug:
logger_zato.info('No subscribers in `%s`', cid)
# No subscribers, also good
return True
# ################################################################################################################################
def sql_publish_with_retry(*args):
""" Populates SQL structures with new messages for topics and their counterparts in subscriber queues.
In case of a deadlock will retry the whole transaction, per MySQL's requirements, which rolls back
the whole of it rather than a deadlocking statement only.
"""
is_ok = False
while not is_ok:
if has_debug:
logger_zato.info('sql_publish_with_retry -> is_ok.1:`%s`', is_ok)
is_ok = _sql_publish_with_retry(*args)
if has_debug:
logger_zato.info('sql_publish_with_retry -> is_ok.2:`%s`', is_ok)
# ################################################################################################################################
def _insert_topic_messages(session, msg_list):
""" A low-level implementation for insert_topic_messages.
"""
session.execute(MsgInsert().values(msg_list))
# ################################################################################################################################
def insert_topic_messages(session, cid, msg_list):
""" Publishes messages to a topic, i.e. runs an INSERT that inserts rows, one for each message.
"""
try:
return sql_op_with_deadlock_retry(cid, 'insert_topic_messages', _insert_topic_messages, session, msg_list)
# Catch duplicate MsgId values sent by clients
except IntegrityError as e:
if has_debug:
logger_zato.info('Caught IntegrityError (insert_topic_messages) `%s` `%s`', cid, format_exc())
str_e = str(e)
if 'pubsb_msg_pubmsg_id_idx' in str_e:
raise BadRequest(cid, 'Duplicate msg_id:`{}`'.format(str_e))
else:
raise
# ################################################################################################################################
def _insert_queue_messages(session, queue_msgs):
""" A low-level call to enqueue messages.
"""
session.execute(EnqueuedMsgInsert().values(queue_msgs))
# ################################################################################################################################
def insert_queue_messages(session, cluster_id, subscriptions_by_topic, msg_list, topic_id, now, cid, _initialized=_initialized,
_float_str=PUBSUB.FLOAT_STRING_CONVERT):
""" Moves messages to each subscriber's queue, i.e. runs an INSERT that adds relevant references to the topic message.
Also, updates each message's is_in_sub_queue flag to indicate that it is no longer available for other subscribers.
"""
queue_msgs = []
for sub in subscriptions_by_topic:
for msg in msg_list:
# Enqueues the message for each subscriber
queue_msgs.append({
'creation_time': _float_str.format(now),
'pub_msg_id': msg['pub_msg_id'],
'endpoint_id': sub.endpoint_id,
'topic_id': topic_id,
'sub_key': sub.sub_key,
'cluster_id': cluster_id,
'sub_pattern_matched': msg['sub_pattern_matched'][sub.sub_key],
})
# Move the message to endpoint queues
return sql_op_with_deadlock_retry(cid, 'insert_queue_messages', _insert_queue_messages, session, queue_msgs)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/query/pubsub/publish.py | publish.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import Boolean, Column, DateTime, false as sa_false, ForeignKey, Index, Integer, Sequence, String, Text, \
UniqueConstraint
from sqlalchemy.ext.declarative import declared_attr
# Zato
from zato.common.odb.model.base import Base, _JSON
# ################################################################################################################################
class _SSOGroup(Base):
__tablename__ = 'zato_sso_group'
__table_args__ = (
UniqueConstraint('name', 'source', name='zato_g_name_uq'),
UniqueConstraint('group_id', name='zato_g_gid_uq'),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_group_id_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
# Publicly visible
group_id = Column(String(191), nullable=False)
name = Column(String(191), nullable=False)
source = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Groups may be optionally nested
parent_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOUser(Base):
__tablename__ = 'zato_sso_user'
__table_args__ = (
UniqueConstraint('username', name='zato_u_usrn_uq'),
UniqueConstraint('user_id', name='zato_user_id_uq'),
Index('zato_u_email_idx', 'email', unique=False, mysql_length={'email':767}),
Index('zato_u_appr_stat_idx', 'approval_status', unique=False),
Index('zato_u_dspn_idx', 'display_name_upper', unique=False),
Index('zato_u_alln_idx', 'first_name_upper', 'middle_name_upper', 'last_name_upper', unique=False),
Index('zato_u_lastn_idx', 'last_name_upper', unique=False),
Index('zato_u_sigst_idx', 'sign_up_status', unique=False),
Index('zato_u_sigctok_idx', 'sign_up_confirm_token', unique=True),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_user_id_seq'), primary_key=True)
# Publicly visible
user_id = Column(String(191), nullable=False)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
is_super_user = Column(Boolean(), nullable=False, default=False)
is_locked = Column(Boolean(), nullable=False, default=False)
locked_time = Column(DateTime(), nullable=True)
# Creation metadata, e.g. what this user's remote IP was
creation_ctx = Column(Text(), nullable=False)
# Note that this is not an FK - this is on purpose to keep this information around
# even if parent row is deleted.
locked_by = Column(String(191), nullable=True)
approval_status = Column(String(191), nullable=False)
approval_status_mod_time = Column(DateTime(), nullable=False) # When user was approved or rejected
approval_status_mod_by = Column(String(191), nullable=False) # Same comment as in locked_by
# Basic information, always required
username = Column(String(191), nullable=False)
password = Column(Text(), nullable=False)
password_is_set = Column(Boolean(), nullable=False)
password_must_change = Column(Boolean(), nullable=False)
password_last_set = Column(DateTime(), nullable=False)
password_expiry = Column(DateTime(), nullable=False)
# Sign-up information, possibly used in API workflows
sign_up_status = Column(String(191), nullable=False)
sign_up_time = Column(DateTime(), nullable=False)
sign_up_confirm_time = Column(DateTime(), nullable=True)
sign_up_confirm_token = Column(String(191), nullable=False)
# Won't be always needed
email = Column(Text(), nullable=True)
# Various cultures don't have a notion of first or last name and display_name is the one that can be used in that case.
display_name = Column(String(191), nullable=True)
first_name = Column(String(191), nullable=True)
middle_name = Column(String(191), nullable=True)
last_name = Column(String(191), nullable=True)
# Same as above but upper-cased for look-up / indexing purposes
display_name_upper = Column(String(191), nullable=True)
first_name_upper = Column(String(191), nullable=True)
middle_name_upper = Column(String(191), nullable=True)
last_name_upper = Column(String(191), nullable=True)
# Rate limiting
is_rate_limit_active = Column(Boolean(), nullable=True)
rate_limit_type = Column(String(40), nullable=True)
rate_limit_def = Column(Text(), nullable=True)
rate_limit_check_parent_def = Column(Boolean(), nullable=True)
# TOTP
is_totp_enabled = Column(Boolean(), nullable=False, server_default=sa_false())
totp_key = Column(Text(), nullable=True)
totp_label = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# ################################################################################################################################
class _SSOUserGroup(Base):
""" An N:N mapping of users to their groups.
"""
__tablename__ = 'zato_sso_user_group'
__table_args__ = (
UniqueConstraint('user_id', 'group_id', name='zato_ug_id_uq'),
{})
# Not exposed publicly, used only to have a natural FK
id = Column(Integer, Sequence('zato_sso_ug_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
group_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOSession(Base):
__tablename__ = 'zato_sso_session'
__table_args__ = (
Index('zato_sso_sust_idx', 'ust', unique=True),
Index('zato_sso_extsi_idx', 'ext_session_id', unique=False, mysql_length={'ext_session_id':767}),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_sid_seq'), primary_key=True)
# Publicly visible session identifier (user session token)
ust = Column(String(191), nullable=False)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
remote_addr = Column(Text(), nullable=False)
user_agent = Column(Text(), nullable=False)
auth_type = Column(Text(), nullable=False)
auth_principal = Column(Text(), nullable=False)
# ID of a session external to SSO that is linked to this one,
# where external may still mean JWT or Basic Auth,
# but it is not a built-in SSO one.
ext_session_id = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
@declared_attr
def user_id(cls):
return Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOAttr(Base):
__tablename__ = 'zato_sso_attr'
__table_args__ = (
UniqueConstraint('name', 'is_session_attr', 'user_id', '_ust_string', name='zato_attr_name_uq'),
Index('zato_attr_usr', 'user_id', unique=False),
Index('zato_attr_usr_ust', 'user_id', 'ust', unique=False),
Index('zato_attr_usr_name', 'user_id', 'name', unique=False),
Index('zato_attr_usr_ust_name', 'user_id', 'ust', 'name', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_attr_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
expiration_time = Column(DateTime(), nullable=True)
is_session_attr = Column(Boolean(), nullable=False)
is_encrypted = Column(Boolean(), nullable=False, default=False)
serial_method = Column(String(20), nullable=False, default='json')
name = Column(String(191), nullable=False)
value = Column(Text(), nullable=True)
# Unlike ust, this cannot be NULL so it may be used for practical purposes in the unique constraint 'zato_attr_name_uq',
# otherwise all NULL values are considered different (or at least uncomparable) and API-wise, it is not possible
# to construct a sensible unique constraint.
_ust_string = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
ust = Column(String(191), ForeignKey('zato_sso_session.ust', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOLinkedAuth(Base):
__tablename__ = 'zato_sso_linked_auth'
__table_args__ = (
Index('auth_idx', 'auth_type', 'user_id', 'auth_id', 'auth_principal', unique=True,
mysql_length={'auth_type':191, 'user_id':191, 'auth_principal':191}),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_linked_auth_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
# If True, auth_principal will point to an account/user defined externally to Zato,
# e.g. in a system that Zato has no direct authentication support for.
# Otherwise, if False, auth_id will be filled in.
has_ext_principal = Column(Boolean(), nullable=False)
# A label describing authentication type
auth_type = Column(Text(191), nullable=False)
#
# Will be provided if has_ext_principal is False, in which case it will point to one of sec_base.id definitions.
#
# Note that if the SSO ODB is installed in a standalone database, this column will not be an FK
# because there will be no parent sec_base.id column to point to. The Alembic logic to add
# the FK after the table is created is implemented in cli/create_odb.py:Create.
auth_id = Column(Integer, nullable=True)
# Will be given if auth_id is not provided.
auth_principal = Column(Text(191), nullable=True)
# E.g. name of an environment this link is valid in - useful in cases when the same user
# has multiple linked accounts, different in different auth sources (environments).
auth_source = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOPasswordReset(Base):
__tablename__ = 'zato_sso_password_reset'
__table_args__ = (
Index('zato_prt_value_type', 'token', 'type_', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_flow_prt_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
# Creation metadata in JSON
creation_ctx = Column(_JSON(), nullable=False)
# The actual PRT (password reset token)
token = Column(String(191), nullable=False)
# PRT type - what kind is it of, e.g. a Zato built-in one or an external one?
type_ = Column(String(191), nullable=False)
# This key is used to reset the password after the PRT has been accessed
reset_key = Column(String(191), nullable=False)
# This is set when the PRT is accessed in order to set a time limit
# for the password reset procedure (using prt.password_change_session_duration from sso.conf)
reset_key_exp_time = Column(DateTime(), nullable=False)
# Will be set to True when the PRT has been accessed in any way,
# e.g. a user clicks on a link.
has_been_accessed = Column(Boolean(), nullable=False, default=False)
# When was the PRT accessed
access_time = Column(DateTime(), nullable=True)
# Access metadata in JSON
access_ctx = Column(_JSON(), nullable=True)
# Will be set to True when a password is reset using this PRT and reset_key
is_password_reset = Column(Boolean(), nullable=False, default=False)
# When was the password reset
password_reset_time = Column(DateTime(), nullable=True)
# Password reset metadata in JSON
password_reset_ctx = Column(_JSON(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/model/sso.py | sso.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from ftplib import FTP_PORT
# SQLAlchemy
from sqlalchemy import BigInteger, Boolean, Column, DateTime, Enum, false as sa_false, ForeignKey, Index, Integer, LargeBinary, \
Numeric, Sequence, SmallInteger, String, Text, true as sa_true, UniqueConstraint
from sqlalchemy.orm import backref, relationship
# Zato
from zato.common.api import AMQP, CASSANDRA, CLOUD, DATA_FORMAT, HTTP_SOAP_SERIALIZATION_TYPE, MISC, NOTIF, ODOO, SAP, PUBSUB, \
SCHEDULER, STOMP, PARAMS_PRIORITY, URL_PARAMS_PRIORITY
from zato.common.json_internal import json_dumps
from zato.common.odb.const import WMQ_DEFAULT_PRIORITY
from zato.common.odb.model.base import Base, _JSON
from zato.common.odb.model.sso import _SSOAttr, _SSOPasswordReset, _SSOGroup, _SSOLinkedAuth, _SSOSession, _SSOUser
# ################################################################################################################################
def to_json(model, return_as_dict=False):
""" Returns a JSON representation of an SQLAlchemy-backed object.
"""
out = {}
out['fields'] = {}
out['pk'] = getattr(model, 'id')
for col in model._sa_class_manager.mapper.mapped_table.columns:
out['fields'][col.name] = getattr(model, col.name)
if return_as_dict:
return out
else:
return json_dumps([out])
# ################################################################################################################################
class SSOGroup(_SSOGroup):
pass
# ################################################################################################################################
class SSOUser(_SSOUser):
pass
# ################################################################################################################################
class SSOSession(_SSOSession):
pass
# ################################################################################################################################
class SSOAttr(_SSOAttr):
pass
# ################################################################################################################################
class SSOLinkedAuth(_SSOLinkedAuth):
pass
# ################################################################################################################################
class SSOPasswordReset(_SSOPasswordReset):
pass
# ################################################################################################################################
class AlembicRevision(Base):
""" A table for Alembic to store its revision IDs for SQL migrations.
Note that Alembic as of version 0.6.0 which is the latest one right now (Sun, Jun 8 2014)
doesn't declare 'version_num' to be a primary key but we need to because SQLAlchemy always needs one.
"""
__tablename__ = 'alembic_version'
version_num = Column(String(32), primary_key=True)
def __init__(self, version_num=None):
self.version_num = version_num
# ################################################################################################################################
class ZatoInstallState(Base):
""" Contains a row for each Zato installation belonging to that particular
ODB. For instance, installing Zato 1.0 will add a new row, installing 1.1
"""
__tablename__ = 'install_state'
id = Column(Integer, Sequence('install_state_seq'), primary_key=True)
version = Column(Integer, unique=True, nullable=False)
install_time = Column(DateTime(), nullable=False)
source_host = Column(String(200), nullable=False)
source_user = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, version=None, install_time=None, source_host=None, source_user=None):
self.id = id
self.version = version
self.install_time = install_time
self.source_host = source_host
self.source_user = source_user
# ################################################################################################################################
class Cluster(Base):
""" Represents a Zato cluster.
"""
__tablename__ = 'cluster'
id = Column(Integer, Sequence('cluster_id_seq'), primary_key=True)
name = Column(String(200), unique=True, nullable=False)
description = Column(String(1000), nullable=True)
odb_type = Column(String(30), nullable=False)
odb_host = Column(String(200), nullable=True)
odb_port = Column(Integer(), nullable=True)
odb_user = Column(String(200), nullable=True)
odb_db_name = Column(String(200), nullable=True)
odb_schema = Column(String(200), nullable=True)
broker_host = Column(String(200), nullable=False)
broker_port = Column(Integer(), nullable=False)
lb_host = Column(String(200), nullable=False)
lb_port = Column(Integer(), nullable=False)
lb_agent_port = Column(Integer(), nullable=False)
cw_srv_id = Column(Integer(), nullable=True)
cw_srv_keep_alive_dt = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, name=None, description=None, odb_type=None, odb_host=None, odb_port=None, odb_user=None,
odb_db_name=None, odb_schema=None, broker_host=None, broker_port=None, lb_host=None, lb_port=None,
lb_agent_port=None, cw_srv_id=None, cw_srv_keep_alive_dt=None):
self.id = id
self.name = name
self.description = description
self.odb_type = odb_type
self.odb_host = odb_host
self.odb_port = odb_port
self.odb_user = odb_user
self.odb_db_name = odb_db_name
self.odb_schema = odb_schema
self.broker_host = broker_host
self.broker_port = broker_port
self.lb_host = lb_host
self.lb_agent_port = lb_agent_port
self.lb_port = lb_port
self.cw_srv_id = cw_srv_id
self.cw_srv_keep_alive_dt = cw_srv_keep_alive_dt
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Server(Base):
""" Represents a Zato server.
"""
__tablename__ = 'server'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('server_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(400), nullable=True)
bind_host = Column(String(400), nullable=True)
bind_port = Column(Integer(), nullable=True)
preferred_address = Column(String(400), nullable=True)
crypto_use_tls = Column(Boolean(), nullable=True)
# If the server's request to join a cluster has been accepted, and for now
# it will always be.
last_join_status = Column(String(40), nullable=True)
last_join_mod_date = Column(DateTime(), nullable=True)
last_join_mod_by = Column(String(200), nullable=True)
# Whether the server's up or not
up_status = Column(String(40), nullable=True)
up_mod_date = Column(DateTime(), nullable=True)
token = Column(String(32), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('servers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, cluster=None, token=None, last_join_status=None, last_join_mod_date=None,
last_join_mod_by=None):
self.id = id
self.name = name
self.cluster = cluster
self.token = token
self.last_join_status = last_join_status
self.last_join_mod_date = last_join_mod_date
self.last_join_mod_by = last_join_mod_by
self.has_lb_config = False # Not used by the database
self.in_lb = False # Not used by the database
self.lb_state = None # Not used by the database
self.lb_address = None # Not used by the database
self.may_be_deleted = None # Not used by the database
self.up_mod_date_user = None # Not used by the database
# ################################################################################################################################
class SecurityBase(Base):
""" A base class for all the security definitions.
"""
__tablename__ = 'sec_base'
__table_args__ = (UniqueConstraint('cluster_id', 'name'),
UniqueConstraint('cluster_id', 'username', 'sec_type'), {})
__mapper_args__ = {'polymorphic_on': 'sec_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# It's nullable because some children classes do not use usernames
username = Column(String(200), nullable=True)
password = Column(String(1000), nullable=True)
password_type = Column(String(45), nullable=True)
is_active = Column(Boolean(), nullable=False)
sec_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('security_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class MultiSecurity(Base):
""" An N:N mapping between security definitions and objects making use of them.
"""
__tablename__ = 'sec_multi'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'security_id', 'is_channel', 'is_outconn'), {})
id = Column(Integer, Sequence('sec_multi_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
priority = Column(Integer(), nullable=False)
conn_id = Column(String(100), nullable=False)
conn_type = Column(String(100), nullable=False)
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPBasicAuth(SecurityBase):
""" An HTTP Basic Auth definition.
"""
__tablename__ = 'sec_basic_auth'
__mapper_args__ = {'polymorphic_identity': 'basic_auth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
realm = Column(String(200), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, realm=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.realm = realm
self.password = password
self.cluster = cluster
# ################################################################################################################################
class JWT(SecurityBase):
""" A set of JavaScript Web Token (JWT) credentials.
"""
__tablename__ = 'sec_jwt'
__mapper_args__ = {'polymorphic_identity': 'jwt'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
ttl = Column(Integer, nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, ttl=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.ttl = ttl
self.cluster = cluster
# ################################################################################################################################
class WSSDefinition(SecurityBase):
""" A WS-Security definition.
"""
__tablename__ = 'sec_wss_def'
__mapper_args__ = {'polymorphic_identity':'wss'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
reject_empty_nonce_creat = Column(Boolean(), nullable=False)
reject_stale_tokens = Column(Boolean(), nullable=True)
reject_expiry_limit = Column(Integer(), nullable=False)
nonce_freshness_time = Column(Integer(), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, password_type=None,
reject_empty_nonce_creat=None, reject_stale_tokens=None, reject_expiry_limit=None, nonce_freshness_time=None,
cluster=None, password_type_raw=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.password_type = password_type
self.reject_empty_nonce_creat = reject_empty_nonce_creat
self.reject_stale_tokens = reject_stale_tokens
self.reject_expiry_limit = reject_expiry_limit
self.nonce_freshness_time = nonce_freshness_time
self.cluster = cluster
self.password_type_raw = password_type_raw
# ################################################################################################################################
class OAuth(SecurityBase):
""" Stores OAuth credentials.
"""
__tablename__ = 'sec_oauth'
__mapper_args__ = {'polymorphic_identity':'oauth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
proto_version = Column(String(32), nullable=False)
sig_method = Column(String(32), nullable=False) # HMAC-SHA1 or PLAINTEXT
max_nonce_log = Column(Integer(), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, proto_version=None, sig_method=None,
max_nonce_log=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.proto_version = proto_version
self.sig_method = sig_method
self.max_nonce_log = max_nonce_log
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NTLM(SecurityBase):
""" Stores NTLM definitions.
"""
__tablename__ = 'sec_ntlm'
__mapper_args__ = {'polymorphic_identity': 'ntlm'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class AWSSecurity(SecurityBase):
""" Stores Amazon credentials.
"""
__tablename__ = 'sec_aws'
__mapper_args__ = {'polymorphic_identity': 'aws'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class OpenStackSecurity(SecurityBase):
""" Stores OpenStack credentials (no longer used, to be removed).
"""
__tablename__ = 'sec_openstack'
__mapper_args__ = {'polymorphic_identity': 'openstack'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class APIKeySecurity(SecurityBase):
""" Stores API keys.
"""
__tablename__ = 'sec_apikey'
__mapper_args__ = {'polymorphic_identity': 'apikey'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class XPathSecurity(SecurityBase):
""" Stores XPath-based credentials.
"""
__tablename__ = 'sec_xpath'
__mapper_args__ = {'polymorphic_identity':'xpath_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
username_expr = Column(String(200), nullable=False)
password_expr = Column(String(200), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, username_expr=None, password_expr=None,
cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.username_expr = username_expr
self.password_expr = password_expr
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class TLSKeyCertSecurity(SecurityBase):
""" Stores information regarding TLS key/cert pairs used in outgoing connections.
"""
__tablename__ = 'sec_tls_key_cert'
__mapper_args__ = {'polymorphic_identity':'tls_key_cert'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
info = Column(LargeBinary(200000), nullable=False)
auth_data = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class TLSChannelSecurity(SecurityBase):
""" Stores information regarding TLS client certificate-based security definitions.
"""
__tablename__ = 'sec_tls_channel'
__mapper_args__ = {'polymorphic_identity':'tls_channel_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
value = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class VaultConnection(SecurityBase):
""" Stores information on how to connect to Vault and how to authenticate against it by default.
"""
__tablename__ = 'sec_vault_conn'
__mapper_args__ = {'polymorphic_identity':'vault_conn_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
url = Column(String(200), nullable=False)
token = Column(String(200), nullable=True)
default_auth_method = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=False)
allow_redirects = Column(Boolean(), nullable=False)
tls_verify = Column(Boolean(), nullable=False)
tls_key_cert_id = Column(Integer, ForeignKey('sec_tls_key_cert.id', ondelete='CASCADE'), nullable=True)
tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('vault_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class TLSCACert(Base):
""" Stores information regarding CA certs.
"""
__tablename__ = 'sec_tls_ca_cert'
id = Column(Integer, Sequence('sec_tls_ca_cert_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(LargeBinary(200000), nullable=False)
info = Column(LargeBinary(200000), nullable=False)
is_active = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('ca_cert_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPSOAP(Base):
""" An incoming or outgoing HTTP/SOAP connection.
"""
__tablename__ = 'http_soap'
__table_args__ = (
UniqueConstraint('name', 'connection', 'transport', 'cluster_id'),
Index('path_host_conn_act_clus_idx', 'url_path', 'host', 'connection', 'soap_action', 'cluster_id', unique=False), {})
id = Column(Integer, Sequence('http_soap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
connection = Column(String(20), nullable=False)
transport = Column(String(200), nullable=False)
host = Column(String(200), nullable=True)
url_path = Column(String(200), nullable=False)
method = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
soap_action = Column(String(200), nullable=False)
soap_version = Column(String(20), nullable=True)
data_format = Column(String(20), nullable=True)
content_type = Column(String(200), nullable=True)
ping_method = Column(String(60), nullable=True)
pool_size = Column(Integer, nullable=True)
serialization_type = Column(String(200), nullable=False, default=HTTP_SOAP_SERIALIZATION_TYPE.SUDS.id)
timeout = Column(Integer(), nullable=False, default=MISC.DEFAULT_HTTP_TIMEOUT)
merge_url_params_req = Column(Boolean, nullable=True, default=True)
url_params_pri = Column(String(200), nullable=True, default=URL_PARAMS_PRIORITY.DEFAULT)
params_pri = Column(String(200), nullable=True, default=PARAMS_PRIORITY.DEFAULT)
has_rbac = Column(Boolean, nullable=False, default=False)
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
security = relationship(SecurityBase, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
sec_tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
sec_tls_ca_cert = relationship('TLSCACert', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, is_internal=None, connection=None, transport=None, host=None,
url_path=None, method=None, soap_action=None, soap_version=None, data_format=None, ping_method=None,
pool_size=None, merge_url_params_req=None, url_params_pri=None, params_pri=None, serialization_type=None,
timeout=None, sec_tls_ca_cert_id=None, service_id=None, service=None, security=None, cluster_id=None,
cluster=None, service_name=None, security_id=None, has_rbac=None, security_name=None, content_type=None,
cache_id=None, cache_type=None, cache_expiry=None, cache_name=None, content_encoding=None, match_slash=None,
http_accept=None, opaque=None, **kwargs):
super(HTTPSOAP, self).__init__(**kwargs)
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.connection = connection
self.transport = transport
self.host = host
self.url_path = url_path
self.method = method
self.soap_action = soap_action
self.soap_version = soap_version
self.data_format = data_format
self.ping_method = ping_method
self.pool_size = pool_size
self.merge_url_params_req = merge_url_params_req
self.url_params_pri = url_params_pri
self.params_pri = params_pri
self.serialization_type = serialization_type
self.timeout = timeout
self.sec_tls_ca_cert_id = sec_tls_ca_cert_id
self.service_id = service_id
self.service = service
self.security = security
self.cluster_id = cluster_id
self.cluster = cluster
self.service_name = service_name # Not used by the DB
self.security_id = security_id
self.has_rbac = has_rbac
self.security_name = security_name
self.content_type = content_type
self.cache_id = cache_id
self.cache_type = cache_type
self.cache_expiry = cache_expiry
self.cache_name = cache_name # Not used by the DB
self.content_encoding = content_encoding
self.match_slash = match_slash # Not used by the DB
self.http_accept = http_accept # Not used by the DB
self.opaque1 = opaque
self.is_rate_limit_active = None
self.rate_limit_type = None
self.rate_limit_def = None
self.rate_limit_check_parent_def = None
# ################################################################################################################################
class SQLConnectionPool(Base):
""" An SQL connection pool.
"""
__tablename__ = 'sql_pool'
__table_args__ = (UniqueConstraint('cluster_id', 'name'), {})
id = Column(Integer, Sequence('sql_pool_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
db_name = Column(String(200), nullable=False)
engine = Column(String(200), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sql_pools', order_by=name, cascade='all, delete, delete-orphan'))
engine_display_name = None # For auto-completion, not used by DB
def __init__(self, id=None, name=None, is_active=None, db_name=None, username=None, engine=None, extra=None, host=None,
port=None, pool_size=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.db_name = db_name
self.username = username
self.engine = engine
self.extra = extra
self.host = host
self.port = port
self.pool_size = pool_size
self.cluster = cluster
# ################################################################################################################################
class Service(Base):
""" A set of basic informations about a service available in a given cluster.
"""
__tablename__ = 'service'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('service_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
impl_name = Column(String(2000), nullable=False)
is_internal = Column(Boolean(), nullable=False)
wsdl = Column(LargeBinary(5000000), nullable=True)
wsdl_name = Column(String(200), nullable=True)
slow_threshold = Column(Integer, nullable=False, default=99999)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('services', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, impl_name=None, is_internal=None, cluster=None, wsdl=None,
wsdl_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.impl_name = impl_name
self.is_internal = is_internal
self.cluster = cluster
self.wsdl = wsdl
self.wsdl_name = wsdl_name
self.plain_http_channels = [] # Not used by the database
self.soap_channels = [] # Not used by the database
self.amqp_channels = [] # Not used by the database
self.wmq_channels = [] # Not used by the database
self.zmq_channels = [] # Not used by the database
self.scheduler_jobs = [] # Not used by the database
self.deployment_info = [] # Not used by the database
self.source_info = None # Not used by the database
self.may_be_deleted = False # Not used by the database
self.sample_cid = None # Not used by the database
self.sample_req_timestamp = None # Not used by the database
self.sample_resp_timestamp = None # Not used by the database
self.sample_req = None # Not used by the database
self.sample_resp = None # Not used by the database
self.sample_req_resp_freq = None # Not used by the database
self.sample_req_html = None # Not used by the database
self.sample_resp_html = None # Not used by the database
self.usage = None # Not used by the database
self.time_last = None # Not used by the database
self.time_min_all_time = None # Not used by the database
self.time_max_all_time = None # Not used by the database
self.time_mean_all_time = None # Not used by the database
self.time_usage_1h = None # Not used by the database
self.time_min_1h = None # Not used by the database
self.time_max_1h = None # Not used by the database
self.time_trend_mean_1h = None # Not used by the database
self.time_trend_rate_1h = None # Not used by the database
self.docs_summary = None # Not used by the database
self.docs_description = None # Not used by the database
self.invokes = None # Not used by the database
self.invoked_by = None # Not used by the database
self.last_timestamp = None # Not used by the database
self.last_timestamp_utc = None # Not used by the database
# ################################################################################################################################
class DeployedService(Base):
""" A service living on a given server.
"""
__tablename__ = 'deployed_service'
__table_args__ = (UniqueConstraint('server_id', 'service_id'), {})
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
source = Column(LargeBinary(500000), nullable=True)
source_path = Column(String(2000), nullable=True)
source_hash = Column(String(512), nullable=True)
source_hash_method = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=True)
server = relationship(Server, backref=backref('deployed_services', order_by=deployment_time, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False, primary_key=True)
service = relationship(Service, backref=backref('deployment_data', order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, deployment_time, details, server_id, service_id, source, source_path, source_hash, source_hash_method):
self.deployment_time = deployment_time
self.details = details
self.server_id = server_id
self.service_id = service_id
self.source = source
self.source_path = source_path
self.source_hash = source_hash
self.source_hash_method = source_hash_method
# ################################################################################################################################
class Job(Base):
""" A scheduler's job. Stores all the information needed to execute a job
if it's a one-time job, otherwise the information is kept in related tables.
"""
__tablename__ = 'job'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('job_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
job_type = Column(Enum(SCHEDULER.JOB_TYPE.ONE_TIME, SCHEDULER.JOB_TYPE.INTERVAL_BASED,
SCHEDULER.JOB_TYPE.CRON_STYLE, name='job_type'), nullable=False)
start_date = Column(DateTime(), nullable=False)
extra = Column(LargeBinary(500000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, job_type=None, start_date=None, extra=None, cluster=None,
cluster_id=None, service=None, service_id=None, service_name=None, interval_based=None, cron_style=None,
definition_text=None, job_type_friendly=None):
self.id = id
self.name = name
self.is_active = is_active
self.job_type = job_type
self.start_date = start_date
self.extra = extra
self.cluster = cluster
self.cluster_id = cluster_id
self.service = service
self.service_id = service_id
self.service_name = service_name # Not used by the database
self.interval_based = interval_based
self.cron_style = cron_style
self.definition_text = definition_text # Not used by the database
self.job_type_friendly = job_type_friendly # Not used by the database
# ################################################################################################################################
class IntervalBasedJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_interval_based'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_intrvl_seq'), primary_key=True)
job_id = Column(Integer, nullable=False)
weeks = Column(Integer, nullable=True)
days = Column(Integer, nullable=True)
hours = Column(Integer, nullable=True)
minutes = Column(Integer, nullable=True)
seconds = Column(Integer, nullable=True)
repeats = Column(Integer, nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(Job, backref=backref('interval_based', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, weeks=None, days=None, hours=None, minutes=None, seconds=None, repeats=None,
definition_text=None):
self.id = id
self.job = job
self.weeks = weeks
self.days = days
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.repeats = repeats
self.definition_text = definition_text # Not used by the database
# ################################################################################################################################
class CronStyleJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_cron_style'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_cron_seq'), primary_key=True)
cron_definition = Column(String(4000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(
Job, backref=backref('cron_style', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, cron_definition=None):
self.id = id
self.job = job
self.cron_definition = cron_definition
# ################################################################################################################################
class Cache(Base):
""" Base class for all cache definitions.
"""
__tablename__ = 'cache'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'cache_type'}
id = Column(Integer, Sequence('cache_builtin_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_default = Column(Boolean(), nullable=False)
cache_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cache_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self):
self.current_size = 0 # Not used by the DB
# ################################################################################################################################
class CacheBuiltin(Cache):
""" Cache definitions using mechanisms built into Zato.
"""
__tablename__ = 'cache_builtin'
__mapper_args__ = {'polymorphic_identity':'builtin'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
max_size = Column(Integer(), nullable=False)
max_item_size = Column(Integer(), nullable=False)
extend_expiry_on_get = Column(Boolean(), nullable=False)
extend_expiry_on_set = Column(Boolean(), nullable=False)
sync_method = Column(String(20), nullable=False)
persistent_storage = Column(String(40), nullable=False)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class CacheMemcached(Cache):
""" Cache definitions using Memcached.
"""
__tablename__ = 'cache_memcached'
__mapper_args__ = {'polymorphic_identity':'memcached'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
servers = Column(Text, nullable=False)
is_debug = Column(Boolean(), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class ConnDefAMQP(Base):
""" An AMQP connection definition.
"""
__tablename__ = 'conn_def_amqp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
vhost = Column(String(200), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
frame_max = Column(Integer(), nullable=False)
heartbeat = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('amqp_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, vhost=None, username=None, password=None, frame_max=None,
heartbeat=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.host = host
self.port = port
self.vhost = vhost
self.username = username
self.password = password
self.frame_max = frame_max
self.heartbeat = heartbeat
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ConnDefWMQ(Base):
""" A IBM MQ connection definition.
"""
__tablename__ = 'conn_def_wmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# TODO is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer, nullable=False)
queue_manager = Column(String(200), nullable=True)
channel = Column(String(200), nullable=False)
cache_open_send_queues = Column(Boolean(), nullable=False)
cache_open_receive_queues = Column(Boolean(), nullable=False)
use_shared_connections = Column(Boolean(), nullable=False)
dynamic_queue_template = Column(String(200), nullable=False, server_default='SYSTEM.DEFAULT.MODEL.QUEUE') # We're not actually using it yet
ssl = Column(Boolean(), nullable=False)
ssl_cipher_spec = Column(String(200))
ssl_key_repository = Column(String(200))
needs_mcd = Column(Boolean(), nullable=False)
use_jms = Column(Boolean(), nullable=False)
max_chars_printed = Column(Integer, nullable=False)
username = Column(String(100), nullable=True)
password = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('wmq_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, queue_manager=None, channel=None, cache_open_send_queues=None,
cache_open_receive_queues=None, use_shared_connections=None, ssl=None, ssl_cipher_spec=None, ssl_key_repository=None,
needs_mcd=None, max_chars_printed=None, cluster_id=None, cluster=None, username=None, password=None, use_jms=None):
self.id = id
self.name = name
self.host = host
self.queue_manager = queue_manager
self.channel = channel
self.port = port
self.cache_open_receive_queues = cache_open_receive_queues
self.cache_open_send_queues = cache_open_send_queues
self.use_shared_connections = use_shared_connections
self.ssl = ssl
self.ssl_cipher_spec = ssl_cipher_spec
self.ssl_key_repository = ssl_key_repository
self.needs_mcd = needs_mcd
self.max_chars_printed = max_chars_printed
self.cluster_id = cluster_id
self.cluster = cluster
self.username = username
self.password = password
self.use_jms = use_jms
# ################################################################################################################################
class OutgoingAMQP(Base):
""" An outgoing AMQP connection.
"""
__tablename__ = 'out_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(AMQP.DEFAULT.PRIORITY), nullable=False)
content_type = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
expiration = Column(Integer(), nullable=True)
user_id = Column(String(200), nullable=True)
app_id = Column(String(200), nullable=True)
pool_size = Column(SmallInteger(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('out_conns_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, content_type=None,
content_encoding=None, expiration=None, user_id=None, app_id=None, def_id=None, delivery_mode_text=None,
def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.content_type = content_type
self.content_encoding = content_encoding
self.expiration = expiration
self.user_id = user_id
self.app_id = app_id
self.def_id = def_id
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by the DB
# ################################################################################################################################
class OutgoingFTP(Base):
""" An outgoing FTP connection.
"""
__tablename__ = 'out_ftp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_ftp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
acct = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=True)
port = Column(Integer, server_default=str(FTP_PORT), nullable=False)
dircache = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_ftp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, host=None, user=None, password=None, acct=None, timeout=None,
port=None, dircache=None, cluster_id=None):
self.id = id
self.name = name
self.is_active = is_active
self.host = host
self.user = user
self.password = password
self.acct = acct
self.timeout = timeout
self.port = port
self.dircache = dircache
self.cluster_id = cluster_id
# ################################################################################################################################
class OutgoingOdoo(Base):
""" An outgoing Odoo connection.
"""
__tablename__ = 'out_odoo'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_odoo_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.PORT))
user = Column(String(200), nullable=False)
database = Column(String(200), nullable=False)
protocol = Column(String(200), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.POOL_SIZE))
password = Column(String(400), nullable=False)
client_type = Column(String(40), nullable=False, server_default=str(ODOO.CLIENT_TYPE.OPENERP_CLIENT_LIB))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_odoo', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
self.protocol_name = None # Not used by the DB
# ################################################################################################################################
class OutgoingSAP(Base):
""" An outgoing SAP RFC connection.
"""
__tablename__ = 'out_sap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_sap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
sysnr = Column(String(3), nullable=True, server_default=str(SAP.DEFAULT.INSTANCE))
user = Column(String(200), nullable=False)
client = Column(String(4), nullable=False)
sysid = Column(String(4), nullable=False)
password = Column(String(400), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(SAP.DEFAULT.POOL_SIZE))
router = Column(String(400), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_sap', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingSTOMP(Base):
""" An outgoing STOMP connection.
"""
__tablename__ = 'out_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_stomp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingWMQ(Base):
""" An outgoing IBM MQ connection.
"""
__tablename__ = 'out_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(WMQ_DEFAULT_PRIORITY), nullable=False)
expiration = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('out_conns_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, expiration=None, def_id=None,
cluster=None, delivery_mode_text=None, def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.expiration = expiration
self.def_id = def_id
self.cluster = cluster
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by DB
self.def_name_full_text = None # Not used by DB
# ################################################################################################################################
class OutgoingZMQ(Base):
""" An outgoing Zero MQ connection.
"""
__tablename__ = 'out_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
socket_method = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.socket_type = socket_type
self.address = address
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ChannelAMQP(Base):
""" An incoming AMQP connection.
"""
__tablename__ = 'channel_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
consumer_tag_prefix = Column(String(200), nullable=False)
pool_size = Column(Integer, nullable=False)
ack_mode = Column(String(20), nullable=False)
prefetch_count = Column(Integer, nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_amqp', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('channels_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, consumer_tag_prefix=None, def_id=None, def_name=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.consumer_tag_prefix = consumer_tag_prefix
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelSTOMP(Base):
""" An incoming STOMP connection.
"""
__tablename__ = 'channel_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
sub_to = Column(Text, nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWMQ(Base):
""" An incoming IBM MQ connection.
"""
__tablename__ = 'channel_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_wmq', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('channels_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, def_id=None, def_name=None, service_name=None,
data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelZMQ(Base):
""" An incoming Zero MQ connection.
"""
__tablename__ = 'channel_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
sub_key = Column(String(200), nullable=True)
data_format = Column(String(20), nullable=True)
socket_method = Column(String(20), nullable=False)
pool_strategy = Column(String(20), nullable=False)
service_source = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, socket_type_text=None, sub_key=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.address = address
self.socket_type = socket_type
self.socket_type_text = socket_type_text # Not used by the DB
self.sub_key = sub_key
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class DeploymentPackage(Base):
""" A package to be deployed onto a server, either a plain .py/.pyw or
a Distutils2 archive.
"""
__tablename__ = 'deployment_package'
id = Column(Integer, Sequence('depl_package_seq'), primary_key=True)
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
payload_name = Column(String(200), nullable=False)
payload = Column(LargeBinary(5000000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('originating_deployment_packages',
order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, deployment_time=None, details=None, payload_name=None, payload=None):
self.id = id
self.deployment_time = deployment_time
self.details = details
self.payload_name = payload_name
self.payload = payload
# ################################################################################################################################
class DeploymentStatus(Base):
""" Whether a server has already deployed a given package.
"""
__tablename__ = 'deployment_status'
__table_args__ = (UniqueConstraint('package_id', 'server_id'), {})
id = Column(Integer, Sequence('depl_status_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
package_id = Column(
Integer, ForeignKey('deployment_package.id', ondelete='CASCADE'), nullable=False, primary_key=False)
package = relationship(
DeploymentPackage, backref=backref('deployment_status_list', order_by=package_id, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('deployment_status_list', order_by=server_id, cascade='all, delete, delete-orphan'))
# See zato.common.DEPLOYMENT_STATUS
status = Column(String(20), nullable=False)
status_change_time = Column(DateTime(), nullable=False)
def __init__(self, package_id=None, server_id=None, status=None, status_change_time=None):
self.package_id = package_id
self.server_id = server_id
self.status = status
self.status_change_time = status_change_time
# ################################################################################################################################
class MsgNamespace(Base):
""" A message namespace, used in XPath, for instance.
"""
__tablename__ = 'msg_ns'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_ns_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('namespaces', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class XPath(Base):
""" An XPath expression to run against XML messages.
"""
__tablename__ = 'msg_xpath'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_xpath_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('xpaths', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class JSONPointer(Base):
""" An XPath-list expression to run against JSON messages.
"""
__tablename__ = 'msg_json_pointer'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_json_pointer_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('json_pointers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class OpenStackSwift(Base):
""" A connection to OpenStack's Swift (no longer used, to be removed).
"""
__tablename__ = 'os_swift'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('os_swift_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False)
auth_url = Column(String(200), nullable=False)
auth_version = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
secret_key = Column(String(200), nullable=True)
retries = Column(Integer, nullable=False)
is_snet = Column(Boolean(), nullable=False)
starting_backoff = Column(Integer, nullable=False)
max_backoff = Column(Integer, nullable=False)
tenant_name = Column(String(200), nullable=True)
should_validate_cert = Column(Boolean(), nullable=False)
cacert = Column(String(200), nullable=True)
should_retr_ratelimit = Column(Boolean(), nullable=False)
needs_tls_compr = Column(Boolean(), nullable=False)
custom_options = Column(String(2000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('openstack_swift_conns', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, auth_url=None, auth_version=None, user=None, key=None, retries=None,
is_snet=None, starting_backoff=None, max_backoff=None, tenant_name=None, should_validate_cert=None,
cacert=None, should_retr_ratelimit=None, needs_tls_compr=None, custom_options=None):
self.id = id
self.name = name
self.is_active = is_active
self.auth_url = auth_url
self.auth_version = auth_version
self.user = user
self.key = key
self.retries = retries
self.is_snet = is_snet
self.starting_backoff = starting_backoff
self.max_backoff = max_backoff
self.tenant_name = tenant_name
self.should_validate_cert = should_validate_cert
self.cacert = cacert
self.should_retr_ratelimit = should_retr_ratelimit
self.needs_tls_compr = needs_tls_compr
self.custom_options = custom_options
# ################################################################################################################################
class AWSS3(Base):
""" An outgoing connection to AWS S3.
"""
__tablename__ = 'aws_s3'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('aws_s3_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.POOL_SIZE)
address = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.ADDRESS)
debug_level = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.DEBUG_LEVEL)
suppr_cons_slashes = Column(Boolean(), nullable=False, default=True)
content_type = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.CONTENT_TYPE)
metadata_ = Column(String(2000), nullable=True) # Can't be 'metadata' because this is reserved to SQLAlchemy
bucket = Column(String(2000), nullable=True)
encrypt_at_rest = Column(Boolean(), nullable=False, default=False)
storage_class = Column(String(200), nullable=False, default=CLOUD.AWS.S3.STORAGE_CLASS.DEFAULT)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('aws_s3_conns', order_by=is_active, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('aws_s3_conns', order_by=name, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Notification(Base):
""" A base class for all notifications, be it cloud, FTP-based or others.
"""
__tablename__ = 'notif'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'notif_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
notif_type = Column(String(45), nullable=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
interval = Column(Integer, nullable=False, default=NOTIF.DEFAULT.CHECK_INTERVAL)
name_pattern = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.NAME_PATTERN)
name_pattern_neg = Column(Boolean(), nullable=True, default=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
get_data = Column(Boolean(), nullable=True, default=False)
get_data_patt = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.GET_DATA_PATTERN)
get_data_patt_neg = Column(Boolean(), nullable=True, default=False)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class NotificationOpenStackSwift(Notification):
""" Stores OpenStack Swift notifications (no longer used).
"""
__tablename__ = 'notif_os_swift'
__mapper_args__ = {'polymorphic_identity': 'openstack_swift'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
containers = Column(String(16380), nullable=False)
def_id = Column(Integer, ForeignKey('os_swift.id'), primary_key=True)
definition = relationship(
OpenStackSwift, backref=backref('notif_oss_list', order_by=id, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NotificationSQL(Notification):
""" Stores SQL notifications.
"""
__tablename__ = 'notif_sql'
__mapper_args__ = {'polymorphic_identity': 'sql'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
query = Column(Text, nullable=False)
def_id = Column(Integer, ForeignKey('sql_pool.id'), primary_key=True)
definition = relationship(
SQLConnectionPool, backref=backref('notif_sql_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraConn(Base):
""" Connections to Cassandra.
"""
__tablename__ = 'conn_def_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
contact_points = Column(String(400), nullable=False, default=CASSANDRA.DEFAULT.CONTACT_POINTS)
port = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PORT)
exec_size = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.EXEC_SIZE)
proto_version = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PROTOCOL_VERSION)
cql_version = Column(Integer, nullable=True)
default_keyspace = Column(String(400), nullable=False)
username = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
tls_ca_certs = Column(String(200), nullable=True)
tls_client_cert = Column(String(200), nullable=True)
tls_client_priv_key = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ElasticSearch(Base):
__tablename__ = 'search_es'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_es_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
hosts = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
body_as = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_es_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class Solr(Base):
__tablename__ = 'search_solr'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_solr_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
address = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
ping_path = Column(String(40), nullable=False)
options = Column(String(800), nullable=True)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_solr_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraQuery(Base):
""" Cassandra query templates.
"""
__tablename__ = 'query_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('query_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
value = Column(LargeBinary(40000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_queries', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_cassandra.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(CassandraConn, backref=backref('cassandra_queries', cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMTP(Base):
__tablename__ = 'email_smtp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_smtp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
is_debug = Column(Boolean(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
ping_address = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('smtp_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class IMAP(Base):
__tablename__ = 'email_imap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_imap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
debug_level = Column(Integer(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
get_criteria = Column(String(2000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('imap_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRole(Base):
""" All the roles known within a particular cluster.
"""
__tablename__ = 'rbac_role'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_seq'), primary_key=True)
name = Column(String(200), nullable=False)
parent_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=True)
parent = relationship('RBACRole', backref=backref('children'), remote_side=[id])
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_roles', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACPermission(Base):
""" Permissions defined in a given cluster.
"""
__tablename__ = 'rbac_perm'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_perm_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_permissions', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACClientRole(Base):
""" Mappings between clients and roles they have.
"""
__tablename__ = 'rbac_client_role'
__table_args__ = (UniqueConstraint('client_def', 'role_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_cli_rol_seq'), primary_key=True)
name = Column(String(400), nullable=False)
client_def = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_client_roles', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_client_roles', order_by=client_def, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRolePermission(Base):
""" Mappings between roles and permissions they have on given services.
"""
__tablename__ = 'rbac_role_perm'
__table_args__ = (UniqueConstraint('role_id', 'perm_id', 'service_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_perm_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
perm_id = Column(Integer, ForeignKey('rbac_perm.id', ondelete='CASCADE'), nullable=False)
perm = relationship(RBACPermission, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship('Service', backref=backref('role_perm', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_role_permissions', order_by=id, cascade='all, delete, delete-orphan'))
def get_name(self):
return '{}/{}/{}/{}'.format(self.id, self.role_id, self.perm_id, self.service_id)
# ################################################################################################################################
class KVData(Base):
""" Key/value data table.
"""
__tablename__ = 'kv_data'
__table_args__ = (Index('key_clust_id_idx', 'key', 'cluster_id', unique=True, mysql_length={'key':767}),)
id = Column(Integer, Sequence('kv_data_id_seq'), primary_key=True)
key = Column(LargeBinary(), nullable=False)
value = Column(LargeBinary(), nullable=True)
data_type = Column(String(200), nullable=False, default='text')
creation_time = Column(DateTime(), nullable=False)
expiry_time = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True)
cluster = relationship(Cluster, backref=backref('kv_data', order_by=key, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWebSocket(Base):
""" A WebSocket connection definition.
"""
__tablename__ = 'channel_web_socket'
__table_args__ = (UniqueConstraint('name', 'cluster_id'),
UniqueConstraint('address', 'cluster_id'), {})
id = Column(Integer, Sequence('web_socket_chan_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
is_out = Column(Boolean(), nullable=False, default=sa_false())
address = Column(String(200), nullable=False)
data_format = Column(String(20), nullable=False)
new_token_wait_time = Column(Integer(), nullable=False)
token_ttl = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('web_socket', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('web_socket_list', order_by=name, cascade='all, delete, delete-orphan'))
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
def __init__(self, id=None, name=None, is_active=None, is_internal=None, address=None, data_format=None,
new_token_wait_time=None, token_ttl=None, service_id=None, service=None, cluster_id=None, cluster=None,
security_id=None, security=None):
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.address = address
self.data_format = data_format
self.new_token_wait_time = new_token_wait_time
self.token_ttl = token_ttl
self.service_id = service_id
self.service = service
self.cluster_id = cluster_id
self.cluster = cluster
self.security_id = security_id
self.security = security
self.service_name = None # Not used by DB
self.sec_type = None # Not used by DB
# ################################################################################################################################
class WebSocketClient(Base):
""" An active WebSocket client - currently connected to a Zato server process.
"""
__tablename__ = 'web_socket_client'
__table_args__ = (
Index('wscl_pub_client_idx', 'cluster_id', 'pub_client_id', unique=True),
Index('wscl_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('wscl_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wscl_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('wscl_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = Column(Integer, Sequence('web_socket_cli_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
# This one is assigned by Zato
pub_client_id = Column(String(200), nullable=False)
# These are assigned by clients themselves
ext_client_id = Column(String(200), nullable=False)
ext_client_name = Column(String(200), nullable=True)
local_address = Column(String(400), nullable=False)
peer_address = Column(String(400), nullable=False)
peer_fqdn = Column(String(400), nullable=False)
connection_time = Column(DateTime, nullable=False)
last_seen = Column(DateTime, nullable=False)
server_proc_pid = Column(Integer, nullable=False)
server_name = Column(String(200), nullable=False) # References server.name
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=False)
channel = relationship(
ChannelWebSocket, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False)
server = relationship(
Server, backref=backref('server_web_socket_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('web_socket_client_list', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketClientPubSubKeys(Base):
""" Associates currently active WebSocket clients with subscription keys.
"""
__tablename__ = 'web_socket_cli_ps_keys'
__table_args__ = (
Index('wscl_psk_cli', 'cluster_id', 'client_id', unique=False),
Index('wscl_psk_sk', 'cluster_id', 'sub_key', unique=False),
{})
id = Column(Integer, Sequence('web_socket_cli_ps_seq'), primary_key=True)
# The same as in web_socket_sub.sub_key
sub_key = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
client_id = Column(Integer, ForeignKey('web_socket_client.id', ondelete='CASCADE'), nullable=False)
client = relationship(
WebSocketClient, backref=backref('web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref(
'web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketSubscription(Base):
""" Persistent subscriptions pertaining to a given long-running, possibly restartable, WebSocket connection.
"""
__tablename__ = 'web_socket_sub'
__table_args__ = (
Index('wssub_channel_idx', 'cluster_id', 'channel_id', unique=False),
Index('wssub_subkey_idx', 'cluster_id', 'sub_key', unique=True),
Index('wssub_extcli_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wssub_subkey_chan_idx', 'cluster_id', 'sub_key', 'channel_id', unique=True),
{})
id = Column(Integer, Sequence('web_socket_sub_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
ext_client_id = Column(String(200), nullable=False)
# Each transient, per-connection, web_socket_cli_ps_keys.sub_key will refer to this column
sub_key = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
channel = relationship(
ChannelWebSocket, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
subscription_id = Column(Integer, ForeignKey('pubsub_sub.id', ondelete='CASCADE'), nullable=False)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubEndpoint(Base):
""" An individual endpoint participating in publish/subscribe scenarios.
"""
__tablename__ = 'pubsub_endpoint'
__table_args__ = (
Index('pubsb_endp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endp_name_idx', 'cluster_id', 'name', unique=True),
UniqueConstraint('cluster_id', 'security_id'),
UniqueConstraint('cluster_id', 'service_id'),
UniqueConstraint('cluster_id', 'ws_channel_id'),
{})
id = Column(Integer, Sequence('pubsub_endp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_internal = Column(Boolean(), nullable=False, server_default=sa_false())
is_active = Column(Boolean(), nullable=False, server_default=sa_true()) # Unusued for now
endpoint_type = Column(String(40), nullable=False) # WSX, REST, AMQP and other types
last_seen = Column(BigInteger(), nullable=True)
last_pub_time = Column(BigInteger(), nullable=True)
last_sub_time = Column(BigInteger(), nullable=True)
last_deliv_time = Column(BigInteger(), nullable=True)
# Endpoint's role, e.g. publisher, subscriber or both
role = Column(String(40), nullable=False)
# Tags describing this endpoint
tags = Column(Text, nullable=True) # Unusued for now
# Patterns for topics that this endpoint may subscribe to
topic_patterns = Column(Text, nullable=True)
# Patterns for tags of publishers
pub_tag_patterns = Column(Text, nullable=True) # Unused for now
# Patterns for tags of messages
message_tag_patterns = Column(Text, nullable=True) # Unused for now
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Endpoint is a service
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
# Identifies the endpoint through its security definition, e.g. a username/password combination.
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
security = relationship(SecurityBase, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a reference to a generic connection
gen_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
gen_conn = relationship('GenericConn', backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a long-running WebSockets channel
ws_channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
sec_type = None # Not used by DB
sec_name = None # Not used by DB
ws_channel_name = None # Not used by DB
service_name = None # Not used by DB
# ################################################################################################################################
class PubSubTopic(Base):
""" A topic in pub/sub.
"""
__tablename__ = 'pubsub_topic'
__table_args__ = (
Index('pubsb_tp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_tp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_tp_name_idx', 'cluster_id', 'name', unique=True),
{})
id = Column(Integer, Sequence('pubsub_topic_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
max_depth_gd = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_GD)
max_depth_non_gd = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_NON_GD)
depth_check_freq = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DEPTH_CHECK_FREQ)
has_gd = Column(Boolean(), nullable=False) # Guaranteed delivery
is_api_sub_allowed = Column(Boolean(), nullable=False)
# How many messages to buffer in RAM before they are actually saved in SQL / pushed to tasks
pub_buffer_size_gd = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.PUB_BUFFER_SIZE_GD))
task_sync_interval = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_SYNC_INTERVAL))
task_delivery_interval = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_DELIVERY_INTERVAL))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# A hook service invoked during publications to this specific topic
hook_service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_topics', order_by=name, cascade='all, delete, delete-orphan'))
# Not used by DB
ext_client_id = None
last_pub_time = None
pub_time = None
ext_pub_time = None
last_pub_time = None
last_pub_msg_id = None
last_endpoint_id = None
last_endpoint_name = None
last_pub_has_gd = None
last_pub_server_pid = None
last_pub_server_name = None
# ################################################################################################################################
class PubSubEndpointTopic(Base):
""" A list of topics to which a given endpoint has ever published along with metadata about the latest publication.
There is one row for each existing publisher and topic ever in use.
"""
__tablename__ = 'pubsub_endp_topic'
__table_args__ = (
Index('pubsb_endpt_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endpt_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endpt_msgid_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_endpt_clsendtp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=True),
{})
id = Column(Integer, Sequence('pubsub_endpt_seq'), primary_key=True)
pub_pattern_matched = Column(Text, nullable=False)
last_pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=False)
pub_msg_id = Column(String(200), nullable=False)
pub_correl_id = Column(String(200), nullable=True)
in_reply_to = Column(String(200), nullable=True)
ext_client_id = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True)
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_endpoint_topics', order_by=endpoint_id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_endpoint_topics', order_by=topic_id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_topics', order_by=cluster_id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubMessage(Base):
""" An individual message published to a topic.
"""
__tablename__ = 'pubsub_message'
__table_args__ = (
# This index is needed for FKs from other tables,
# otherwise with MySQL we get error 1215 'Cannot add foreign key constraint'
Index('pubsb_msg_pubmsg_id_idx', 'pub_msg_id', unique=True),
Index('pubsb_msg_pubmsg_clu_id_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_msg_inreplyto_id_idx', 'cluster_id', 'in_reply_to', unique=False),
Index('pubsb_msg_correl_id_idx', 'cluster_id', 'pub_correl_id', unique=False),
{})
# For SQL joins
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
# Publicly visible message identifier
pub_msg_id = Column(String(200), nullable=False)
# Publicly visible correlation ID
pub_correl_id = Column(String(200), nullable=True)
# Publicly visible ID of the message current message is a response to
in_reply_to = Column(String(200), nullable=True)
# ID of an external client on whose behalf the endpoint published the message
ext_client_id = Column(Text(), nullable=True)
# Will group messages belonging logically to the same group, useful if multiple
# messages are published with the same timestamp by the same client but they still
# need to be correctly ordered.
group_id = Column(Text(), nullable=True)
position_in_group = Column(Integer, nullable=True)
# What matching pattern allowed an endpoint to publish this message
pub_pattern_matched = Column(Text, nullable=False)
pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When the row was created
ext_pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=True) # When the message was created by publisher
expiration_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
last_updated = Column(Numeric(20, 7, asdecimal=False), nullable=True)
data = Column(Text(2 * 10 ** 9), nullable=False) # 2 GB to prompt a promotion to LONGTEXT under MySQL
data_prefix = Column(Text(), nullable=False)
data_prefix_short = Column(String(200), nullable=False)
data_format = Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.DATA_FORMAT)
mime_type = Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.MIME_TYPE)
size = Column(Integer, nullable=False)
priority = Column(Integer, nullable=False, server_default=str(PUBSUB.PRIORITY.DEFAULT))
expiration = Column(BigInteger, nullable=False, server_default='0')
has_gd = Column(Boolean(), nullable=False, server_default=sa_true()) # Guaranteed delivery
# Is the message in at least one delivery queue, meaning that there is at least one
# subscriber to whom this message will be sent so the message is no longer considered
# to be available in the topic for other subscribers to receive it,
# i.e. it can be said that it has been already transported to all subsriber queues (possibly to one only).
is_in_sub_queue = Column(Boolean(), nullable=False, server_default=sa_false())
# User-defined arbitrary context data
user_ctx = Column(_JSON(), nullable=True)
# Zato-defined arbitrary context data
zato_ctx = Column(_JSON(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
published_by_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False)
published_by = relationship(
PubSubEndpoint, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=True)
topic = relationship(
PubSubTopic, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_messages', order_by=id, cascade='all, delete, delete-orphan'))
pub_time_utc = None # Not used by DB
# ################################################################################################################################
class PubSubSubscription(Base):
""" Stores high-level information about topics an endpoint subscribes to.
"""
__tablename__ = 'pubsub_sub'
__table_args__ = (
Index('pubsb_sub_clust_idx', 'cluster_id', unique=False),
Index('pubsb_sub_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_sub_clust_endpt_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
Index('pubsb_sub_clust_subk', 'sub_key', unique=True),
{})
id = Column(Integer, Sequence('pubsub_sub_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False, default=False)
creation_time = Column(Numeric(20, 7, asdecimal=False), nullable=False)
sub_key = Column(String(200), nullable=False) # Externally visible ID of this subscription
sub_pattern_matched = Column(Text, nullable=False)
deliver_by = Column(Text, nullable=True) # Delivery order, e.g. by priority, date etc.
ext_client_id = Column(Text, nullable=True) # Subscriber's ID as it is stored by that external system
is_durable = Column(Boolean(), nullable=False, default=True) # For now always True = survives cluster restarts
has_gd = Column(Boolean(), nullable=False) # Guaranteed delivery
active_status = Column(String(200), nullable=False, default=PUBSUB.QUEUE_ACTIVE_STATUS.FULLY_ENABLED.id)
is_staging_enabled = Column(Boolean(), nullable=False, default=False)
delivery_method = Column(String(200), nullable=False, default=PUBSUB.DELIVERY_METHOD.NOTIFY.id)
delivery_data_format = Column(String(200), nullable=False, default=DATA_FORMAT.JSON)
delivery_endpoint = Column(Text, nullable=True)
# This is updated only periodically, e.g. once an hour, rather than each time the subscriber is seen,
# so the value is not an exact time of the last interaction with the subscriber but a time,
# within a certain range (default=60 minutes), when any action was last time carried out with the subscriber.
# For WSX subscribers, this value will never be less than their ping timeout.
last_interaction_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
last_interaction_type = Column(String(200), nullable=True)
last_interaction_details = Column(Text, nullable=True)
# How many messages to deliver in a single batch for that endpoint
delivery_batch_size = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE)
# If delivery_batch_size is 1, whether such a single message delivered to endpoint
# should be sent as-is or wrapped in a single-element list.
wrap_one_msg_in_list = Column(Boolean(), nullable=False)
# How many bytes to send at most in a single delivery
delivery_max_size = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_SIZE) # Unused for now
# How many times to retry delivery for a single message
delivery_max_retry = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_RETRY)
# Should a failed delivery of a single message block the entire delivery queue
# until that particular message has been successfully delivered.
delivery_err_should_block = Column(Boolean(), nullable=False)
# How many seconds to wait on a TCP socket error
wait_sock_err = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_SOCKET_ERROR)
# How many seconds to wait on an error other than a TCP socket one
wait_non_sock_err = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_NON_SOCKET_ERROR)
# A hook service invoked before messages are delivered for this specific subscription
hook_service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
# REST/POST
out_http_method = Column(Text, nullable=True, default='POST') # E.g. POST or PATCH
# AMQP
amqp_exchange = Column(Text, nullable=True)
amqp_routing_key = Column(Text, nullable=True)
# Flat files
files_directory_list = Column(Text, nullable=True)
# FTP
ftp_directory_list = Column(Text, nullable=True)
# SMS - Twilio
sms_twilio_from = Column(Text, nullable=True)
sms_twilio_to_list = Column(Text, nullable=True)
# SMTP
smtp_subject = Column(Text, nullable=True)
smtp_from = Column(Text, nullable=True)
smtp_to_list = Column(Text, nullable=True)
smtp_body = Column(Text, nullable=True)
smtp_is_html = Column(Boolean(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True)
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=True)
out_job = relationship(
Job, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_http_soap_id = Column(Integer, ForeignKey('http_soap.id', ondelete='CASCADE'), nullable=True)
out_http_soap = relationship(
HTTPSOAP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_smtp_id = Column(Integer, ForeignKey('email_smtp.id', ondelete='CASCADE'), nullable=True)
out_smtp = relationship(
SMTP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_amqp_id = Column(Integer, ForeignKey('out_amqp.id', ondelete='CASCADE'), nullable=True)
out_amqp = relationship(
OutgoingAMQP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_gen_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
out_gen_conn = relationship(
'GenericConn', backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
ws_channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_ws_subs', order_by=id, cascade='all, delete, delete-orphan'))
# Server that will run the delivery task for this subscription
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True)
server = relationship(
Server, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True)
cluster = relationship(
Cluster, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
name = None # Not used by DB
topic_name = None # Not used by DB
total_depth = None # Not used by DB
current_depth_gd = None # Not used by DB
current_depth_non_gd = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointEnqueuedMessage(Base):
""" A queue of messages for an individual endpoint subscribed to a topic.
"""
__tablename__ = 'pubsub_endp_msg_queue'
__table_args__ = (
Index('pubsb_enms_q_pubmid_idx', 'cluster_id', 'pub_msg_id', unique=False),
Index('pubsb_enms_q_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_q_endp_idx', 'cluster_id', 'endpoint_id', unique=False),
Index('pubsb_enms_q_subs_idx', 'cluster_id', 'sub_key', unique=False),
Index('pubsb_enms_q_endptp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
{})
__mapper_args__ = {
'confirm_deleted_rows': False
}
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
creation_time = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When was the message enqueued
delivery_count = Column(Integer, nullable=False, server_default='0')
last_delivery_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
is_in_staging = Column(Boolean(), nullable=False, server_default=sa_false())
sub_pattern_matched = Column(Text, nullable=False)
# A flag indicating whether this message is deliverable at all - will be set to False
# after delivery_count reaches max retries for subscription or if a hook services decides so.
is_deliverable = Column(Boolean(), nullable=False, server_default=sa_true())
delivery_status = Column(Integer, nullable=False, server_default=str(PUBSUB.DELIVERY_STATUS.INITIALIZED))
delivery_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
pub_msg_id = Column(String(200), ForeignKey('pubsub_message.pub_msg_id', ondelete='CASCADE'), nullable=False)
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False)
endpoint = relationship(PubSubEndpoint,
backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(PubSubTopic, backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
sub_key = Column(String(200), ForeignKey('pubsub_sub.sub_key', ondelete='CASCADE'), nullable=False)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_queues', order_by=id, cascade='all, delete, delete-orphan'))
queue_name = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointQueueInteraction(Base):
""" A series of interactions with a message queue's endpoint.
"""
__tablename__ = 'pubsub_endp_msg_q_inter'
__table_args__ = (
Index('pubsb_enms_qi_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_qi_endptp_idx', 'cluster_id', 'queue_id', unique=False),
{})
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
entry_timestamp = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When the row was created
inter_type = Column(String(200), nullable=False)
inter_details = Column(Text, nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
queue_id = Column(Integer, ForeignKey('pubsub_endp_msg_queue.id', ondelete='CASCADE'), nullable=False)
queue = relationship(
PubSubEndpointEnqueuedMessage, backref=backref(
'pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubChannel(Base):
""" An N:N mapping between arbitrary channels and topics to which their messages should be sent.
"""
__tablename__ = 'pubsub_channel'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'topic_id'), {})
id = Column(Integer, Sequence('pubsub_channel_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
conn_id = Column(String(100), nullable=False)
conn_type = Column(String(100), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMSTwilio(Base):
""" Outgoing SMS connections with Twilio.
"""
__tablename__ = 'sms_twilio'
__table_args__ = (
UniqueConstraint('name', 'cluster_id'),
{})
id = Column(Integer, Sequence('sms_twilio_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
account_sid = Column(String(200), nullable=False)
auth_token = Column(String(200), nullable=False)
default_from = Column(String(200), nullable=True)
default_to = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sms_twilio_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericObject(Base):
""" A generic data object.
"""
__tablename__ = 'generic_object'
__table_args__ = (
Index('gen_obj_uq_name_type', 'name', 'type_', 'cluster_id', unique=True,
mysql_length={'name':191, 'type_':191}),
Index('gen_obj_par_id', 'cluster_id', 'parent_id', 'parent_type', unique=False,
mysql_length={'parent_id':191, 'parent_type':191}),
Index('gen_obj_cat_id', 'cluster_id', 'category_id', unique=False,
mysql_length={'category_id':191}),
Index('gen_obj_cat_subcat_id', 'cluster_id', 'category_id', 'subcategory_id', unique=False,
mysql_length={'category_id':191, 'subcategory_id':191}),
Index('gen_obj_cat_name', 'cluster_id', 'category_name', unique=False,
mysql_length={'category_name':191}),
Index('gen_obj_cat_subc_name', 'cluster_id', 'category_name', 'subcategory_name', unique=False,
mysql_length={'category_name':191, 'subcategory_name':191}),
Index('gen_obj_par_obj_id', 'cluster_id', 'parent_object_id', unique=False),
{})
id = Column(Integer, Sequence('generic_object_seq'), primary_key=True)
name = Column(Text(191), nullable=False)
type_ = Column(Text(191), nullable=False)
subtype = Column(Text(191), nullable=True)
category_id = Column(Text(191), nullable=True)
subcategory_id = Column(Text(191), nullable=True)
creation_time = Column(DateTime, nullable=False)
last_modified = Column(DateTime, nullable=False)
category_name = Column(Text(191), nullable=True)
subcategory_name = Column(Text(191), nullable=True)
# This references back to generic objects
parent_object_id = Column(Integer, nullable=True)
# This may reference objects other than the current model
parent_id = Column(Text(191), nullable=True)
parent_type = Column(Text(191), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
generic_conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
generic_conn_def_sec_id = Column(Integer, ForeignKey('generic_conn_def_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
generic_conn_sec_id = Column(Integer, ForeignKey('generic_conn_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_client_id = Column(Integer, ForeignKey('generic_conn_client.id', ondelete='CASCADE'), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_object_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDef(Base):
""" Generic connection definitions - with details kept in JSON.
"""
__tablename__ = 'generic_conn_def'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_def_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDefSec(Base):
""" N:N security mappings for generic connection definitions.
"""
__tablename__ = 'generic_conn_def_sec'
__table_args__ = (
UniqueConstraint('conn_def_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=False)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConn(Base):
""" Generic connections - with details kept in JSON.
"""
__tablename__ = 'generic_conn'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
# Some connections will have a connection definition assigned
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_list',
order_by=id, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnSec(Base):
""" N:N security mappings for generic connections.
"""
__tablename__ = 'generic_conn_sec'
__table_args__ = (
UniqueConstraint('conn_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(GenericConn, backref=backref('generic_conn_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnClient(Base):
""" A live client connection.
"""
__tablename__ = 'generic_conn_client'
__table_args__ = (
Index('gen_conn_cli_idx', 'cluster_id', 'pub_client_id', unique=False),
Index('gen_conn_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('gen_conn_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('gen_conn_cli_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('gen_conn_cli_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = Column(Integer, Sequence('generic_conn_client_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
# This one is assigned by Zato
pub_client_id = Column(String(200), nullable=False)
# These are assigned by clients themselves
ext_client_id = Column(String(200), nullable=False)
ext_client_name = Column(String(200), nullable=True)
local_address = Column(String(400), nullable=False)
peer_address = Column(String(400), nullable=False)
peer_fqdn = Column(String(400), nullable=False)
connection_time = Column(DateTime, nullable=False)
last_seen = Column(DateTime, nullable=False)
server_proc_pid = Column(Integer, nullable=True)
server_name = Column(String(200), nullable=True) # References server.name
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(
GenericConn, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True)
server = relationship(
Server, backref=backref('gen_conn_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('gen_conn_clients', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RateLimitState(Base):
""" Rate limiting persistent storage for exact definitions.
"""
__tablename__ = 'rate_limit_state'
__table_args__ = (
Index('rate_lim_obj_idx', 'object_type', 'object_id', 'period', 'last_network', unique=True,
mysql_length={'object_type':191, 'object_id':191, 'period':191, 'last_network':191}),
{})
id = Column(Integer(), Sequence('rate_limit_state_seq'), primary_key=True)
object_type = Column(Text(191), nullable=False)
object_id = Column(Text(191), nullable=False)
period = Column(Text(), nullable=False)
requests = Column(Integer(), nullable=False, server_default='0')
last_cid = Column(Text(), nullable=False)
last_request_time_utc = Column(DateTime(), nullable=False)
last_from = Column(Text(), nullable=False)
last_network = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rate_limit_state_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/odb/model/__init__.py | __init__.py |
# stdlib
from typing import Optional as optional
# Zato
from zato.common.typing_ import dataclass
# ################################################################################################################################
# ################################################################################################################################
class Default:
# This is relative to server.conf's main.work_dir
fs_data_path = 'events'
# Sync database to disk once in that many events ..
sync_threshold = 30_000
# .. or once in that many seconds.
sync_interval = 30
# ################################################################################################################################
# ################################################################################################################################
class EventInfo:
class EventType:
service_request = 1_000_000
service_response = 1_000_001
class ObjectType:
service = 2_000_000
# ################################################################################################################################
# ################################################################################################################################
# All event actions possible
class Action:
Ping = b'01'
PingReply = b'02'
Push = b'03'
GetTable = b'04'
GetTableReply = b'05'
SyncState = b'06'
LenAction = len(Ping)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class PushCtx:
id: str
cid: str
timestamp: str
event_type: int
source_type: optional[str] = None
source_id: optional[str] = None
object_type: int
object_id: str
recipient_type: optional[str] = None
recipient_id: optional[str] = None
total_time_ms: int
def __hash__(self):
return hash(self.id)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/events/common.py | common.py |
# stdlib
import socket
from datetime import datetime
from logging import getLogger
# gevent
from gevent import sleep
from gevent.lock import RLock
# orjson
from orjson import dumps
# simdjson
from simdjson import loads
# Zato
from zato.common.events.common import Action
from zato.common.typing_ import asdict
from zato.common.util.api import new_cid
from zato.common.util.tcp import read_from_socket, SocketReaderCtx, wait_until_port_taken
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.events.common import PushCtx
PushCtx = PushCtx
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class Client:
def __init__(self, host, port):
# type: (str, int) -> None
self.host = host
self.port = port
self.remote_addr_str = '{}:{}'.format(self.host, self.port)
self.socket = None # type: socket.socket
self.peer_name = '<Client-peer_name-default>'
self.peer_name_str = '<Client-peer_name_str-default>'
self.conn_id = 'zstrcl' + new_cid(bytes=4)
self.max_wait_time = 30
self.max_msg_size = 30_000_000
self.read_buffer_size = 30_000_000
self.recv_timeout = 30
self.should_log_messages = False
self.is_connected = False
self.lock = RLock()
# ################################################################################################################################
def connect(self):
# For later use
start = utcnow()
with self.lock:
if self.is_connected:
return
self.socket = socket.socket(type=socket.SOCK_STREAM)
while not self.is_connected:
logger.info('Connecting to %s', self.remote_addr_str)
try:
self.socket.connect((self.host, self.port))
self.peer_name = self.socket.getpeername()
self.peer_name_str = '{}:{}'.format(*self.peer_name)
except Exception as e:
logger.info('Connection error `%s` (%s) -> %s', e.args, utcnow() - start, self.remote_addr_str)
sleep(1)
else:
logger.info('Connected to %s after %s', self.remote_addr_str, utcnow() - start)
self.is_connected = True
# ################################################################################################################################
def send(self, action, data=b''):
# type: (bytes) -> None
with self.lock:
try:
self.socket.sendall(action + data + b'\n')
except Exception as e:
self.is_connected = False
logger.warning('Socket send error `%s` -> %s', e.args, self.remote_addr_str)
self.close()
self.connect()
# ################################################################################################################################
def read(self):
# type: () -> bytes
with self.lock:
# Build a receive context ..
ctx = SocketReaderCtx(
self.conn_id,
self.socket,
self.max_wait_time,
self.max_msg_size,
self.read_buffer_size,
self.recv_timeout,
self.should_log_messages
)
# .. wait for the reply and return it.
return read_from_socket(ctx)
# ################################################################################################################################
def ping(self):
logger.info('Pinging %s (%s)', self.peer_name_str, self.conn_id)
# Send the ping message ..
self.send(Action.Ping)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and response != Action.PingReply:
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
# ################################################################################################################################
def push(self, ctx):
# type: (PushCtx) -> None
# Serialise the context to dict ..
data = asdict(ctx)
# .. now to JSON ..
data = dumps(data)
# .. and send it across (there will be no response).
self.send(Action.Push, data)
# ################################################################################################################################
def get_table(self):
# Request the tabulated data ..
self.send(Action.GetTable)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and (not response.startswith(Action.GetTableReply)):
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
table = response[Action.LenAction:]
return loads(table) if table else None
# ################################################################################################################################
def sync_state(self):
# Request that the database sync its state with persistent storage ..
self.send(Action.SyncState)
# .. wait for the reply
self.read()
# ################################################################################################################################
def close(self):
self.socket.close()
# ################################################################################################################################
def run(self):
# Make sure that we have a port to connect to ..
wait_until_port_taken(self.port, 5)
# .. do connect now ..
self.connect()
# .. and ping the remote end to confirm that we have connectivity.
self.ping()
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/events/client.py | client.py |
# stdlib
from tempfile import gettempdir
from threading import Thread
# pyftpdlib
from pyftpdlib.authorizers import DummyAuthorizer as _DummyAuthorizer
from pyftpdlib.handlers import FTPHandler as _FTPHandler
from pyftpdlib.servers import FTPServer as _ImplFTPServer
# ################################################################################################################################
# ################################################################################################################################
class config:
port = 11021
username = '111'
password = '222'
directory = gettempdir()
# ################################################################################################################################
# ################################################################################################################################
def create_ftp_server():
# type: () -> _ImplFTPServer
authorizer = _DummyAuthorizer()
authorizer.add_user(config.username, config.password, config.directory, 'elradfmwMT')
handler = _FTPHandler
handler.authorizer = authorizer
handler.banner = 'Welcome to Zato'
handler.log_prefix = '[%(username)s]@%(remote_ip)s'
address = ('', config.port)
server = _ImplFTPServer(address, handler)
server.max_cons = 10
server.max_cons_per_ip = 10
return server
# ################################################################################################################################
# ################################################################################################################################
class FTPServer(Thread):
def __init__(self):
self.impl = create_ftp_server()
Thread.__init__(self, target=self.impl.serve_forever)
self.setDaemon(True)
def stop(self):
self.impl.close_all()
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/test/ftp.py | ftp.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import socket
import ssl
from http.client import OK
from tempfile import NamedTemporaryFile
from threading import Thread
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
# psutil
import psutil
# Python 2/3 compatibility
from past.builtins import xrange
# Zato
from zato.common.api import ZATO_OK
from zato.common.test.tls_material import ca_cert, server1_cert, server1_key
def get_free_port(start=20001, end=50000):
taken = []
for c in psutil.net_connections(kind='inet'):
if c.status == psutil.CONN_LISTEN:
taken.append(c.laddr[1])
for port in xrange(start, end):
if port not in taken:
return port
class _HTTPHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(OK)
self.send_header('Content-Type', 'application/json')
self.wfile.write('\n')
self.wfile.write('"{}"'.format(ZATO_OK))
do_DELETE = do_OPTIONS = do_POST = do_PUT = do_PATCH = do_GET
def log_message(self, *ignored_args, **ignored_kwargs):
pass # Base class logs to stderr and we want to silence it outs
class _TLSServer(HTTPServer):
def __init__(self, cert_reqs, ca_cert):
self.port = get_free_port()
self.cert_reqs = cert_reqs
self.ca_cert=None
HTTPServer.__init__(self, ('0.0.0.0', self.port), _HTTPHandler)
def server_bind(self):
with NamedTemporaryFile(prefix='zato-tls', delete=False) as server1_key_tf:
server1_key_tf.write(server1_key)
server1_key_tf.flush()
with NamedTemporaryFile(prefix='zato-tls', delete=False) as server1_cert_tf:
server1_cert_tf.write(server1_cert)
server1_cert_tf.flush()
with NamedTemporaryFile(prefix='zato-tls', delete=False) as ca_cert_tf:
ca_cert_tf.write(ca_cert)
ca_cert_tf.flush()
self.socket = ssl.wrap_socket(
self.socket, server1_key_tf.name, server1_cert_tf.name, True, self.cert_reqs, ca_certs=ca_cert_tf.name)
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
class TLSServer(Thread):
def __init__(self, cert_reqs=ssl.CERT_NONE, ca_cert=None):
Thread.__init__(self)
self.setDaemon(True)
self.server = None
self.cert_reqs = cert_reqs
self.ca_cert=None
def get_port(self):
return self.server.port
def stop(self):
self.server.server_close()
def run(self):
self.server = _TLSServer(self.cert_reqs, self.ca_cert)
self.server.serve_forever() | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/test/tls.py | tls.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# # ##############################################################################################################################
#
# ** WARNING ** WARNING ** WARNING **
#
# Crypto material below is not safe for use outside of Zato's own unittests. Don't use it anywhere else.
#
# # ##############################################################################################################################
ca_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKAIBAAKCAgEAvx4Np3z+u6MJkXfqRby7nNk5ucqDFHY0ZB4Tj+0xM1AKQP80
4YtPAkTrGSnpjJqGl8LlG+NYy8WWrYggObuwXpgkcPjG2TkzlCDXW2gnzFUuI/iM
StTl7dZMH6/MG89eTnWeruglkH4Dp3kx+MfkpFLcnPr2IkL4/drfZsFXhYMdLTj/
zkq5mkx32kzh77AIdlO2COZ3qTIF9LtZS9X6RwXSowWkT+KPCortI79mjJyiQ2K+
H18N75adcdWmDVHQL+HlEVee1NR2THRVaYsf9yGFcjD0EOJPZsv2GGKzIR0eiQOa
4nK5ZS40uqoGsB7hj+j3aC+2QncXNYPm9Rzjp/NQBG5RCczFefJ6X5Fu1VTSf4AX
b9Qln4UsWHbqbkHpuFivVtgg8yyjQTJmqZme62xdyZv+B58pXbPQf7H/eVHxJaO2
LFV0tIrYNIc9VDFiRPmUjGIXb2zX5X5p2vcy5/osmd4UJQA+cJLSOsEbPntfDDaN
zKEzTMH4dI+5qO/8raoIhTSv7CbvUzI6sb3C8NltGOs+l24UhI/apjDJs3YAwiq8
PZomOgXSCHqVYkGE9dUK9x5SRNH8dLXOv3TFoqxvWBvZ0moaRIiaUWNEyKlyUB3/
ePXZSOvJT+4bwB9j/eFFzxseaC1TSNlKHz0SLkGqW2rfV6TZYiUtq0RQOVsCAwEA
AQKCAgEAkBIbwPLdJJ3XDK4VENUhqS+n8ILVJYumGwMBRDJOMJxZ3n2EY7IcsV06
zRFnWfLzG1x0Uf2kZphg6hgAEwWGAwk2D2O6ou4YD8ZiEioGNmbQDZXETHUJj61I
XWqstxovwX2xTbD7AF2+a3VVUnF9ztIYNM6K1XEfOl7QoimFzMP2Lq0VSXHTUJns
j8f9Wi6dcnXQeA0kj4uCKedBfYWk0f11uCb8oqvroMrx0UzsBXveZmX9ZLDHVKF5
tuKT9t6Bzla/0771oQM42pGoAZQ7WJUQf/CfTEsOCDQhJGjjGEdXSXpKPAK396pJ
XZ3mxMXCzDWWrBerkZctC86PQJ+yjQ2viLsLaF/pHMe4g6vn8yqalJDOOzpal+Jx
XFAD10oslzzfBrSaL5kl8Gem/qitFAO032hPW3lUVRgsSJ9ilHYqdIrERqROeDED
yVntTTnqCjyNaHhkZl483z+wWam7skGp6G/OWI8ZveSMKRj2g+8mFcEv1ocJ+Z8h
gAS4YLQqWhtWr2zFZ6YK/Vd3PmNwyaFjZIQ5vpOESyAiQzo/SXj4MQ4FFCLUOEH7
z39ZL6GmWwSEgOBq850yPAGGflcR7dwTDIZTvffZ81wpup9IJaaginkkoan6GT7t
LCtcDqXJpoNhA/lLLQVD2E6QQE3YM5ztkFvqqhgLRMr4f7JU5IECggEBAPdYD1Lw
FukDO1wuGpvZygyum3q8CFhucRPy0PFICYRuyx3ZeZC8Y1s3Dydl1QmDX//9a8+e
E3ae3/zbbS8tLjkNpHPTUkXrRsZ3f4Pkju2efkpvdLxCBXScGhRNE4jv0d/6gniM
7EgvvutELoxBRE3GFiORhSpa+vWOdMD/aKb7uJ6QNmzpfVHzPo9KfQqDHf4cn3wr
Kd8AYpGXn8n0xEsAZMVtrpxRHII3kigCw/9N6GX+jeCPP6IiaoeSWYfC8Iza6YNI
St5XDpI8bFs5MPIV8rlM+6IJoz+3z5nh/wb92h48N0znsLWUqR0bciAP1vmSJMSw
MTLJrwMwhlobyrECggEBAMXOQ0atRNPKjW21JsdM9cuaWaZ0PC5vhC9OYyONlk8R
Ve91xqBJXeklrIgda/SCyFYMfRHBNM1yT42JmS0ubQtWu6wpUQfx6U2XT4ZEEZCq
fQG5LpVUlLdHZN/yp5VIuWITh2BFFGB+jYPvZOmX76kuuvbfDjOACh5aSuPjSIgf
X22WeNah06a6m2Qh27nIWOh4glk3xMrnHuHGj/GgvrTucjcIcs2elkzM92T0P3rU
wuJlp5VgQXCSoPikvShvArh1PBO042kQ28SYbya/mjW47RspiAJQQzvm1DVsi8FI
FXm/vu0fSHjWs18ypBYQHeyJeu/qWLxxlt7Dp3sQL8sCggEAHjY2YPYMhlert322
KFU41cW6Hgq7pjmPPFWLaf1XlEKIMtQughxQsoKOKkzI8cmHP1hwA8MWM4YCa/tN
YdbN75AYB0HHqdysH2/XNoADaUjTujnU823JBs5ObS5g9Xf9lbMenqTv831Jf6kr
WlxagHlymNOchWjpgHbvEefgm4zhpxSMYU8/zHO+r3f0wAT18+UBIgSPr7p3T7tK
fDuWgmbA6FCWZGeP6OPqyVJVKGkWuuaV49j7d81mX7rjjq6j/UB8B1ocMv5FPF1/
CsF4lglSRYn+rnMo6o6EIBK3uN3m94x5YL5oGjXXVkPU88+bfY55SUEQMVjrNKOH
tZfxcQKCAQAmdIwlwGfCGP3X10D7vB2JAK/vKWfNy0ZSgBXMAqm3I3KmhCoiXUER
o45gRAAJ4Ccce38RJZOjYVbP+HE8FGuEqc8AkGO9fK1TtVfzjWYwzsRQwnSo+XGU
FCArXZxw7FuGEq/d6nAktlXC0Za3xx8DsB8PAZxcLMdK0Vj/5t7h/99oibliWMGy
B1NQazixbJ7ESzFkMPBkVfxt/lFbs1mACV9RDaZsDSnBMpPiH437zkM5CnRDGRx/
yzHaRQS1SKepvrj4R9FySqG/Hbd2PAe57ALEphVYBcycZ6rX3Atrfx0Vt05iARPw
0iS7HDhERcvbgXrSC6hGsnqXQkhcJ3BzAoIBAEd3ZQEWhPu/ChO8AUbtXK3p+G8n
s6C7lv8eh+S39+rWYNeuzyvXfVtHSseWG7TWDnDne3WmbQr4nlsLCcsp4wZYxTB+
ysfQnv1qXOQeR4FGGrJ2x9co2rXuIqLiECWY+4IAo5vsjMVi8ZZ4Alj1l78Lg02W
WYI7lUgWGFaz6QfMZBCQ08Xnys6ueRZuR8SvI2SvStvehVeCpqOHP8uLxjBkLmSA
uosil5GtOP9pgnn+W1tkscTSsTIgsCF3i9qDD7XYdtEDZel80ugDn3SUYbkLRgpi
q39wvU1nNWuOlUvW4Eg0ofYIWdgffJvRGLJNJ6+KhBovnkA54JJg1Stwokc=
-----END RSA PRIVATE KEY-----
""".strip()
ca_cert = """
-----BEGIN CERTIFICATE-----
MIIFoDCCA4igAwIBAgIJAMpUuR9ijhIRMA0GCSqGSIb3DQEBBQUAMBsxCzAJBgNV
BAYTAkFVMQwwCgYDVQQDEwNDQTIwHhcNMTQwNzIwMTgyMTU2WhcNMjQwNzE3MTgy
MTU2WjAbMQswCQYDVQQGEwJBVTEMMAoGA1UEAxMDQ0EyMIICIjANBgkqhkiG9w0B
AQEFAAOCAg8AMIICCgKCAgEAnMEaU26+UqOtQkiDkCiJdfB/Pv4sL7yef3iE9Taf
bpuTPdheqzkeR9NHxklyjKMjrAlVrIDu1D4ZboIDmgcq1Go4OCWhTwriFrATJYlp
LZhOlzd5/hC0SCJ1HljR4/mOWsVj/KanftMYzSNADjQ0cxVtPguj/H8Y7CDlQxQ4
d6I1+JPGCUIwG3HfSwC5Lxqp/QLUC6OuKqatwDetaE7+t9Ei6LXrFvOg6rPb4cuQ
jymzWnql0Q1NEOGyifbhXaQgO6mM5DaT/q3XtakqviUZDLbIo4IWJAmvlB8tbcbP
wzku+6jEBhkdTAzAb6K6evTK4wUUSrHTE6vF/PHq5+KLrGReX/NrCgdTH/LB/Aux
817IF2St4ohiI8XVtWoC/Ye94c1ju/LBWIFPZAxFoNJJ5zvlLwJN8/o1wuIVNQ3p
4FWTXVArmSOGEmQL48UTUFq/VKJeoDstUoyIsKnBn4uRMcYPIsMh1VF6Heayq1T9
eO2Uwkw75IZVLVA9WaXnCIc07peDREFbyWtyKzpDa2Bh8bLVQ/tyB+sBJkO2lGPb
PMRZl50IhdD7JENNfTG89LCBNioPDNQXN9q3XQYSZgQ9H70Zp+Y3/ipXvIAelPwq
Uyg7YoIjOTqFF25g2c/XSrwSpKCr22lb1vkCLUT7pA0tslMVdULo1FkkkfIDDiHs
FC8CAwEAAaOB5jCB4zAdBgNVHQ4EFgQUmh+yIUO2PG/fMMMjXjestsQPg48wSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAPBgNVHRMBAf8EBTADAQH/MBEGCWCG
SAGG+EIBAQQEAwIBBjAJBgNVHRIEAjAAMCsGCWCGSAGG+EIBDQQeFhxUaW55Q0Eg
R2VuZXJhdGVkIENlcnRpZmljYXRlMAkGA1UdEQQCMAAwDgYDVR0PAQH/BAQDAgEG
MA0GCSqGSIb3DQEBBQUAA4ICAQBJsjzUBFUOpuOGz3n2MwcH8IaDtHyKNTsvhuT8
rS2zVpnexUdRgMzdG0K0mgfKLc+t/oEt8oe08ZtRnpj1FVJLvz68cPbEBxsqkTWi
Kf65vtTtoZidVBnpIC4Tq7Kx0XQXg8h+3iykqFF6ObqxZix/V9hs3QDRnTNiWGE7
thGCAWWVy1r56nkS91uhQhSWt471FevmdxOdf7+4Df8OsQGcPF6sH/TQcOVgDc20
EiapNMpRxQmhyOI7HBZdYGmHM6okGTf/mtUFhBLKDfdLfBHoGhUINiv939O6M6X3
LFserZ9DEd9IIOTsvYQyWhJDijekEtvBfehwp1NjQcity/l/pwUajw/NUok56Dj7
jHBjHJSSgb5xJ9EMrtJ2Qm2a5pUZXwF2cJIxBjQR5bufJpgiYPRjzxbncStuibps
JjSGwiGvoyGbg2xLw7sSI7C2G9KGMtwbS4Di1/e0M1WfFg/ibT3Z1VhqtEL6Yr+m
CG6rI1BBiPfJqqeryLg8q9a4CQFA+vhXSzvly/pT7jZcLyXc/6pCU6GqjFZDaiGI
sBQseOvrJQ1CouAMnwc9Z8vxOOThqtMTZsGGawi+16+5NmpLwW53V/wtHUZuk39F
29ICmBRa3wrCyhNMb+AFhaPjO34jtRGqeOJA98eS29GooycDnh/Uwi3txZu6DNmZ
NVRV1A==
-----END CERTIFICATE-----
""".strip()
ca_cert_invalid = """
-----BEGIN CERTIFICATE-----
MIIF4TCCA8mgAwIBAgIJAOnUIE1WsqOoMA0GCSqGSIb3DQEBBQUAMDAxCzAJBgNV
BAYTAkRFMSEwHwYDVQQDExh6YXRvLnVuaXR0ZXN0LmNhLmludmFsaWQwIBcNMTQw
ODAxMTYyNTMwWhgPMjExNDA3MDgxNjI1MzBaMDAxCzAJBgNVBAYTAkRFMSEwHwYD
VQQDExh6YXRvLnVuaXR0ZXN0LmNhLmludmFsaWQwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQCevYnCOWEf3ez1utRrUuoBDxRI8VhokIg+q6QcUQyuoTsg
ofxgVTnJC9rO/S3xXRUN0cfAbA3wzPvctTvRCcZP+3KZvL58mOfGK6GTIq2Fe2LW
tD7YPIaQRsWCWTTy/jKr9CLRqyJ+TVQLjU/CG4MCyUZ/I9+XATPMLy5ew8l24G99
Q1hYk0aB2jEtOGFV3zH4JHD2SlDgrZozcVIkVSRUPMVL8tqNZpLwohV8D4mr58ZB
0ll8SnnT4nZAGb4pgOEUgjials38gBHp3PhNhLG1BN6MdZGDqHjpI3n8T9VX3uhm
wv6nYeKG8/SqqjKgq30pEqH/gGjOBAqjdAOi7DTbsq+n6Xk0bDWEUGJG+2D8Odfu
AntUm1xpfEEKABQ/JO91HdMIi6bU+Rp30cAxBJqFl3GJt2ypADqh+h3q2vWbZtR1
XgW3j/GzhxzzgGfJ0bqZmDq/bOlcm1zbB43jiUdjay3C+HKUDmnYEkKY0+Ar9gXm
QKBgFYEnstNt2ceJiMXhrInFMMLdmHnuiQsGYHPXUvQJQqWcr1a8BSP11AXqf55p
wyONLNcKsPIqS8q0OOK89CLzsqUso7tpDYFy237nOKE9ZBMn8NtlRd9UfCLQPC6p
5lFo3/QZsuucVmKZzD2iSSIeCeTDzZsozycOkj/Cr5m4V1S4TmBQl0eA4lIlWQID
AQABo4H7MIH4MB0GA1UdDgQWBBRU926sfA4IdgQogtv3jPjcj6dYBTBgBgNVHSME
WTBXgBRU926sfA4IdgQogtv3jPjcj6dYBaE0pDIwMDELMAkGA1UEBhMCREUxITAf
BgNVBAMTGHphdG8udW5pdHRlc3QuY2EuaW52YWxpZIIJAOnUIE1WsqOoMA8GA1Ud
EwEB/wQFMAMBAf8wEQYJYIZIAYb4QgEBBAQDAgEGMAkGA1UdEgQCMAAwKwYJYIZI
AYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwCQYDVR0RBAIw
ADAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggIBAAQY6PF59X5eZ0ju
1J3KwqD41AZPymcNUAn423vQjwyN6h/gavyjyO/a9ZRw/Duk0AZ9Oca7U2ufOG5c
QtDksi2Ukk5hQNBo/UDqguPM/+buvNtGF/ibWzzIKj6YxMfFRzrTic+qAEBMli3z
UrcrgUQo//iBVx5EYMM/xZm+49Cl6wFJntzM3AL3uvphcR8vvRs9ieEpm11KtMtt
G9j/6gsOGH7INX3VRUM9MdxXF45gt/R7Xm915Juh3Qt4ZYrD+eKjuL3JypB56Tb9
7cWaLffzHKGwePYedJXczvQb5nPkgrSYN1SZQoOxaN+f3q3tkn9RcL4zsteoOHSm
PJkYTdXkUMluopXFjPOPolNKljEs8Ys0ow+6QT/PLSlGBgH7L/gUWtgxzOpd6NNK
8NES9aZtL+xpmmLkciWH/tXt9s+9+vzCUwEuXF8uvPieJgjgW6hVxFofJpyGy2Vz
ZNxG+oBSP8fXDQyNM1PFTVSdP2xVzX2VIhnZOoqUTPAbFHYlsyvXnybcourP2Jtv
Ytm+X6SGyQR4eo8wGtXHqu1H8R4/LyFuLy7Xb/ILk/Sp9F1MklNWYUA59d3PlG/a
Ds0Vj2YzSEyStP1a+HaahUZEj0+3/W/x+f8068HyWsVDGa2/9U8IZwn7+C7pK9fN
wSQh3r/cB+X3alAbvPwTlzyNXFu1
-----END CERTIFICATE-----
""".strip()
client1_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKQIBAAKCAgEAzBsxWVTPDi8jBQFVofwMBoSdKvE+VYe+S6w+bTSUekL+pvzf
pirRGD7owGcySKgzgZ4Jj8yGEk4tjVxCwq5epEL06XLP5XMEKzk0TMYu+aINcZ2v
YCrW3Sr6/GZ9PWw3oHK2pul7g+o1sMPFtOcM1sRfVG5LdXXDXclRd5QTPO2FTrDP
cTr5LoC1jtOAJhJ0XqQd/LOV/95j4+k0ypOCCkFI5kl9caZSnaG7xMrSsssLkBrk
a99hSN4CB+1/A0vZUsPRIb4yzudlVzn/w7aWKMOQYxLrFE/NJ+4fTiJ9bBpL20jE
yiq87kCgVRbx1tVo2Rzyp+bQcDvcKZ6YXrZqj4I9s5JPiygvdnxB5dPegYBFWYkK
eosZO8gTif53vZz7yQIiYU768FYpbRW7UVWY2fk+MGBIj0hlCsclPUh66SZYiRlm
OaxaufMC4m5ofS4zJNs5HryynvnyTwxqde4iPvukPxuQKASs+z25kSWjqv8R9HqW
ct6i0GQNKO1FbcuiX8vlRjXB6bMYEJbSgccESe1yZTSIWvnw0ihFA0K+7K3NsERs
IAEdbRxREzgW6eDbTMU8wsaudQsyKHzvZD+blejkgXUEyFO554u9m8VINk/JCmdA
P95a+XumFnoNrUQ9n2c4kHfFgT6dzB5i5okXMm1GFrx7d2VLdVBCSAYKIO0CAwEA
AQKCAgEAlmu3+9j328E7ctXf8Uha6HbVia9PPCOVSBnBzCPDBTPYjuKwRLsrbE34
pMupEEj95Jm+/D5D1LvO8G49OVLepvo9msqlkrkoKl63P5mTRyB5/fCzLhGEVmz1
mgxCYoEdod7I48wQ3lA+j25Ih6D8Ik+I3iWG8SL//1997b2wS+fUpgDCcPWAbRgo
NgGDYQuavaEABJupgW+5eF8HLAB4BuzEOAuTKq3kFw353veHPoNLm0FmdGWlQdlz
77nFMH22xTtRJigRM9DvK9CvwOIQWix+fbWUkFybmsDwS1o5yvC6VPqVJVVH9eKl
BvCo/KY85j1iTAFcPkqvX/Dk5HBVqOrmx4NQU5o/9eJnSknfcGAdsWr3952wrHxa
kGjjkwsp6fBb/NkVqJuODgzSC7XwJR0D4OwnzTuzcoi2uXwjDohAJEYd6M8rITP1
6RckzXu9upM3bh4cFnv76TF9Dbca0paBb9VPeXSUZYMZazwsXYlETWDLZjhX9RLv
CA2pk1gBSorMyqx8KOLfH2Lx8ZbB9QBdqU6WAUz00cO72TiVw2dbU8Gp34BO78N2
mpahflg98WnRLQhzb6iwcCXHzfVdHUYsHcALq5vBh4RkDK74xzXp4sjE0za3BiqA
MaO+0+Tsfw7loyXMWXimXFazxD3FZ/YLWQPNlEGJMOma/94DBEECggEBAObaShP9
9RzbpiHltH6/JIOI5f61agc7vyCnHQ9TUejOsUbXrgcsWnVcqSdNa0azpGpqtwKO
S2haF+DviKF+zM6znJJ41AyBjqIyBipDAKcF8Tervq2dPP/16SEMO/D1CX0IwFUd
M2Si1eWU49bk/y7fkH5zw/0xJXLXrKyDSBTaiyRj6+KGj6h2uJPmRhStlgvuyufu
PD0TcffBOP9tx5HfkWcGmnPJrZZ+ehe4Kn5q8BR4W11V64/a03ALbx+2f6DcOU48
8m3O9tXucExjOuDUOC9JZXMQucUEtrOMADnIMLXEjYjW/VbV5jP+QYCj+Er028Ip
xoNXjSwyFgduYd0CggEBAOJXCJ0eo9EUSJgSH1ZKPyziRCmhmnEXBVwQOPOYOO73
rPHWdpjG+HUkQSWOsFxa3Yjia9r9z3DA8ynVzPnmbG8Otc4i2IN/S4ksbFFjHtjs
F0hQBFmYI21VqkUqK8iFOOwQacFmyYs8lqg7PnQhS7GoQsnbnHE0HOpe9zjQr6Fl
T5AY6bJ9cdhXPnap/2LLP08wpNcaW0XbKWRT0+hAl5WvZry3ftn7ubNstF/HAUTU
bxLBn0CYMtTg/jAGyYtj5MvNLFGUFGx3Lg51mBS3TZWstOeF/7sAD5w453VjVWKy
Qkj4OkWJRxxbB5fuJVGrqTXc/SNh/+z25iuUX0EAMlECggEAVklhRve1loPDJQhm
3rkzPLb+wKWua+W5Gstb4U6TXyFiwcf8FFJPvW5VC4u0fUjIO76HiT0GkoqaQklG
GJb8loYsD9N57vK+DYIFK+a/Z66g6t4W922+Ty3rZZ7dCMOOOF39BdNUUllK+fUc
9EXD3BFUQO+kYg7soHBc6l5nouPM/l0a3iDNsXouo5l+uFvpqawny2kQuwN5pdFj
LJYr8ipOfuPI9156s7WyjQsZVwdBlWUnQUvMMIjqXwbnEkN0kPu/r664LrMdL/lf
oC225DJujb4xXUDzLuEEKTg7HV3mVwqQnIU/TCXHVcfDVAH13I6JVZmnyZAABHT0
JvLrQQKCAQEAmiRboWU0ezctGSN+Y+28iHyvnwqHe20KIWCK6JpKa7QQ+8HqkrEu
k9hU5Zb/VGYtaQOKIGGp3EgLUfpg1e+u+RMzjWb9vM/8STcPrX2rjF98m6qiy8Fo
nxUwGFpX5v+TfHDRFP1DVKe2kmuGZOAoBJ1qnr4JFK9A4fw6sV6tvWSZgrD0trHn
zkXcLEQpwMZaHzwphrRUZIaU8daFAi67DR2fAfaVVS6xkRf+3xtQKefinQtvwTXl
qERx15NHvr4RGxpnjEckgZnIq+A56iHLnJs5uFLxjhDEkMfQGYnEpKpxqfAi/yg2
XYFA8p8kmzIk0qHlYytid6bNqfApzsKrgQKCAQAqDHO2DSVZEiqpG9ony4WcRTMY
lZ85e3S1gCWDwDHfhGBFLgg7JgmqVsM6De1s6+gcSRK8wXVJzRbF4XWkBLmXU2Nr
FS4ZCFoSPDUFrETtd7X5a6UL14gkpmFxNp3NEfIkGHFemti2U2Iv+v2E/A23sQbR
oAhWdJru5/ToQEGSS2lThIxebj8laedmKoyI2c4muxwvkB3grrSN1FNDs7bmUSTP
CKyAiZSy8T+YHPL4r9Up5M86LRbUvHmIVy7kJaYjQTGeqNJFPX0WMqb6xTm3VA7G
4Zfx4Q3uMFdRgGHQIhwIIYe14sw8ImHbAyRKuXT0Noo/ETmWCaVZzi8pil9M
-----END RSA PRIVATE KEY-----
""".strip()
client1_cert = """
-----BEGIN CERTIFICATE-----
MIIF0DCCA7igAwIBAgIBBzANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDIwOVoXDTIxMTIxNTEyNDIwOVowNzEL
MAkGA1UEBhMCQVUxEDAOBgNVBAMTB0NsaWVudDQxFjAUBgkqhkiG9w0BCQEWB0Ns
aWVudDQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDMGzFZVM8OLyMF
AVWh/AwGhJ0q8T5Vh75LrD5tNJR6Qv6m/N+mKtEYPujAZzJIqDOBngmPzIYSTi2N
XELCrl6kQvTpcs/lcwQrOTRMxi75og1xna9gKtbdKvr8Zn09bDegcram6XuD6jWw
w8W05wzWxF9Ubkt1dcNdyVF3lBM87YVOsM9xOvkugLWO04AmEnRepB38s5X/3mPj
6TTKk4IKQUjmSX1xplKdobvEytKyywuQGuRr32FI3gIH7X8DS9lSw9EhvjLO52VX
Of/DtpYow5BjEusUT80n7h9OIn1sGkvbSMTKKrzuQKBVFvHW1WjZHPKn5tBwO9wp
nphetmqPgj2zkk+LKC92fEHl096BgEVZiQp6ixk7yBOJ/ne9nPvJAiJhTvrwVilt
FbtRVZjZ+T4wYEiPSGUKxyU9SHrpJliJGWY5rFq58wLibmh9LjMk2zkevLKe+fJP
DGp17iI++6Q/G5AoBKz7PbmRJaOq/xH0epZy3qLQZA0o7UVty6Jfy+VGNcHpsxgQ
ltKBxwRJ7XJlNIha+fDSKEUDQr7src2wRGwgAR1tHFETOBbp4NtMxTzCxq51CzIo
fO9kP5uV6OSBdQTIU7nni72bxUg2T8kKZ0A/3lr5e6YWeg2tRD2fZziQd8WBPp3M
HmLmiRcybUYWvHt3ZUt1UEJIBgog7QIDAQABo4IBATCB/jAJBgNVHRMEAjAAMBEG
CWCGSAGG+EIBAQQEAwIEsDArBglghkgBhvhCAQ0EHhYcVGlueUNBIEdlbmVyYXRl
ZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUffrp+KrDJFGTgUARU2M+RvvRlJkwSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAJBgNVHRIEAjAAMBIGA1UdEQQLMAmB
B0NsaWVudDQwDgYDVR0PAQH/BAQDAgWgMBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMC
MA0GCSqGSIb3DQEBBQUAA4ICAQAuspxaskmlZSNIaK4qE2gUWLm37Otr6hwJdP4P
s6B4jkjMW5n2gQ0ZjtWVXEG2xA771pTqL9XNtqBdUGRNBs3tj2lSp5n7KTuxilVX
S79EaoOVr7/vbEGscgrpRcIhYhXxS9JxdL64drWJybjMBuw945lxPmYA8G3bW3LN
R40raEN//gui9Hb0hIW+2uu/WM8Hw60Gmc50q5mQh3A3n8ZUocFxKkUfb3tLqeG3
cgqCYgUctTqISJsHbQkTI594rRhQeYyaGirg0t2OgeVaXBX+7HBnDAomR1VPxahU
hhqxc8cE6l6ufIKusljOYljWydcgcinJnwGyH/gxSdMCItolPj4gAiVvCbJ/Pu38
GNlgCPc1pfJ2vSgzoUeMr5HLTx/jwfNpHDE3on/qtiaYCWWkZqKJOC/0Nq2Jz9lM
jvbWTSnQ+oRq7B/5cH02u+M2dcuZFrrmosQq680Ov8K/f4/jBjwGgFXg46fCXzsR
mNc0s6Dx3nS2ecIocDQfR7cy+oqYVHQOhvBrp6zSbb2H265D8i82jV/i5j6DbZ6P
s/Ab7xtyW6AwGr6O+s9Wix4w6vVKds7uq5lTUIjjl5dw6JcHjpBmlmPsKvQH2izx
1fLOfvz9aFHvvXEKFqpptwd9ZQL2KpmNIrOp7jrnpQ1e18zbL8HnX6W4V0rKUAn4
svkkFA==
-----END CERTIFICATE-----
""".strip()
server1_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEAtvCbou2IcBSHbMdFWJ4PzPYsxrsprli027OLFEPXs6a3X7L9
z2gNL9BuK7Zh/XK9XNAPYYsjYWVkP0O4JbyK4rH2kOPXuUIYGFztz2BwXPvDLjlr
uqNVWAbil3g7EIUqcRJfxkx6aZRG6KlWOfGsJHGd46pUDRF79WupkSauC3t0EgqH
C18WcDuQtCkYVxoFiRflfkLdjVl2TD2RcXOBvDnxj1N5668HyVHsEU32l0xfOByq
LeLl5z4uk+DrgvmwOFVi/4ij2uSm/+oa2rKFFPLlWUbeUtdiEHQ3Sw+6aY0+95gH
sUjMXfqzIF6/Yo/nlk6JjGh4FLaJyDCyOj8MGdG7kgvDl5Ho1cmJip63Y/z95aRf
4gtrZq0nD7upwyZC6XlWS7jr6V7Pd0KrRT9bLbrLeCZEZ1rWiM4KItM8GViolRSY
aRyJgQOMh5F0jIV9w9Ai9Oxta72jmCaSFozwQyjWL3CqCxCUsvIFiEQEdiGaGFRs
3DehWI1dHpCmgrTtChCIu1+bEMogl75d1VPYKAHhRcblFySkwjbgpatajxEkKxmb
tKeg2kuH8josU+3hxCyj+66JwzfiYt1UNh1uHzrOvheosl3p+5JpotBuVAm4+ofP
anEydEXEg9ORxYD08Ddqql62QGO8QUMLt+SwcdWRQRQkjAxvX0lFotMI/eUCAwEA
AQKCAgEApgyTWDm+o+0eVzAw05T0xpeUYPY1iRjfYKQBU22Y9moW+/hdxMqvXX0U
4vxyyThChWIc8+71OExtx7bSCP6wGcBrC2yjvHYvpL2E5bylgODMcsKP9CKZLoNh
XRc2lXIp6eRBpp54Zii+jCRYLdQc6h9urt1F2W7LUyJcEXJIfAecfVus5Dd1CH4o
hD7g5v6pk5xrJEXRD6HqbJ1dzNqJIa5+ghfFDJYcvTFs0vAvKXma3DW4ilnvUAvy
/ysi2gmFWDy41TTTdbYhlxyJL4TmovMuFfDrj8oMKt8x6SHnlDMuulH2eYaYaZ1K
xdD6ap4wGRBEbXvNsw9U1K7Ot2vOsH+AUK46bZfkw+Oe28j6i342gL/o29z6BwSe
GP+an+VeCS87WUuYCzGugucVBU7UnbGkXyYXbSpYS1h0FrSxElqCTxXBmteo4KJL
uWo3iQXg7ik8gpPG89Xo5c8tylEVEvA9wLB7lZNPURsY9QNXLyYGffJuW8AYFJyv
ymhdiVtLNV5rBUgXmjl+g8g416u6Oj/sx5NfcCQTCw04q5LbCeiHW/KsvIhV3eHz
mj7kQ/OrAtdwZA7ER3mhm7cXqw0EutA+p+HZ87BWYi5HBV7eOgxrxHTw9SK4OIFt
OhKH6l0nghsI/P7PNBR3b+yySFkrn06ctttYCLm6NRYqRoWFrmkCggEBAOOYJOHw
bT/EgJM3vugXl6DKQs4OnfmRdQ2T08HWHCu6tMzEp7veNtE2oAz39XezpOK+gclJ
VGnGBLiZC2eTAsyb7WxAbmRW2Q17+K9NC1SXpYvFDFFaWI65sQciiZBdDZlDzUJw
NlIXgKfJSuAuqXx78slcQuV69Ii7CYys3AbbPeGgKVEqOHGn74hFhUlmWpoE2lM9
tr2p5pZMdKBIe98dyFnzPbBB81dbIfILzH5wSWJLGPuSWhB28a0cY21OAczd59Eq
FyYMTItdk5X8bZLjj0NZ803WWq1usl+X5z3Kr/2aQvV/FRJH26/UBz8z2Pqdk67D
WhBLavhTrj1k68sCggEBAM3Ftj5fr2BKV7TiGroZnLDi+9UdOE344K6OI/sM85/m
YcUJWGxJFTVgOIpMtIJQ9CxHc9xhTabFSGzJ6VOLYW4r5EbiBFY3WrL4lrUeOIzF
XAxBJQR8vt1d/wQD7h0WKDSimpToM4wOcFzEHEkyB9bVbyw2sWj+bM+sD8O5Q0gv
a5Z1W406Ssn+z1gvVBM3MDbUqrrzTTXqHvWOwdDvkxb1eIY++Kco5FIhy7NecdT1
oV+8GfOUCFMqLXTRrHg7atQgS7vcehsILuQqhXs0y3PSbbemVgLLG9E0CZ+w/zbX
HBu14Hhjj4XogSJi+HC5uyUTafNmq0bYhL29wCax5w8CggEBANAC7CK8VX1koYbr
+kWx2lmQwsIFxgilEvCX3YBZqmGlQT2ttwgTrtJENL/lmKoQvHCoYYKQzN/npcT5
y9ycFoDfOn4n3T1Dyxlx5vaBWgu0lg9Kx1lLU4kO2meE/2m8QoOD3oQMfvlElcfE
R/ThcPJfbqTu+A049WpKWA4Epwx1MPeYJGsURYZLULehopJVRBVkvg46Z1ytfhx8
QFOGLADd/ZGIqScA/+ElX78TXZFqGwgFTw4O0tYdgAER4yWxmB+f6RHYgFO8BfGS
UyNQFO2dogCSo7bOZQ4CEHEiKqzlJTiJ1wz9W0rb9kObbAwt3PAhOSsPTK973oac
JLHkHUUCggEAa3ZfsL9j5ZOtrkeO0bXigPZpsmiqKP5ayI5u+ANRkCZO1QoGZbbd
Hpz7qi5Y7t28Rwuh1Gv0k63gHwBrnDfkUBcYBnSu8x/BfEoa2sfHnKzNX5D99hP3
0b/vGHe8+O/DW4m31SBXG0PHJos8gnVgZq/ceWiuyjhlNyeSrBKqsp4hP9hWUbEp
scgjHNjKvaZKxbfW2f+KSSfVt0QwsB8N4CWeJe3pCdNvOf1wVmJybFdDSa4Al5at
qlESoDmIKtpM9i9PnfKMymVBp+MVBr0Rq5Evv4Nc0+SiyGS2yfEzt74rbcVUT0sf
fz1ngz/Qo3474Cb9ZCIwPLWCzVy1Zv/tvQKCAQEAv8uxjmM/CqtKDW9c/z4Z4y6O
squI4AjCgbml8VzC2aS1zQwbCsq0KmGYVgYALKT4dSH+B+6koy+J5GPpVX9xL0Zq
MZJlo1Hmi2hDW+gi/w+Q62iRdqO+SoqbFZJ5aX4iF3dyX9rvDyOzRFr+kddtuQ6y
tru00ATHMp2hix8LoKDo8dLY9bX6Y9RmgWAVOYbFHm4OB9wE2fya3feo6O3znJY9
EqlYKE0bzcHQQzeT0+Lh9+1KLBg6B6jfyAscVKmSgJyEHLW7gzgF/h10py8XMEVj
syS6C3/DMznzpQSyjdTHqdiGuLfagF9oHxRaRacXaxLP2CzILIUFIaEIvJevYg==
-----END RSA PRIVATE KEY-----
""".strip()
server1_cert = """
-----BEGIN CERTIFICATE-----
MIIFwTCCA6mgAwIBAgIBBjANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDEzN1oXDTIxMTIxNTEyNDEzN1owOTEL
MAkGA1UEBhMCQVUxEjAQBgNVBAMTCWxvY2FsaG9zdDEWMBQGCSqGSIb3DQEJARYH
U2VydmVyNDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALbwm6LtiHAU
h2zHRVieD8z2LMa7Ka5YtNuzixRD17Omt1+y/c9oDS/Qbiu2Yf1yvVzQD2GLI2Fl
ZD9DuCW8iuKx9pDj17lCGBhc7c9gcFz7wy45a7qjVVgG4pd4OxCFKnESX8ZMemmU
RuipVjnxrCRxneOqVA0Re/VrqZEmrgt7dBIKhwtfFnA7kLQpGFcaBYkX5X5C3Y1Z
dkw9kXFzgbw58Y9TeeuvB8lR7BFN9pdMXzgcqi3i5ec+LpPg64L5sDhVYv+Io9rk
pv/qGtqyhRTy5VlG3lLXYhB0N0sPummNPveYB7FIzF36syBev2KP55ZOiYxoeBS2
icgwsjo/DBnRu5ILw5eR6NXJiYqet2P8/eWkX+ILa2atJw+7qcMmQul5Vku46+le
z3dCq0U/Wy26y3gmRGda1ojOCiLTPBlYqJUUmGkciYEDjIeRdIyFfcPQIvTsbWu9
o5gmkhaM8EMo1i9wqgsQlLLyBYhEBHYhmhhUbNw3oViNXR6QpoK07QoQiLtfmxDK
IJe+XdVT2CgB4UXG5RckpMI24KWrWo8RJCsZm7SnoNpLh/I6LFPt4cQso/uuicM3
4mLdVDYdbh86zr4XqLJd6fuSaaLQblQJuPqHz2pxMnRFxIPTkcWA9PA3aqpetkBj
vEFDC7fksHHVkUEUJIwMb19JRaLTCP3lAgMBAAGjgfEwge4wCQYDVR0TBAIwADAR
BglghkgBhvhCAQEEBAMCBkAwKwYJYIZIAYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0
ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFHewxkloJzmR2Cj0/za/ZHS0yl92MEsG
A1UdIwREMEKAFJofsiFDtjxv3zDDI143rLbED4OPoR+kHTAbMQswCQYDVQQGEwJB
VTEMMAoGA1UEAxMDQ0EyggkAylS5H2KOEhEwCQYDVR0SBAIwADASBgNVHREECzAJ
gQdTZXJ2ZXI0MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMBMA0GCSqGSIb3DQEBBQUA
A4ICAQA+EAj846j4u/PZvLITPX/kI1+8Y9JIULKwdQ2v8O5mMf9In2Pk9MQ+81RP
rpDZo3ZsfkkdoAR7j5ZeTdMargFAeyErfJpZ5Fv4LryaNotJB0/iG8vcWpOJ7qa7
bae+5hQ0vzAoeIxg7kRXN2noSyHHhd3riddOp3/TxetKoFdWSjjnMXqBvZbYzUcf
asdKMXKcvZlan01f+zV8CkR7+Scd+5uW33lNHnUmCzeGA5G8z1vA05u9TVAkwU5r
XbdJbUjCE3d+X/jkaS5IvhBu6tKSA1YFcD9Brh8CmMjtCWLk8ETv+78WJzqyjiaT
OisFTUI/jC18dKgFyyehEeeYo5SZO7BIsNgplDX2UOumQwZYdUX4M3ObRt2n33Fb
ReVhPf39oCDSOGEckRGeJX6ydVRjWJHC/qT3gDKaMPZd5lN0M1BOqyAFakM0oU/7
VPf9dUQsw/BeUvm+34hE382JIefzBA32SsyfQjNf6L6tV1JYEfeaebSI+cIny9me
lfvTgPmoabqCXVN03hyppf7/0tD8BpitC9ghFrN61oJLEgJOJ9tLuQz0h5gbxeZP
mOAkPcQs5FMuzNmP/amLSfCFfdUT5iIqZ3uIAsqnw0ftp8OOEAdyoC4/vgVx3y6b
BOX+H+pK1aZXjNzcacyPSawHJTvqexNJFWV167okb1BmOFJL9w==
-----END CERTIFICATE-----
""".strip() | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/test/tls_material.py | tls_material.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from datetime import datetime
from tempfile import NamedTemporaryFile
from random import choice, randint
from unittest import TestCase
from uuid import uuid4
# Bunch
from bunch import Bunch, bunchify
# mock
from mock import MagicMock, Mock
# nose
from nose.tools import eq_
# six
from six import string_types
# SQLAlchemy
from sqlalchemy import create_engine
# Zato
from zato.common.api import CHANNEL, DATA_FORMAT, SIMPLE_IO
from zato.common.ext.configobj_ import ConfigObj
from zato.common.json_internal import loads
from zato.common.log_message import CID_LENGTH
from zato.common.odb import model
from zato.common.odb.model import Cluster, ElasticSearch
from zato.common.odb.api import SessionWrapper, SQLConnectionPool
from zato.common.odb.query import search_es_list
from zato.common.simpleio_ import get_bytes_to_str_encoding, get_sio_server_config, simple_io_conf_contents
from zato.common.py23_ import maxint
from zato.common.util.api import is_port_taken, new_cid
from zato.server.service import Service
# Zato - Cython
from zato.simpleio import CySimpleIO
# Python 2/3 compatibility
from past.builtins import basestring, cmp, unicode, xrange
# ################################################################################################################################
if 0:
from zato.common.util.search import SearchResults
SearchResults = SearchResults
# ################################################################################################################################
test_class_name = '<my-test-class>'
# ################################################################################################################################
class test_odb_data:
cluster_id = 1
name = 'my.name'
is_active = True
es_hosts = 'my.hosts'
es_timeout = 111
es_body_as = 'my.body_as'
# ################################################################################################################################
def rand_bool():
return choice((True, False))
# ################################################################################################################################
def rand_csv(count=3):
return ','.join(str(elem) for elem in rand_int(count=count))
# ################################################################################################################################
def rand_dict():
out = {}
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out[choice(funcs)()] = choice(funcs)()
return out
# ################################################################################################################################
def rand_list():
out = []
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out.append(choice(funcs)())
return out
# ################################################################################################################################
def rand_list_of_dicts():
out = []
for x in range(rand_int(30)):
out.append(rand_dict())
return out
# ################################################################################################################################
def rand_opaque():
return rand_object()
rand_nested = rand_opaque
# ################################################################################################################################
def rand_datetime(to_string=True):
value = datetime.utcnow() # Current time is as random any other
return value.isoformat() if to_string else value
# ################################################################################################################################
def rand_int(start=1, stop=100, count=1):
if count == 1:
return randint(start, stop)
else:
return [randint(start, stop) for x in range(count)]
# ################################################################################################################################
def rand_float(start=1.0, stop=100.0):
return float(rand_int(start, stop))
# ################################################################################################################################
def rand_string(count=1, prefix=''):
prefix = ('-' + prefix + '-') if prefix else ''
if count == 1:
return 'a' + prefix + uuid4().hex
else:
return ['a' + prefix + uuid4().hex for x in range(count)]
# ################################################################################################################################
def rand_unicode():
return u'ϠϡϢϣϤϥϦϧϨϩϪϫϬϭ'
# ################################################################################################################################
def rand_object():
return object()
# ################################################################################################################################
def rand_date_utc(as_string=False):
value = datetime.utcnow() # Now is as random as any other date
if as_string:
return value.isoformat()
return value
# ################################################################################################################################
def is_like_cid(cid):
""" Raises ValueError if the cid given on input does not look like a genuine CID
produced by zato.common.util.new_cid
"""
if not isinstance(cid, string_types):
raise ValueError('CID `{}` should be string like instead of `{}`'.format(cid, type(cid)))
len_given = len(cid)
if len_given != CID_LENGTH:
raise ValueError('CID `{}` should have length `{}` instead of `{}`'.format(cid, CID_LENGTH, len_given))
return True
# ################################################################################################################################
def get_free_tcp_port(start=40000, stop=40500):
""" Iterates between start and stop, returning first free TCP port. Must not be used except for tests because
it comes with a race condition - another process may want to bind the port we find before our caller does.
"""
for port in xrange(start, stop):
if not is_port_taken(port):
return port
else:
raise Exception('Could not find any free TCP port between {} and {}'.format(start, stop))
# ################################################################################################################################
def enrich_with_static_config(object_):
""" Adds to an object (service instance or class) all attributes that are added by service store.
Useful during tests since there is no service store around to do it.
"""
object_.component_enabled_ibm_mq = True
object_.component_enabled_zeromq = True
object_.component_enabled_patterns = True
object_.component_enabled_target_matcher = True
object_.component_enabled_invoke_matcher = True
object_.component_enabled_sms = True
object_.get_name()
def target_match(*args, **kwargs):
return True
is_allowed = target_match
object_._worker_config = Bunch(out_odoo=None, out_soap=None)
object_._worker_store = Bunch(
sql_pool_store=None, outgoing_web_sockets=None, cassandra_api=None,
cassandra_query_api=None, email_smtp_api=None, email_imap_api=None, search_es_api=None, search_solr_api=None,
target_matcher=Bunch(target_match=target_match, is_allowed=is_allowed), invoke_matcher=Bunch(is_allowed=is_allowed),
vault_conn_api=None, sms_twilio_api=None)
# ################################################################################################################################
class Expected(object):
""" A container for the data a test expects the service to return.
"""
def __init__(self):
self.data = []
def add(self, item):
self.data.append(item)
def get_data(self):
if not self.data or len(self.data) > 1:
return self.data
else:
return self.data[0]
# ################################################################################################################################
class FakeBrokerClient(object):
def __init__(self):
self.publish_args = []
self.publish_kwargs = []
self.invoke_async_args = []
self.invoke_async_kwargs = []
def publish(self, *args, **kwargs):
raise NotImplementedError()
def invoke_async(self, *args, **kwargs):
self.invoke_async_args.append(args)
self.invoke_async_kwargs.append(kwargs)
# ################################################################################################################################
class FakeKVDB(object):
class FakeConn(object):
def __init__(self):
self.setnx_args = None
self.setnx_return_value = True
self.expire_args = None
self.delete_args = None
def return_none(self, *ignored_args, **ignored_kwargs):
return None
get = hget = return_none
def setnx(self, *args):
self.setnx_args = args
return self.setnx_return_value
def expire(self, *args):
self.expire_args = args
def delete(self, args):
self.delete_args = args
def __init__(self):
self.conn = self.FakeConn()
def translate(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError()
# ################################################################################################################################
class FakeServices(object):
def __getitem__(self, ignored):
return {'slow_threshold': 1234}
# ################################################################################################################################
class FakeServiceStore(object):
def __init__(self, name_to_impl_name=None, impl_name_to_service=None):
self.services = FakeServices()
self.name_to_impl_name = name_to_impl_name or {}
self.impl_name_to_service = impl_name_to_service or {}
def new_instance(self, impl_name, is_active=True):
return self.impl_name_to_service[impl_name](), is_active
# ################################################################################################################################
class FakeServer(object):
""" A fake mock server used in test cases.
"""
def __init__(self, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None, worker_store=None):
self.kvdb = FakeKVDB()
self.service_store = FakeServiceStore(service_store_name_to_impl_name, service_store_impl_name_to_service)
self.worker_store = worker_store
self.fs_server_config = Bunch()
self.fs_server_config.misc = Bunch()
self.fs_server_config.misc.zeromq_connect_sleep = 0.1
self.fs_server_config.misc.internal_services_may_be_deleted = False
self.repo_location = rand_string()
self.delivery_store = None
self.user_config = Bunch()
self.static_config = Bunch()
self.time_util = Bunch()
self.servers = []
self.ipc_api = None
self.component_enabled = Bunch()
# ################################################################################################################################
class SIOElemWrapper(object):
""" Makes comparison between two SIOElem elements use their names.
"""
def __init__(self, value):
self.value = value
def __cmp__(self, other):
# Compare to either other's name or to other directly. In the latter case it means it's a plain string name
# of a SIO attribute.
return cmp(self.value.name, getattr(other, 'name', other))
# ################################################################################################################################
class ServiceTestCase(TestCase):
def __init__(self, *args, **kwargs):
self.maxDiff = None
super(ServiceTestCase, self).__init__(*args, **kwargs)
def invoke(self, class_, request_data, expected, mock_data={}, channel=CHANNEL.HTTP_SOAP, job_type=None,
data_format=DATA_FORMAT.JSON, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None):
""" Sets up a service's invocation environment, then invokes and returns
an instance of the service.
"""
class_.component_enabled_cassandra = True
class_.component_enabled_email = True
class_.component_enabled_search = True
class_.component_enabled_msg_path = True
class_.has_sio = getattr(class_, 'SimpleIO', False)
instance = class_()
server = MagicMock()
server.component_enabled.stats = False
worker_store = MagicMock()
worker_store.worker_config = MagicMock
worker_store.worker_config.outgoing_connections = MagicMock(return_value=(None, None, None, None))
worker_store.worker_config.cloud_aws_s3 = MagicMock(return_value=None)
worker_store.invoke_matcher.is_allowed = MagicMock(return_value=True)
simple_io_config = {
'int_parameters': SIMPLE_IO.INT_PARAMETERS.VALUES,
'int_parameter_suffixes': SIMPLE_IO.INT_PARAMETERS.SUFFIXES,
'bool_parameter_prefixes': SIMPLE_IO.BOOL_PARAMETERS.SUFFIXES,
}
class_.update(
instance, channel, FakeServer(service_store_name_to_impl_name, service_store_impl_name_to_service, worker_store),
None, worker_store, new_cid(), request_data, request_data, simple_io_config=simple_io_config,
data_format=data_format, job_type=job_type)
def get_data(self, *ignored_args, **ignored_kwargs):
return expected.get_data()
instance.get_data = get_data
for attr_name, mock_path_data_list in mock_data.items():
setattr(instance, attr_name, Mock())
attr = getattr(instance, attr_name)
for mock_path_data in mock_path_data_list:
for path, value in mock_path_data.items():
split = path.split('.')
new_path = '.return_value.'.join(elem for elem in split) + '.return_value'
attr.configure_mock(**{new_path:value})
broker_client_publish = getattr(self, 'broker_client_publish', None)
if broker_client_publish:
instance.broker_client = FakeBrokerClient()
instance.broker_client.publish = broker_client_publish
def set_response_func(*args, **kwargs):
pass
instance.handle()
instance.update_handle(
set_response_func, instance, request_data, channel, data_format, None, server, None, worker_store, new_cid(),
None)
return instance
def _check_sio_request_input(self, instance, request_data):
for k, v in request_data.items():
self.assertEquals(getattr(instance.request.input, k), v)
sio_keys = set(getattr(instance.SimpleIO, 'input_required', []))
sio_keys.update(set(getattr(instance.SimpleIO, 'input_optional', [])))
given_keys = set(request_data.keys())
diff = sio_keys ^ given_keys
self.assertFalse(diff, 'There should be no difference between sio_keys {} and given_keys {}, diff {}'.format(
sio_keys, given_keys, diff))
def check_impl(self, service_class, request_data, response_data, response_elem, mock_data={}):
expected_data = sorted(response_data.items())
instance = self.invoke(service_class, request_data, None, mock_data)
self._check_sio_request_input(instance, request_data)
if response_data:
if not isinstance(instance.response.payload, basestring):
response = loads(instance.response.payload.getvalue())[response_elem] # Raises KeyError if 'response_elem' doesn't match
else:
response = loads(instance.response.payload)[response_elem]
self.assertEqual(sorted(response.items()), expected_data)
def check_impl_list(self, service_class, item_class, request_data, # noqa
response_data, request_elem, response_elem, mock_data={}): # noqa
expected_keys = response_data.keys()
expected_data = tuple(response_data for x in range(rand_int(10)))
expected = Expected()
for datum in expected_data:
item = item_class()
for key in expected_keys:
value = getattr(datum, key)
setattr(item, key, value)
expected.add(item)
instance = self.invoke(service_class, request_data, expected, mock_data)
response = loads(instance.response.payload.getvalue())[response_elem]
for idx, item in enumerate(response):
expected = expected_data[idx]
given = Bunch(item)
for key in expected_keys:
given_value = getattr(given, key)
expected_value = getattr(expected, key)
eq_(given_value, expected_value)
self._check_sio_request_input(instance, request_data)
def wrap_force_type(self, elem):
return SIOElemWrapper(elem)
# ################################################################################################################################
class ODBTestCase(TestCase):
def setUp(self):
engine_url = 'sqlite:///:memory:'
pool_name = 'ODBTestCase.pool'
config = {
'engine': 'sqlite',
'sqlite_path': ':memory:',
'fs_sql_config': {
'engine': {
'ping_query': 'SELECT 1'
}
}
}
# Create a standalone engine ..
self.engine = create_engine(engine_url)
# .. all ODB objects for that engine..
model.Base.metadata.create_all(self.engine)
# .. an SQL pool too ..
self.pool = SQLConnectionPool(pool_name, config, config)
# .. a session wrapper on top of everything ..
self.session_wrapper = SessionWrapper()
self.session_wrapper.init_session(pool_name, config, self.pool)
# .. and all ODB objects for that wrapper's engine too ..
model.Base.metadata.create_all(self.session_wrapper.pool.engine)
# Unrelated to the above, used in individual tests
self.ODBTestModelClass = ElasticSearch
def tearDown(self):
model.Base.metadata.drop_all(self.engine)
self.ODBTestModelClass = None
def get_session(self):
return self.session_wrapper.session()
def get_sample_odb_orm_result(self, is_list):
# type: (bool) -> object
cluster = Cluster()
cluster.id = test_odb_data.cluster_id
cluster.name = 'my.cluster'
cluster.odb_type = 'sqlite'
cluster.broker_host = 'my.broker.host'
cluster.broker_port = 1234
cluster.lb_host = 'my.lb.host'
cluster.lb_port = 5678
cluster.lb_agent_port = 9012
es = self.ODBTestModelClass()
es.name = test_odb_data.name
es.is_active = test_odb_data.is_active
es.hosts = test_odb_data.es_hosts
es.timeout = test_odb_data.es_timeout
es.body_as = test_odb_data.es_body_as
es.cluster_id = test_odb_data.cluster_id
session = self.session_wrapper._session
session.add(cluster)
session.add(es)
session.commit()
session = self.session_wrapper._session
result = search_es_list(session, test_odb_data.cluster_id) # type: tuple
result = result[0] # type: SearchResults
# This is a one-element tuple of ElasticSearch ORM objects
result = result.result # type: tuple
return result if is_list else result[0]
# ################################################################################################################################
class MyODBService(Service):
class SimpleIO:
output = 'cluster_id', 'is_active', 'name'
# ################################################################################################################################
class MyODBServiceWithResponseElem(MyODBService):
class SimpleIO(MyODBService.SimpleIO):
response_elem = 'my_response_elem'
# ################################################################################################################################
class MyZatoClass:
def to_zato(self):
return {
'cluster_id': test_odb_data.cluster_id,
'is_active': test_odb_data.is_active,
'name': test_odb_data.name,
}
# ################################################################################################################################
# ################################################################################################################################
class BaseSIOTestCase(TestCase):
# ################################################################################################################################
def setUp(self):
self.maxDiff = maxint
# ################################################################################################################################
def get_server_config(self, needs_response_elem=False):
with NamedTemporaryFile(delete=False) as f:
contents = simple_io_conf_contents.format(bytes_to_str_encoding=get_bytes_to_str_encoding())
if isinstance(contents, unicode):
contents = contents.encode('utf8')
f.write(contents)
f.flush()
temporary_file_name=f.name
sio_fs_config = ConfigObj(temporary_file_name)
sio_fs_config = bunchify(sio_fs_config)
import os
os.remove(temporary_file_name)
sio_server_config = get_sio_server_config(sio_fs_config)
if not needs_response_elem:
sio_server_config.response_elem = None
return sio_server_config
# ################################################################################################################################
def get_sio(self, declaration, class_):
sio = CySimpleIO(self.get_server_config(), declaration)
sio.build(class_)
return sio
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/test/__init__.py | __init__.py |
# stdlib
from operator import itemgetter
# Zato
from zato.common.odb.query import pubsub_endpoint_queue_list_by_sub_keys
# ################################################################################################################################
if 0:
from typing import Union as union
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
union = union
# ################################################################################################################################
def make_short_msg_copy_from_dict(msg, data_prefix_len, data_prefix_short_len):
out_msg = {}
out_msg['msg_id'] = msg['pub_msg_id']
out_msg['in_reply_to'] = msg.get('in_reply_to')
out_msg['data'] = msg['data'][:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg['size']
out_msg['pub_pattern_matched'] = msg['pub_pattern_matched']
out_msg['sub_pattern_matched'] = msg['sub_pattern_matched']
out_msg['pub_time'] = msg['pub_time']
out_msg['expiration'] = msg['expiration']
out_msg['expiration_time'] = msg['expiration_time']
out_msg['topic_id'] = msg['topic_id']
out_msg['topic_name'] = msg['topic_name']
out_msg['cluster_id'] = msg['cluster_id']
out_msg['published_by_id'] = msg['published_by_id']
out_msg['delivery_status'] = msg['delivery_status']
out_msg['server_name'] = msg['server_name']
out_msg['server_pid'] = msg['server_pid']
out_msg['has_gd'] = msg['has_gd']
out_msg['recv_time'] = msg['recv_time']
out_msg['sub_key'] = msg['sub_key']
return out_msg
# ################################################################################################################################
def make_short_msg_copy_from_msg(msg, data_prefix_len, data_prefix_short_len):
out_msg = {}
out_msg['msg_id'] = msg.pub_msg_id
out_msg['in_reply_to'] = msg.in_reply_to
out_msg['data'] = msg.data[:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg.size
out_msg['pub_pattern_matched'] = msg.pub_pattern_matched
out_msg['sub_pattern_matched'] = msg.sub_pattern_matched
out_msg['pub_time'] = msg.pub_time
out_msg['expiration'] = msg.expiration
out_msg['expiration_time'] = msg.expiration_time
out_msg['topic_id'] = msg.topic_id
out_msg['topic_name'] = msg.topic_name
out_msg['cluster_id'] = msg.cluster_id
out_msg['published_by_id'] = msg.published_by_id
out_msg['delivery_status'] = msg.delivery_status
out_msg['server_name'] = msg.server_name
out_msg['server_pid'] = msg.server_pid
out_msg['has_gd'] = msg.has_gd
out_msg['recv_time'] = msg.recv_time
out_msg['sub_key'] = msg.sub_key
return out_msg
# ################################################################################################################################
def get_last_topics(topic_list, as_list=True):
# type: (list, bool) -> union[dict, list]
# Response to produce
out = {}
for item in topic_list: # type: (dict)
# Local alias
topic_id = item['topic_id'] # type: int
# .. we may have visited this topic already ..
previous = out.get(topic_id, {}) # type: dict
# .. if we have ..
if previous:
if item['pub_time'] > previous['pub_time']:
out[topic_id] = item
# .. otherwise, we can just set the current one ..
else:
out[topic_id] = item
if as_list:
out = sorted(out.values(), key=itemgetter('pub_time'), reverse=True)
return out
else:
return out
# ################################################################################################################################
def get_last_pub_metadata(server, topic_id_list):
# type: (ParallelServer, list) -> dict
# Make sure we have a list on input
if isinstance(topic_id_list, list):
input_topic_id = None
is_single_topic = False
else:
input_topic_id = int(topic_id_list)
is_single_topic = True
topic_id_list = [topic_id_list]
# Always use integers for topic IDs
topic_id_list = [int(elem) for elem in topic_id_list]
# Look up topic metadata in all the servers ..
response = server.rpc.invoke_all('zato.pubsub.topic.get-topic-metadata', {'topic_id_list':topic_id_list})
# Produce our response
out = get_last_topics(response.data, as_list=False)
if is_single_topic:
return out.get(input_topic_id) or {}
else:
return out
# ################################################################################################################################
def get_endpoint_metadata(server, endpoint_id):
# type: (ParallelServer, int) -> dict
# All topics from all PIDs
topic_list = []
response = server.rpc.invoke_all('zato.pubsub.endpoint.get-endpoint-metadata', {'endpoint_id':endpoint_id})
for pid_response in response.data: # type: dict
for pid_topic_list in pid_response.values(): # type: list
for topic_data in pid_topic_list: # type: dict
topic_list.append(topic_data)
return get_last_topics(topic_list, as_list=True)
# ################################################################################################################################
def get_topic_sub_keys_from_sub_keys(session, cluster_id, sub_key_list):
topic_sub_keys = {}
for item in pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list):
sub_keys = topic_sub_keys.setdefault(item.topic_name, [])
sub_keys.append(item.sub_key)
return topic_sub_keys
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/pubsub.py | pubsub.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from collections import deque
from datetime import datetime
from itertools import count
# gevent
from gevent.lock import RLock
# ################################################################################################################################
class default:
max_size = 1000
# ################################################################################################################################
class Event(object):
""" An individual event emitted to an event log.
"""
__slots__ = 'log_id', 'event_id', 'name', 'timestamp', 'ctx'
def __init__(self, log_id, event_id, name, ctx, _utcnow=datetime.utcnow):
self.log_id = log_id
self.event_id = event_id
self.name = name
self.ctx = ctx
self.timestamp = _utcnow()
def __repr__(self):
return '<{} at {} log:{} id:{} n:{} t:{}>'.format(self.__class__.__name__, hex(id(self)),
self.log_id, self.event_id, self.name, self.timestamp)
def to_dict(self):
return {
'log_id': self.log_id,
'event_id': self.event_id,
'name': self.name,
'timestamp': self.timestamp.isoformat(),
'ctx': None if self.ctx is None else repr(self.ctx)
}
# ################################################################################################################################
class EventLog(object):
""" A backlog of max_size events of arbitrary nature described by attributes such as ID, name, timestamp and opaque context.
"""
def __init__(self, log_id, max_size=default.max_size):
self.log_id = log_id
self.event_id_counter = count(1)
self.lock = RLock()
self.events = deque(maxlen=max_size)
# ################################################################################################################################
def emit(self, name, ctx=None):
self.events.append(Event(self.log_id, next(self.event_id_counter), name, ctx))
# ################################################################################################################################
def get_event_list(self):
return [elem.to_dict() for elem in self.events]
# ################################################################################################################################
if __name__ == '__main__':
el = EventLog('aaa')
for x in range(1, 50):
el.emit('aaa-{}'.format(x))
print(list(reversed(el.events))) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/event.py | event.py |
# ################################################################################################################################
# ################################################################################################################################
if 0:
from typing import Callable
Callable = Callable
# ################################################################################################################################
# ################################################################################################################################
_search_attrs = 'num_pages', 'cur_page', 'prev_page', 'next_page', 'has_prev_page', 'has_next_page', 'page_size', 'total'
# ################################################################################################################################
# ################################################################################################################################
class SearchResults(object):
def __init__(self, q, result, columns, total):
# type: (object, object, object, int) -> None
self.q = q
self.result = result
self.total = total
self.columns = columns # type: list
self.num_pages = 0
self.cur_page = 0
self.prev_page = 0
self.next_page = 0
self.has_prev_page = False
self.has_next_page = False
self.page_size = None # type: int
# ################################################################################################################################
def __iter__(self):
return iter(self.result)
# ################################################################################################################################
def __repr__(self):
# To avoice circular imports - this is OK because we very rarely repr(self) anyway
from zato.common.util.api import make_repr
return make_repr(self)
# ################################################################################################################################
def set_data(self, cur_page, page_size):
num_pages, rest = divmod(self.total, page_size)
# Apparently there are some results in rest that did not fit a full page
if rest:
num_pages += 1
self.num_pages = num_pages
self.cur_page = cur_page + 1 # Adding 1 because, again, the external API is 1-indexed
self.prev_page = self.cur_page - 1 if self.cur_page > 1 else 0
self.next_page = self.cur_page + 1 if self.cur_page < self.num_pages else None
self.has_prev_page = self.prev_page >= 1
self.has_next_page = bool(self.next_page and self.next_page <= self.num_pages) or False
self.page_size = page_size
# ################################################################################################################################
@staticmethod
def from_list(
data_list, # type: list
cur_page, # type: int
page_size, # type: int
needs_sort=False, # type: bool
post_process_func=None, # type: Callable
sort_key=None, # type: object
needs_reverse=True # type: bool
):
cur_page = cur_page - 1 if cur_page else 0 # We index lists from 0
# Set it here because later on it may be shortened to the page_size of elements
total = len(data_list)
# If we get here, we must have collected some data at all
if data_list:
# We need to sort the output ..
if needs_sort:
data_list.sort(key=sort_key, reverse=needs_reverse)
# .. the output may be already sorted but we may perhaps need to reverse it.
else:
if needs_reverse:
data_list.reverse()
start = cur_page * page_size
end = start + page_size
data_list = data_list[start:end]
if post_process_func:
post_process_func(data_list)
search_results = SearchResults(None, data_list, None, total)
search_results.set_data(cur_page, page_size)
return search_results
# ################################################################################################################################
def to_dict(self, _search_attrs=_search_attrs):
out = {}
out['result'] = self.result
for name in _search_attrs:
out[name] = getattr(self, name, None)
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/search.py | search.py |
# stdlib
import math
from datetime import timedelta
from operator import itemgetter
# Humanize
from humanize import precisedelta
# numpy
import numpy as np
# Zato
from zato.common.api import StatsKey
# ################################################################################################################################
# ################################################################################################################################
float_stats = ('item_max', 'item_min', 'item_mean', 'item_total_time')
# ################################################################################################################################
# ################################################################################################################################
def tmean(data, limit_from=None, limit_to=None):
""" Trimmed mean - includes only elements up to the input limit, if it is given at all.
"""
data = data if isinstance(data, list) else [data]
if limit_from or limit_to:
_data = []
for elem in data:
if limit_from:
if elem < limit_from:
continue
if limit_to:
if elem > limit_to:
continue
_data.append(elem)
data = _data[:]
count = len(data)
total = sum(data)
return total / count if count else 0
# ################################################################################################################################
# ################################################################################################################################
#
# Taken from https://code.activestate.com/recipes/511478-finding-the-percentile-of-the-values/
#
# Original code by Wai Yip Tung, licensed under the Python Foundation License
#
def percentile(data, percent, key=lambda x:x):
"""
Find the percentile of a list of values.
@parameter data - a list of values
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of data.
@return - the percentile of the values
"""
if not data:
return 0
data.sort()
k = (len(data)-1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(data[int(k)])
d0 = key(data[int(f)]) * (c-k)
d1 = key(data[int(c)]) * (k-f)
return d0 + d1
# ################################################################################################################################
# ################################################################################################################################
def collect_current_usage(data):
# type: (list) -> dict
# For later use
usage = 0
last_duration = None
last_timestamp = ''
usage_min = None
usage_max = None
usage_mean = None
# Make sure we always have a list to iterate over (rather than None)
data = data or []
for elem in data:
if elem is None:
continue
usage += elem[StatsKey.PerKeyValue]
if elem[StatsKey.PerKeyLastTimestamp] > last_timestamp:
last_timestamp = elem[StatsKey.PerKeyLastTimestamp]
last_duration = elem[StatsKey.PerKeyLastDuration]
if usage_min:
usage_min = min([usage_min, elem[StatsKey.PerKeyMin]])
else:
usage_min = elem[StatsKey.PerKeyMin]
if usage_max:
usage_max = max([usage_max, elem[StatsKey.PerKeyMax]])
else:
usage_max = elem[StatsKey.PerKeyMax]
if usage_mean:
usage_mean = np.mean([usage_mean, elem[StatsKey.PerKeyMean]])
else:
usage_mean = elem[StatsKey.PerKeyMean]
usage_mean = round(usage_mean, 3)
return {
StatsKey.PerKeyValue: usage,
StatsKey.PerKeyLastDuration: last_duration,
StatsKey.PerKeyLastTimestamp: last_timestamp,
StatsKey.PerKeyMin: usage_min,
StatsKey.PerKeyMax: usage_max,
StatsKey.PerKeyMean: usage_mean,
}
# ################################################################################################################################
# ################################################################################################################################
def should_include_in_table_stats(service_name):
# type: (str) -> bool
if service_name.startswith('pub.zato'):
return False
elif service_name.startswith('zato'):
return False
else:
return True
# ################################################################################################################################
# ################################################################################################################################
def combine_table_data(data, round_digits=2):
# type: (list, int) -> dict
# Response to return
out = []
# How many objects we have seen, e.g. how many individual services
total_object_id = 0
# Total usage across all events
total_usage = 0
# Total time spent in all the events (in ms)
total_time = 0
# Total mean time across all objects
total_mean = 0
# First pass, filter out objects with known unneeded names
# and collect total usage of each object and of objects as a whole.
for pid_response in data: # type: dict
if pid_response:
for object_name, stats in pid_response.items(): # type: (str, dict)
if should_include_in_table_stats(object_name):
# Update per object counters
# Total usage needs to be an integer
stats['item_total_usage'] = int(stats['item_total_usage'])
# These are always floats that we need to round up
for name in float_stats:
stats[name] = round(stats[name], round_digits)
# Add to totals
total_usage += stats['item_total_usage']
total_mean += stats['item_mean']
total_time += stats['item_total_time']
total_object_id += 1
# Finally, add the results so that they can be used in further steps
item = dict(stats)
item['name'] = object_name
out.append(item)
# We know how many events we have so we can now compute the mean across all of them
if total_object_id:
total_mean = total_mean / total_object_id
# In this pass, we can attach additional per-object statistics
for item in out: # type: dict
item_usage_share = item['item_total_usage'] / total_usage * 100
item_usage_share = round(item_usage_share, round_digits)
item_time_share = item['item_total_time'] / total_time * 100
item_time_share = round(item_time_share, round_digits)
item['item_usage_share'] = item_usage_share
item['item_time_share'] = item_time_share
item['item_total_usage_human'] = item['item_total_usage'] # Currently, this is the same
total_time_delta_min_unit = 'milliseconds' if item['item_total_time'] < 1 else 'seconds'
total_time_delta = timedelta(milliseconds=item['item_total_time'])
total_time_delta = precisedelta(total_time_delta, minimum_unit=total_time_delta_min_unit)
item['item_total_time_human'] = total_time_delta
# Sort by the most interesting attribute
out.sort(key=itemgetter('item_time_share'), reverse=True)
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/stats.py | stats.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.util.api import is_func_overridden
# ################################################################################################################################
class HookTool(object):
def __init__(self, server, hook_ctx_class, hook_type_to_method, invoke_func):
self.server = server
self.hook_ctx_class = hook_ctx_class
self.hook_type_to_method = hook_type_to_method
self.invoke_func = invoke_func
# ################################################################################################################################
def is_hook_overridden(self, service_name, hook_type):
impl_name = self.server.service_store.name_to_impl_name[service_name]
service_class = self.server.service_store.service_data(impl_name)['service_class']
func_name = self.hook_type_to_method[hook_type]
func = getattr(service_class, func_name)
return is_func_overridden(func)
# ################################################################################################################################
def get_hook_service_invoker(self, service_name, hook_type):
""" Returns a function that will invoke ooks or None if a given service does not implement input hook_type.
"""
# Do not continue if we already know that user did not override the hook method
if not self.is_hook_overridden(service_name, hook_type):
return
def _invoke_hook_service(*args, **kwargs):
""" A function to invoke hook services.
"""
ctx = self.hook_ctx_class(hook_type, *args, **kwargs)
return self.invoke_func(service_name, {'ctx':ctx}, serialize=False).getvalue(serialize=False)['response']
return _invoke_hook_service
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/hook.py | hook.py |
# stdlib
from datetime import datetime, timedelta
from time import time
import logging
# Arrow
import arrow
# tzlocal
from tzlocal import get_localzone
# Python 2/3 compatibility
from past.builtins import unicode
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
local_tz = get_localzone()
# ################################################################################################################################
def datetime_to_ms(dt):
""" Converts a datetime object to a number of milliseconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds() * 1000
# ################################################################################################################################
def utcnow_as_ms(_time=time):
""" Returns current UTC time in milliseconds since epoch. As of now, uses time.time but may eventually choose
to use alternative implementations on different systems.
"""
return _time()
# ################################################################################################################################
def datetime_from_ms(ms, isoformat=True):
""" Converts a number of milliseconds since UNIX epoch to a datetime object.
"""
value = _epoch + timedelta(milliseconds=ms)
return value.isoformat() if isoformat else value
# ################################################################################################################################
class TimeUtil(object):
""" A thin layer around Arrow's date/time handling library customized for our needs.
Default format is always taken from ISO 8601 (so it's sorted lexicographically)
and default timezone is always UTC.
"""
# ################################################################################################################################
def now(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True, delta=None):
""" Returns now in a specified timezone.
"""
now = arrow.now(tz=tz)
if delta:
now = now + delta
if needs_format:
return now.format(format)
return now
# ################################################################################################################################
def yesterday(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=-1))
# ################################################################################################################################
def tomorrow(self, format='YYYY-MM-DD HH:mm:ss', tz=local_tz.zone, needs_format=True):
return self.now(format, tz, needs_format, delta=timedelta(days=1))
# ################################################################################################################################
def utcnow(self, format='YYYY-MM-DD HH:mm:ss', needs_format=True):
""" Returns now in UTC formatted as given in 'format'.
"""
return self.now(format, 'UTC', needs_format)
# ################################################################################################################################
def today(self, format='YYYY-MM-DD', tz=local_tz.zone, needs_format=True):
""" Returns current day in a given timezone.
"""
now = arrow.now(tz=tz)
today = arrow.Arrow(year=now.year, month=now.month, day=now.day)
if tz != 'UTC':
today = today.to(tz)
if needs_format:
return today.format(format)
else:
return today
# ################################################################################################################################
def isonow(self, tz=local_tz.zone, needs_format=True, _format='YYYY-MM-DDTHH:mm:ss.SSSSSS'):
return self.now(_format, tz, needs_format)
# ################################################################################################################################
def isoutcnow(self, needs_format=True, _format='YYYY-MM-DDTHH:mm:ss.SSSSSS'):
return self.utc_now(_format, needs_format)
# ################################################################################################################################
def reformat(self, value, from_, to):
""" Reformats value from one datetime format to another, for instance
from 23-03-2013 to 03/23/13 (MM-DD-YYYY to DD/MM/YY).
"""
try:
# Arrow compares to str, not basestring
value = str(value) if isinstance(value, unicode) else value
from_ = str(from_) if isinstance(from_, unicode) else from_
return arrow.get(value, from_).format(to)
except Exception:
logger.error('Could not reformat value:`%s` from:`%s` to:`%s`',
value, from_, to)
raise
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/time_.py | time_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
# Bunch
from bunch import Bunch
# Python 2/3 compatibility
from builtins import bytes
from past.builtins import basestring
# Zato
from zato.common.const import SECRETS
# ################################################################################################################################
def resolve_value(key, value, decrypt_func=None, _default=object(), _secrets=SECRETS):
""" Resolves final value of a given variable by looking it up in environment if applicable.
"""
# Skip non-resolvable items
if not isinstance(value, basestring):
return value
if not value:
return value
value = value.decode('utf8') if isinstance(value, bytes) else value
# It may be an environment variable ..
if value.startswith('$'):
# .. but not if it's $$ which is a signal to skip this value ..
if value.startswith('$$'):
return value
# .. a genuine pointer to an environment variable.
else:
env_key = value[1:].strip().upper()
value = os.environ.get(env_key, _default)
# Use a placeholder if the actual environment key is missing
if value is _default:
value = 'ENV_KEY_MISSING_{}'.format(env_key)
# It may be an encrypted value
elif key in _secrets.PARAMS and value.startswith(_secrets.PREFIX):
value = decrypt_func(value)
# Pre-processed, we can assign this pair to output
return value
# ################################################################################################################################
def resolve_env_variables(data):
""" Given a Bunch instance on input, iterates over all items and resolves all keys/values to ones extracted
from environment variables.
"""
out = Bunch()
for key, value in data.items():
out[key] = resolve_value(None, value)
return out | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/config.py | config.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Requests
from requests import Session as RequestsSession
# Zato
from zato.common.api import CACHE, NotGiven
from zato.common.crypto.api import ServerCryptoManager
from zato.common.json_internal import dumps
from zato.common.util.api import as_bool, get_config, get_odb_session_from_server_dir, get_repo_dir_from_component_dir
from zato.common.odb.model import Cluster, HTTPBasicAuth, Server
# ################################################################################################################################
if 0:
from requests import Response as RequestsResponse
RequestsResponse = RequestsResponse
# ################################################################################################################################
# Maps cache operations to HTTP verbos
op_verb_map = {
'get': 'GET',
'set': 'POST',
'delete': 'DELETE'
}
# ################################################################################################################################
# ################################################################################################################################
class CommandConfig(object):
__slots__ = 'command', 'modifier', 'key', 'value', 'is_string_key', 'is_int_key', 'is_string_value', 'is_int_value', \
'is_bool_value', 'format'
def __init__(self):
self.command = None # type: str
self.modifier = None # type: str
self.key = None # type: str
self.value = None # type: str
self.is_string_key = None # type: bool
self.is_int_key = None # type: bool
self.is_string_value = None # type: bool
self.is_int_value = None # type: bool
self.is_bool_value = None # type: bool
self.format = None # type: str
def to_dict(self):
out = {}
for name in self.__slots__:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
# ################################################################################################################################
class CommandResponse(object):
__slots__ = 'key', 'text', 'has_value'
def __init__(self):
self.key = None # type: object
self.text = None # type: str
self.has_value = None # type: bool
# ################################################################################################################################
# ################################################################################################################################
class Client(object):
""" An HTTP-based Zato cache client.
"""
__slots__ = 'address', 'username', 'password', 'cache_name', 'session'
def __init__(self):
self.address = None # type: str
self.username = None # type: str
self.password = None # type: str
self.cache_name = None # type: str
self.session = None # type: RequestsSession
# ################################################################################################################################
@staticmethod
def from_server_conf(server_dir, cache_name, is_https):
# type: (str, str, bool) -> Client
repo_dir = get_repo_dir_from_component_dir(server_dir)
cm = ServerCryptoManager.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, 'server.conf', crypto_manager=cm, secrets_conf=secrets_conf)
session = None
password = None
try:
session = get_odb_session_from_server_dir(server_dir)
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster # type: Cluster
security = session.query(HTTPBasicAuth).\
filter(Cluster.id == HTTPBasicAuth.cluster_id).\
filter(HTTPBasicAuth.username == CACHE.API_USERNAME).\
filter(HTTPBasicAuth.cluster_id == cluster.id).\
first() # type: HTTPBasicAuth
if security:
password = security.password
finally:
if session:
session.close()
return Client.from_dict({
'username': CACHE.API_USERNAME,
'password': password,
'address': config.main.gunicorn_bind,
'cache_name': cache_name,
'is_https': is_https,
})
# ################################################################################################################################
@staticmethod
def from_dict(config):
# type: (dict) -> Client
client = Client()
client.username = config['username']
client.password = config['password']
client.cache_name = config['cache_name']
if config['address'].startswith('http'):
address = config['address']
else:
address = 'http{}://{}'.format('s' if config['is_https'] else '', config['address'])
client.address = address
session = RequestsSession()
if client.password:
session.auth = (client.username, client.password)
client.session = session
return client
# ################################################################################################################################
def _request(self, op, key, value=NotGiven, pattern='/zato/cache/{}', op_verb_map=op_verb_map):
# type: (str, str, str) -> str
# Build a full address
path = pattern.format(key)
address = '{}{}'.format(self.address, path)
# Get the HTTP verb to use in the request
verb = op_verb_map[op] # type: str
data = {
'cache': self.cache_name,
'return_prev': True
}
if value is not NotGiven:
data['value'] = value
data = dumps(data)
response = self.session.request(verb, address, data=data) # type: RequestsResponse
return response.text
# ################################################################################################################################
def run_command(self, config):
# type: (CommandConfig) -> CommandResponse
if config.value is not NotGiven:
if config.is_int_value:
value = int(config.value)
elif config.is_bool_value:
value = as_bool(config.value)
else:
value = config.value
else:
value = config.value
raw_response = self._request(config.command, config.key, value)
_response = CommandResponse()
_response.key = config.key
_response.text = raw_response
return _response
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/cache.py | cache.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import Formatter
# ################################################################################################################################
# ################################################################################################################################
# Based on http://stackoverflow.com/questions/384076/how-can-i-make-the-python-logging-output-to-be-colored
class ColorFormatter(Formatter):
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
RESET_SEQ = '\033[0m'
COLOR_SEQ = '\033[1;%dm'
BOLD_SEQ = '\033[1m'
COLORS = {
'WARNING': YELLOW,
'INFO': WHITE,
'DEBUG': BLUE,
'CRITICAL': YELLOW,
'ERROR': RED,
'TRACE1': YELLOW
}
def __init__(self, fmt):
self.use_color = True
super(ColorFormatter, self).__init__(fmt)
# ################################################################################################################################
def formatter_msg(self, msg, use_color=True):
if use_color:
msg = msg.replace('$RESET', self.RESET_SEQ).replace('$BOLD', self.BOLD_SEQ)
else:
msg = msg.replace('$RESET', '').replace('$BOLD', '')
return msg
# ################################################################################################################################
def format(self, record):
levelname = record.levelname
if self.use_color and levelname in self.COLORS:
fore_color = 30 + self.COLORS[levelname]
levelname_color = self.COLOR_SEQ % fore_color + levelname + self.RESET_SEQ
record.levelname = levelname_color
return Formatter.format(self, record)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/logging_.py | logging_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from base64 import b64decode, b64encode
# Python 2/3 compatibility
from past.builtins import unicode
from six import PY2
# Zato
from zato.common.api import AUTH_RESULT
logger = getLogger('zato')
def parse_basic_auth(auth, prefix='Basic '):
""" Parses username/password out of incoming HTTP Basic Auth data.
"""
if not auth:
raise ValueError('No auth received in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
if not auth.startswith(prefix):
raise ValueError('Invalid prefix in `{}` ({})'.format(auth, AUTH_RESULT.BASIC_AUTH.NO_AUTH))
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
auth = auth if PY2 else auth.decode('utf8')
return auth.split(':', 1)
# ################################################################################################################################
# ################################################################################################################################
# Code below comes from another project - will be moved elsewhere at one point thus the location of imports and definitions
# ################################################################################################################################
# ################################################################################################################################
# stdlib
from hashlib import sha1
from datetime import datetime
# Python 2/3 compatibility
from future.moves.urllib.parse import quote_plus
# lxml
from lxml import etree
# PyYAML
from yaml import dump
try:
from yaml import CDumper as Dumper
except ImportError: # pragma: no cover
from yaml import Dumper # pragma: no cover
# ################################################################################################################################
# ################################################################################################################################
class AuthResult(object):
""" Represents the result of validating a URL against the config. 'status' is the main boolean flag indicating
whether the successful was successful or not. 'code' equal to '0' means success and any other value is a failure,
note that 'code' may be a multi-character string including punctuation. 'description' is an optional attribute holding
any additional textual information a callee might wish to pass to the calling layer. 'auth_info' is either
an empty string or information regarding the authorization data presented by the calling application.
Instances of this class are considered True or False in boolean comparisons
according to the boolean value of self.status.
"""
def __init__(self, status=False, code='-1', description=''):
self.status = status
self.code = code
self.description = description
self._auth_info = b''
@property
def auth_info(self):
return self._auth_info
@auth_info.setter
def auth_info(self, value):
self._auth_info = dump(value, Dumper=Dumper)
def __repr__(self):
return '<{0} at {1} status={2} code={3} description={4} auth_info={5}>'.format(
self.__class__.__name__, hex(id(self)), self.status, self.code,
self.description, self.auth_info)
def __bool__(self):
""" Returns the boolean value of self.status. Useful when an instance
must be compared in a boolean context.
"""
return bool(self.status)
__nonzero__ = __bool__
# ################################################################################################################################
# ################################################################################################################################
class SecurityException(Exception):
""" Indicates problems with validating incoming requests. The 'description'
attribute holds textual information suitable for showing to human users.
"""
def __init__(self, description):
self.description = description
# ################################################################################################################################
# ################################################################################################################################
AUTH_WSSE_NO_DATA = '0003.0001'
AUTH_WSSE_VALIDATION_ERROR = '0003.0002'
AUTH_BASIC_NO_AUTH = '0004.0001'
AUTH_BASIC_INVALID_PREFIX = '0004.0002'
AUTH_BASIC_USERNAME_OR_PASSWORD_MISMATCH = '0004.0003'
# ################################################################################################################################
# ################################################################################################################################
def on_wsse_pwd(wsse, url_config, data, needs_auth_info=True):
""" Visit _RequestApp._on_wsse_pwd method's docstring.
"""
if not data:
return AuthResult(False, AUTH_WSSE_NO_DATA)
request = etree.fromstring(data)
try:
ok, wsse_username = wsse.validate(request, url_config)
except SecurityException as e:
return AuthResult(False, AUTH_WSSE_VALIDATION_ERROR, e.description)
else:
auth_result = AuthResult(True, '0')
if needs_auth_info:
auth_result.auth_info = {b'wsse-pwd-username': str(wsse_username)}
return auth_result
# ################################################################################################################################
# ################################################################################################################################
def _on_basic_auth(auth, expected_username, expected_password):
""" A low-level call for checking the HTTP Basic Auth credentials.
"""
if not auth:
return AUTH_BASIC_NO_AUTH
prefix = 'Basic '
if not auth.startswith(prefix):
return AUTH_BASIC_INVALID_PREFIX
_, auth = auth.split(prefix)
auth = auth.strip()
auth = b64decode(auth)
auth = auth if isinstance(auth, unicode) else auth.decode('utf8')
username, password = auth.split(':', 1)
if username == expected_username and password == expected_password:
return True
else:
return AUTH_BASIC_USERNAME_OR_PASSWORD_MISMATCH
# ################################################################################################################################
# ################################################################################################################################
def on_basic_auth(env, url_config, needs_auth_info=True):
""" Visit _RequestApp._on_basic_auth method's docstring.
"""
username = url_config['basic-auth-username']
result = _on_basic_auth(env.get('HTTP_AUTHORIZATION', ''), username, url_config['basic-auth-password'])
is_success = result is True # Yes, need to check for True
auth_result = AuthResult(is_success)
if is_success:
if needs_auth_info:
auth_result.auth_info = {b'basic-auth-username': quote_plus(username).encode('utf-8')}
else:
auth_result.code = result
return auth_result
# ################################################################################################################################
# ################################################################################################################################
soap_date_time_format = '%Y-%m-%dT%H:%M:%S.%fZ'
soapenv_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soap_body_path = '/soapenv:Envelope/soapenv:Body'
soap_body_xpath = etree.XPath(soap_body_path, namespaces={'soapenv':soapenv_namespace})
wsse_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
wsu_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
wss_namespaces = {'soapenv':soapenv_namespace, 'wsse':wsse_namespace, 'wsu':wsu_namespace}
wsse_password_type_text = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'
wsse_password_type_digest = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordDigest'
supported_wsse_password_types = (wsse_password_type_text, wsse_password_type_digest)
wsse_username_token_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken'
wsse_username_token_xpath = etree.XPath(wsse_username_token_path, namespaces=wss_namespaces)
wsse_username_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Username'
wsse_username_xpath = etree.XPath(wsse_username_path, namespaces=wss_namespaces)
wsse_password_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password'
wsse_password_xpath = etree.XPath(wsse_password_path, namespaces=wss_namespaces)
wsse_password_type_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password/@Type'
wsse_password_type_xpath = etree.XPath(wsse_password_type_path, namespaces=wss_namespaces)
wsse_nonce_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Nonce'
wsse_nonce_xpath = etree.XPath(wsse_nonce_path, namespaces=wss_namespaces)
wsu_username_created_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsu:Created'
wsu_username_created_xpath = etree.XPath(wsu_username_created_path, namespaces=wss_namespaces)
class WSSE(object):
""" Implements authentication using WS-Security.
"""
def _replace_username_token_elem(self, soap, old_elem, attr_name):
""" A utility function for replacing passwords and nonces with '***'
for the purpose of logging the messages without worrying of disclosing
any data known to be secret.
"""
old_elem = old_elem[0]
attr = old_elem.get(attr_name)
username_token = wsse_username_token_xpath(soap)
if not username_token:
self.error(expected_element=wsse_username_token_path)
username_token = username_token[0]
elem_idx = username_token.index(old_elem)
username_token.remove(old_elem)
new_elem = etree.Element(old_elem.tag)
new_elem.set(attr_name, attr)
new_elem.text = '***'
username_token.insert(elem_idx, new_elem)
return old_elem.text, attr
def _get_digest(self, password, nonce, created):
""" Returns the password's expected digest.
"""
nonce = b64decode(nonce)
concat = nonce + created + password
h = sha1()
h.update(concat)
return b64encode(h.digest()).rstrip('\n')
def error(self, description='', expected_element='', soap=None):
""" A utility function for exceptions in erronous situations. May be
subclassed if error reporting needs to be customized. The 'soap'
parameter is guaranteed to have WSSE password and token replaced
with '***' characters. Note that default implementation doesn't use
the 'soap' parameter however the subclasses are free to do so.
"""
msg = description
if expected_element:
if description:
msg += '. '
msg += 'Element [{0}] doesn\'t exist'.format(expected_element)
raise SecurityException(msg)
def check_nonce(self, wsse_nonce, now, nonce_freshness_time):
""" Checks whether the nonce has been already seen. Default implementation
lets all nonces in. More sophisticated subclasses may wish to override
this method and check the nonce against a cache of some sort.
"""
return False
def on_invalid_username(self, config, given, message):
""" Invoked when the expected and given usernames don't match.
"""
self.error('Invalid username or password')
def on_invalid_password(self, config, given_username, given_password, message):
""" Invoked when the expected and given passwords don't match.
"""
self.error('Invalid username or password')
def on_username_token_expired(self, config, elapsed, message):
""" Invoked when the username token has been found to be expired.
"""
self.error('UsernameToken has expired')
def on_nonce_non_unique(self, config, nonce, now, message):
""" Invoked when the nonce has been found not to be unique.
"""
self.error('Nonce [{0}] is not unique'.format(nonce))
def validate(self, soap, config):
# Shadow the password and a nonce before any processing, getting
# their values along the way.
wsse_password = wsse_password_xpath(soap)
if wsse_password:
wsse_password, wsse_password_type = self._replace_username_token_elem(soap, wsse_password, 'Type')
wsse_nonce = wsse_nonce_xpath(soap)
if wsse_nonce:
wsse_nonce, wsse_encoding_type = self._replace_username_token_elem(soap, wsse_nonce, 'EncodingType')
wsse_username = wsse_username_xpath(soap)
if not wsse_username:
self.error('No username sent', wsse_username_path, soap)
wsse_username = wsse_username[0].text
if config['wsse-pwd-username'] != wsse_username:
self.on_invalid_username(config, wsse_username, soap)
if not wsse_password_type:
self.error('No password type sent', wsse_password_type_path, soap)
if not wsse_password_type in supported_wsse_password_types:
msg = 'Unsupported password type=[{0}], not in [{1}]'.format(wsse_password_type, supported_wsse_password_types)
self.error(msg, soap=soap)
now = datetime.utcnow()
if config['wsse-pwd-reject-empty-nonce-creation']:
wsu_username_created = wsu_username_created_xpath(soap)
if not all((wsse_nonce, wsu_username_created)):
self.error('Both nonce and creation timestamp must be given', soap=soap)
else:
if wsu_username_created:
wsu_username_created = wsu_username_created[0].text
# Check nonce freshness and report error if the UsernameToken is stale.
token_created = datetime.strptime(wsu_username_created, soap_date_time_format)
elapsed = (now - token_created)
if config['wsse-pwd-reject-stale-tokens'] and elapsed.seconds > config['wsse-pwd-reject-expiry-limit']:
self.on_username_token_expired(config, elapsed, soap)
if config.get('wsse-pwd-password-digest'):
expected_password = self._get_digest(config['wsse-pwd-password'], wsse_nonce, wsu_username_created)
else:
expected_password = config.get('wsse-pwd-password')
if wsse_password != expected_password:
self.on_invalid_password(config, wsse_username, wsse_password, soap)
# Have we already seen such a nonce?
if self.check_nonce(wsse_nonce, now, config.get('wsse-pwd-nonce-freshness-time')):
self.on_nonce_non_unique(config, wsse_nonce, now, soap)
# All good, we let the client in.
return True, wsse_username
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/auth.py | auth.py |
# stdlib
from datetime import datetime, timedelta
from logging import getLogger
from mmap import mmap
from time import sleep
from traceback import format_exc
# posix-ipc
import posix_ipc as ipc
# Zato
from zato.common.json_internal import dumps, loads
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_shmem_pattern = '/zato-shmem-{}'
# ################################################################################################################################
# ################################################################################################################################
class SharedMemoryIPC(object):
""" An IPC object which Zato processes use to communicate with each other using mmap files
backed by shared memory. All data in shared memory is kept as a dictionary and serialized as JSON
each time any read or write is needed.
"""
key_name = '<invalid>'
def __init__(self):
self.shmem_name = ''
self.size = -1
self._mmap = None
self.running = False
# ################################################################################################################################
def create(self, shmem_suffix, size, needs_create):
""" Creates all IPC structures.
"""
self.shmem_name = _shmem_pattern.format(shmem_suffix)
self.size = size
# Create or read share memory
logger.debug('%s shmem `%s` (%s %s)', 'Creating' if needs_create else 'Opening', self.shmem_name,
self.size, self.key_name)
try:
self._mem = ipc.SharedMemory(self.shmem_name, ipc.O_CREAT if needs_create else 0, size=self.size)
except ipc.ExistentialError:
raise ValueError('Could not create shmem `{}` ({}), e:`{}`'.format(self.shmem_name, self.key_name, format_exc()))
# Map memory to mmap
self._mmap = mmap(self._mem.fd, self.size)
# Write initial data so that JSON .loads always succeeds
self.store_initial()
self.running = True
# ################################################################################################################################
def store(self, data):
""" Serializes input data as JSON and stores it in RAM, overwriting any previous data.
"""
self._mmap.seek(0)
self._mmap.write(dumps(data).encode('utf8'))
self._mmap.flush()
# ################################################################################################################################
def store_initial(self):
""" Stores initial data in shmem unless there is already data in there.
"""
if self.load(False):
return
else:
self.store({})
# ################################################################################################################################
def load(self, needs_loads=True):
""" Reads in all data from RAM and, optionally, loads it as JSON.
"""
self._mmap.seek(0)
data = self._mmap.read(self.size).strip(b'\x00')
return loads(data.decode('utf8')) if needs_loads else data
# ################################################################################################################################
def close(self):
""" Closes all underlying in-RAM structures.
"""
if not self.running:
logger.debug('Skipped close, IPC not running (%s)', self.key_name)
return
else:
logger.info('Closing IPC (%s)', self.key_name)
self._mmap.close()
try:
self._mem.unlink()
except ipc.ExistentialError:
pass
# ################################################################################################################################
def get_parent(self, parent_path, needs_data=True):
""" Returns element pointed to by parent_path, creating all elements along the way, if neccessary.
"""
data = self.load()
parent_path = [elem for elem in parent_path.split('/') if elem]
# Find or create element that is parent of input key
current = data
while parent_path:
next = parent_path.pop(0)
current = current.setdefault(next, {})
return (data, current) if needs_data else current
# ################################################################################################################################
def set_key(self, parent, key, value):
""" Set key to value under element called 'parent'.
"""
# Get parent to add our key to - will create it if needed
data, parent = self.get_parent(parent)
# Set key to value
parent[key] = value
# Save it all back
self.store(data)
# ################################################################################################################################
def _get_key(self, parent, key):
""" Low-level implementation of get_key which does not handle timeouts.
"""
parent = self.get_parent(parent, False)
return parent[key]
# ################################################################################################################################
def get_key(self, parent, key, timeout=None, _sleep=sleep, _utcnow=datetime.utcnow):
""" Returns a specific key from parent dictionary.
"""
try:
return self._get_key(parent, key)
except KeyError:
if timeout:
now = _utcnow()
start = now
until = now + timedelta(seconds=timeout)
idx = 0
while now <= until:
try:
value = self._get_key(parent, key)
if value:
msg = 'Returning value `%s` for parent/key `%s` `%s` after %s'
logger.info(msg, value, parent, key, now - start)
return value
except KeyError:
_sleep(0.1)
idx += 1
if idx % 10 == 0:
logger.info('Waiting for parent/key `%s` `%s` (timeout: %ss)', parent, key, timeout)
now = _utcnow()
# We get here if we did not return the key within timeout seconds,
# in which case we need to log an error and raise an exception.
# Same message for logger and exception
msg = 'Could not get parent/key `{}` `{}` after {}s'.format(parent, key, timeout)
logger.warn(msg)
raise KeyError(msg)
# No exception = re-raise exception immediately
else:
raise
# ################################################################################################################################
# ################################################################################################################################
class ServerStartupIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for server startup initialization.
"""
key_name = '/pubsub/pid'
def create(self, deployment_key, size, needs_create=True):
super(ServerStartupIPC, self).create('server-{}'.format(deployment_key), size, needs_create)
def set_pubsub_pid(self, pid):
self.set_key(self.key_name, 'current', pid)
def get_pubsub_pid(self, timeout=60):
return self.get_key(self.key_name, 'current', timeout)
# ################################################################################################################################
# ################################################################################################################################
class ConnectorConfigIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for configuration of subprocess-based containers.
"""
needs_create = False
key_name = '/connector/config'
def create(self, deployment_key, size, needs_create=True):
super(ConnectorConfigIPC, self).create('connector-config-{}'.format(deployment_key), size, needs_create)
def set_config(self, connector_key, config):
self.set_key(self.key_name, connector_key, config)
def get_config(self, connector_key, timeout=60, as_dict=False):
response = self.get_key(self.key_name, connector_key, timeout)
if response:
return loads(response) if as_dict else response
# ################################################################################################################################
# ################################################################################################################################
class CommandStoreIPC(SharedMemoryIPC):
""" A shared memory-backed IPC object for CLI commands used by Zato.
"""
needs_create = False
key_name = '/cli/command/store'
def create(self, size=100_000, needs_create=True):
super(CommandStoreIPC, self).create('cli-command-store', size, needs_create)
def add_parser(self, parser_data):
self.set_key(self.key_name, 'parser', parser_data)
def get_config(self, timeout=3):
return self.get_key(self.key_name, 'parser', timeout)
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/posix_ipc_.py | posix_ipc_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import sys
import traceback
from logging import getLogger
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
def get_current_stack():
sep = '*' * 80
out = ['\n', sep]
for line in traceback.format_stack():
out.append(line.strip())
out.append(sep)
return '\n'.join(out)
# ################################################################################################################################
# ################################################################################################################################
def log_current_stack():
logger.info(get_current_stack())
# ################################################################################################################################
# ################################################################################################################################
# Taken from https://stackoverflow.com/a/16589622
def get_full_stack():
exc = sys.exc_info()[0]
stack = traceback.extract_stack()[:-1] # last one would be full_stack()
if exc is not None: # i.e. if an exception is present
del stack[-1] # remove call of full_stack, the printed exception will contain the caught exception caller instead
trace = 'Traceback (most recent call last):\n'
stack_string = trace + ''.join(traceback.format_list(stack))
if exc is not None:
stack_string += ' '
stack_string += traceback.format_exc()
stack_string = stack_string.lstrip(trace)
return stack_string
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/python_.py | python_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from itertools import chain
from logging import DEBUG, getLogger
# Bunch
from bunch import bunchify
# gevent
from gevent import sleep
# SQLAlchemy
from sqlalchemy.exc import InternalError as SAInternalError, OperationalError as SAOperationalError
# Zato
from zato.common.api import GENERIC, SEARCH
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import Base, SecurityBase
from zato.common.util.search import SearchResults
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
has_debug = logger_zato.isEnabledFor(DEBUG) or logger_pubsub.isEnabledFor(DEBUG)
# ################################################################################################################################
_default_page_size = SEARCH.ZATO.DEFAULTS.PAGE_SIZE
_max_page_size = _default_page_size * 5
# All exceptions that can be raised when deadlocks occur
_DeadlockException = (SAInternalError, SAOperationalError)
# In MySQL, 1213 = 'Deadlock found when trying to get lock; try restarting transaction'
# but the underlying PyMySQL library returns only a string rather than an integer code.
_deadlock_code = 'Deadlock found when trying to get lock'
_zato_opaque_skip_attrs=set(['needs_details', 'paginate', 'cur_page', 'query'])
# ################################################################################################################################
def search(search_func, config, filter_by, session=None, cluster_id=None, *args, **kwargs):
""" Adds search criteria to an SQLAlchemy query based on current search configuration.
"""
try:
cur_page = int(config.get('cur_page', 1))
except(ValueError, TypeError):
cur_page = 1
try:
page_size = min(int(config.get('page_size', _default_page_size)), _max_page_size)
except(ValueError, TypeError):
page_size = _default_page_size
# We need to substract 1 because externally our API exposes human-readable numbers,
# i.e. starting from 1, not 0, but internally the database needs 0-based slices.
if cur_page > 0:
cur_page -= 1
kwargs = {
'cur_page': cur_page,
'page_size': page_size,
'filter_by': filter_by,
'where': kwargs.get('where'),
'filter_op': kwargs.get('filter_op'),
'data_filter': kwargs.get('data_filter'),
}
query = config.get('query')
if query:
query = query.strip().split()
if query:
kwargs['query'] = query
result = search_func(session, cluster_id, *args, **kwargs)
# Fills out all the search-related information
result.set_data(cur_page, page_size)
return result
# ################################################################################################################################
def sql_op_with_deadlock_retry(cid, name, func, *args, **kwargs):
cid = cid or None
attempts = 0
while True:
attempts += 1
if has_debug:
logger_zato.info('In sql_op_with_deadlock_retry, %s %s %s %s %r %r', attempts, cid, name, func, args, kwargs)
try:
# Call the SQL function that will possibly result in a deadlock
func(*args, **kwargs)
if has_debug:
logger_zato.info('In sql_op_with_deadlock_retry, returning True')
# This will return only if there is no exception in calling the SQL function
return True
# Catch deadlocks - it may happen because both this function and delivery tasks update the same tables
except _DeadlockException as e:
if has_debug:
logger_zato.warn('Caught _DeadlockException `%s` `%s`', cid, e)
if _deadlock_code not in e.args[0]:
raise
else:
if attempts % 50 == 0:
msg = 'Still in deadlock for `{}` after %d attempts cid:%s args:%s'.format(name)
logger_zato.warn(msg, attempts, cid, args)
logger_pubsub.warn(msg, attempts, cid, args)
# Sleep for a while until the next attempt
sleep(0.005)
# Push the counter
attempts += 1
# ################################################################################################################################
# ################################################################################################################################
class ElemsWithOpaqueMaker(object):
def __init__(self, elems):
self.elems = elems
# ################################################################################################################################
@staticmethod
def get_opaque_data(elem):
return elem.get(GENERIC.ATTR_NAME)
has_opaque_data = get_opaque_data
# ################################################################################################################################
@staticmethod
def _set_opaque(elem, drop_opaque=False):
opaque = ElemsWithOpaqueMaker.get_opaque_data(elem)
opaque = loads(opaque) if opaque else {}
elem.update(opaque)
if drop_opaque:
del elem[GENERIC.ATTR_NAME]
# ################################################################################################################################
@staticmethod
def process_config_dict(config, drop_opaque=False):
ElemsWithOpaqueMaker._set_opaque(config, drop_opaque)
# ################################################################################################################################
def _process_elems(self, out, elems, _skip_class=(Base, list)):
for elem in elems:
if hasattr(elem, '_sa_class_manager'):
data = {}
for (name, _) in elem._sa_class_manager._all_sqla_attributes():
value = getattr(elem, name)
if name.startswith('__'):
continue
if isinstance(value, _skip_class):
continue
data[name] = value
else:
data = elem._asdict()
elem = bunchify(data)
ElemsWithOpaqueMaker._set_opaque(elem)
out.append(elem)
return out
# ################################################################################################################################
def _elems_with_opaque_search(self):
""" Resolves all opaque elements in search results.
"""
search_result = self.elems[0]
new_result = self._process_elems([], search_result.result)
search_result.result = new_result
return self.elems
# ################################################################################################################################
def get(self):
if isinstance(self.elems, tuple) and isinstance(self.elems[0], SearchResults):
return self._elems_with_opaque_search()
else:
return self._process_elems([], self.elems)
# ################################################################################################################################
# ################################################################################################################################
def elems_with_opaque(elems):
""" Turns a list of SQLAlchemy elements into a list of Bunch instances,
each possibly with its opaque elements already extracted to the level of each Bunch.
"""
return ElemsWithOpaqueMaker(elems).get()
# ################################################################################################################################
def parse_instance_opaque_attr(instance):
opaque = getattr(instance, GENERIC.ATTR_NAME)
opaque = loads(opaque) if opaque else None
if not opaque:
return {}
ElemsWithOpaqueMaker.process_config_dict(opaque)
return bunchify(opaque)
# ################################################################################################################################
def get_dict_with_opaque(instance, to_bunch=False):
opaque = parse_instance_opaque_attr(instance)
out = instance._asdict() if hasattr(instance, '_asdict') else instance.asdict()
for k, v in opaque.items():
out[k] = v
return bunchify(out) if to_bunch else out
# ################################################################################################################################
def set_instance_opaque_attrs(instance, input, skip=None, only=None, _zato_skip=_zato_opaque_skip_attrs):
""" Given an SQLAlchemy object instance and incoming SimpleIO-based input,
populates all opaque values of that instance.
"""
only = only or []
instance_opaque_attrs = None
instance_attrs = set(instance.asdict())
input_attrs = set(input)
if only:
input_attrs = set([elem for elem in input_attrs if elem in only])
instance_attrs = set([elem for elem in instance_attrs if elem not in only])
# Any extra input attributes will be treated as opaque ones
input_opaque_attrs = input_attrs - instance_attrs
# Skip attributes related to pagination
for name in chain(skip or [], _zato_skip):
input_opaque_attrs.discard(name)
# Prepare generic attributes for instance
if GENERIC.ATTR_NAME in instance_attrs:
instance_opaque_attrs = getattr(instance, GENERIC.ATTR_NAME)
if instance_opaque_attrs:
instance_opaque_attrs = loads(instance_opaque_attrs)
else:
instance_opaque_attrs = {}
for name in input_opaque_attrs:
instance_opaque_attrs[name] = input[name]
# Set generic attributes for instance
if instance_opaque_attrs is not None:
setattr(instance, GENERIC.ATTR_NAME, dumps(instance_opaque_attrs))
# ################################################################################################################################
def get_security_by_id(session, security_id):
return session.query(SecurityBase).\
filter(SecurityBase.id==security_id).\
one()
# ################################################################################################################################
def get_instance_by_id(session, model_class, id):
return session.query(model_class).\
filter(model_class.id==id).\
one()
# ################################################################################################################################
def get_instance_by_name(session, model_class, type_, name):
return session.query(model_class).\
filter(model_class.type_==type_).\
filter(model_class.name==name).\
one()
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/sql.py | sql.py |
# stdlib
import copy
import errno
import gc
import imp
import inspect
import linecache
import logging
import os
import random
import re
import signal
import threading
import socket
import sys
import unicodedata
from ast import literal_eval
from base64 import b64decode
from binascii import hexlify as binascii_hexlify
from contextlib import closing
from datetime import datetime, timedelta
from getpass import getuser as getpass_getuser
from glob import glob
from hashlib import sha256
from inspect import isfunction, ismethod
from itertools import tee
from io import StringIO
from operator import itemgetter
from os.path import abspath, isabs, join
from pathlib import Path
from pprint import pprint as _pprint, PrettyPrinter
from string import Template
from subprocess import Popen, PIPE
from tempfile import NamedTemporaryFile
from threading import current_thread
from time import sleep
from traceback import format_exc
# Bunch
from bunch import Bunch, bunchify
from dateutil.parser import parse as dt_parse
# gevent
from gevent import sleep as gevent_sleep, spawn, Timeout
from gevent.greenlet import Greenlet
from gevent.hub import Hub
# lxml
from lxml import etree, objectify
# OpenSSL
from OpenSSL import crypto
# portalocker
import portalocker
# psutil
import psutil
# pytz
import pytz
# requests
import requests
# SQLAlchemy
import sqlalchemy as sa
from sqlalchemy import orm
# Texttable
from texttable import Texttable
# Python 2/3 compatibility
from builtins import bytes
from future.moves.itertools import zip_longest
from future.utils import iteritems, raise_
from past.builtins import basestring, cmp, reduce, unicode
from six import PY3
from six.moves.urllib.parse import urlparse
from zato.common.py23_ import ifilter, izip
from zato.common.py23_.spring_ import CAValidatingHTTPSConnection, SSLClientTransport
if PY3:
from functools import cmp_to_key
# Zato
from zato.common.api import CHANNEL, CLI_ARG_SEP, DATA_FORMAT, engine_def, engine_def_sqlite, HL7, KVDB, MISC, \
SECRET_SHADOW, SIMPLE_IO, TLS, TRACE1, zato_no_op_marker, ZATO_NOT_GIVEN, ZMQ
from zato.common.broker_message import SERVICE
from zato.common.const import SECRETS
from zato.common.crypto.api import CryptoManager
from zato.common.exception import ZatoException
from zato.common.ext.configobj_ import ConfigObj
from zato.common.ext.validate_ import is_boolean, is_integer, VdtTypeError
from zato.common.json_internal import dumps, loads
from zato.common.odb.model import Cluster, HTTPBasicAuth, HTTPSOAP, IntervalBasedJob, Job, Server, Service
from zato.common.util.tcp import get_free_port, is_port_taken, wait_for_zato_ping, wait_until_port_free, wait_until_port_taken
from zato.common.util.eval_ import as_bool, as_list
from zato.common.util.file_system import fs_safe_name
from zato.common.util.logging_ import ColorFormatter
from zato.common.xml_ import soap_body_path, soap_body_xpath
from zato.hl7.parser import get_payload_from_request as hl7_get_payload_from_request
# ################################################################################################################################
if 0:
from typing import Iterable as iterable
from simdjson import Parser as SIMDJSONParser
iterable = iterable
SIMDJSONParser = SIMDJSONParser
# ################################################################################################################################
random.seed()
# ################################################################################################################################
logger = logging.getLogger(__name__)
logging.addLevelName(TRACE1, "TRACE1")
_repr_template = Template('<$class_name at $mem_loc$attrs>')
_uncamelify_re = re.compile(r'((?<=[a-z])[A-Z]|(?<!\A)[A-Z](?=[a-z]))')
_epoch = datetime.utcfromtimestamp(0) # Start of UNIX epoch
cid_symbols = '0123456789abcdefghjkmnpqrstvwxyz'
encode_cid_symbols = {idx: elem for (idx, elem) in enumerate(cid_symbols)}
cid_base = len(cid_symbols)
# ################################################################################################################################
# For pyflakes
ColorFormatter = ColorFormatter
# ################################################################################################################################
asbool = as_bool
aslist = as_list
# ################################################################################################################################
_data_format_json = DATA_FORMAT.JSON
_data_format_json_like = DATA_FORMAT.JSON, DATA_FORMAT.DICT
_data_format_xml = DATA_FORMAT.XML
_data_format_hl7_v2 = HL7.Const.Version.v2.id
# ################################################################################################################################
# Kept here for backward compatibility
get_free_port = get_free_port
is_port_taken = is_port_taken
wait_until_port_free = wait_until_port_free
wait_until_port_taken = wait_until_port_taken
# ################################################################################################################################
# We can initialize it once per process here
_hostname = socket.gethostname()
_fqdn = socket.getfqdn()
_current_host = '{}/{}'.format(_hostname, _fqdn)
_current_user = getpass_getuser()
# ################################################################################################################################
TLS_KEY_TYPE = {
crypto.TYPE_DSA: 'DSA',
crypto.TYPE_RSA: 'RSA'
}
# ################################################################################################################################
def is_method(class_, func=isfunction if PY3 else ismethod):
return func(class_)
# ################################################################################################################################
def absjoin(base, path):
""" Turns a path into an absolute path if it's relative to the base location. If the path is already an absolute path,
it is returned as-is.
"""
if isabs(path):
return path
return abspath(join(base, path))
# ################################################################################################################################
def absolutize(path, base=''):
""" Turns a relative path into an absolute one or returns it as is if it's already absolute.
"""
if not isabs(path):
path = os.path.expanduser(path)
if not isabs(path):
path = os.path.normpath(os.path.join(base, path))
return path
# ################################################################################################################################
def current_host():
return _current_host
# ################################################################################################################################
def current_user(_getpass_getuser=getpass_getuser):
return _getpass_getuser()
# ################################################################################################################################
def pprint(obj):
""" Pretty-print an object into a string buffer.
"""
# Get dicts' items.
if hasattr(obj, "items"):
obj = sorted(obj.items())
buf = StringIO()
_pprint(obj, buf)
value = buf.getvalue()
buf.close()
return value
# ################################################################################################################################
def get_zato_command():
""" Returns the full path to the 'zato' command' in a buildout environment.
"""
return os.path.join(os.path.dirname(sys.executable), 'zato')
# ################################################################################################################################
def object_attrs(_object, ignore_double_underscore, to_avoid_list, sort):
attrs = dir(_object)
if ignore_double_underscore:
attrs = ifilter(lambda elem: not elem.startswith("__"), attrs)
_to_avoid_list = getattr(_object, to_avoid_list, None) # Don't swallow exceptions
if _to_avoid_list is not None:
attrs = ifilter(lambda elem: not elem in _to_avoid_list, attrs)
if sort:
attrs = sorted(attrs)
return attrs
# ################################################################################################################################
def make_repr(_object, ignore_double_underscore=True, to_avoid_list='repr_to_avoid', sort=True):
""" Makes a nice string representation of an object, suitable for logging purposes.
"""
attrs = object_attrs(_object, ignore_double_underscore, to_avoid_list, sort)
buff = StringIO()
for attr in attrs:
attr_obj = getattr(_object, attr)
if not callable(attr_obj):
buff.write('; %s:%r' % (attr, attr_obj))
out = _repr_template.safe_substitute(
class_name=_object.__class__.__name__, mem_loc=hex(id(_object)), attrs=buff.getvalue())
buff.close()
return out
# ################################################################################################################################
def to_form(_object):
""" Reads public attributes of an object and creates a dictionary out of it;
handy for providing initial data to a Django form which isn't backed by
a true Django model.
"""
out = {}
attrs = object_attrs(_object, True, "repr_to_avoid", False)
for attr in attrs:
out[attr] = getattr(_object, attr)
return out
# ################################################################################################################################
def get_lb_client(is_tls_enabled, lb_host, lb_agent_port, ssl_ca_certs, ssl_key_file, ssl_cert_file, timeout):
""" Returns an SSL XML-RPC client to the load-balancer.
"""
from zato.agent.load_balancer.client import LoadBalancerAgentClient, TLSLoadBalancerAgentClient
http_proto = 'https' if is_tls_enabled else 'http'
agent_uri = '{}://{}:{}/RPC2'.format(http_proto, lb_host, lb_agent_port)
if is_tls_enabled:
if sys.version_info >= (2, 7):
class Python27CompatTransport(SSLClientTransport):
def make_connection(self, host):
return CAValidatingHTTPSConnection(
host, strict=self.strict, ca_certs=self.ca_certs,
keyfile=self.keyfile, certfile=self.certfile, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version, timeout=self.timeout)
transport = Python27CompatTransport
else:
transport = None
return TLSLoadBalancerAgentClient(
agent_uri, ssl_ca_certs, ssl_key_file, ssl_cert_file, transport=transport, timeout=timeout)
else:
return LoadBalancerAgentClient(agent_uri)
# ################################################################################################################################
def tech_account_password(password_clear, salt):
return sha256(password_clear+ ':' + salt).hexdigest()
# ################################################################################################################################
def new_cid(bytes=12, _random=random.getrandbits):
""" Returns a new 96-bit correlation identifier. It is *not* safe to use the ID
for any cryptographical purposes; it is only meant to be used as a conveniently
formatted ticket attached to each of the requests processed by Zato servers.
"""
# Note that we need to convert bytes to bits here.
return hex(_random(bytes * 8))[2:]
# ################################################################################################################################
def get_user_config_name(file_name):
return file_name.split('.')[0]
# ################################################################################################################################
def _get_config(conf, bunchified, needs_user_config, repo_location=None):
# type: (bool, bool, str) -> Bunch
conf = bunchify(conf) if bunchified else conf
if needs_user_config:
conf.user_config_items = {}
user_config = conf.get('user_config')
if user_config:
for name, path in user_config.items():
path = absolutize(path, repo_location)
if not os.path.exists(path):
logger.warn('User config not found `%s`, name:`%s`', path, name)
else:
user_conf = ConfigObj(path)
user_conf = bunchify(user_conf) if bunchified else user_conf
conf.user_config_items[name] = user_conf
return conf
# ################################################################################################################################
def get_config(repo_location, config_name, bunchified=True, needs_user_config=True, crypto_manager=None, secrets_conf=None,
raise_on_error=False, log_exception=True):
""" Returns the configuration object. Will load additional user-defined config files, if any are available.
"""
# type: (str, str, bool, bool, object, object) -> Bunch
# Default output to produce
result = Bunch()
try:
conf_location = os.path.join(repo_location, config_name)
conf = ConfigObj(conf_location, zato_crypto_manager=crypto_manager, zato_secrets_conf=secrets_conf)
result = _get_config(conf, bunchified, needs_user_config, repo_location)
except Exception:
if log_exception:
logger.warn('Error while reading %s from %s; e:`%s`', config_name, repo_location, format_exc())
if raise_on_error:
raise
else:
return result
else:
return result
# ################################################################################################################################
def get_config_from_string(data):
""" A simplified version of get_config which creates a config object from string, skipping any user-defined config files.
"""
# type: (str) -> Bunch
buff = StringIO()
buff.write(data)
buff.seek(0)
conf = ConfigObj(buff)
out = _get_config(conf, True, False)
buff.close()
return out
# ################################################################################################################################
def _get_ioc_config(location, config_class):
""" Instantiates an Inversion of Control container from the given location if the location exists at all.
"""
stat = os.stat(location)
if stat.st_size:
config = config_class(location)
else:
config = None
return config
# ################################################################################################################################
def get_current_user():
return _current_user
# ################################################################################################################################
def service_name_from_impl(impl_name):
""" Turns a Zato internal service's implementation name into a shorter
service name
"""
return impl_name.replace('server.service.internal.', '')
# ################################################################################################################################
def deployment_info(method, object_, timestamp, fs_location, remote_host='', remote_user=''):
""" Returns a JSON document containing information who deployed a service
onto a server, where from and when it was.
"""
return {
'method': method,
'object': object_,
'timestamp': timestamp,
'fs_location':fs_location,
'remote_host': remote_host or os.environ.get('SSH_CONNECTION', ''),
'remote_user': remote_user,
'current_host': current_host(),
'current_user': get_current_user(),
}
# ################################################################################################################################
def get_body_payload(body):
body_children_count = body[0].countchildren()
if body_children_count == 0:
body_payload = None
elif body_children_count == 1:
body_payload = body[0].getchildren()[0]
else:
body_payload = body[0].getchildren()
return body_payload
# ################################################################################################################################
def payload_from_request(json_parser, cid, request, data_format, transport, channel_item=None):
""" Converts a raw request to a payload suitable for usage with SimpleIO.
"""
# type: (SIMDJSONParser, str, object, str, str, object)
if request is not None:
#
# JSON and dicts
#
if data_format in _data_format_json_like:
# It is possible that we have an XML request converted
# to an ObjectifiedElement instance on input and sent
# using the data format of dict. This happens in IBM MQ channels.
if isinstance(request, objectify.ObjectifiedElement):
return request
if not request:
return ''
if isinstance(request, basestring) and data_format == _data_format_json:
try:
request_bytes = request if isinstance(request, bytes) else request.encode('utf8')
try:
payload = json_parser.parse(request_bytes)
except ValueError:
payload = request_bytes
if hasattr(payload, 'as_dict'):
payload = payload.as_dict()
except ValueError:
logger.warn('Could not parse request as JSON:`%s`, (%s), e:`%s`', request, type(request), format_exc())
raise
else:
payload = request
#
# XML
#
elif data_format == _data_format_xml:
if transport == 'soap':
if isinstance(request, objectify.ObjectifiedElement):
soap = request
else:
soap = objectify.fromstring(request)
body = soap_body_xpath(soap)
if not body:
raise ZatoException(cid, 'Client did not send `{}` element'.format(soap_body_path))
payload = get_body_payload(body)
else:
if isinstance(request, objectify.ObjectifiedElement):
payload = request
elif len(request) == 0:
payload = objectify.fromstring('<empty/>')
else:
payload = objectify.fromstring(request)
#
# HL7 v2
#
elif data_format == _data_format_hl7_v2:
payload = hl7_get_payload_from_request(
request,
channel_item['data_encoding'],
channel_item['hl7_version'],
channel_item['json_path'],
channel_item['should_parse_on_input'],
channel_item['should_validate']
)
#
# Other data formats
#
else:
payload = request
else:
payload = request
return payload
# ################################################################################################################################
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz')
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma')
ZIP_EXTENSIONS = ('.zip', '.whl')
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar')
ARCHIVE_EXTENSIONS = (ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS)
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = os.path.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
ext = splitext(name)[1].lower()
if ext in ARCHIVE_EXTENSIONS:
return True
return False
# ################################################################################################################################
def is_python_file(name):
""" Is it a Python file we can import Zato services from?
"""
for suffix in('py', 'pyw'):
if name.endswith(suffix):
return True
# ################################################################################################################################
class _DummyLink(object):
""" A dummy class for staying consistent with pip's API in certain places
below.
"""
def __init__(self, url):
self.url = url
# ################################################################################################################################
class ModuleInfo(object):
def __init__(self, file_name, module):
self.file_name = file_name
self.module = module
# ################################################################################################################################
def import_module_from_path(file_name, base_dir=None):
if not os.path.isabs(file_name):
file_name = os.path.normpath(os.path.join(base_dir, file_name))
if not os.path.exists(file_name):
raise ValueError("Module could not be imported, path:`{}` doesn't exist".format(file_name))
_, mod_file = os.path.split(file_name)
mod_name, _ = os.path.splitext(mod_file)
# Delete compiled bytecode if it exists so that imp.load_source actually picks up the source module
for suffix in('c', 'o'):
path = file_name + suffix
if os.path.exists(path):
os.remove(path)
return ModuleInfo(file_name, imp.load_source(mod_name, file_name))
# ################################################################################################################################
def visit_py_source(dir_name):
for pattern in('*.py', '*.pyw'):
glob_path = os.path.join(dir_name, pattern)
for py_path in sorted(glob(glob_path)):
yield py_path
# ################################################################################################################################
def _os_remove(path):
""" A helper function so it's easier to mock it in unittests.
"""
return os.remove(path)
# ################################################################################################################################
def hot_deploy(parallel_server, file_name, path, delete_path=True, notify=True):
""" Hot-deploys a package if it looks like a Python module or archive.
"""
logger.debug('About to hot-deploy `%s`', path)
now = datetime.utcnow()
di = dumps(deployment_info('hot-deploy', file_name, now.isoformat(), path))
# Insert the package into the DB ..
package_id = parallel_server.odb.hot_deploy(
now, di, file_name, open(path, 'rb').read(), parallel_server.id)
# .. and optionally notify all the servers they're to pick up a delivery
if notify:
parallel_server.notify_new_package(package_id)
if delete_path:
_os_remove(path)
return package_id
# ################################################################################################################################
# As taken from http://wiki.python.org/moin/SortingListsOfDictionaries
def multikeysort(items, columns):
comparers = [((itemgetter(col[1:].strip()), -1) if col.startswith('-') else (itemgetter(col.strip()), 1)) for col in columns]
def comparer(left, right):
for fn, mult in comparers:
result = cmp(fn(left), fn(right))
if result:
return mult * result
else:
return 0
if PY3:
return sorted(items, key=cmp_to_key(comparer))
else:
return sorted(items, cmp=comparer)
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def grouper(n, iterable, fillvalue=None):
"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(fillvalue=fillvalue, *args)
# ################################################################################################################################
def translation_name(system1, key1, value1, system2, key2):
return KVDB.SEPARATOR.join((KVDB.TRANSLATION, system1, key1, value1, system2, key2))
# ################################################################################################################################
def dict_item_name(system, key, value):
return KVDB.SEPARATOR.join((system, key, value))
# ################################################################################################################################
# From http://docs.python.org/release/2.7/library/itertools.html#recipes
def pairwise(iterable):
"s -> (s0,s1), (s1,s2), (s2, s3), ..."
a, b = tee(iterable)
next(b, None)
return izip(a, b)
# ################################################################################################################################
def from_local_to_utc(dt, tz_name, dayfirst=True):
""" What is the UTC time given the local time and the timezone's name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt, dayfirst=dayfirst)
dt = pytz.timezone(tz_name).localize(dt)
utc_dt = pytz.utc.normalize(dt.astimezone(pytz.utc))
return utc_dt
# ################################################################################################################################
def from_utc_to_local(dt, tz_name):
""" What is the local time in the user-provided time zone name?
"""
if not isinstance(dt, datetime):
dt = dt_parse(dt)
local_tz = pytz.timezone(tz_name)
dt = local_tz.normalize(dt.astimezone(local_tz))
return dt
# ################################################################################################################################
def _utcnow():
""" See zato.common.util.utcnow for docstring.
"""
return datetime.utcnow()
# ################################################################################################################################
def utcnow():
""" A thin wrapper around datetime.utcnow added so that tests can mock it
out and return their own timestamps at will.
"""
return _utcnow()
# ################################################################################################################################
def _now(tz):
""" See zato.common.util.utcnow for docstring.
"""
return datetime.now(tz)
# ################################################################################################################################
def now(tz=None):
""" A thin wrapper around datetime.now added so that tests can mock it
out and return their own timestamps at will.
"""
return _now(tz)
# ################################################################################################################################
def datetime_to_seconds(dt):
""" Converts a datetime object to a number of seconds since UNIX epoch.
"""
return (dt - _epoch).total_seconds()
# ################################################################################################################################
# Inspired by http://stackoverflow.com/a/9283563
def uncamelify(s, separator='-', elem_func=unicode.lower):
""" Converts a CamelCaseName into a more readable one, e.g.
will turn ILikeToReadWSDLDocsNotReallyNOPENotMeQ into
i-like-to-read-wsdl-docs-not-really-nope-not-me-q or a similar one,
depending on the value of separator and elem_func.
"""
return separator.join(elem_func(elem) for elem in re.sub(_uncamelify_re, r' \1', s).split())
# ################################################################################################################################
def get_component_name(prefix='parallel'):
""" Returns a name of the component issuing a given request so it's possible
to trace which Zato component issued it.
"""
return '{}/{}/{}/{}'.format(prefix, current_host(), os.getpid(), current_thread().name)
# ################################################################################################################################
def dotted_getattr(o, path):
return reduce(getattr, path.split('.'), o)
# ################################################################################################################################
def wait_for_odb_service(session, cluster_id, service_name):
# type: (object, int, str) -> Service
# Assume we do not have it
service = None
while not service:
# Try to look it up ..
service = session.query(Service).\
filter(Service.name==service_name).\
filter(Cluster.id==cluster_id).\
first()
# .. if not found, sleep for a moment.
if not service:
sleep(1)
logger.info('Waiting for ODB service `%s`', service_name)
# If we are here, it means that the service was found so we can return it
return service
# ################################################################################################################################
def add_startup_jobs(cluster_id, odb, jobs, stats_enabled):
""" Adds internal jobs to the ODB. Note that it isn't being added
directly to the scheduler because we want users to be able to fine-tune the job's
settings.
"""
with closing(odb.session()) as session:
now = datetime.utcnow()
for item in jobs:
if item['name'].startswith('zato.stats'):
continue
try:
extra = item.get('extra', '')
if isinstance(extra, basestring):
extra = extra.encode('utf-8')
else:
if item.get('is_extra_list'):
extra = '\n'.join(extra)
else:
extra = dumps(extra)
if extra:
if not isinstance(extra, bytes):
extra = extra.encode('utf8')
#
# This will block as long as this service is not available in the ODB.
# It is required to do it because the scheduler may start before servers
# in which case services will not be in the ODB yet and we need to wait for them.
#
service = wait_for_odb_service(session, cluster_id, item['service'])
cluster = session.query(Cluster).\
filter(Cluster.id==cluster_id).\
one()
existing_one = session.query(Job).\
filter(Job.name==item['name']).\
filter(Job.cluster_id==cluster_id).\
first()
if existing_one:
continue
job = Job(None, item['name'], True, 'interval_based', now, cluster=cluster, service=service, extra=extra)
kwargs = {}
for name in('seconds', 'minutes'):
if name in item:
kwargs[name] = item[name]
ib_job = IntervalBasedJob(None, job, **kwargs)
session.add(job)
session.add(ib_job)
session.commit()
except Exception:
logger.warn(format_exc())
else:
logger.info('Initial job added `%s`', job.name)
# ################################################################################################################################
def hexlify(item, _hexlify=binascii_hexlify):
""" Returns a nice hex version of a string given on input.
"""
item = item if isinstance(item, unicode) else item.decode('utf8')
return ' '.join(hex(ord(elem)) for elem in item)
# ################################################################################################################################
def validate_input_dict(cid, *validation_info):
""" Checks that input belongs is one of allowed values.
"""
for key_name, key, source in validation_info:
if not source.has(key):
msg = 'Invalid {}:[{}]'.format(key_name, key)
log_msg = '{} (attrs: {})'.format(msg, source.attrs)
logger.warn(log_msg)
raise ZatoException(cid, msg)
# ################################################################################################################################
# Code below taken from tripod https://github.com/shayne/tripod/blob/master/tripod/sampler.py and slightly modified
# under the terms of LGPL (see LICENSE.txt file for details).
class SafePrettyPrinter(PrettyPrinter, object):
def format(self, obj, context, maxlevels, level):
try:
return super(SafePrettyPrinter, self).format(
obj, context, maxlevels, level)
except Exception:
return object.__repr__(obj)[:-1] + ' (bad repr)>', True, False
def spformat(obj, depth=None):
return SafePrettyPrinter(indent=1, width=76, depth=depth).pformat(obj)
def formatvalue(v):
s = spformat(v, depth=1).replace('\n', '')
if len(s) > 12500:
s = object.__repr__(v)[:-1] + ' (really long repr)>'
return '=' + s
def get_stack(f, with_locals=False):
limit = getattr(sys, 'tracebacklimit', None)
frames = []
n = 0
while f is not None and (limit is None or n < limit):
lineno, co = f.f_lineno, f.f_code
name, filename = co.co_name, co.co_filename
args = inspect.getargvalues(f)
linecache.checkcache(filename)
line = linecache.getline(filename, lineno, f.f_globals)
if line:
line = line.strip()
else:
line = None
frames.append((filename, lineno, name, line, f.f_locals, args))
f = f.f_back
n += 1
frames.reverse()
out = []
for filename, lineno, name, line, localvars, args in frames:
out.append(' File "%s", line %d, in %s' % (filename, lineno, name))
if line:
out.append(' %s' % line.strip())
if with_locals:
args = inspect.formatargvalues(formatvalue=formatvalue, *args)
out.append('\n Arguments: %s%s' % (name, args))
if with_locals and localvars:
out.append(' Local variables:\n')
try:
reprs = spformat(localvars)
except Exception:
reprs = "failed to format local variables"
out += [' ' + line for line in reprs.splitlines()]
out.append('')
return '\n'.join(out)
# ################################################################################################################################
def get_threads_traceback(pid):
result = {}
id_name = dict([(th.ident, th.name) for th in threading.enumerate()])
for thread_id, frame in iteritems(sys._current_frames()):
key = '{}:{}'.format(pid, id_name.get(thread_id, '(No name)'))
result[key] = get_stack(frame, True)
return result
# ################################################################################################################################
def get_greenlets_traceback(pid):
result = {}
for item in gc.get_objects():
if not isinstance(item, (Greenlet, Hub)):
continue
if not item:
continue
key = '{}:{}'.format(pid, repr(item))
result[key] = ''.join(get_stack(item.gr_frame, True))
return result
# ################################################################################################################################
def dump_stacks(*ignored):
pid = os.getpid()
table = Texttable()
table.set_cols_width((30, 90))
table.set_cols_dtype(['t', 't'])
rows = [['Proc:Thread/Greenlet', 'Traceback']]
rows.extend(sorted(iteritems(get_threads_traceback(pid))))
rows.extend(sorted(iteritems(get_greenlets_traceback(pid))))
table.add_rows(rows)
logger.info('\n' + table.draw())
# ################################################################################################################################
def register_diag_handlers():
""" Registers diagnostic handlers dumping stacks, threads and greenlets on receiving a signal.
"""
signal.signal(signal.SIGURG, dump_stacks)
# ################################################################################################################################
def parse_simple_type(value, convert_bool=True):
if convert_bool:
try:
value = is_boolean(value)
except VdtTypeError:
# It's cool, not a boolean
pass
try:
value = is_integer(value)
except VdtTypeError:
# OK, not an integer
pass
# Could be a dict or another simple type then
try:
value = literal_eval(value)
except Exception:
pass
# Either parsed out or as it was received
return value
# ################################################################################################################################
def parse_extra_into_dict(lines, convert_bool=True):
""" Creates a dictionary out of key=value lines.
"""
_extra = {}
if lines:
extra = ';'.join(lines.splitlines())
for line in extra.split(';'):
original_line = line
if line:
line = line.strip()
if line.startswith('#'):
continue
line = line.split('=', 1)
if not len(line) == 2:
raise ValueError('Each line must be a single key=value entry, not `{}`'.format(original_line))
key, value = line
value = value.strip()
value = parse_simple_type(value, convert_bool)
# OK, let's just treat it as string
_extra[key.strip()] = value
return _extra
# ################################################################################################################################
# Taken from http://plumberjack.blogspot.cz/2009/09/how-to-treat-logger-like-output-stream.html
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(self.level, message)
# ################################################################################################################################
def validate_xpath(expr):
""" Evaluates an XPath expression thus confirming it is correct.
"""
etree.XPath(expr)
return True
# ################################################################################################################################
def get_haproxy_agent_pidfile(component_dir):
json_config = loads(open(os.path.join(component_dir, 'config', 'repo', 'lb-agent.conf')).read())
return os.path.abspath(os.path.join(component_dir, json_config['pid_file']))
def store_pidfile(component_dir, pidfile=MISC.PIDFILE):
open(os.path.join(component_dir, pidfile), 'w').write('{}'.format(os.getpid()))
# ################################################################################################################################
def get_kvdb_config_for_log(config):
config = copy.deepcopy(config)
if config.shadow_password_in_logs:
config.password = SECRET_SHADOW
return config
# ################################################################################################################################
def validate_tls_from_payload(payload, is_key=False):
with NamedTemporaryFile(prefix='zato-tls-') as tf:
payload = payload.encode('utf8') if isinstance(payload, unicode) else payload
tf.write(payload)
tf.flush()
pem = open(tf.name).read()
cert_info = crypto.load_certificate(crypto.FILETYPE_PEM, pem)
cert_info = sorted(cert_info.get_subject().get_components())
cert_info = '; '.join('{}={}'.format(k.decode('utf8'), v.decode('utf8')) for k, v in cert_info)
if is_key:
key_info = crypto.load_privatekey(crypto.FILETYPE_PEM, pem)
key_info = '{}; {} bits'.format(TLS_KEY_TYPE[key_info.type()], key_info.bits())
return '{}; {}'.format(key_info, cert_info)
else:
return cert_info
get_tls_from_payload = validate_tls_from_payload
# ################################################################################################################################
def get_tls_full_path(root_dir, component, info):
return os.path.join(root_dir, component, fs_safe_name(info) + '.pem')
# ################################################################################################################################
def get_tls_ca_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_CA_CERTS, info)
# ################################################################################################################################
def get_tls_key_cert_full_path(root_dir, info):
return get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS, info)
# ################################################################################################################################
def store_tls(root_dir, payload, is_key=False):
# Raises exception if it's not really a certificate.
info = get_tls_from_payload(payload, is_key)
pem_file_path = get_tls_full_path(root_dir, TLS.DIR_KEYS_CERTS if is_key else TLS.DIR_CA_CERTS, info)
pem_file = open(pem_file_path, 'w')
try:
portalocker.lock(pem_file, portalocker.LOCK_EX)
pem_file.write(payload)
pem_file.close()
os.chmod(pem_file_path, 0o640)
return pem_file_path
except portalocker.LockException:
pass # It's OK, something else is doing the same thing right now
# ################################################################################################################################
def replace_private_key(orig_payload):
if isinstance(orig_payload, basestring):
for item in TLS.BEGIN_END:
begin = '-----BEGIN {}PRIVATE KEY-----'.format(item)
if begin in orig_payload:
end = '-----END {}PRIVATE KEY-----'.format(item)
begin_last_idx = orig_payload.find(begin) + len(begin) + 1
end_preceeding_idx = orig_payload.find(end) -1
return orig_payload[0:begin_last_idx] + SECRET_SHADOW + orig_payload[end_preceeding_idx:]
# No private key at all in payload
return orig_payload
# ################################################################################################################################
def delete_tls_material_from_fs(server, info, full_path_func):
try:
os.remove(full_path_func(server.tls_dir, info))
except OSError as e:
if e.errno == errno.ENOENT:
# It's ok - some other worker must have deleted it already
pass
else:
raise
# ################################################################################################################################
def ping_solr(config):
result = urlparse(config.address)
requests.get('{}://{}{}'.format(result.scheme, result.netloc, config.ping_path))
# ################################################################################################################################
def ping_odoo(conn):
user_model = conn.get_model('res.users')
ids = user_model.search([('login', '=', conn.login)])
user_model.read(ids, ['login'])[0]['login']
# ################################################################################################################################
def ping_sap(conn):
conn.ping()
# ################################################################################################################################
class StaticConfig(Bunch):
def __init__(self, base_dir):
# type: (str) -> None
super(StaticConfig, self).__init__()
self.base_dir = base_dir
def read_file(self, full_path, file_name):
# type: (str, str) -> None
f = open(full_path)
file_contents = f.read()
f.close()
# Convert to a Path object to prepare to manipulations ..
full_path = Path(full_path)
# .. this is the path to the directory containing the file
# relative to the base directory, e.g. the "config/repo/static" part
# in "/home/zato/server1/config/repo/static" ..
relative_dir = Path(full_path.parent).relative_to(self.base_dir)
# .. now, convert all the components from relative_dir into a nested Bunch of Bunch instances ..
relative_dir_elems = list(relative_dir.parts)
# .. start with ourselves ..
_bunch = self
# .. if there are no directories leading to the file, simply assign
# its name to self and return ..
if not relative_dir_elems:
_bunch[file_name] = file_contents
return
# .. otherwise, if there are directories leading to the file,
# iterate until they exist and convert their names to Bunch keys ..
while relative_dir_elems:
# .. name of a directory = a Bunch key ..
elem = relative_dir_elems.pop(0)
# .. attach to the parent Bunch as a new Bunch instance ..
_bunch = _bunch.setdefault(elem, Bunch())
# .. this was the last directory to visit so we can now attach the file name and its contents
# to the Bunch instance representing this directory.
if not relative_dir_elems:
_bunch[file_name] = file_contents
def read_directory(self, root_dir):
for elem in Path(root_dir).rglob('*'): # type: Path
full_path = str(elem)
try:
if elem.is_file():
self.read_file(full_path, elem.name)
except Exception as e:
logger.warn('Could not read file `%s`, e:`%s`', full_path, e.args)
# ################################################################################################################################
def add_scheduler_jobs(api, odb, cluster_id, spawn=True):
for(id, name, is_active, job_type, start_date, extra, service_name, _,
_, weeks, days, hours, minutes, seconds, repeats, cron_definition)\
in odb.get_job_list(cluster_id):
job_data = Bunch({'id':id, 'name':name, 'is_active':is_active,
'job_type':job_type, 'start_date':start_date,
'extra':extra, 'service':service_name, 'weeks':weeks,
'days':days, 'hours':hours, 'minutes':minutes,
'seconds':seconds, 'repeats':repeats,
'cron_definition':cron_definition})
if is_active:
api.create_edit('create', job_data, spawn=spawn)
else:
logger.info('Not adding an inactive job `%s`', job_data)
# ################################################################################################################################
def get_basic_auth_credentials(auth):
if not auth:
return None, None
prefix = 'Basic '
if not auth.startswith(prefix):
return None, None
_, auth = auth.split(prefix)
auth = b64decode(auth.strip())
return auth.split(':', 1)
# ################################################################################################################################
def parse_tls_channel_security_definition(value):
# type: (bytes) -> iterable(str, str)
if not value:
raise ValueError('No definition given `{}`'.format(repr(value)))
else:
if isinstance(value, bytes):
value = value.decode('utf8')
for line in value.splitlines():
line = line.strip()
if not line:
continue
if not '=' in line:
raise ValueError("Line `{}` has no '=' key/value separator".format(line))
# It's possible we will have multiple '=' symbols.
sep_index = line.find('=')
key, value = line[:sep_index], line[sep_index+1:]
if not key:
raise ValueError('Key missing in line `{}`'.format(line))
if not value:
raise ValueError('Value missing in line `{}`'.format(line))
yield 'HTTP_X_ZATO_TLS_{}'.format(key.upper()), value
# ################################################################################################################################
def get_http_json_channel(name, service, cluster, security):
return HTTPSOAP(None, '{}.json'.format(name), True, True, 'channel', 'plain_http', None, '/zato/json/{}'.format(name),
None, '', None, SIMPLE_IO.FORMAT.JSON, service=service, cluster=cluster, security=security)
# ################################################################################################################################
def get_http_soap_channel(name, service, cluster, security):
return HTTPSOAP(None, name, True, True, 'channel', 'soap', None, '/zato/soap', None, name, '1.1',
SIMPLE_IO.FORMAT.XML, service=service, cluster=cluster, security=security)
# ################################################################################################################################
def get_engine(args):
return sa.create_engine(get_engine_url(args))
# ################################################################################################################################
def get_session(engine):
session = orm.sessionmaker() # noqa
session.configure(bind=engine)
return session()
# ################################################################################################################################
def get_crypto_manager_from_server_config(config, repo_dir):
priv_key_location = os.path.abspath(os.path.join(repo_dir, config.crypto.priv_key_location))
cm = CryptoManager(priv_key_location=priv_key_location)
cm.load_keys()
return cm
# ################################################################################################################################
def get_odb_session_from_server_config(config, cm, odb_password_encrypted):
engine_args = Bunch()
engine_args.odb_type = config.odb.engine
engine_args.odb_user = config.odb.username
engine_args.odb_host = config.odb.host
engine_args.odb_port = config.odb.port
engine_args.odb_db_name = config.odb.db_name
if odb_password_encrypted:
engine_args.odb_password = cm.decrypt(config.odb.password) if config.odb.password else ''
else:
engine_args.odb_password = config.odb.password
return get_session(get_engine(engine_args))
# ################################################################################################################################
def get_odb_session_from_component_dir(component_dir, config_file, CryptoManagerClass):
repo_dir = get_repo_dir_from_component_dir(component_dir)
cm = CryptoManagerClass.from_repo_dir(None, repo_dir, None)
secrets_conf = get_config(repo_dir, 'secrets.conf', needs_user_config=False)
config = get_config(repo_dir, config_file, crypto_manager=cm, secrets_conf=secrets_conf)
return get_odb_session_from_server_config(config, None, False)
# ################################################################################################################################
def get_odb_session_from_server_dir(server_dir):
# Zato
from zato.common.crypto.api import ServerCryptoManager
return get_odb_session_from_component_dir(server_dir, 'server.conf', ServerCryptoManager)
# ################################################################################################################################
def get_server_client_auth(config, repo_dir, cm, odb_password_encrypted):
""" Returns credentials to authenticate with against Zato's own /zato/admin/invoke channel.
"""
session = get_odb_session_from_server_config(config, cm, odb_password_encrypted)
with closing(session) as session:
cluster = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster
channel = session.query(HTTPSOAP).\
filter(HTTPSOAP.cluster_id == cluster.id).\
filter(HTTPSOAP.url_path == '/zato/admin/invoke').\
filter(HTTPSOAP.connection== 'channel').\
one()
if channel.security_id:
security = session.query(HTTPBasicAuth).\
filter(HTTPBasicAuth.id == channel.security_id).\
first()
if security:
password = security.password.replace(SECRETS.PREFIX, '')
if password.startswith(SECRETS.EncryptedMarker):
password = cm.decrypt(password)
return (security.username, password)
# ################################################################################################################################
def get_client_from_server_conf(server_dir, require_server=True, stdin_data=None):
# Imports go here to avoid circular dependencies
from zato.client import get_client_from_server_conf as client_get_client_from_server_conf
# Get the client object ..
client = client_get_client_from_server_conf(server_dir, get_server_client_auth, get_config, stdin_data=stdin_data)
# .. make sure the server is available ..
if require_server:
wait_for_zato_ping(client.address)
# .. return the client to our caller now.
return client
# ################################################################################################################################
def get_repo_dir_from_component_dir(component_dir):
# type: (str) -> str
return os.path.join(os.path.abspath(os.path.join(component_dir)), 'config', 'repo')
# ################################################################################################################################
django_sa_mappings = {
'NAME': 'db_name',
'HOST': 'host',
'PORT': 'port',
'USER': 'username',
'PASSWORD': 'password',
'odb_type': 'engine',
'db_type': 'engine',
}
cli_sa_mappings = {
'odb_db_name': 'db_name',
'odb_host': 'host',
'odb_port': 'port',
'odb_user': 'username',
'odb_password': 'password',
'odb_type': 'engine',
}
# ################################################################################################################################
def get_engine_url(args):
attrs = {}
is_sqlite = False
is_django = 'NAME' in args
has_get = getattr(args, 'get', False)
odb_type = getattr(args, 'odb_type', None)
if odb_type:
is_sqlite = odb_type == 'sqlite'
else:
is_sqlite = args.get('engine') == 'sqlite' or args.get('db_type') == 'sqlite'
names = ('engine', 'username', 'password', 'host', 'port', 'name', 'db_name', 'db_type', 'sqlite_path', 'odb_type',
'odb_user', 'odb_password', 'odb_host', 'odb_port', 'odb_db_name', 'odb_type', 'ENGINE', 'NAME', 'HOST', 'USER',
'PASSWORD', 'PORT')
for name in names:
if has_get:
attrs[name] = args.get(name, '')
else:
attrs[name] = getattr(args, name, '')
# Re-map Django params into SQLAlchemy params
if is_django:
for name in django_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
if not value and (name in 'db_type', 'odb_type'):
continue
attrs[django_sa_mappings[name]] = value
# Zato CLI to SQLAlchemy
if not attrs.get('engine'):
for name in cli_sa_mappings:
value = attrs.get(name, ZATO_NOT_GIVEN)
if value != ZATO_NOT_GIVEN:
attrs[cli_sa_mappings[name]] = value
# Re-map server ODB params into SQLAlchemy params
if attrs['engine'] == 'sqlite':
db_name = attrs.get('db_name')
sqlite_path = attrs.get('sqlite_path')
if db_name:
attrs['sqlite_path'] = db_name
if sqlite_path:
attrs['db_name'] = sqlite_path
return (engine_def_sqlite if is_sqlite else engine_def).format(**attrs)
# ################################################################################################################################
def startup_service_payload_from_path(name, value, repo_location):
""" Reads payload from a local file. Abstracted out to ease in testing.
"""
orig_path = value.replace('file://', '')
if not os.path.isabs(orig_path):
path = os.path.normpath(os.path.join(repo_location, orig_path))
else:
path = orig_path
try:
payload = open(path).read()
except Exception:
logger.warn(
'Could not open payload path:`%s` `%s`, skipping startup service:`%s`, e:`%s`', orig_path, path, name, format_exc())
else:
return payload
# ################################################################################################################################
def invoke_startup_services(source, key, fs_server_config, repo_location, broker_client=None, service_name=None,
skip_include=True, worker_store=None, is_sso_enabled=False):
""" Invoked when we are the first worker and we know we have a broker client and all the other config is ready
so we can publish the request to execute startup services. In the worst case the requests will get back to us but it's
also possible that other workers are already running. In short, there is no guarantee that any server or worker in particular
will receive the requests, only that there will be exactly one.
"""
for name, payload in iteritems(fs_server_config.get(key, {})):
# Don't invoke SSO services if the feature is not enabled
if not is_sso_enabled:
if 'zato.sso' in name:
continue
if service_name:
# We are to skip this service:
if skip_include:
if name == service_name:
continue
# We are to include this service only, any other is rejected
else:
if name != service_name:
continue
if isinstance(payload, basestring) and payload.startswith('file://'):
payload = startup_service_payload_from_path(name, payload, repo_location)
if not payload:
continue
cid = new_cid()
msg = {}
msg['action'] = SERVICE.PUBLISH.value
msg['service'] = name
msg['payload'] = payload
msg['cid'] = cid
msg['channel'] = CHANNEL.STARTUP_SERVICE
if broker_client:
broker_client.invoke_async(msg)
else:
worker_store.on_message_invoke_service(msg, msg['channel'], msg['action'])
# ################################################################################################################################
def timeouting_popen(command, timeout, timeout_msg, rc_non_zero_msg, common_msg=''):
""" Runs a command in background and returns its return_code, stdout and stderr.
stdout and stderr will be None if return code = 0
"""
stdout, stderr = None, None
# Run the command
p = Popen(command, stdout=PIPE, stderr=PIPE)
# Sleep as long as requested and poll for results
sleep(timeout)
p.poll()
if p.returncode is None:
msg = timeout_msg + common_msg + 'command:[{}]'.format(command)
raise Exception(msg.format(timeout))
else:
if p.returncode != 0:
stdout, stderr = p.communicate()
msg = rc_non_zero_msg + common_msg + 'command:[{}], return code:[{}], stdout:[{}], stderr:[{}] '.format(
command, p.returncode, stdout, stderr)
raise Exception(msg)
return p.returncode
# ################################################################################################################################
def spawn_greenlet(callable, *args, **kwargs):
""" Spawns a new greenlet and wait up to timeout seconds for its response. It is expected that the response never arrives
because if it does, it means that there were some errors.
"""
try:
timeout = kwargs.pop('timeout', 0.2)
g = spawn(callable, *args, **kwargs)
gevent_sleep(0)
g.join(timeout)
if g.exception:
type_, value, traceback = g.exc_info
raise_(type_(value, str(g.exception)), None, traceback)
except Timeout:
pass # Timeout = good = no errors
else:
return g
# ################################################################################################################################
def get_logger_for_class(class_):
return logging.getLogger('{}.{}'.format(inspect.getmodule(class_).__name__, class_.__name__))
# ################################################################################################################################
def get_worker_pids():
""" Returns all sibling worker PIDs of the server process we are being invoked on, including our own worker too.
"""
return sorted(elem.pid for elem in psutil.Process(psutil.Process().ppid()).children())
# ################################################################################################################################
def update_bind_port(data, idx):
address_info = urlparse(data.address)
base, port = address_info.netloc.split(':')
port = int(port) + idx
data.address = '{}://{}:{}{}'.format(address_info.scheme, base, port, address_info.path)
data.bind_port = port
# ################################################################################################################################
def start_connectors(worker_store, service_name, data):
for idx, pid in enumerate(get_worker_pids()):
if 'socket_method' in data and data.socket_method == ZMQ.METHOD_NAME.BIND:
update_bind_port(data, idx)
worker_store.server.invoke(service_name, data, pid=pid, is_async=True, data_format=DATA_FORMAT.DICT)
# ################################################################################################################################
def require_tcp_port(address):
if not ':' in address:
raise Exception('No TCP port in {}'.format(address))
port = address.split(':')[-1]
if not port.strip():
raise Exception('No TCP port in {}'.format(address))
try:
int(port)
except ValueError:
raise Exception('Invalid TCP port in {}'.format(address))
# ################################################################################################################################
def update_apikey_username_to_channel(config):
config.username = 'HTTP_{}'.format(config.get('username', '').upper().replace('-', '_'))
# ################################################################################################################################
def get_response_value(response):
""" Extracts the actual response string from a response object produced by services.
"""
return (response.payload.getvalue() if hasattr(response.payload, 'getvalue') else response.payload) or ''
# ################################################################################################################################
def get_lb_agent_json_config(repo_dir):
return loads(open(os.path.join(repo_dir, 'lb-agent.conf')).read())
# ################################################################################################################################
def parse_cmd_line_options(argv):
options = argv.split(CLI_ARG_SEP)
options = '\n'.join(options)
return parse_extra_into_dict(options)
# ################################################################################################################################
def get_sa_model_columns(model):
""" Returns all columns (as string) of an input SQLAlchemy model.
"""
return [elem.key for elem in model.__table__.columns]
# ################################################################################################################################
def is_class_pubsub_hook(class_):
""" Returns True if input class subclasses PubSubHook.
"""
# Imported here to avoid circular dependencies
from zato.server.service import PubSubHook
return issubclass(class_, PubSubHook) and (class_ is not PubSubHook)
# ################################################################################################################################
def ensure_pubsub_hook_is_valid(self, input, instance, attrs):
""" An instance hook that validates if an optional pub/sub hook given on input actually subclasses PubSubHook.
"""
if input.get('hook_service_id'):
impl_name = self.server.service_store.id_to_impl_name[input.hook_service_id]
details = self.server.service_store.services[impl_name]
if not is_class_pubsub_hook(details['service_class']):
raise ValueError('Service `{}` is not a PubSubHook subclass'.format(details['name']))
# ################################################################################################################################
def is_func_overridden(func):
""" Returns True if input func was overridden by user in a subclass - used to decide
whether users implemented a given hook. If there is a special internal marker in input arguments,
it means that it is an internal function from parent class, not a user-defined one.
"""
if func and is_method(func):
func_defaults = func.__defaults__ if PY3 else func.im_func.func_defaults
# Only internally defined methods will fulfill conditions that they have default arguments
# and one of them is our no-op marker, hence if we negate it and the result is True,
# it means it must have been a user-defined method.
if not (func_defaults and isinstance(func_defaults, tuple) and zato_no_op_marker in func_defaults):
return True
# ################################################################################################################################
def get_sql_engine_display_name(engine, fs_sql_config):
display_name = None
for key, value in fs_sql_config.items():
if key == engine:
display_name = value.get('display_name')
break
if not display_name:
raise ValueError('Could not find display name for engine `{}` in config `{}`'.format(
engine, fs_sql_config))
else:
return display_name
# ################################################################################################################################
def pretty_format_float(value):
return ('%f' % value).rstrip('0').rstrip('.') if value else value
# ################################################################################################################################
# The slugify function below is taken from Django:
"""
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
def slugify(value, allow_unicode=False):
""" Convert to ASCII if 'allow_unicode' is False. Convert spaces to underscores.
Remove characters that aren't alphanumerics, underscores, or hyphens.
Convert to lowercase. Also strip leading and trailing whitespace.
"""
if allow_unicode:
value = unicodedata.normalize('NFKC', value)
value = re.sub('[^\w\s-]', '', value, flags=re.U).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value, flags=re.U) # noqa: W605
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
value = re.sub('[^\w\s-]', '', value).strip().lower() # noqa: W605
return re.sub('[-\s]+', '_', value) # noqa: W605
# ################################################################################################################################
def wait_for_predicate(predicate_func, timeout, interval, *args, **kwargs):
# type: (object, int, float, *object, **object) -> bool
is_fulfilled = predicate_func(*args, **kwargs)
if not is_fulfilled:
start = datetime.utcnow()
wait_until = start + timedelta(seconds=timeout)
while not is_fulfilled:
gevent_sleep(interval)
is_fulfilled = predicate_func(*args, **kwargs)
if datetime.utcnow() > wait_until:
break
return is_fulfilled
# ################################################################################################################################
def wait_for_dict_key(_dict, key, timeout=30, interval=0.01):
# type: (dict, object, int, float) -> bool
def _predicate_dict_key(*_ignored_args, **_ignored_kwargs):
return key in _dict
return wait_for_predicate(_predicate_dict_key, timeout, interval)
# ################################################################################################################################
def hex_sequence_to_bytes(elems):
# type: (str) -> bytes
elems = [int(elem.strip(), 16) for elem in elems.split()]
elems = [chr(elem) for elem in elems]
elems = [bytes(elem, 'utf8') for elem in elems]
return b''.join(elems)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
import sys
from logging import getLogger
from tempfile import mkstemp
from time import time, sleep
# Sarge
from sarge import run as sarge_run, shell_format
# Python 2/3 compatibility
from six import PY2
# Zato
from zato.common.api import CLI_ARG_SEP
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
stderr_sleep_fg = 0.9
stderr_sleep_bg = 1.2
# ################################################################################################################################
# This is for convenience of switching to a newer version of sarge in the future. Newer versions use async_ instead of async.
async_keyword = 'async_' if PY2 else 'async_'
# ################################################################################################################################
import platform
system = platform.system()
is_windows = 'windows' in system.lower()
# ################################################################################################################################
def get_executable():
""" Returns the wrapper which buildout uses for executing Zato commands,
the one with all the dependencies added to PYTHONPATH.
"""
if is_windows:
return os.path.join(os.path.dirname(sys.executable), 'python.exe')
return os.path.join(os.path.dirname(sys.executable), 'py')
# ################################################################################################################################
class _StdErr(object):
# Some log messages (like the ones produced by PyKafka) go to stderr but they are not really errors,
# in which case we need to ignore them.
ignored = [
'Could not load pykafka.rdkafka extension.'
]
def __init__(self, path, timeout):
self.path = path
self.timeout = timeout
# ################################################################################################################################
def wait_for_error(self):
now = time()
while time() - now < self.timeout:
sleep(0.1)
_stderr = open(self.path)
_err = _stderr.read()
if _err and (not self.should_ignore(_err)):
return _err
else:
_stderr.close()
# ################################################################################################################################
def should_ignore(self, err):
for item in self.ignored:
if err.endswith(item):
return True
# ################################################################################################################################
def start_process(component_name, executable, run_in_fg, cli_options, extra_cli_options='', on_keyboard_interrupt=None,
failed_to_start_err=-100, extra_options=None, stderr_path=None, stdin_data=None, async_keyword=async_keyword):
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
stderr_path = stderr_path or mkstemp('-zato-start-{}.txt'.format(component_name.replace(' ','')))[1]
stdout_redirect = ''
stderr_redirect = ''
if not is_windows:
if not run_in_fg:
stdout_redirect = '1> /dev/null'
stderr_redirect = '2> {}'.format(stderr_path)
program = '{} {} {} {}'.format(executable, extra_cli_options, stdout_redirect, stderr_redirect)
try:
_stderr = _StdErr(stderr_path, stderr_sleep_fg if run_in_fg else stderr_sleep_bg)
run_kwargs = {
async_keyword: False if run_in_fg else True,
}
# Do not send input if it does not really exist because it prevents pdb from attaching to a service's stdin
if stdin_data:
run_kwargs['input'] = stdin_data
sarge_run(program, **run_kwargs)
# Wait a moment for any potential errors
_err = _stderr.wait_for_error()
if _err:
if 'Could not load pykafka.rdkafka extension.' not in _err:
logger.warn('Stderr received from program `%s` e:`%s`, kw:`%s`', program, _err, run_kwargs)
sys.exit(failed_to_start_err)
except KeyboardInterrupt:
if on_keyboard_interrupt:
on_keyboard_interrupt()
sys.exit(0)
# ################################################################################################################################
def start_python_process(component_name, run_in_fg, py_path, program_dir, on_keyboard_interrupt=None, failed_to_start_err=-100,
extra_options=None, stderr_path=None, stdin_data=None):
""" Starts a new process from a given Python path, either in background or foreground (run_in_fg).
"""
options = {
'fg': run_in_fg,
}
if extra_options:
options.update(extra_options)
options = CLI_ARG_SEP.join('{}={}'.format(k, v) for k, v in options.items())
py_path_option = shell_format('-m {0}', py_path)
program_dir_option = shell_format('{0}', program_dir) if program_dir else ''
extra_cli_options = '{} {} {}'.format(py_path_option, program_dir_option, options)
extra_cli_options = '{} '.format(py_path_option)
if program_dir_option:
extra_cli_options += '{} '.format(program_dir_option)
extra_cli_options += '{}'.format(options)
return start_process(component_name, get_executable(), run_in_fg, None, extra_cli_options, on_keyboard_interrupt,
failed_to_start_err, extra_options, stderr_path, stdin_data)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/proc.py | proc.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# Zato
from zato.common.api import WEB_SOCKET
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_wsx = getLogger('zato_web_socket')
# ################################################################################################################################
msg_cleanup_error = 'WSX cleanup error, wcr:`%d`, si:`%s`, pci:`%s`, sk_list:`%s`, h:`%r`, hs:`%r`, hr:`%r`, ofl:`%s`, e:`%s`'
# ################################################################################################################################
_on_disconnected = WEB_SOCKET.HOOK_TYPE.ON_DISCONNECTED
# ################################################################################################################################
def find_wsx_environ(service, raise_if_not_found=True):
wsx_environ = service.wsgi_environ.get('zato.request_ctx.async_msg', {}).get('environ')
if not wsx_environ:
if raise_if_not_found:
raise Exception('Could not find `[\'zato.request_ctx.async_msg\'][\'environ\']` in WSGI environ `{}`'.format(
service.wsgi_environ))
else:
return wsx_environ
# ################################################################################################################################
def cleanup_wsx_client(wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook, hook_service, hook_request,
opaque_func_list=None):
""" Cleans up information about a WSX client that has disconnected.
"""
try:
# Sometime it will not be needed at all, e.g. when we clean up a half-opened connection that never
# succesfully authenticated.
if wsx_cleanup_required:
# Deletes state from SQL
service_invoker('zato.channel.web-socket.client.delete-by-pub-id', {
'pub_client_id': pub_client_id,
})
if sub_keys:
# Deletes across all workers the in-RAM pub/sub state about the client that is disconnecting
service_invoker('zato.channel.web-socket.client.unregister-ws-sub-key', {
'sub_key_list': sub_keys,
})
# An opaque list of functions to invoke - each caller may decide what else should be carried out here
for func in opaque_func_list or []:
func()
# Run the relevant on_disconnected hook, if any is available (even if the session was never opened)
if hook:
hook(_on_disconnected, hook_service, **hook_request)
except Exception as e:
for logger in logger_zato, logger_wsx:
logger.info(msg_cleanup_error, wsx_cleanup_required, service_invoker, pub_client_id, sub_keys, hook,
hook_service, hook_request, opaque_func_list, e)
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/wsx.py | wsx.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import errno
from datetime import datetime, timedelta
from logging import getLogger
from socket import timeout as SocketTimeoutException
from time import sleep
from traceback import format_exc
# gevent
from gevent import socket
from gevent.server import StreamServer
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
class SocketReaderCtx:
""" Configuration and context used to read that from sockets via read_from_socket.
"""
__slots__ = 'conn_id', 'socket', 'max_wait_time', 'max_msg_size', 'read_buffer_size', 'recv_timeout', \
'should_log_messages', 'buffer', 'is_ok', 'reason'
def __init__(self, conn_id, socket, max_wait_time, max_msg_size, read_buffer_size, recv_timeout, should_log_messages):
# type: (str, socket, int, int, int, int, object)
self.conn_id = conn_id
self.socket = socket
self.max_wait_time = max_wait_time
self.max_msg_size = max_msg_size
self.read_buffer_size = read_buffer_size
self.recv_timeout = recv_timeout
self.should_log_messages = should_log_messages
self.buffer = []
self.is_ok = False
self.reason = ''
# ################################################################################################################################
# ################################################################################################################################
def get_free_port(start=30000):
port = start
while is_port_taken(port):
port += 1
return port
# ################################################################################################################################
# Taken from http://grodola.blogspot.com/2014/04/reimplementing-netstat-in-cpython.html
def is_port_taken(port):
# psutil
import psutil
# Zato
from .platform_ import is_linux
# Shortcut for Linux so as not to bind to a socket which in turn means waiting until it's closed by OS
if is_linux:
for conn in psutil.net_connections(kind='tcp'):
if conn.laddr[1] == port and conn.status == psutil.CONN_LISTEN:
return True
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(('', port))
sock.close()
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
return True
raise
# ################################################################################################################################
def _is_port_ready(port, needs_taken):
taken = is_port_taken(port)
return taken if needs_taken else not taken
# ################################################################################################################################
def _wait_for_port(port, timeout, interval, needs_taken):
port_ready = _is_port_ready(port, needs_taken)
if not port_ready:
start = datetime.utcnow()
wait_until = start + timedelta(seconds=timeout)
while not port_ready:
sleep(interval)
port_ready = _is_port_ready(port, needs_taken)
if datetime.utcnow() > wait_until:
break
return port_ready
# ################################################################################################################################
def wait_for_zato(address, url_path, timeout=60, interval=0.1):
""" Waits until a Zato server responds.
"""
# Requests
from requests import get as requests_get
# Imported here to avoid circular imports
from zato.common.util.api import wait_for_predicate
# Full URL to check a Zato server under
url = address + url_path
def _predicate_zato_ping(*ignored_args, **ignored_kwargs):
try:
requests_get(url, timeout=interval)
except Exception as e:
logger.warn('Waiting for `%s` (%s)', url, e)
else:
return True
return wait_for_predicate(_predicate_zato_ping, timeout, interval, address)
# ################################################################################################################################
def wait_for_zato_ping(address, timeout=60, interval=0.1):
""" Waits for timeout seconds until address replies to a request sent to /zato/ping.
"""
wait_for_zato(address, '/zato/ping', timeout, interval)
# ################################################################################################################################
def wait_until_port_taken(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes taken, i.e. a process binds to a TCP socket.
"""
return _wait_for_port(port, timeout, interval, True)
# ################################################################################################################################
def wait_until_port_free(port, timeout=2, interval=0.1):
""" Waits until a given TCP port becomes free, i.e. a process releases a TCP socket.
"""
return _wait_for_port(port, timeout, interval, False)
# ################################################################################################################################
def get_fqdn_by_ip(ip_address, default, log_msg_prefix):
# type: (str, str) -> str
try:
host = socket.gethostbyaddr(ip_address)[0]
return socket.getfqdn(host)
except Exception:
logger.warn('%s exception in FQDN lookup `%s`', log_msg_prefix, format_exc())
return '(unknown-{}-fqdn)'.format(default)
# ################################################################################################################################
def read_from_socket(ctx, _utcnow=datetime.utcnow, _timedelta=timedelta):
""" Reads data from an already connected TCP socket.
"""
# type: (SocketReaderCtx) -> bytes
# Local aliases
_should_log_messages = ctx.should_log_messages
_log_info = logger.warn
_log_debug = logger.warn
_conn_id = ctx.conn_id
_max_msg_size = ctx.max_msg_size
_read_buffer_size = ctx.read_buffer_size
_recv_timeout = ctx.recv_timeout
_socket_recv = ctx.socket.recv
_socket_settimeout = ctx.socket.settimeout
# Wait for that many seconds
wait_until = _utcnow() + timedelta(seconds=ctx.max_wait_time)
# How many bytes have we read so far
msg_size = 0
# Buffer to accumulate data in
buffer = []
# No data received yet
data = '<initial-no-data>'
# Run the main loop
while _utcnow() < wait_until:
# Check whether reading the data would not exceed our message size limit
new_size = msg_size + _read_buffer_size
if new_size > _max_msg_size:
reason = 'Message would exceed max. size allowed `{}` > `{}`'.format(new_size, _max_msg_size)
raise ValueError(reason)
try:
_socket_settimeout(_recv_timeout)
data = _socket_recv(_read_buffer_size)
if _should_log_messages:
_log_debug('Data received by `%s` (%d) -> `%s`', _conn_id, len(data), data)
except SocketTimeoutException:
# This is fine, we just iterate until wait_until time.
pass
else:
# Some data was received ..
if data:
buffer.append(data)
# .. otherwise, the remote end disconnected so we can end.
break
# If we are here, it means that we have all the data needed so we can just return it now
result = b''.join(buffer)
if _should_log_messages:
_log_info('Returning result from `%s` (%d) -> `%s`', _conn_id, len(result), result)
return result
# ################################################################################################################################
def parse_address(address):
# type: (str) -> (str, int)
# First, let's reverse it in case input contains an IPv6 address ..
address = address[::-1] # type: str
# .. now, split on the first colon to give the information we seek ..
port, host = address.split(':', 1)
# .. reverse the values back
host = host[::-1]
port = port[::-1]
# .. port needs to be an integer ..
port = int(port)
# .. and now we can return the result.
return host, port
# ################################################################################################################################
# ################################################################################################################################
class ZatoStreamServer(StreamServer):
# ################################################################################################################################
def shutdown(self):
self.close()
# ################################################################################################################################
# These two methods are reimplemented from gevent.server to make it possible to use SO_REUSEPORT.
@classmethod
def get_listener(self, address, backlog=None, family=None):
if backlog is None:
backlog = self.backlog
return ZatoStreamServer._make_socket(address, backlog=backlog, reuse_addr=self.reuse_addr, family=family)
@staticmethod
def _make_socket(address, backlog=50, reuse_addr=None, family=socket.AF_INET):
sock = socket.socket(family=family)
if reuse_addr is not None:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, reuse_addr)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
try:
sock.bind(address)
except socket.error as e:
strerror = getattr(e, 'strerror', None)
if strerror is not None:
e.strerror = strerror + ': ' + repr(address)
raise
sock.listen(backlog)
sock.setblocking(0)
return sock
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/util/tcp.py | tcp.py |
__version__ = '1.0.1'
__all__ = (
'__version__',
'dottedQuadToNum',
'numToDottedQuad',
'ValidateError',
'VdtUnknownCheckError',
'VdtParamError',
'VdtTypeError',
'VdtValueError',
'VdtValueTooSmallError',
'VdtValueTooBigError',
'VdtValueTooShortError',
'VdtValueTooLongError',
'VdtMissingValue',
'Validator',
'is_integer',
'is_float',
'is_boolean',
'is_list',
'is_tuple',
'is_ip_addr',
'is_string',
'is_int_list',
'is_bool_list',
'is_float_list',
'is_string_list',
'is_ip_addr_list',
'is_mixed_list',
'is_option',
'__docformat__',
)
import re
import sys
from pprint import pprint
#TODO - #21 - six is part of the repo now, but we didn't switch over to it here
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
if sys.version_info < (3,):
string_type = basestring
else:
string_type = str
# so tests that care about unicode on 2.x can specify unicode, and the same
# tests when run on 3.x won't complain about a undefined name "unicode"
# since all strings are unicode on 3.x we just want to pass it through
# unchanged
unicode = lambda x: x
# in python 3, all ints are equivalent to python 2 longs, and they'll
# never show "L" in the repr
long = int
_list_arg = re.compile(r'''
(?:
([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*list\(
(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
)
\)
)
''', re.VERBOSE | re.DOTALL) # two groups
_list_members = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
''', re.VERBOSE | re.DOTALL) # one group
_paramstring = r'''
(?:
(
(?:
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*list\(
(?:
\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)
\s*,\s*
)*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s\)][^,\)]*?) # unquoted
)? # last one
\)
)|
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?)| # unquoted
(?: # keyword argument
[a-zA-Z_][a-zA-Z0-9_]*\s*=\s*
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\s=][^,=]*?) # unquoted
)
)
)
)
(?:
(?:\s*,\s*)|(?:\s*$) # comma
)
)
'''
_matchstring = '^%s*' % _paramstring
# Python pre 2.2.1 doesn't have bool
try:
bool
except NameError:
def bool(val):
"""Simple boolean equivalent function. """
if val:
return 1
else:
return 0
def dottedQuadToNum(ip):
"""
Convert decimal dotted quad string to long integer
>>> int(dottedQuadToNum('1 '))
1
>>> int(dottedQuadToNum(' 1.2'))
16777218
>>> int(dottedQuadToNum(' 1.2.3 '))
16908291
>>> int(dottedQuadToNum('1.2.3.4'))
16909060
>>> dottedQuadToNum('255.255.255.255')
4294967295
>>> dottedQuadToNum('255.255.255.256')
Traceback (most recent call last):
ValueError: Not a good dotted-quad IP: 255.255.255.256
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
try:
return struct.unpack('!L',
socket.inet_aton(ip.strip()))[0]
except socket.error:
raise ValueError('Not a good dotted-quad IP: %s' % ip)
return
def numToDottedQuad(num):
"""
Convert int or long int to dotted quad string
>>> numToDottedQuad(long(-1))
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(long(1))
'0.0.0.1'
>>> numToDottedQuad(long(16777218))
'1.0.0.2'
>>> numToDottedQuad(long(16908291))
'1.2.0.3'
>>> numToDottedQuad(long(16909060))
'1.2.3.4'
>>> numToDottedQuad(long(4294967295))
'255.255.255.255'
>>> numToDottedQuad(long(4294967296))
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
>>> numToDottedQuad(-1)
Traceback (most recent call last):
ValueError: Not a good numeric IP: -1
>>> numToDottedQuad(1)
'0.0.0.1'
>>> numToDottedQuad(16777218)
'1.0.0.2'
>>> numToDottedQuad(16908291)
'1.2.0.3'
>>> numToDottedQuad(16909060)
'1.2.3.4'
>>> numToDottedQuad(4294967295)
'255.255.255.255'
>>> numToDottedQuad(4294967296)
Traceback (most recent call last):
ValueError: Not a good numeric IP: 4294967296
"""
# import here to avoid it when ip_addr values are not used
import socket, struct
# no need to intercept here, 4294967295L is fine
if num > long(4294967295) or num < 0:
raise ValueError('Not a good numeric IP: %s' % num)
try:
return socket.inet_ntoa(
struct.pack('!L', long(num)))
except (socket.error, struct.error, OverflowError):
raise ValueError('Not a good numeric IP: %s' % num)
class ValidateError(Exception):
"""
This error indicates that the check failed.
It can be the base class for more specific errors.
Any check function that fails ought to raise this error.
(or a subclass)
>>> raise ValidateError
Traceback (most recent call last):
ValidateError
"""
class VdtMissingValue(ValidateError):
"""No value was supplied to a check that needed one."""
class VdtUnknownCheckError(ValidateError):
"""An unknown check function was requested"""
def __init__(self, value):
"""
>>> raise VdtUnknownCheckError('yoda')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
"""
ValidateError.__init__(self, 'the check "%s" is unknown.' % (value,))
class VdtParamError(SyntaxError):
"""An incorrect parameter was passed"""
def __init__(self, name, value):
"""
>>> raise VdtParamError('yoda', 'jedi')
Traceback (most recent call last):
VdtParamError: passed an incorrect value "jedi" for parameter "yoda".
"""
SyntaxError.__init__(self, 'passed an incorrect value "%s" for parameter "%s".' % (value, name))
class VdtTypeError(ValidateError):
"""The value supplied was of the wrong type"""
def __init__(self, value):
"""
>>> raise VdtTypeError('jedi')
Traceback (most recent call last):
VdtTypeError: the value "jedi" is of the wrong type.
"""
ValidateError.__init__(self, 'the value "%s" is of the wrong type.' % (value,))
class VdtValueError(ValidateError):
"""The value supplied was of the correct type, but was not an allowed value."""
def __init__(self, value):
"""
>>> raise VdtValueError('jedi')
Traceback (most recent call last):
VdtValueError: the value "jedi" is unacceptable.
"""
ValidateError.__init__(self, 'the value "%s" is unacceptable.' % (value,))
class VdtValueTooSmallError(VdtValueError):
"""The value supplied was of the correct type, but was too small."""
def __init__(self, value):
"""
>>> raise VdtValueTooSmallError('0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "0" is too small.
"""
ValidateError.__init__(self, 'the value "%s" is too small.' % (value,))
class VdtValueTooBigError(VdtValueError):
"""The value supplied was of the correct type, but was too big."""
def __init__(self, value):
"""
>>> raise VdtValueTooBigError('1')
Traceback (most recent call last):
VdtValueTooBigError: the value "1" is too big.
"""
ValidateError.__init__(self, 'the value "%s" is too big.' % (value,))
class VdtValueTooShortError(VdtValueError):
"""The value supplied was of the correct type, but was too short."""
def __init__(self, value):
"""
>>> raise VdtValueTooShortError('jed')
Traceback (most recent call last):
VdtValueTooShortError: the value "jed" is too short.
"""
ValidateError.__init__(
self,
'the value "%s" is too short.' % (value,))
class VdtValueTooLongError(VdtValueError):
"""The value supplied was of the correct type, but was too long."""
def __init__(self, value):
"""
>>> raise VdtValueTooLongError('jedie')
Traceback (most recent call last):
VdtValueTooLongError: the value "jedie" is too long.
"""
ValidateError.__init__(self, 'the value "%s" is too long.' % (value,))
class Validator(object):
"""
Validator is an object that allows you to register a set of 'checks'.
These checks take input and test that it conforms to the check.
This can also involve converting the value from a string into
the correct datatype.
The ``check`` method takes an input string which configures which
check is to be used and applies that check to a supplied value.
An example input string would be:
'int_range(param1, param2)'
You would then provide something like:
>>> def int_range_check(value, min, max):
... # turn min and max from strings to integers
... min = int(min)
... max = int(max)
... # check that value is of the correct type.
... # possible valid inputs are integers or strings
... # that represent integers
... if not isinstance(value, (int, long, string_type)):
... raise VdtTypeError(value)
... elif isinstance(value, string_type):
... # if we are given a string
... # attempt to convert to an integer
... try:
... value = int(value)
... except ValueError:
... raise VdtValueError(value)
... # check the value is between our constraints
... if not min <= value:
... raise VdtValueTooSmallError(value)
... if not value <= max:
... raise VdtValueTooBigError(value)
... return value
>>> fdict = {'int_range': int_range_check}
>>> vtr1 = Validator(fdict)
>>> vtr1.check('int_range(20, 40)', '30')
30
>>> vtr1.check('int_range(20, 40)', '60')
Traceback (most recent call last):
VdtValueTooBigError: the value "60" is too big.
New functions can be added with : ::
>>> vtr2 = Validator()
>>> vtr2.functions['int_range'] = int_range_check
Or by passing in a dictionary of functions when Validator
is instantiated.
Your functions *can* use keyword arguments,
but the first argument should always be 'value'.
If the function doesn't take additional arguments,
the parentheses are optional in the check.
It can be written with either of : ::
keyword = function_name
keyword = function_name()
The first program to utilise Validator() was Michael Foord's
ConfigObj, an alternative to ConfigParser which supports lists and
can validate a config file using a config schema.
For more details on using Validator with ConfigObj see:
https://configobj.readthedocs.org/en/latest/configobj.html
"""
# this regex does the initial parsing of the checks
_func_re = re.compile(r'(.+?)\((.*)\)', re.DOTALL)
# this regex takes apart keyword arguments
_key_arg = re.compile(r'^([a-zA-Z_][a-zA-Z0-9_]*)\s*=\s*(.*)$', re.DOTALL)
# this regex finds keyword=list(....) type values
_list_arg = _list_arg
# this regex takes individual values out of lists - in one pass
_list_members = _list_members
# These regexes check a set of arguments for validity
# and then pull the members out
_paramfinder = re.compile(_paramstring, re.VERBOSE | re.DOTALL)
_matchfinder = re.compile(_matchstring, re.VERBOSE | re.DOTALL)
def __init__(self, functions=None):
"""
>>> vtri = Validator()
"""
self.functions = {
'': self._pass,
'integer': is_integer,
'float': is_float,
'boolean': is_boolean,
'ip_addr': is_ip_addr,
'string': is_string,
'list': is_list,
'tuple': is_tuple,
'int_list': is_int_list,
'float_list': is_float_list,
'bool_list': is_bool_list,
'ip_addr_list': is_ip_addr_list,
'string_list': is_string_list,
'mixed_list': is_mixed_list,
'pass': self._pass,
'option': is_option,
'force_list': force_list,
}
if functions is not None:
self.functions.update(functions)
# tekNico: for use by ConfigObj
self.baseErrorClass = ValidateError
self._cache = {}
def check(self, check, value, missing=False):
"""
Usage: check(check, value)
Arguments:
check: string representing check to apply (including arguments)
value: object to be checked
Returns value, converted to correct type if necessary
If the check fails, raises a ``ValidateError`` subclass.
>>> vtor.check('yoda', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('yoda()', '')
Traceback (most recent call last):
VdtUnknownCheckError: the check "yoda" is unknown.
>>> vtor.check('string(default="")', '', missing=True)
''
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if missing:
if default is None:
# no information needed here - to be handled by caller
raise VdtMissingValue()
value = self._handle_none(default)
if value is None:
return None
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _handle_none(self, value):
if value == 'None':
return None
elif value in ("'None'", '"None"'):
# Special case a quoted None
value = self._unquote(value)
return value
def _parse_with_caching(self, check):
if check in self._cache:
fun_name, fun_args, fun_kwargs, default = self._cache[check]
# We call list and dict below to work with *copies* of the data
# rather than the original (which are mutable of course)
fun_args = list(fun_args)
fun_kwargs = dict(fun_kwargs)
else:
fun_name, fun_args, fun_kwargs, default = self._parse_check(check)
fun_kwargs = dict([(str(key), value) for (key, value) in list(fun_kwargs.items())])
self._cache[check] = fun_name, list(fun_args), dict(fun_kwargs), default
return fun_name, fun_args, fun_kwargs, default
def _check_value(self, value, fun_name, fun_args, fun_kwargs):
try:
fun = self.functions[fun_name]
except KeyError:
raise VdtUnknownCheckError(fun_name)
else:
return fun(value, *fun_args, **fun_kwargs)
def _parse_check(self, check):
fun_match = self._func_re.match(check)
if fun_match:
fun_name = fun_match.group(1)
arg_string = fun_match.group(2)
arg_match = self._matchfinder.match(arg_string)
if arg_match is None:
# Bad syntax
raise VdtParamError('Bad syntax in check "%s".' % check)
fun_args = []
fun_kwargs = {}
# pull out args of group 2
for arg in self._paramfinder.findall(arg_string):
# args may need whitespace removing (before removing quotes)
arg = arg.strip()
listmatch = self._list_arg.match(arg)
if listmatch:
key, val = self._list_handle(listmatch)
fun_kwargs[key] = val
continue
keymatch = self._key_arg.match(arg)
if keymatch:
val = keymatch.group(2)
if not val in ("'None'", '"None"'):
# Special case a quoted None
val = self._unquote(val)
fun_kwargs[keymatch.group(1)] = val
continue
fun_args.append(self._unquote(arg))
else:
# allows for function names without (args)
return check, (), {}, None
# Default must be deleted if the value is specified too,
# otherwise the check function will get a spurious "default" keyword arg
default = fun_kwargs.pop('default', None)
return fun_name, fun_args, fun_kwargs, default
def _unquote(self, val):
"""Unquote a value if necessary."""
if (len(val) >= 2) and (val[0] in ("'", '"')) and (val[0] == val[-1]):
val = val[1:-1]
return val
def _list_handle(self, listmatch):
"""Take apart a ``keyword=list('val, 'val')`` type string."""
out = []
name = listmatch.group(1)
args = listmatch.group(2)
for arg in self._list_members.findall(args):
out.append(self._unquote(arg))
return name, out
def _pass(self, value):
"""
Dummy check that always passes
>>> vtor.check('', 0)
0
>>> vtor.check('', '0')
'0'
"""
return value
def get_default_value(self, check):
"""
Given a check, return the default value for the check
(converted to the right type).
If the check doesn't specify a default value then a
``KeyError`` will be raised.
"""
fun_name, fun_args, fun_kwargs, default = self._parse_with_caching(check)
if default is None:
raise KeyError('Check "%s" has no default value.' % check)
value = self._handle_none(default)
if value is None:
return value
return self._check_value(value, fun_name, fun_args, fun_kwargs)
def _is_num_param(names, values, to_float=False):
"""
Return numbers from inputs or raise VdtParamError.
Lets ``None`` pass through.
Pass in keyword argument ``to_float=True`` to
use float for the conversion rather than int.
>>> _is_num_param(('', ''), (0, 1.0))
[0, 1]
>>> _is_num_param(('', ''), (0, 1.0), to_float=True)
[0.0, 1.0]
>>> _is_num_param(('a'), ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "a" for parameter "a".
"""
fun = to_float and float or int
out_params = []
for (name, val) in zip(names, values):
if val is None:
out_params.append(val)
elif isinstance(val, (int, long, float, string_type)):
try:
out_params.append(fun(val))
except ValueError as e:
raise VdtParamError(name, val)
else:
raise VdtParamError(name, val)
return out_params
# built in checks
# you can override these by setting the appropriate name
# in Validator.functions
# note: if the params are specified wrongly in your input string,
# you will also raise errors.
def is_integer(value, min=None, max=None):
"""
A check that tests that a given value is an integer (int, or long)
and optionally, between bounds. A negative value is accepted, while
a float will fail.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
>>> vtor.check('integer', '-1')
-1
>>> vtor.check('integer', '0')
0
>>> vtor.check('integer', 9)
9
>>> vtor.check('integer', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('integer', '2.2')
Traceback (most recent call last):
VdtTypeError: the value "2.2" is of the wrong type.
>>> vtor.check('integer(10)', '20')
20
>>> vtor.check('integer(max=20)', '15')
15
>>> vtor.check('integer(10)', '9')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(10)', 9)
Traceback (most recent call last):
VdtValueTooSmallError: the value "9" is too small.
>>> vtor.check('integer(max=20)', '35')
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(max=20)', 35)
Traceback (most recent call last):
VdtValueTooBigError: the value "35" is too big.
>>> vtor.check('integer(0, 9)', False)
0
"""
(min_val, max_val) = _is_num_param(('min', 'max'), (min, max))
if not isinstance(value, (int, long, string_type)):
raise VdtTypeError(value)
if isinstance(value, string_type):
# if it's a string - does it represent an integer ?
try:
value = int(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
def is_float(value, min=None, max=None):
"""
A check that tests that a given value is a float
(an integer will be accepted), and optionally - that it is between bounds.
If the value is a string, then the conversion is done - if possible.
Otherwise a VdtError is raised.
This can accept negative values.
>>> vtor.check('float', '2')
2.0
From now on we multiply the value to avoid comparing decimals
>>> vtor.check('float', '-6.8') * 10
-68.0
>>> vtor.check('float', '12.2') * 10
122.0
>>> vtor.check('float', 8.4) * 10
84.0
>>> vtor.check('float', 'a')
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
>>> vtor.check('float(10.1)', '10.2') * 10
102.0
>>> vtor.check('float(max=20.2)', '15.1') * 10
151.0
>>> vtor.check('float(10.0)', '9.0')
Traceback (most recent call last):
VdtValueTooSmallError: the value "9.0" is too small.
>>> vtor.check('float(max=20.0)', '35.0')
Traceback (most recent call last):
VdtValueTooBigError: the value "35.0" is too big.
"""
(min_val, max_val) = _is_num_param(
('min', 'max'), (min, max), to_float=True)
if not isinstance(value, (int, long, float, string_type)):
raise VdtTypeError(value)
if not isinstance(value, float):
# if it's a string - does it represent a float ?
try:
value = float(value)
except ValueError:
raise VdtTypeError(value)
if (min_val is not None) and (value < min_val):
raise VdtValueTooSmallError(value)
if (max_val is not None) and (value > max_val):
raise VdtValueTooBigError(value)
return value
bool_dict = {
True: True, 'on': True, '1': True, 'true': True, 'yes': True,
False: False, 'off': False, '0': False, 'false': False, 'no': False,
}
def is_boolean(value):
"""
Check if the value represents a boolean.
>>> vtor.check('boolean', 0)
0
>>> vtor.check('boolean', False)
0
>>> vtor.check('boolean', '0')
0
>>> vtor.check('boolean', 'off')
0
>>> vtor.check('boolean', 'false')
0
>>> vtor.check('boolean', 'no')
0
>>> vtor.check('boolean', 'nO')
0
>>> vtor.check('boolean', 'NO')
0
>>> vtor.check('boolean', 1)
1
>>> vtor.check('boolean', True)
1
>>> vtor.check('boolean', '1')
1
>>> vtor.check('boolean', 'on')
1
>>> vtor.check('boolean', 'true')
1
>>> vtor.check('boolean', 'yes')
1
>>> vtor.check('boolean', 'Yes')
1
>>> vtor.check('boolean', 'YES')
1
>>> vtor.check('boolean', '')
Traceback (most recent call last):
VdtTypeError: the value "" is of the wrong type.
>>> vtor.check('boolean', 'up')
Traceback (most recent call last):
VdtTypeError: the value "up" is of the wrong type.
"""
if isinstance(value, string_type):
try:
return bool_dict[value.lower()]
except KeyError:
raise VdtTypeError(value)
# we do an equality test rather than an identity test
# this ensures Python 2.2 compatibilty
# and allows 0 and 1 to represent True and False
if value == False:
return False
elif value == True:
return True
else:
raise VdtTypeError(value)
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256')
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5')
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
def is_list(value, min=None, max=None):
"""
Check that the value is a list of values.
You can optionally specify the minimum and maximum number of members.
It does no check on list members.
>>> vtor.check('list', ())
[]
>>> vtor.check('list', [])
[]
>>> vtor.check('list', (1, 2))
[1, 2]
>>> vtor.check('list', [1, 2])
[1, 2]
>>> vtor.check('list(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('list(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('list(min=3, max=5)', (1, 2, 3, 4))
[1, 2, 3, 4]
>>> vtor.check('list', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('list', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
if isinstance(value, string_type):
raise VdtTypeError(value)
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return list(value)
def is_tuple(value, min=None, max=None):
"""
Check that the value is a tuple of values.
You can optionally specify the minimum and maximum number of members.
It does no check on members.
>>> vtor.check('tuple', ())
()
>>> vtor.check('tuple', [])
()
>>> vtor.check('tuple', (1, 2))
(1, 2)
>>> vtor.check('tuple', [1, 2])
(1, 2)
>>> vtor.check('tuple(3)', (1, 2))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2)" is too short.
>>> vtor.check('tuple(max=5)', (1, 2, 3, 4, 5, 6))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2, 3, 4, 5, 6)" is too long.
>>> vtor.check('tuple(min=3, max=5)', (1, 2, 3, 4))
(1, 2, 3, 4)
>>> vtor.check('tuple', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('tuple', '12')
Traceback (most recent call last):
VdtTypeError: the value "12" is of the wrong type.
"""
return tuple(is_list(value, min, max))
def is_string(value, min=None, max=None):
"""
Check that the supplied value is a string.
You can optionally specify the minimum and maximum number of members.
>>> vtor.check('string', '0')
'0'
>>> vtor.check('string', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('string(2)', '12')
'12'
>>> vtor.check('string(2)', '1')
Traceback (most recent call last):
VdtValueTooShortError: the value "1" is too short.
>>> vtor.check('string(min=2, max=3)', '123')
'123'
>>> vtor.check('string(min=2, max=3)', '1234')
Traceback (most recent call last):
VdtValueTooLongError: the value "1234" is too long.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
(min_len, max_len) = _is_num_param(('min', 'max'), (min, max))
try:
num_members = len(value)
except TypeError:
raise VdtTypeError(value)
if min_len is not None and num_members < min_len:
raise VdtValueTooShortError(value)
if max_len is not None and num_members > max_len:
raise VdtValueTooLongError(value)
return value
def is_int_list(value, min=None, max=None):
"""
Check that the value is a list of integers.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an integer.
>>> vtor.check('int_list', ())
[]
>>> vtor.check('int_list', [])
[]
>>> vtor.check('int_list', (1, 2))
[1, 2]
>>> vtor.check('int_list', [1, 2])
[1, 2]
>>> vtor.check('int_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_integer(mem) for mem in is_list(value, min, max)]
def is_bool_list(value, min=None, max=None):
"""
Check that the value is a list of booleans.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a boolean.
>>> vtor.check('bool_list', ())
[]
>>> vtor.check('bool_list', [])
[]
>>> check_res = vtor.check('bool_list', (True, False))
>>> check_res == [True, False]
1
>>> check_res = vtor.check('bool_list', [True, False])
>>> check_res == [True, False]
1
>>> vtor.check('bool_list', [True, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_boolean(mem) for mem in is_list(value, min, max)]
def is_float_list(value, min=None, max=None):
"""
Check that the value is a list of floats.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a float.
>>> vtor.check('float_list', ())
[]
>>> vtor.check('float_list', [])
[]
>>> vtor.check('float_list', (1, 2.0))
[1.0, 2.0]
>>> vtor.check('float_list', [1, 2.0])
[1.0, 2.0]
>>> vtor.check('float_list', [1, 'a'])
Traceback (most recent call last):
VdtTypeError: the value "a" is of the wrong type.
"""
return [is_float(mem) for mem in is_list(value, min, max)]
def is_string_list(value, min=None, max=None):
"""
Check that the value is a list of strings.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is a string.
>>> vtor.check('string_list', ())
[]
>>> vtor.check('string_list', [])
[]
>>> vtor.check('string_list', ('a', 'b'))
['a', 'b']
>>> vtor.check('string_list', ['a', 1])
Traceback (most recent call last):
VdtTypeError: the value "1" is of the wrong type.
>>> vtor.check('string_list', 'hello')
Traceback (most recent call last):
VdtTypeError: the value "hello" is of the wrong type.
"""
if isinstance(value, string_type):
raise VdtTypeError(value)
return [is_string(mem) for mem in is_list(value, min, max)]
def is_ip_addr_list(value, min=None, max=None):
"""
Check that the value is a list of IP addresses.
You can optionally specify the minimum and maximum number of members.
Each list member is checked that it is an IP address.
>>> vtor.check('ip_addr_list', ())
[]
>>> vtor.check('ip_addr_list', [])
[]
>>> vtor.check('ip_addr_list', ('1.2.3.4', '5.6.7.8'))
['1.2.3.4', '5.6.7.8']
>>> vtor.check('ip_addr_list', ['a'])
Traceback (most recent call last):
VdtValueError: the value "a" is unacceptable.
"""
return [is_ip_addr(mem) for mem in is_list(value, min, max)]
def force_list(value, min=None, max=None):
"""
Check that a value is a list, coercing strings into
a list with one member. Useful where users forget the
trailing comma that turns a single value into a list.
You can optionally specify the minimum and maximum number of members.
A minumum of greater than one will fail if the user only supplies a
string.
>>> vtor.check('force_list', ())
[]
>>> vtor.check('force_list', [])
[]
>>> vtor.check('force_list', 'hello')
['hello']
"""
if not isinstance(value, (list, tuple)):
value = [value]
return is_list(value, min, max)
fun_dict = {
'integer': is_integer,
'float': is_float,
'ip_addr': is_ip_addr,
'string': is_string,
'boolean': is_boolean,
}
def is_mixed_list(value, *args):
"""
Check that the value is a list.
Allow specifying the type of each member.
Work on lists of specific lengths.
You specify each member as a positional argument specifying type
Each type should be one of the following strings :
'integer', 'float', 'ip_addr', 'string', 'boolean'
So you can specify a list of two strings, followed by
two integers as :
mixed_list('string', 'string', 'integer', 'integer')
The length of the list must match the number of positional
arguments you supply.
>>> mix_str = "mixed_list('integer', 'float', 'ip_addr', 'string', 'boolean')"
>>> check_res = vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', True))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> check_res = vtor.check(mix_str, ('1', '2.0', '1.2.3.4', 'a', 'True'))
>>> check_res == [1, 2.0, '1.2.3.4', 'a', True]
1
>>> vtor.check(mix_str, ('b', 2.0, '1.2.3.4', 'a', True))
Traceback (most recent call last):
VdtTypeError: the value "b" is of the wrong type.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a'))
Traceback (most recent call last):
VdtValueTooShortError: the value "(1, 2.0, '1.2.3.4', 'a')" is too short.
>>> vtor.check(mix_str, (1, 2.0, '1.2.3.4', 'a', 1, 'b'))
Traceback (most recent call last):
VdtValueTooLongError: the value "(1, 2.0, '1.2.3.4', 'a', 1, 'b')" is too long.
>>> vtor.check(mix_str, 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
>>> vtor.check('mixed_list("yoda")', ('a'))
Traceback (most recent call last):
VdtParamError: passed an incorrect value "KeyError('yoda',)" for parameter "'mixed_list'"
"""
try:
length = len(value)
except TypeError:
raise VdtTypeError(value)
if length < len(args):
raise VdtValueTooShortError(value)
elif length > len(args):
raise VdtValueTooLongError(value)
try:
return [fun_dict[arg](val) for arg, val in zip(args, value)]
except KeyError as e:
raise VdtParamError('mixed_list', e)
def is_option(value, *options):
"""
This check matches the value to any of a set of options.
>>> vtor.check('option("yoda", "jedi")', 'yoda')
'yoda'
>>> vtor.check('option("yoda", "jedi")', 'jed')
Traceback (most recent call last):
VdtValueError: the value "jed" is unacceptable.
>>> vtor.check('option("yoda", "jedi")', 0)
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_type):
raise VdtTypeError(value)
if not value in options:
raise VdtValueError(value)
return value
def _test(value, *args, **keywargs):
"""
A function that exists for test purposes.
>>> checks = [
... '3, 6, min=1, max=3, test=list(a, b, c)',
... '3',
... '3, 6',
... '3,',
... 'min=1, test="a b c"',
... 'min=5, test="a, b, c"',
... 'min=1, max=3, test="a, b, c"',
... 'min=-100, test=-99',
... 'min=1, max=3',
... '3, 6, test="36"',
... '3, 6, test="a, b, c"',
... '3, max=3, test=list("a", "b", "c")',
... '''3, max=3, test=list("'a'", 'b', "x=(c)")''',
... "test='x=fish(3)'",
... ]
>>> v = Validator({'test': _test})
>>> for entry in checks:
... pprint(v.check(('test(%s)' % entry), 3))
(3, ('3', '6'), {'max': '3', 'min': '1', 'test': ['a', 'b', 'c']})
(3, ('3',), {})
(3, ('3', '6'), {})
(3, ('3',), {})
(3, (), {'min': '1', 'test': 'a b c'})
(3, (), {'min': '5', 'test': 'a, b, c'})
(3, (), {'max': '3', 'min': '1', 'test': 'a, b, c'})
(3, (), {'min': '-100', 'test': '-99'})
(3, (), {'max': '3', 'min': '1'})
(3, ('3', '6'), {'test': '36'})
(3, ('3', '6'), {'test': 'a, b, c'})
(3, ('3',), {'max': '3', 'test': ['a', 'b', 'c']})
(3, ('3',), {'max': '3', 'test': ["'a'", 'b', 'x=(c)']})
(3, (), {'test': 'x=fish(3)'})
>>> v = Validator()
>>> v.check('integer(default=6)', '3')
3
>>> v.check('integer(default=6)', None, True)
6
>>> v.get_default_value('integer(default=6)')
6
>>> v.get_default_value('float(default=6)')
6.0
>>> v.get_default_value('pass(default=None)')
>>> v.get_default_value("string(default='None')")
'None'
>>> v.get_default_value('pass')
Traceback (most recent call last):
KeyError: 'Check "pass" has no default value.'
>>> v.get_default_value('pass(default=list(1, 2, 3, 4))')
['1', '2', '3', '4']
>>> v = Validator()
>>> v.check("pass(default=None)", None, True)
>>> v.check("pass(default='None')", None, True)
'None'
>>> v.check('pass(default="None")', None, True)
'None'
>>> v.check('pass(default=list(1, 2, 3, 4))', None, True)
['1', '2', '3', '4']
Bug test for unicode arguments
>>> v = Validator()
>>> v.check(unicode('string(min=4)'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> v.get_default_value(unicode('string(min=4, default="1234")')) == unicode('1234')
True
>>> v.check(unicode('string(min=4, default="1234")'), unicode('test')) == unicode('test')
True
>>> v = Validator()
>>> default = v.get_default_value('string(default=None)')
>>> default == None
1
"""
return (value, args, keywargs)
def _test2():
"""
>>>
>>> v = Validator()
>>> v.get_default_value('string(default="#ff00dd")')
'#ff00dd'
>>> v.get_default_value('integer(default=3) # comment')
3
"""
def _test3():
r"""
>>> vtor.check('string(default="")', '', missing=True)
''
>>> vtor.check('string(default="\n")', '', missing=True)
'\n'
>>> print(vtor.check('string(default="\n")', '', missing=True))
<BLANKLINE>
<BLANKLINE>
>>> vtor.check('string()', '\n')
'\n'
>>> vtor.check('string(default="\n\n\n")', '', missing=True)
'\n\n\n'
>>> vtor.check('string()', 'random \n text goes here\n\n')
'random \n text goes here\n\n'
>>> vtor.check('string(default=" \nrandom text\ngoes \n here\n\n ")',
... '', missing=True)
' \nrandom text\ngoes \n here\n\n '
>>> vtor.check("string(default='\n\n\n')", '', missing=True)
'\n\n\n'
>>> vtor.check("option('\n','a','b',default='\n')", '', missing=True)
'\n'
>>> vtor.check("string_list()", ['foo', '\n', 'bar'])
['foo', '\n', 'bar']
>>> vtor.check("string_list(default=list('\n'))", '', missing=True)
['\n']
"""
if __name__ == '__main__':
# run the code tests in doctest format
import sys
import doctest
m = sys.modules.get('__main__')
globs = m.__dict__.copy()
globs.update({
'vtor': Validator(),
})
failures, tests = doctest.testmod(
m, globs=globs,
optionflags=doctest.IGNORE_EXCEPTION_DETAIL | doctest.ELLIPSIS)
assert not failures, '{} failures out of {} tests'.format(failures, tests) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/validate_.py | validate_.py |
import os
import re
import sys
from ast import literal_eval
from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE
from collections import OrderedDict
import six
__version__ = '5.0.6'
# imported lazily to avoid startup performance hit if it isn't used
compiler = None
# A dictionary mapping BOM to
# the encoding to decode with, and what to set the
# encoding attribute to.
BOMS = {
BOM_UTF8: ('utf_8', None),
BOM_UTF16_BE: ('utf16_be', 'utf_16'),
BOM_UTF16_LE: ('utf16_le', 'utf_16'),
BOM_UTF16: ('utf_16', 'utf_16'),
}
# All legal variants of the BOM codecs.
# TODO: the list of aliases is not meant to be exhaustive, is there a
# better way ?
BOM_LIST = {
'utf_16': 'utf_16',
'u16': 'utf_16',
'utf16': 'utf_16',
'utf-16': 'utf_16',
'utf16_be': 'utf16_be',
'utf_16_be': 'utf16_be',
'utf-16be': 'utf16_be',
'utf16_le': 'utf16_le',
'utf_16_le': 'utf16_le',
'utf-16le': 'utf16_le',
'utf_8': 'utf_8',
'u8': 'utf_8',
'utf': 'utf_8',
'utf8': 'utf_8',
'utf-8': 'utf_8',
}
# Map of encodings to the BOM to write.
BOM_SET = {
'utf_8': BOM_UTF8,
'utf_16': BOM_UTF16,
'utf16_be': BOM_UTF16_BE,
'utf16_le': BOM_UTF16_LE,
None: BOM_UTF8
}
def match_utf8(encoding):
return BOM_LIST.get(encoding.lower()) == 'utf_8'
# Quote strings used for writing values
squot = "'%s'"
dquot = '"%s"'
noquot = "%s"
wspace_plus = ' \r\n\v\t\'"'
tsquot = '"""%s"""'
tdquot = "'''%s'''"
# Sentinel for use in getattr calls to replace hasattr
MISSING = object()
__all__ = (
'DEFAULT_INDENT_TYPE',
'DEFAULT_INTERPOLATION',
'ConfigObjError',
'NestingError',
'ParseError',
'DuplicateError',
'ConfigspecError',
'ConfigObj',
'SimpleVal',
'InterpolationError',
'InterpolationLoopError',
'MissingInterpolationOption',
'RepeatSectionError',
'ReloadError',
'UnreprError',
'UnknownType',
'flatten_errors',
'get_extra_values'
)
DEFAULT_INTERPOLATION = 'configparser'
DEFAULT_INDENT_TYPE = ' '
MAX_INTERPOL_DEPTH = 10
OPTION_DEFAULTS = {
'interpolation': True,
'raise_errors': False,
'list_values': True,
'create_empty': False,
'file_error': False,
'configspec': None,
'stringify': True,
# option may be set to one of ('', ' ', '\t')
'indent_type': None,
'encoding': None,
'default_encoding': None,
'unrepr': False,
'write_empty_values': False,
}
# this could be replaced if six is used for compatibility, or there are no
# more assertions about items being a string
def getObj(s):
global compiler
if compiler is None:
import compiler
s = "a=" + s
p = compiler.parse(s)
return p.getChildren()[1].getChildren()[0].getChildren()[1]
class UnknownType(Exception):
pass
class Builder(object):
def build(self, o):
if m is None:
raise UnknownType(o.__class__.__name__)
return m(o)
def build_List(self, o):
return list(map(self.build, o.getChildren()))
def build_Const(self, o):
return o.value
def build_Dict(self, o):
d = {}
i = iter(map(self.build, o.getChildren()))
for el in i:
d[el] = next(i)
return d
def build_Tuple(self, o):
return tuple(self.build_List(o))
def build_Name(self, o):
if o.name == 'None':
return None
if o.name == 'True':
return True
if o.name == 'False':
return False
# An undefined Name
raise UnknownType('Undefined Name')
def build_Add(self, o):
real, imag = list(map(self.build_Const, o.getChildren()))
try:
real = float(real)
except TypeError:
raise UnknownType('Add')
if not isinstance(imag, complex) or imag.real != 0.0:
raise UnknownType('Add')
return real+imag
def build_Getattr(self, o):
parent = self.build(o.expr)
return getattr(parent, o.attrname)
def build_UnarySub(self, o):
return -self.build_Const(o.getChildren()[0])
def build_UnaryAdd(self, o):
return self.build_Const(o.getChildren()[0])
_builder = Builder()
def unrepr(s):
if not s:
return s
# this is supposed to be safe
import ast
return ast.literal_eval(s)
class ConfigObjError(SyntaxError):
"""
This is the base class for all errors that ConfigObj raises.
It is a subclass of SyntaxError.
"""
def __init__(self, message='', line_number=None, line=''):
self.line = line
self.line_number = line_number
SyntaxError.__init__(self, message)
class NestingError(ConfigObjError):
"""
This error indicates a level of nesting that doesn't match.
"""
class ParseError(ConfigObjError):
"""
This error indicates that a line is badly written.
It is neither a valid ``key = value`` line,
nor a valid section marker line.
"""
class ReloadError(IOError):
"""
A 'reload' operation failed.
This exception is a subclass of ``IOError``.
"""
def __init__(self):
IOError.__init__(self, 'reload failed, filename is not set.')
class DuplicateError(ConfigObjError):
"""
The keyword or section specified already exists.
"""
class ConfigspecError(ConfigObjError):
"""
An error occured whilst parsing a configspec.
"""
class InterpolationError(ConfigObjError):
"""Base class for the two interpolation errors."""
class InterpolationLoopError(InterpolationError):
"""Maximum interpolation depth exceeded in string interpolation."""
def __init__(self, option):
InterpolationError.__init__(
self,
'interpolation loop detected in value "%s".' % option)
class RepeatSectionError(ConfigObjError):
"""
This error indicates additional sections in a section with a
``__many__`` (repeated) section.
"""
class MissingInterpolationOption(InterpolationError):
"""A value specified for interpolation was missing."""
def __init__(self, option):
msg = 'missing option "%s" in interpolation.' % option
InterpolationError.__init__(self, msg)
class UnreprError(ConfigObjError):
"""An error parsing in unrepr mode."""
class InterpolationEngine(object):
"""
A helper class to help perform string interpolation.
This class is an abstract base class; its descendants perform
the actual work.
"""
# compiled regexp to use in self.interpolate()
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
_cookie = '%'
def __init__(self, section):
# the Section instance that "owns" this engine
self.section = section
def interpolate(self, key, value):
# short-cut
if not self._cookie in value:
return value
def recursive_interpolate(key, value, section, backtrail):
"""The function that does the actual work.
``value``: the string we're trying to interpolate.
``section``: the section in which that string was found
``backtrail``: a dict to keep track of where we've been,
to detect and prevent infinite recursion loops
This is similar to a depth-first-search algorithm.
"""
# Have we been here already?
if (key, section.name) in backtrail:
# Yes - infinite loop detected
raise InterpolationLoopError(key)
# Place a marker on our backtrail so we won't come back here again
backtrail[(key, section.name)] = 1
# Now start the actual work
match = self._KEYCRE.search(value)
while match:
# The actual parsing of the match is implementation-dependent,
# so delegate to our helper function
k, v, s = self._parse_match(match)
if k is None:
# That's the signal that no further interpolation is needed
replacement = v
else:
# Further interpolation may be needed to obtain final value
replacement = recursive_interpolate(k, v, s, backtrail)
# Replace the matched string with its final value
start, end = match.span()
value = ''.join((value[:start], replacement, value[end:]))
new_search_start = start + len(replacement)
# Pick up the next interpolation key, if any, for next time
# through the while loop
match = self._KEYCRE.search(value, new_search_start)
# Now safe to come back here again; remove marker from backtrail
del backtrail[(key, section.name)]
return value
# Back in interpolate(), all we have to do is kick off the recursive
# function with appropriate starting values
value = recursive_interpolate(key, value, self.section, {})
return value
def _fetch(self, key):
"""Helper function to fetch values from owning section.
Returns a 2-tuple: the value, and the section where it was found.
"""
# switch off interpolation before we try and fetch anything !
save_interp = self.section.main.interpolation
self.section.main.interpolation = False
# Start at section that "owns" this InterpolationEngine
current_section = self.section
while True:
# try the current section first
val = current_section.get(key)
if val is not None and not isinstance(val, Section):
break
# try "DEFAULT" next
val = current_section.get('DEFAULT', {}).get(key)
if val is not None and not isinstance(val, Section):
break
# move up to parent and try again
# top-level's parent is itself
if current_section.parent is current_section:
# reached top level, time to give up
break
current_section = current_section.parent
# restore interpolation to previous value before returning
self.section.main.interpolation = save_interp
if val is None:
raise MissingInterpolationOption(key)
return val, current_section
def _parse_match(self, match):
"""Implementation-dependent helper function.
Will be passed a match object corresponding to the interpolation
key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
key in the appropriate config file section (using the ``_fetch()``
helper function) and return a 3-tuple: (key, value, section)
``key`` is the name of the key we're looking for
``value`` is the value found for that key
``section`` is a reference to the section where it was found
``key`` and ``section`` should be None if no further
interpolation should be performed on the resulting value
(e.g., if we interpolated "$$" and returned "$").
"""
raise NotImplementedError()
class ConfigParserInterpolation(InterpolationEngine):
"""Behaves like ConfigParser."""
_cookie = '%'
_KEYCRE = re.compile(r"%\(([^)]*)\)s")
def _parse_match(self, match):
key = match.group(1)
value, section = self._fetch(key)
return key, value, section
class TemplateInterpolation(InterpolationEngine):
"""Behaves like string.Template."""
_cookie = '$'
_delimiter = '$'
_KEYCRE = re.compile(r"""
\$(?:
(?P<escaped>\$) | # Two $ signs
(?P<named>[_a-z][_a-z0-9]*) | # $name format
{(?P<braced>[^}]*)} # ${name} format
)
""", re.IGNORECASE | re.VERBOSE)
def _parse_match(self, match):
# Valid name (in or out of braces): fetch value from section
key = match.group('named') or match.group('braced')
if key is not None:
value, section = self._fetch(key)
return key, value, section
# Escaped delimiter (e.g., $$): return single delimiter
if match.group('escaped') is not None:
# Return None for key and section to indicate it's time to stop
return None, self._delimiter, None
# Anything else: ignore completely, just return it unchanged
return None, match.group(), None
interpolation_engines = {
'configparser': ConfigParserInterpolation,
'template': TemplateInterpolation,
}
def __newobj__(cls, *args):
# Hack for pickle
return cls.__new__(cls, *args)
class Section(OrderedDict):
"""
A dictionary-like object that represents a section in a config file.
It does string interpolation if the 'interpolation' attribute
of the 'main' object is set to True.
Interpolation is tried first from this object, then from the 'DEFAULT'
section of this object, next from the parent and its 'DEFAULT' section,
and so on until the main object is reached.
A Section will behave like an ordered dictionary - following the
order of the ``scalars`` and ``sections`` attributes.
You can use this to change the order of members.
Iteration follows the order: scalars, then sections.
"""
def __setstate__(self, state):
OrderedDict.update(self, state[0])
self.__dict__.update(state[1])
def __reduce__(self):
state = (OrderedDict(self), self.__dict__)
return (__newobj__, (self.__class__,), state)
def __init__(self, parent, depth, main, indict=None, name=None):
"""
* parent is the section above
* depth is the depth level of this section
* main is the main ConfigObj
* indict is a dictionary to initialise the section with
"""
if indict is None:
indict = {}
OrderedDict.__init__(self)
# used for nesting level *and* interpolation
self.parent = parent
# used for the interpolation attribute
self.main = main
# level of nesting depth of this Section
self.depth = depth
# purely for information
self.name = name
#
self._initialise()
# we do this explicitly so that __setitem__ is used properly
# (rather than just passing to ``OrderedDict.__init__``)
for entry, value in indict.items():
self[entry] = value
def _initialise(self):
# the sequence of scalar values in this Section
self.scalars = []
# the sequence of sections in this Section
self.sections = []
# for comments :-)
self.comments = {}
self.inline_comments = {}
# the configspec
self.configspec = None
# for defaults
self.defaults = []
self.default_values = {}
self.extra_values = []
self._created = False
def _interpolate(self, key, value):
try:
# do we already have an interpolation engine?
engine = self._interpolation_engine
except AttributeError:
# not yet: first time running _interpolate(), so pick the engine
name = self.main.interpolation
if name == True: # note that "if name:" would be incorrect here
# backwards-compatibility: interpolation=True means use default
name = DEFAULT_INTERPOLATION
name = name.lower() # so that "Template", "template", etc. all work
class_ = interpolation_engines.get(name, None)
if class_ is None:
# invalid value for self.main.interpolation
self.main.interpolation = False
return value
else:
# save reference to engine so we don't have to do this again
engine = self._interpolation_engine = class_(self)
# let the engine do the actual work
return engine.interpolate(key, value)
def __getitem__(self, key):
"""Fetch the item and do string interpolation."""
val = OrderedDict.__getitem__(self, key)
if self.main.interpolation:
if isinstance(val, six.string_types):
return self._interpolate(key, val)
if isinstance(val, list):
def _check(entry):
if isinstance(entry, six.string_types):
return self._interpolate(key, entry)
return entry
new = [_check(entry) for entry in val]
if new != val:
return new
return val
def __setitem__(self, key, value, unrepr=False):
"""
Correctly set a value.
Making dictionary values Section instances.
(We have to special case 'Section' instances - which are also dicts)
Keys must be strings.
Values need only be strings (or lists of strings) if
``main.stringify`` is set.
``unrepr`` must be set when setting a value to a dictionary, without
creating a new sub-section.
"""
if not isinstance(key, six.string_types):
raise ValueError('The key "%s" is not a string.' % key)
# add the comment
if key not in self.comments:
self.comments[key] = []
self.inline_comments[key] = ''
# remove the entry from defaults
if key in self.defaults:
self.defaults.remove(key)
#
if isinstance(value, Section):
if key not in self:
self.sections.append(key)
OrderedDict.__setitem__(self, key, value)
elif isinstance(value, dict) and not unrepr:
# First create the new depth level,
# then create the section
if key not in self:
self.sections.append(key)
new_depth = self.depth + 1
OrderedDict.__setitem__(
self,
key,
Section(
self,
new_depth,
self.main,
indict=value,
name=key))
else:
if key not in self:
self.scalars.append(key)
if not self.main.stringify:
if isinstance(value, six.string_types):
pass
elif isinstance(value, (list, tuple)):
for entry in value:
if not isinstance(entry, six.string_types):
raise TypeError('Value is not a string "%s".' % entry)
else:
raise TypeError('Value is not a string "%s".' % value)
OrderedDict.__setitem__(self, key, value)
def __delitem__(self, key):
"""Remove items from the sequence when deleting."""
OrderedDict. __delitem__(self, key)
if key in self.scalars:
self.scalars.remove(key)
else:
self.sections.remove(key)
del self.comments[key]
del self.inline_comments[key]
def get(self, key, default=None):
"""A version of ``get`` that doesn't bypass string interpolation."""
try:
return self[key]
except KeyError:
return default
def update(self, indict):
"""
A version of update that uses our ``__setitem__``.
"""
for entry in indict:
self[entry] = indict[entry]
def pop(self, key, default=MISSING):
"""
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised'
"""
try:
val = self[key]
except KeyError:
if default is MISSING:
raise
val = default
else:
del self[key]
return val
def popitem(self):
"""Pops the first (key,val)"""
sequence = (self.scalars + self.sections)
if not sequence:
raise KeyError(": 'popitem(): dictionary is empty'")
key = sequence[0]
val = self[key]
del self[key]
return key, val
def clear(self):
"""
A version of clear that also affects scalars/sections
Also clears comments and configspec.
Leaves other attributes alone :
depth/main/parent are not affected
"""
OrderedDict.clear(self)
self.scalars = []
self.sections = []
self.comments = {}
self.inline_comments = {}
self.configspec = None
self.defaults = []
self.extra_values = []
def setdefault(self, key, default=None):
"""A version of setdefault that sets sequence if appropriate."""
try:
return self[key]
except KeyError:
self[key] = default
return self[key]
def items(self):
"""D.items() -> list of D's (key, value) pairs, as 2-tuples"""
return list(zip((self.scalars + self.sections), list(self.values())))
def keys(self):
"""D.keys() -> list of D's keys"""
return (self.scalars + self.sections)
def values(self):
"""D.values() -> list of D's values"""
return [self[key] for key in (self.scalars + self.sections)]
def iteritems(self):
"""D.iteritems() -> an iterator over the (key, value) items of D"""
return iter(list(self.items()))
def iterkeys(self):
"""D.iterkeys() -> an iterator over the keys of D"""
return iter((self.scalars + self.sections))
__iter__ = iterkeys
def itervalues(self):
"""D.itervalues() -> an iterator over the values of D"""
return iter(list(self.values()))
def __repr__(self):
"""x.__repr__() <==> repr(x)"""
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)])
__str__ = __repr__
__str__.__doc__ = "x.__str__() <==> str(x)"
# Extra methods - not in a normal dictionary
def dict(self):
"""
Return a deepcopy of self as a dictionary.
All members that are ``Section`` instances are recursively turned to
ordinary dictionaries - by calling their ``dict`` method.
>>> n = a.dict()
>>> n == a
1
>>> n is a
0
"""
newdict = {}
for entry in self:
this_entry = self[entry]
if isinstance(this_entry, Section):
this_entry = this_entry.dict()
elif isinstance(this_entry, list):
# create a copy rather than a reference
this_entry = list(this_entry)
elif isinstance(this_entry, tuple):
# create a copy rather than a reference
this_entry = tuple(this_entry)
newdict[entry] = this_entry
return newdict
def merge(self, indict):
"""
A recursive update - useful for merging config files.
>>> a = '''[section1]
... option1 = True
... [[subsection]]
... more_options = False
... # end of file'''.splitlines()
>>> b = '''# File is user.ini
... [section1]
... option1 = False
... # end of file'''.splitlines()
>>> c1 = ConfigObj(b)
>>> c2 = ConfigObj(a)
>>> c2.merge(c1)
>>> c2
ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}})
"""
for key, val in list(indict.items()):
if (key in self and isinstance(self[key], dict) and
isinstance(val, dict)):
self[key].merge(val)
else:
self[key] = val
def rename(self, oldkey, newkey):
"""
Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments.
"""
if oldkey in self.scalars:
the_list = self.scalars
elif oldkey in self.sections:
the_list = self.sections
else:
raise KeyError('Key "%s" not found.' % oldkey)
pos = the_list.index(oldkey)
#
val = self[oldkey]
OrderedDict.__delitem__(self, oldkey)
OrderedDict.__setitem__(self, newkey, val)
the_list.remove(oldkey)
the_list.insert(pos, newkey)
comm = self.comments[oldkey]
inline_comment = self.inline_comments[oldkey]
del self.comments[oldkey]
del self.inline_comments[oldkey]
self.comments[newkey] = comm
self.inline_comments[newkey] = inline_comment
def walk(self, function, raise_errors=True,
call_on_sections=False, **keywargs):
"""
Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it's members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
.. admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn't add or delete members.
>>> config = '''[XXXXsection]
... XXXXkey = XXXXvalue'''.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace('XXXX', 'CLIENT1')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace('XXXX', 'CLIENT1')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{'CLIENT1section': {'CLIENT1key': None}}
>>> cfg
ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}})
"""
out = {}
# scalars first
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
# bound again in case name has changed
entry = self.scalars[i]
out[entry] = val
except Exception:
if raise_errors:
raise
else:
entry = self.scalars[i]
out[entry] = False
# then sections
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs)
except Exception:
if raise_errors:
raise
else:
entry = self.sections[i]
out[entry] = False
# bound again in case name has changed
entry = self.sections[i]
# previous result is discarded
out[entry] = self[entry].walk(
function,
raise_errors=raise_errors,
call_on_sections=call_on_sections,
**keywargs)
return out
def as_bool(self, key):
"""
Accepts a key as input. The corresponding value must be a string or
the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
retain compatibility with Python 2.2.
If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
``True``.
If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
``False``.
``as_bool`` is not case sensitive.
Any other input will raise a ``ValueError``.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_bool('a')
Traceback (most recent call last):
ValueError: Value "fish" is neither True nor False
>>> a['b'] = 'True'
>>> a.as_bool('b')
1
>>> a['b'] = 'off'
>>> a.as_bool('b')
0
"""
val = self[key]
if val == True:
return True
elif val == False:
return False
else:
try:
if not isinstance(val, six.string_types):
# TODO: Why do we raise a KeyError here?
raise KeyError()
else:
return self.main._bools[val.lower()]
except KeyError:
raise ValueError('Value "%s" is neither True nor False' % val)
def as_int(self, key):
"""
A convenience method which coerces the specified value to an integer.
If the value is an invalid literal for ``int``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_int('a')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: 'fish'
>>> a['b'] = '1'
>>> a.as_int('b')
1
>>> a['b'] = '3.2'
>>> a.as_int('b')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: '3.2'
"""
return int(self[key])
def as_float(self, key):
"""
A convenience method which coerces the specified value to a float.
If the value is an invalid literal for ``float``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a['a'] = 'fish'
>>> a.as_float('a') #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
ValueError: invalid literal for float(): fish
>>> a['b'] = '1'
>>> a.as_float('b')
1.0
>>> a['b'] = '3.2'
>>> a.as_float('b') #doctest: +ELLIPSIS
3.2...
"""
return float(self[key])
def as_list(self, key):
"""
A convenience method which fetches the specified value, guaranteeing
that it is a list.
>>> a = ConfigObj()
>>> a['a'] = 1
>>> a.as_list('a')
[1]
>>> a['a'] = (1,)
>>> a.as_list('a')
[1]
>>> a['a'] = [1]
>>> a.as_list('a')
[1]
"""
result = self[key]
if isinstance(result, (tuple, list)):
return list(result)
return [result]
def restore_default(self, key):
"""
Restore (and return) default value for the specified key.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
If there is no default value for this key, ``KeyError`` is raised.
"""
default = self.default_values[key]
OrderedDict.__setitem__(self, key, default)
if key not in self.defaults:
self.defaults.append(key)
return default
def restore_defaults(self):
"""
Recursively restore default values to all members
that have them.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
It doesn't delete or modify entries without default values.
"""
for key in self.default_values:
self.restore_default(key)
for section in self.sections:
self[section].restore_defaults()
class ConfigObj(Section):
"""An object to read, create, and write config files."""
_keyword = re.compile(r'''^ # line start
(\s*) # indentation
( # keyword
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"=].*?) # no quotes
)
\s*=\s* # divider
(.*) # value (including list values and comments)
$ # line end
''',
re.VERBOSE)
_sectionmarker = re.compile(r'''^
(\s*) # 1: indentation
((?:\[\s*)+) # 2: section marker open
( # 3: section name open
(?:"\s*\S.*?\s*")| # at least one non-space with double quotes
(?:'\s*\S.*?\s*')| # at least one non-space with single quotes
(?:[^'"\s].*?) # at least one non-space unquoted
) # section name close
((?:\s*\])+) # 4: section marker close
\s*(\#.*)? # 5: optional comment
$''',
re.VERBOSE)
# this regexp pulls list values out as a single string
# or single values and comments
# FIXME: this regex adds a '' to the end of comma terminated lists
# workaround in ``_handle_value``
_valueexp = re.compile(r'''^
(?:
(?:
(
(?:
(?:
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#][^,\#]*?) # unquoted
)
\s*,\s* # comma
)* # match all list items ending in a comma (if any)
)
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#\s][^,]*?)| # unquoted
(?:(?<!,)) # Empty value
)? # last item in a list - or string value
)|
(,) # alternatively a single comma - empty list
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# use findall to get the members of a list value
_listvalueexp = re.compile(r'''
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'",\#]?.*?) # unquoted
)
\s*,\s* # comma
''',
re.VERBOSE)
# this regexp is used for the value
# when lists are switched off
_nolistvalue = re.compile(r'''^
(
(?:".*?")| # double quotes
(?:'.*?')| # single quotes
(?:[^'"\#].*?)| # unquoted
(?:) # Empty value
)
\s*(\#.*)? # optional comment
$''',
re.VERBOSE)
# regexes for finding triple quoted values on one line
_single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$")
_single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$')
_multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$")
_multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$')
_triple_quote = {
"'''": (_single_line_single, _multi_line_single),
'"""': (_single_line_double, _multi_line_double),
}
# Used by the ``istrue`` Section method
_bools = {
'yes': True, 'no': False,
'on': True, 'off': False,
'1': True, '0': False,
'true': True, 'false': False,
}
def __init__(self, infile=None, options=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False, use_zato=True, zato_crypto_manager=None,
zato_secrets_conf=None, zato_secrets_url_prefix='zato+secret://'):
"""
Parse a config file or create a config file object.
``ConfigObj(infile=None, configspec=None, encoding=None,
interpolation=True, raise_errors=False, list_values=True,
create_empty=False, file_error=False, stringify=True,
indent_type=None, default_encoding=None, unrepr=False,
write_empty_values=False, _inspec=False)``
"""
self._inspec = _inspec
self.use_zato = use_zato
self.zato_crypto_manager = zato_crypto_manager
self.zato_secrets_conf = zato_secrets_conf
self.zato_secrets_url_prefix = zato_secrets_url_prefix
# init the superclass
Section.__init__(self, self, 0, self)
infile = infile or []
_options = {'configspec': configspec,
'encoding': encoding, 'interpolation': interpolation,
'raise_errors': raise_errors, 'list_values': list_values,
'create_empty': create_empty, 'file_error': file_error,
'stringify': stringify, 'indent_type': indent_type,
'default_encoding': default_encoding, 'unrepr': unrepr,
'write_empty_values': write_empty_values}
if options is None:
options = _options
else:
import warnings
warnings.warn('Passing in an options dictionary to ConfigObj() is '
'deprecated. Use **options instead.',
DeprecationWarning, stacklevel=2)
# TODO: check the values too.
for entry in options:
if entry not in OPTION_DEFAULTS:
raise TypeError('Unrecognised option "%s".' % entry)
for entry, value in list(OPTION_DEFAULTS.items()):
if entry not in options:
options[entry] = value
keyword_value = _options[entry]
if value != keyword_value:
options[entry] = keyword_value
# XXXX this ignores an explicit list_values = True in combination
# with _inspec. The user should *never* do that anyway, but still...
if _inspec:
options['list_values'] = False
self._initialise(options)
configspec = options['configspec']
self._original_configspec = configspec
self._load(infile, configspec)
def _load(self, infile, configspec):
if isinstance(infile, six.string_types):
self.filename = infile
if os.path.isfile(infile):
with open(infile, 'rb') as h:
content = h.readlines() or []
elif self.file_error:
# raise an error if the file doesn't exist
raise IOError('Config file not found: "%s".' % self.filename)
else:
# file doesn't already exist
if self.create_empty:
# this is a good test that the filename specified
# isn't impossible - like on a non-existent device
with open(infile, 'w') as h:
h.write('')
content = []
elif isinstance(infile, (list, tuple)):
content = list(infile)
elif isinstance(infile, dict):
# initialise self
# the Section class handles creating subsections
if isinstance(infile, ConfigObj):
# get a copy of our ConfigObj
def set_section(in_section, this_section):
for entry in in_section.scalars:
this_section[entry] = in_section[entry]
for section in in_section.sections:
this_section[section] = {}
set_section(in_section[section], this_section[section])
set_section(infile, self)
else:
for entry in infile:
self[entry] = infile[entry]
del self._errors
if configspec is not None:
self._handle_configspec(configspec)
else:
self.configspec = None
return
elif getattr(infile, 'read', MISSING) is not MISSING:
# This supports file like objects
content = infile.read() or []
# needs splitting into lines - but needs doing *after* decoding
# in case it's not an 8 bit encoding
else:
raise TypeError('infile must be a filename, file like object, or list of lines.')
if content:
# don't do it for the empty ConfigObj
content = self._handle_bom(content)
# infile is now *always* a list
#
# Set the newlines attribute (first line ending it finds)
# and strip trailing '\n' or '\r' from lines
for line in content:
if (not line) or (line[-1] not in ('\r', '\n')):
continue
for end in ('\r\n', '\n', '\r'):
if line.endswith(end):
self.newlines = end
break
break
assert all(isinstance(line, six.string_types) for line in content), repr(content)
content = [line.rstrip('\r\n') for line in content]
self._parse(content)
# if we had any errors, now is the time to raise them
if self._errors:
info = "at line %s." % self._errors[0].line_number
if len(self._errors) > 1:
msg = "Parsing failed with several errors.\nFirst error %s" % info
error = ConfigObjError(msg)
else:
error = self._errors[0]
# set the errors attribute; it's a list of tuples:
# (error_type, message, line_number)
error.errors = self._errors
# set the config attribute
error.config = self
raise error
# delete private attributes
del self._errors
if configspec is None:
self.configspec = None
else:
self._handle_configspec(configspec)
def _initialise(self, options=None):
if options is None:
options = OPTION_DEFAULTS
# initialise a few variables
self.filename = None
self._errors = []
self.raise_errors = options['raise_errors']
self.interpolation = options['interpolation']
self.list_values = options['list_values']
self.create_empty = options['create_empty']
self.file_error = options['file_error']
self.stringify = options['stringify']
self.indent_type = options['indent_type']
self.encoding = options['encoding']
self.default_encoding = options['default_encoding']
self.BOM = False
self.newlines = None
self.write_empty_values = options['write_empty_values']
self.unrepr = options['unrepr']
self.initial_comment = []
self.final_comment = []
self.configspec = None
if self._inspec:
self.list_values = False
# Clear section attributes as well
Section._initialise(self)
def __repr__(self):
def _getval(key):
try:
return self[key]
except MissingInterpolationOption:
return OrderedDict.__getitem__(self, key)
return ('ConfigObj({%s})' %
', '.join([('%s: %s' % (repr(key), repr(_getval(key))))
for key in (self.scalars + self.sections)]))
def _handle_bom(self, infile):
"""
Handle any BOM, and decode if necessary.
If an encoding is specified, that *must* be used - but the BOM should
still be removed (and the BOM attribute set).
(If the encoding is wrongly specified, then a BOM for an alternative
encoding won't be discovered or removed.)
If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
removed. The BOM attribute will be set. UTF16 will be decoded to
unicode.
NOTE: This method must not be called with an empty ``infile``.
Specifying the *wrong* encoding is likely to cause a
``UnicodeDecodeError``.
``infile`` must always be returned as a list of lines, but may be
passed in as a single string.
"""
if ((self.encoding is not None) and
(self.encoding.lower() not in BOM_LIST)):
# No need to check for a BOM
# the encoding specified doesn't have one
# just decode
return self._decode(infile, self.encoding)
if isinstance(infile, (list, tuple)):
line = infile[0]
else:
line = infile
if isinstance(line, six.text_type):
# it's already decoded and there's no need to do anything
# else, just use the _decode utility method to handle
# listifying appropriately
return self._decode(infile, self.encoding)
if self.encoding is not None:
# encoding explicitly supplied
# And it could have an associated BOM
# TODO: if encoding is just UTF16 - we ought to check for both
# TODO: big endian and little endian versions.
enc = BOM_LIST[self.encoding.lower()]
if enc == 'utf_16':
# For UTF16 we try big endian and little endian
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not final_encoding:
# skip UTF8
continue
if infile.startswith(BOM):
### BOM discovered
##self.BOM = True
# Don't need to remove BOM
return self._decode(infile, encoding)
# If we get this far, will *probably* raise a DecodeError
# As it doesn't appear to start with a BOM
return self._decode(infile, self.encoding)
# Must be UTF8
BOM = BOM_SET[enc]
if not line.startswith(BOM):
return self._decode(infile, self.encoding)
newline = line[len(BOM):]
# BOM removed
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
self.BOM = True
return self._decode(infile, self.encoding)
# No encoding specified - so we need to check for UTF8/UTF16
for BOM, (encoding, final_encoding) in list(BOMS.items()):
if not isinstance(line, six.binary_type) or not line.startswith(BOM):
# didn't specify a BOM, or it's not a bytestring
continue
else:
# BOM discovered
self.encoding = final_encoding
if not final_encoding:
self.BOM = True
# UTF8
# remove BOM
newline = line[len(BOM):]
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
# UTF-8
if isinstance(infile, six.text_type):
return infile.splitlines(True)
elif isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
# UTF16 - have to decode
return self._decode(infile, encoding)
if six.PY2 and isinstance(line, str):
# don't actually do any decoding, since we're on python 2 and
# returning a bytestring is fine
return self._decode(infile, None)
# No BOM discovered and no encoding specified, default to UTF-8
if isinstance(infile, six.binary_type):
return infile.decode('utf-8').splitlines(True)
else:
return self._decode(infile, 'utf-8')
def _a_to_u(self, aString):
"""Decode ASCII strings to unicode if a self.encoding is specified."""
if isinstance(aString, six.binary_type) and self.encoding:
return aString.decode(self.encoding)
else:
return aString
def _decode(self, infile, encoding):
"""
Decode infile to unicode. Using the specified encoding.
if is a string, it also needs converting to a list.
"""
if isinstance(infile, six.string_types):
return infile.splitlines(True)
if isinstance(infile, six.binary_type):
# NOTE: Could raise a ``UnicodeDecodeError``
if encoding:
return infile.decode(encoding).splitlines(True)
else:
return infile.splitlines(True)
if encoding:
for i, line in enumerate(infile):
if isinstance(line, six.binary_type):
# NOTE: The isinstance test here handles mixed lists of unicode/string
# NOTE: But the decode will break on any non-string values
# NOTE: Or could raise a ``UnicodeDecodeError``
infile[i] = line.decode(encoding)
return infile
def _decode_element(self, line):
"""Decode element to unicode if necessary."""
if isinstance(line, six.binary_type) and self.default_encoding:
return line.decode(self.default_encoding)
else:
return line
# TODO: this may need to be modified
def _str(self, value):
"""
Used by ``stringify`` within validate, to turn non-string values
into strings.
"""
if not isinstance(value, six.string_types):
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
return str(value)
else:
return value
def _parse(self, infile):
"""Actually parse the config file."""
temp_list_values = self.list_values
if self.unrepr:
self.list_values = False
comment_list = []
done_start = False
this_section = self
maxline = len(infile) - 1
cur_index = -1
reset_comment = False
while cur_index < maxline:
if reset_comment:
comment_list = []
cur_index += 1
line = infile[cur_index]
sline = line.strip()
# do we have anything on the line ?
if not sline or sline.startswith('#'):
reset_comment = False
comment_list.append(line)
continue
if not done_start:
# preserve initial comment
self.initial_comment = comment_list
comment_list = []
done_start = True
reset_comment = True
# first we check if it's a section marker
mat = self._sectionmarker.match(line)
if mat is not None:
# is a section line
(indent, sect_open, sect_name, sect_close, comment) = mat.groups()
if indent and (self.indent_type is None):
self.indent_type = indent
cur_depth = sect_open.count('[')
if cur_depth != sect_close.count(']'):
self._handle_error("Cannot compute the section depth",
NestingError, infile, cur_index)
continue
if cur_depth < this_section.depth:
# the new section is dropping back to a previous level
try:
parent = self._match_depth(this_section,
cur_depth).parent
except SyntaxError:
self._handle_error("Cannot compute nesting level",
NestingError, infile, cur_index)
continue
elif cur_depth == this_section.depth:
# the new section is a sibling of the current section
parent = this_section.parent
elif cur_depth == this_section.depth + 1:
# the new section is a child the current section
parent = this_section
else:
self._handle_error("Section too nested",
NestingError, infile, cur_index)
continue
sect_name = self._unquote(sect_name)
if sect_name in parent:
self._handle_error('Duplicate section name',
DuplicateError, infile, cur_index)
continue
# create the new section
this_section = Section(
parent,
cur_depth,
self,
name=sect_name)
parent[sect_name] = this_section
parent.inline_comments[sect_name] = comment
parent.comments[sect_name] = comment_list
continue
#
# it's not a section marker,
# so it should be a valid ``key = value`` line
mat = self._keyword.match(line)
if mat is None:
self._handle_error(
'Invalid line ({0!r}) (matched as neither section nor keyword)'.format(line),
ParseError, infile, cur_index)
else:
# is a keyword value
# value will include any inline comment
(indent, key, value) = mat.groups()
# Handle Zato-specific needs
if self.use_zato:
# This may be an environment variable ..
if value.startswith('$'):
# .. but not if it's just a $ sign or an actual variable starting with it.
if not (len(value) == 1 or value.startswith('$$')):
env_key_name = value[1:].upper()
try:
value = os.environ[env_key_name]
except KeyError:
raise KeyError('Environment variable `{}` not found, config key `{}`'.format(env_key_name, key))
# .. this may be a value to decrypt with a secret key (note that it is an if, not elif,
# to make it possible for environment variables to point to secrets.conf).
if value.startswith(self.zato_secrets_url_prefix):
entry = value.replace(self.zato_secrets_url_prefix, '', 1)
if not entry:
raise ValueError('Missing entry in address `{}`, config key `{}`'.format(value, key))
entry = entry.split('.')
group_name = entry[0]
group_key = '.'.join(entry[1:])
if self.zato_secrets_conf:
group = self.zato_secrets_conf.get(group_name)
if not group:
raise ValueError('Group not found `{}`, config key `{}`, value `{}`'.format(
group_name, key, value))
if not group_key in group:
raise ValueError('Group key not found `{}`, config key `{}`, value `{}`'.format(
group_key, key, value))
else:
encrypted = group[group_key]
if encrypted:
try:
value = self.zato_crypto_manager.decrypt(encrypted)
except Exception as e:
raise ValueError('Could not decrypt value `{}`, group:`{}`, group_key:`{}`, e:`{}`'.format(
encrypted, group, group_key, e))
else:
value = encrypted # This will happen if 'encrypted' is actually an empty string
if indent and (self.indent_type is None):
self.indent_type = indent
# check for a multiline value
if value[:3] in ['"""', "'''"]:
try:
value, comment, cur_index = self._multiline(
value, infile, cur_index, maxline)
except SyntaxError:
self._handle_error(
'Parse error in multiline value',
ParseError, infile, cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if type(e) == UnknownType:
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing multiline value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if isinstance(e, UnknownType):
msg = 'Unknown name or type in value'
else:
msg = 'Parse error from unrepr-ing value'
self._handle_error(msg, UnreprError, infile,
cur_index)
continue
else:
# extract comment and lists
try:
(value, comment) = self._handle_value(value)
except SyntaxError:
self._handle_error(
'Parse error in value',
ParseError, infile, cur_index)
continue
#
key = self._unquote(key)
if key in this_section:
self._handle_error(
'Duplicate keyword name',
DuplicateError, infile, cur_index)
continue
# add the key.
# we set unrepr because if we have got this far we will never
# be creating a new section
# As a last resort, we can attempt to convert strings to their actual
# data types, e.g. integers.
if self.use_zato:
try:
value = literal_eval(value)
except Exception:
# That's OK, we just had to try
pass
this_section.__setitem__(key, value, unrepr=True)
this_section.inline_comments[key] = comment
this_section.comments[key] = comment_list
continue
#
if self.indent_type is None:
# no indentation used, set the type accordingly
self.indent_type = ''
# preserve the final comment
if not self and not self.initial_comment:
self.initial_comment = comment_list
elif not reset_comment:
self.final_comment = comment_list
self.list_values = temp_list_values
def _match_depth(self, sect, depth):
"""
Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.
"""
while depth < sect.depth:
if sect is sect.parent:
# we've reached the top level already
raise SyntaxError()
sect = sect.parent
if sect.depth == depth:
return sect
# shouldn't get here
raise SyntaxError()
def _handle_error(self, text, ErrorClass, infile, cur_index):
"""
Handle an error according to the error settings.
Either raise the error or store it.
The error will have occured at ``cur_index``
"""
line = infile[cur_index]
cur_index += 1
message = '{0} at line {1}.'.format(text, cur_index)
error = ErrorClass(message, cur_index, line)
if self.raise_errors:
# raise the error - parsing stops here
raise error
# store the error
# reraise when parsing has finished
self._errors.append(error)
def _unquote(self, value):
"""Return an unquoted version of a value"""
if not value:
# should only happen during parsing of lists
raise SyntaxError
if (value[0] == value[-1]) and (value[0] in ('"', "'")):
value = value[1:-1]
return value
def _quote(self, value, multiline=True):
"""
Return a safely quoted version of a value.
Raise a ConfigObjError if the value cannot be safely quoted.
If multiline is ``True`` (default) then use triple quotes
if necessary.
* Don't quote values that don't need it.
* Recursively quote members of a list and return a comma joined list.
* Multiline is ``False`` for lists.
* Obey list syntax for empty and single member lists.
If ``list_values=False`` then the value is only quoted if it contains
a ``\\n`` (is multiline) or '#'.
If ``write_empty_values`` is set, and the value is an empty string, it
won't be quoted.
"""
if multiline and self.write_empty_values and value == '':
# Only if multiline is set, so that it is used for values not
# keys, and not values that are part of a list
return ''
if multiline and isinstance(value, (list, tuple)):
if not value:
return ','
elif len(value) == 1:
return self._quote(value[0], multiline=False) + ','
return ', '.join([self._quote(val, multiline=False)
for val in value])
if not isinstance(value, six.string_types):
if self.stringify:
# intentially 'str' because it's just whatever the "normal"
# string type is for the python version we're dealing with
value = str(value)
else:
raise TypeError('Value "%s" is not a string.' % value)
if not value:
return '""'
no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value
need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value ))
hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value)
check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote
if check_for_single:
if not self.list_values:
# we don't quote if ``list_values=False``
quot = noquot
# for normal values either single or double quotes will do
elif '\n' in value:
# will only happen if multiline is off - e.g. '\n' in key
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif ((value[0] not in wspace_plus) and
(value[-1] not in wspace_plus) and
(',' not in value)):
quot = noquot
else:
quot = self._get_single_quote(value)
else:
# if value has '\n' or "'" *and* '"', it will need triple quotes
quot = self._get_triple_quote(value)
if quot == noquot and '#' in value and self.list_values:
quot = self._get_single_quote(value)
return quot % value
def _get_single_quote(self, value):
if ("'" in value) and ('"' in value):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
elif '"' in value:
quot = squot
else:
quot = dquot
return quot
def _get_triple_quote(self, value):
if (value.find('"""') != -1) and (value.find("'''") != -1):
raise ConfigObjError('Value "%s" cannot be safely quoted.' % value)
if value.find('"""') == -1:
quot = tdquot
else:
quot = tsquot
return quot
def _handle_value(self, value):
"""
Given a value string, unquote, remove comment,
handle lists. (including empty and single member lists)
"""
if self._inspec:
# Parsing a configspec so don't handle comments
return (value, '')
# do we look for lists in values ?
if not self.list_values:
mat = self._nolistvalue.match(value)
if mat is None:
raise SyntaxError()
# NOTE: we don't unquote here
return mat.groups()
#
mat = self._valueexp.match(value)
if mat is None:
# the value is badly constructed, probably badly quoted,
# or an invalid list
raise SyntaxError()
(list_values, single, empty_list, comment) = mat.groups()
if (list_values == '') and (single is None):
# change this if you want to accept empty values
raise SyntaxError()
# NOTE: note there is no error handling from here if the regex
# is wrong: then incorrect values will slip through
if empty_list is not None:
# the single comma - meaning an empty list
return ([], comment)
if single is not None:
# handle empty values
if list_values and not single:
# FIXME: the '' is a workaround because our regex now matches
# '' at the end of a list if it has a trailing comma
single = None
else:
single = single or '""'
single = self._unquote(single)
if list_values == '':
# not a list value
return (single, comment)
the_list = self._listvalueexp.findall(list_values)
the_list = [self._unquote(val) for val in the_list]
if single is not None:
the_list += [single]
return (the_list, comment)
def _multiline(self, value, infile, cur_index, maxline):
"""Extract the value, where we are in a multiline situation."""
quot = value[:3]
newvalue = value[3:]
single_line = self._triple_quote[quot][0]
multi_line = self._triple_quote[quot][1]
mat = single_line.match(value)
if mat is not None:
retval = list(mat.groups())
retval.append(cur_index)
return retval
elif newvalue.find(quot) != -1:
# somehow the triple quote is missing
raise SyntaxError()
#
while cur_index < maxline:
cur_index += 1
newvalue += '\n'
line = infile[cur_index]
if line.find(quot) == -1:
newvalue += line
else:
# end of multiline, process it
break
else:
# we've got to the end of the config, oops...
raise SyntaxError()
mat = multi_line.match(line)
if mat is None:
# a badly formed line
raise SyntaxError()
(value, comment) = mat.groups()
return (newvalue + value, comment, cur_index)
def _handle_configspec(self, configspec):
"""Parse the configspec."""
# FIXME: Should we check that the configspec was created with the
# correct settings ? (i.e. ``list_values=False``)
if not isinstance(configspec, ConfigObj):
try:
configspec = ConfigObj(configspec,
raise_errors=True,
file_error=True,
_inspec=True)
except ConfigObjError as e:
# FIXME: Should these errors have a reference
# to the already parsed ConfigObj ?
raise ConfigspecError('Parsing configspec failed: %s' % e)
except IOError as e:
raise IOError('Reading configspec failed: %s' % e)
self.configspec = configspec
def _set_configspec(self, section, copy):
"""
Called by validate. Handles setting the configspec on subsections
including sections to be validated by __many__
"""
configspec = section.configspec
many = configspec.get('__many__')
if isinstance(many, dict):
for entry in section.sections:
if entry not in configspec:
section[entry].configspec = many
for entry in configspec.sections:
if entry == '__many__':
continue
if entry not in section:
section[entry] = {}
section[entry]._created = True
if copy:
# copy comments
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
# Could be a scalar when we expect a section
if isinstance(section[entry], Section):
section[entry].configspec = configspec[entry]
def _write_line(self, indent_string, entry, this_entry, comment):
"""Write an individual line, for the write method"""
# NOTE: the calls to self._quote here handles non-StringType values.
if not self.unrepr:
val = self._decode_element(self._quote(this_entry))
else:
val = repr(this_entry)
out = '%s%s%s%s %s' % (indent_string,
self._decode_element(self._quote(entry, multiline=False)),
self._a_to_u(' = '),
val,
self._decode_element(comment))
out = out.strip()
return out
def _write_marker(self, indent_string, depth, entry, comment):
"""Write a section marker line"""
return '%s%s%s%s%s' % (indent_string,
self._a_to_u('[' * depth),
self._quote(self._decode_element(entry), multiline=False),
self._a_to_u(']' * depth),
self._decode_element(comment))
def _handle_comment(self, comment):
"""Deal with a comment."""
if not comment:
return ''
start = self.indent_type
if not comment.startswith('#'):
start += self._a_to_u(' # ')
return (start + comment)
# Public methods
def write(self, outfile=None, section=None):
"""
Write the current ConfigObj as a file
tekNico: FIXME: use StringIO instead of real files
>>> filename = a.filename
>>> a.filename = 'test.ini'
>>> a.write()
>>> a.filename = filename
>>> a == ConfigObj('test.ini', raise_errors=True)
1
>>> import os
>>> os.remove('test.ini')
"""
if self.indent_type is None:
# this can be true if initialised from a dictionary
self.indent_type = DEFAULT_INDENT_TYPE
out = []
cs = self._a_to_u('#')
csp = self._a_to_u('# ')
if section is None:
int_val = self.interpolation
self.interpolation = False
section = self
for line in self.initial_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
indent_string = self.indent_type * section.depth
for entry in (section.scalars + section.sections):
if entry in section.defaults:
# don't write out default values
continue
for comment_line in section.comments[entry]:
comment_line = self._decode_element(comment_line.lstrip())
if comment_line and not comment_line.startswith(cs):
comment_line = csp + comment_line
out.append(indent_string + comment_line)
this_entry = section[entry]
comment = self._handle_comment(section.inline_comments[entry])
if isinstance(this_entry, Section):
# a section
out.append(self._write_marker(
indent_string,
this_entry.depth,
entry,
comment))
out.extend(self.write(section=this_entry))
else:
out.append(self._write_line(
indent_string,
entry,
this_entry,
comment))
if section is self:
for line in self.final_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if stripped_line and not stripped_line.startswith(cs):
line = csp + line
out.append(line)
self.interpolation = int_val
if section is not self:
return out
if (self.filename is None) and (outfile is None):
# output a list of lines
# might need to encode
# NOTE: This will *screw* UTF16, each line will start with the BOM
if self.encoding:
out = [l.encode(self.encoding) for l in out]
if (self.BOM and ((self.encoding is None) or
(BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
# Add the UTF8 BOM
if not out:
out.append('')
out[0] = BOM_UTF8 + out[0]
return out
# Turn the list to a string, joined with correct newlines
newline = self.newlines or os.linesep
if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w'
and sys.platform == 'win32' and newline == '\r\n'):
# Windows specific hack to avoid writing '\r\r\n'
newline = '\n'
output = self._a_to_u(newline).join(out)
if not output.endswith(newline):
output += newline
if isinstance(output, six.binary_type):
output_bytes = output
else:
output_bytes = output.encode(self.encoding or
self.default_encoding or
'ascii')
if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)):
# Add the UTF8 BOM
output_bytes = BOM_UTF8 + output_bytes
if outfile is not None:
outfile.write(output_bytes)
else:
with open(self.filename, 'wb') as h:
h.write(output_bytes)
def validate(self, validator, preserve_errors=False, copy=False,
section=None):
"""
Test the ConfigObj against a configspec.
It uses the ``validator`` object from *validate.py*.
To run ``validate`` on the current ConfigObj, call: ::
test = config.validate(validator)
(Normally having previously passed in the configspec when the ConfigObj
was created - you can dynamically assign a dictionary of checks to the
``configspec`` attribute of a section though).
It returns ``True`` if everything passes, or a dictionary of
pass/fails (True/False). If every member of a subsection passes, it
will just have the value ``True``. (It also returns ``False`` if all
members fail).
In addition, it converts the values from strings to their native
types if their checks pass (and ``stringify`` is set).
If ``preserve_errors`` is ``True`` (``False`` is default) then instead
of a marking a fail with a ``False``, it will preserve the actual
exception object. This can contain info about the reason for failure.
For example the ``VdtValueTooSmallError`` indicates that the value
supplied was too small. If a value (or section) is missing it will
still be marked as ``False``.
You must have the validate module to use ``preserve_errors=True``.
You can then use the ``flatten_errors`` function to turn your nested
results dictionary into a flattened list of failures - useful for
displaying meaningful error messages.
"""
if section is None:
if self.configspec is None:
raise ValueError('No configspec supplied.')
if preserve_errors:
# We do this once to remove a top level dependency on the validate module
# Which makes importing configobj faster
from zato.common.ext.validate_ import VdtTypeError
self._vdtMissingValue = VdtMissingValue
section = self
if copy:
section.initial_comment = section.configspec.initial_comment
section.final_comment = section.configspec.final_comment
section.encoding = section.configspec.encoding
section.BOM = section.configspec.BOM
section.newlines = section.configspec.newlines
section.indent_type = section.configspec.indent_type
#
# section.default_values.clear() #??
configspec = section.configspec
self._set_configspec(section, copy)
def validate_entry(entry, spec, val, missing, ret_true, ret_false):
section.default_values.pop(entry, None)
try:
section.default_values[entry] = validator.get_default_value(configspec[entry])
except (KeyError, AttributeError, validator.baseErrorClass):
# No default, bad default or validator has no 'get_default_value'
# (e.g. SimpleVal)
pass
try:
check = validator.check(spec,
val,
missing=missing
)
except validator.baseErrorClass as e:
if not preserve_errors or isinstance(e, self._vdtMissingValue):
out[entry] = False
else:
# preserve the error
out[entry] = e
ret_false = False
ret_true = False
else:
ret_false = False
out[entry] = True
if self.stringify or missing:
# if we are doing type conversion
# or the value is a supplied default
if not self.stringify:
if isinstance(check, (list, tuple)):
# preserve lists
check = [self._str(item) for item in check]
elif missing and check is None:
# convert the None from a default to a ''
check = ''
else:
check = self._str(check)
if (check != val) or missing:
section[entry] = check
if not copy and missing and entry not in section.defaults:
section.defaults.append(entry)
return ret_true, ret_false
#
out = {}
ret_true = True
ret_false = True
unvalidated = [k for k in section.scalars if k not in configspec]
incorrect_sections = [k for k in configspec.sections if k in section.scalars]
incorrect_scalars = [k for k in configspec.scalars if k in section.sections]
for entry in configspec.scalars:
if entry in ('__many__', '___many___'):
# reserved names
continue
if (not entry in section.scalars) or (entry in section.defaults):
# missing entries
# or entries from defaults
missing = True
val = None
if copy and entry not in section.scalars:
# copy comments
section.comments[entry] = (
configspec.comments.get(entry, []))
section.inline_comments[entry] = (
configspec.inline_comments.get(entry, ''))
#
else:
missing = False
val = section[entry]
ret_true, ret_false = validate_entry(entry, configspec[entry], val,
missing, ret_true, ret_false)
many = None
if '__many__' in configspec.scalars:
many = configspec['__many__']
elif '___many___' in configspec.scalars:
many = configspec['___many___']
if many is not None:
for entry in unvalidated:
val = section[entry]
ret_true, ret_false = validate_entry(entry, many, val, False,
ret_true, ret_false)
unvalidated = []
for entry in incorrect_scalars:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Value %r was provided as a section' % entry
out[entry] = validator.baseErrorClass(msg)
for entry in incorrect_sections:
ret_true = False
if not preserve_errors:
out[entry] = False
else:
ret_false = False
msg = 'Section %r was provided as a single value' % entry
out[entry] = validator.baseErrorClass(msg)
# Missing sections will have been created as empty ones when the
# configspec was read.
for entry in section.sections:
# FIXME: this means DEFAULT is not copied in copy mode
if section is self and entry == 'DEFAULT':
continue
if section[entry].configspec is None:
unvalidated.append(entry)
continue
if copy:
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
out[entry] = check
if check == False:
ret_true = False
elif check == True:
ret_false = False
else:
ret_true = False
section.extra_values = unvalidated
if preserve_errors and not section._created:
# If the section wasn't created (i.e. it wasn't missing)
# then we can't return False, we need to preserve errors
ret_false = False
#
if ret_false and preserve_errors and out:
# If we are preserving errors, but all
# the failures are from missing sections / values
# then we can return False. Otherwise there is a
# real failure that we need to preserve.
ret_false = not any(out.values())
if ret_true:
return True
elif ret_false:
return False
return out
def reset(self):
"""Clear ConfigObj instance and restore to 'freshly created' state."""
self.clear()
self._initialise()
# FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload)
# requires an empty dictionary
self.configspec = None
# Just to be sure ;-)
self._original_configspec = None
def reload(self):
"""
Reload a ConfigObj from file.
This method raises a ``ReloadError`` if the ConfigObj doesn't have
a filename attribute pointing to a file.
"""
if not isinstance(self.filename, six.string_types):
raise ReloadError()
filename = self.filename
current_options = {}
for entry in OPTION_DEFAULTS:
if entry == 'configspec':
continue
current_options[entry] = getattr(self, entry)
configspec = self._original_configspec
current_options['configspec'] = configspec
self.clear()
self._initialise(current_options)
self._load(filename, configspec)
class SimpleVal(object):
"""
A simple validator.
Can be used to check that all members expected are present.
To use it, provide a configspec with all your members in (the value given
will be ignored). Pass an instance of ``SimpleVal`` to the ``validate``
method of your ``ConfigObj``. ``validate`` will return ``True`` if all
members are present, or a dictionary with True/False meaning
present/missing. (Whole missing sections will be replaced with ``False``)
"""
def __init__(self):
self.baseErrorClass = ConfigObjError
def check(self, check, member, missing=False):
"""A dummy check method, always returns the value unchanged."""
if missing:
raise self.baseErrorClass()
return member
def flatten_errors(cfg, res, levels=None, results=None):
"""
An example function that will turn a nested dictionary of results
(as returned by ``ConfigObj.validate``) into a flat list.
``cfg`` is the ConfigObj instance being checked, ``res`` is the results
dictionary returned by ``validate``.
(This is a recursive function, so you shouldn't use the ``levels`` or
``results`` arguments - they are used by the function.)
Returns a list of keys that failed. Each member of the list is a tuple::
([list of sections...], key, result)
If ``validate`` was called with ``preserve_errors=False`` (the default)
then ``result`` will always be ``False``.
*list of sections* is a flattened list of sections that the key was found
in.
If the section was missing (or a section was expected and a scalar provided
- or vice-versa) then key will be ``None``.
If the value (or section) was missing then ``result`` will be ``False``.
If ``validate`` was called with ``preserve_errors=True`` and a value
was present, but failed the check, then ``result`` will be the exception
object returned. You can use this as a string that describes the failure.
For example *The value "3" is of the wrong type*.
"""
if levels is None:
# first time called
levels = []
results = []
if res == True:
return sorted(results)
if res == False or isinstance(res, Exception):
results.append((levels[:], None, res))
if levels:
levels.pop()
return sorted(results)
for (key, val) in list(res.items()):
if val == True:
continue
if isinstance(cfg.get(key), dict):
# Go down one level
levels.append(key)
flatten_errors(cfg[key], val, levels, results)
continue
results.append((levels[:], key, val))
#
# Go up one level
if levels:
levels.pop()
#
return sorted(results)
def get_extra_values(conf, _prepend=()):
"""
Find all the values and sections not in the configspec from a validated
ConfigObj.
``get_extra_values`` returns a list of tuples where each tuple represents
either an extra section, or an extra value.
The tuples contain two values, a tuple representing the section the value
is in and the name of the extra values. For extra values in the top level
section the first member will be an empty tuple. For values in the 'foo'
section the first member will be ``('foo',)``. For members in the 'bar'
subsection of the 'foo' section the first member will be ``('foo', 'bar')``.
NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't
been validated it will return an empty list.
"""
out = []
out.extend([(_prepend, name) for name in conf.extra_values])
for name in conf.sections:
if name not in conf.extra_values:
out.extend(get_extra_values(conf[name], _prepend + (name,)))
return out
"""*A programming language is a medium of expression.* - Paul Graham""" | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/configobj_.py | configobj_.py |
import abc
import collections
import contextlib
import sys
import typing
import collections.abc as collections_abc
import operator
# These are used by Protocol implementation
# We use internal typing helpers here, but this significantly reduces
# code duplication. (Also this is only until Protocol is in typing.)
from typing import Generic, Callable, TypeVar, Tuple
# After PEP 560, internal typing API was substantially reworked.
# This is especially important for Protocol class which uses internal APIs
# quite extensivelly.
PEP_560 = sys.version_info[:3] >= (3, 7, 0)
if PEP_560:
GenericMeta = TypingMeta = type
else:
from typing import GenericMeta, TypingMeta
OLD_GENERICS = False
try:
from typing import _type_vars, _next_in_mro, _type_check
except ImportError:
OLD_GENERICS = True
try:
from typing import _subs_tree # noqa
SUBS_TREE = True
except ImportError:
SUBS_TREE = False
try:
from typing import _tp_cache
except ImportError:
def _tp_cache(x):
return x
try:
from typing import _TypingEllipsis, _TypingEmpty
except ImportError:
class _TypingEllipsis:
pass
class _TypingEmpty:
pass
# The two functions below are copies of typing internal helpers.
# They are needed by _ProtocolMeta
def _no_slots_copy(dct):
dict_copy = dict(dct)
if '__slots__' in dict_copy:
for slot in dict_copy['__slots__']:
dict_copy.pop(slot, None)
return dict_copy
def _check_generic(cls, parameters):
if not cls.__parameters__:
raise TypeError("%s is not a generic class" % repr(cls))
alen = len(parameters)
elen = len(cls.__parameters__)
if alen != elen:
raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
("many" if alen > elen else "few", repr(cls), alen, elen))
if hasattr(typing, '_generic_new'):
_generic_new = typing._generic_new
else:
# Note: The '_generic_new(...)' function is used as a part of the
# process of creating a generic type and was added to the typing module
# as of Python 3.5.3.
#
# We've defined '_generic_new(...)' below to exactly match the behavior
# implemented in older versions of 'typing' bundled with Python 3.5.0 to
# 3.5.2. This helps eliminate redundancy when defining collection types
# like 'Deque' later.
#
# See https://github.com/python/typing/pull/308 for more details -- in
# particular, compare and contrast the definition of types like
# 'typing.List' before and after the merge.
def _generic_new(base_cls, cls, *args, **kwargs):
return base_cls.__new__(cls, *args, **kwargs)
# See https://github.com/python/typing/pull/439
if hasattr(typing, '_geqv'):
from typing import _geqv
_geqv_defined = True
else:
_geqv = None
_geqv_defined = False
if sys.version_info[:2] >= (3, 6):
import _collections_abc
_check_methods_in_mro = _collections_abc._check_methods
else:
def _check_methods_in_mro(C, *methods):
mro = C.__mro__
for method in methods:
for B in mro:
if method in B.__dict__:
if B.__dict__[method] is None:
return NotImplemented
break
else:
return NotImplemented
return True
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
'ClassVar',
'Final',
'Type',
# ABCs (from collections.abc).
# The following are added depending on presence
# of their non-generic counterparts in stdlib:
# 'Awaitable',
# 'AsyncIterator',
# 'AsyncIterable',
# 'Coroutine',
# 'AsyncGenerator',
# 'AsyncContextManager',
# 'ChainMap',
# Concrete collection types.
'ContextManager',
'Counter',
'Deque',
'DefaultDict',
'TypedDict',
# Structural checks, a.k.a. protocols.
'SupportsIndex',
# One-off things.
'final',
'IntVar',
'Literal',
'NewType',
'overload',
'Text',
'TYPE_CHECKING',
]
# Annotated relies on substitution trees of pep 560. It will not work for
# versions of typing older than 3.5.3
HAVE_ANNOTATED = PEP_560 or SUBS_TREE
if PEP_560:
__all__.extend(["get_args", "get_origin", "get_type_hints"])
if HAVE_ANNOTATED:
__all__.append("Annotated")
# Protocols are hard to backport to the original version of typing 3.5.0
HAVE_PROTOCOLS = sys.version_info[:3] != (3, 5, 0)
if HAVE_PROTOCOLS:
__all__.extend(['Protocol', 'runtime', 'runtime_checkable'])
# TODO
if hasattr(typing, 'NoReturn'):
NoReturn = typing.NoReturn
elif hasattr(typing, '_FinalTypingBase'):
class _NoReturn(typing._FinalTypingBase, _root=True):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
NoReturn = _NoReturn(_root=True)
else:
class _NoReturnMeta(typing.TypingMeta):
"""Metaclass for NoReturn"""
def __new__(cls, name, bases, namespace, _root=False):
return super().__new__(cls, name, bases, namespace, _root=_root)
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
class NoReturn(typing.Final, metaclass=_NoReturnMeta, _root=True):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__slots__ = ()
# Some unconstrained type variables. These are used by the container types.
# (These are not for export.)
T = typing.TypeVar('T') # Any type.
KT = typing.TypeVar('KT') # Key type.
VT = typing.TypeVar('VT') # Value type.
T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
V_co = typing.TypeVar('V_co', covariant=True) # Any type covariant containers.
VT_co = typing.TypeVar('VT_co', covariant=True) # Value type covariant containers.
T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
if hasattr(typing, 'ClassVar'):
ClassVar = typing.ClassVar
elif hasattr(typing, '_FinalTypingBase'):
class _ClassVar(typing._FinalTypingBase, _root=True):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
attribute is intended to be used as a class variable and
should not be set on instances of that class. Usage::
class Starship:
stats: ClassVar[Dict[str, int]] = {} # class variable
damage: int = 10 # instance variable
ClassVar accepts only types and cannot be further subscribed.
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _ClassVar):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
ClassVar = _ClassVar(_root=True)
else:
class _ClassVarMeta(typing.TypingMeta):
"""Metaclass for ClassVar"""
def __new__(cls, name, bases, namespace, tp=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if tp is not None:
self.__type__ = tp
return self
def __instancecheck__(self, obj):
raise TypeError("ClassVar cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("ClassVar cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__type__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
param = typing._type_check(
item,
'{} accepts only single type.'.format(cls.__name__[1:]))
return cls(self.__name__, self.__bases__,
dict(self.__dict__), tp=param, _root=True)
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(self.__name__, self.__bases__,
dict(self.__dict__), tp=self.__type__,
_root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, ClassVar):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class ClassVar(typing.Final, metaclass=_ClassVarMeta, _root=True):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
attribute is intended to be used as a class variable and
should not be set on instances of that class. Usage::
class Starship:
stats: ClassVar[Dict[str, int]] = {} # class variable
damage: int = 10 # instance variable
ClassVar accepts only types and cannot be further subscribed.
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
"""
__type__ = None
# On older versions of typing there is an internal class named "Final".
if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
Final = typing.Final
elif sys.version_info[:2] >= (3, 7):
class _FinalForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
item = typing._type_check(parameters,
'{} accepts only single type'.format(self._name))
return _GenericAlias(self, (item,))
Final = _FinalForm('Final',
doc="""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.""")
elif hasattr(typing, '_FinalTypingBase'):
class _Final(typing._FinalTypingBase, _root=True):
"""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
"""
__slots__ = ('__type__',)
def __init__(self, tp=None, **kwds):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(typing._type_check(item,
'{} accepts only single type.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(new_tp, _root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _Final):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
Final = _Final(_root=True)
else:
class _FinalMeta(typing.TypingMeta):
"""Metaclass for Final"""
def __new__(cls, name, bases, namespace, tp=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if tp is not None:
self.__type__ = tp
return self
def __instancecheck__(self, obj):
raise TypeError("Final cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Final cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__type__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
param = typing._type_check(
item,
'{} accepts only single type.'.format(cls.__name__[1:]))
return cls(self.__name__, self.__bases__,
dict(self.__dict__), tp=param, _root=True)
def _eval_type(self, globalns, localns):
new_tp = typing._eval_type(self.__type__, globalns, localns)
if new_tp == self.__type__:
return self
return type(self)(self.__name__, self.__bases__,
dict(self.__dict__), tp=self.__type__,
_root=True)
def __repr__(self):
r = super().__repr__()
if self.__type__ is not None:
r += '[{}]'.format(typing._type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, Final):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
class Final(typing.Final, metaclass=_FinalMeta, _root=True):
"""A special typing construct to indicate that a name
cannot be re-assigned or overridden in a subclass.
For example:
MAX_SIZE: Final = 9000
MAX_SIZE += 1 # Error reported by type checker
class Connection:
TIMEOUT: Final[int] = 10
class FastConnector(Connection):
TIMEOUT = 1 # Error reported by type checker
There is no runtime checking of these properties.
"""
__type__ = None
if hasattr(typing, 'final'):
final = typing.final
else:
def final(f):
"""This decorator can be used to indicate to type checkers that
the decorated method cannot be overridden, and decorated class
cannot be subclassed. For example:
class Base:
@final
def done(self) -> None:
...
class Sub(Base):
def done(self) -> None: # Error reported by type checker
...
@final
class Leaf:
...
class Other(Leaf): # Error reported by type checker
...
There is no runtime checking of these properties.
"""
return f
def IntVar(name):
return TypeVar(name)
if hasattr(typing, 'Literal'):
Literal = typing.Literal
elif sys.version_info[:2] >= (3, 7):
class _LiteralForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
def __getitem__(self, parameters):
return _GenericAlias(self, parameters)
Literal = _LiteralForm('Literal',
doc="""A type that can be used to indicate to type checkers
that the corresponding value has a value literally equivalent
to the provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to
the value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime
checking verifying that the parameter is actually a value
instead of a type.""")
elif hasattr(typing, '_FinalTypingBase'):
class _Literal(typing._FinalTypingBase, _root=True):
"""A type that can be used to indicate to type checkers that the
corresponding value has a value literally equivalent to the
provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to the
value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime checking
verifying that the parameter is actually a value instead of a type.
"""
__slots__ = ('__values__',)
def __init__(self, values=None, **kwds):
self.__values__ = values
def __getitem__(self, values):
cls = type(self)
if self.__values__ is None:
if not isinstance(values, tuple):
values = (values,)
return cls(values, _root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
return self
def __repr__(self):
r = super().__repr__()
if self.__values__ is not None:
r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
return r
def __hash__(self):
return hash((type(self).__name__, self.__values__))
def __eq__(self, other):
if not isinstance(other, _Literal):
return NotImplemented
if self.__values__ is not None:
return self.__values__ == other.__values__
return self is other
Literal = _Literal(_root=True)
else:
class _LiteralMeta(typing.TypingMeta):
"""Metaclass for Literal"""
def __new__(cls, name, bases, namespace, values=None, _root=False):
self = super().__new__(cls, name, bases, namespace, _root=_root)
if values is not None:
self.__values__ = values
return self
def __instancecheck__(self, obj):
raise TypeError("Literal cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Literal cannot be used with issubclass().")
def __getitem__(self, item):
cls = type(self)
if self.__values__ is not None:
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
if not isinstance(item, tuple):
item = (item,)
return cls(self.__name__, self.__bases__,
dict(self.__dict__), values=item, _root=True)
def _eval_type(self, globalns, localns):
return self
def __repr__(self):
r = super().__repr__()
if self.__values__ is not None:
r += '[{}]'.format(', '.join(map(typing._type_repr, self.__values__)))
return r
def __hash__(self):
return hash((type(self).__name__, self.__values__))
def __eq__(self, other):
if not isinstance(other, Literal):
return NotImplemented
if self.__values__ is not None:
return self.__values__ == other.__values__
return self is other
class Literal(typing.Final, metaclass=_LiteralMeta, _root=True):
"""A type that can be used to indicate to type checkers that the
corresponding value has a value literally equivalent to the
provided parameter. For example:
var: Literal[4] = 4
The type checker understands that 'var' is literally equal to the
value 4 and no other value.
Literal[...] cannot be subclassed. There is no runtime checking
verifying that the parameter is actually a value instead of a type.
"""
__values__ = None
def _overload_dummy(*args, **kwds):
"""Helper for @overload to raise when called."""
raise NotImplementedError(
"You should not call an overloaded function. "
"A series of @overload-decorated functions "
"outside a stub module should always be followed "
"by an implementation that is not @overload-ed.")
def overload(func):
"""Decorator for overloaded functions/methods.
In a stub file, place two or more stub definitions for the same
function in a row, each decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
In a non-stub file (i.e. a regular .py file), do the same but
follow it with an implementation. The implementation should *not*
be decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
def utf8(value):
# implementation goes here
"""
return _overload_dummy
# This is not a real generic class. Don't use outside annotations.
if hasattr(typing, 'Type'):
Type = typing.Type
else:
# Internal type variable used for Type[].
CT_co = typing.TypeVar('CT_co', covariant=True, bound=type)
class Type(typing.Generic[CT_co], extra=type):
"""A special construct usable to annotate class objects.
For example, suppose we have the following classes::
class User: ... # Abstract base for User classes
class BasicUser(User): ...
class ProUser(User): ...
class TeamUser(User): ...
And a function that takes a class argument that's a subclass of
User and returns an instance of the corresponding class::
U = TypeVar('U', bound=User)
def new_user(user_class: Type[U]) -> U:
user = user_class()
# (Here we could write the user object to a database)
return user
joe = new_user(BasicUser)
At this point the type checker knows that joe has type BasicUser.
"""
__slots__ = ()
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
def _define_guard(type_name):
"""
Returns True if the given type isn't defined in typing but
is defined in collections_abc.
Adds the type to __all__ if the collection is found in either
typing or collection_abc.
"""
if hasattr(typing, type_name):
__all__.append(type_name)
globals()[type_name] = getattr(typing, type_name)
return False
elif hasattr(collections_abc, type_name):
__all__.append(type_name)
return True
else:
return False
class _ExtensionsGenericMeta(GenericMeta):
def __subclasscheck__(self, subclass):
"""This mimics a more modern GenericMeta.__subclasscheck__() logic
(that does not have problems with recursion) to work around interactions
between collections, typing, and typing_extensions on older
versions of Python, see https://github.com/python/typing/issues/501.
"""
if sys.version_info[:3] >= (3, 5, 3) or sys.version_info[:3] < (3, 5, 0):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if not self.__extra__:
return super().__subclasscheck__(subclass)
res = self.__extra__.__subclasshook__(subclass)
if res is not NotImplemented:
return res
if self.__extra__ in subclass.__mro__:
return True
for scls in self.__extra__.__subclasses__():
if isinstance(scls, GenericMeta):
continue
if issubclass(subclass, scls):
return True
return False
if _define_guard('Awaitable'):
class Awaitable(typing.Generic[T_co], metaclass=_ExtensionsGenericMeta,
extra=collections_abc.Awaitable):
__slots__ = ()
if _define_guard('Coroutine'):
class Coroutine(Awaitable[V_co], typing.Generic[T_co, T_contra, V_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.Coroutine):
__slots__ = ()
if _define_guard('AsyncIterable'):
class AsyncIterable(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncIterable):
__slots__ = ()
if _define_guard('AsyncIterator'):
class AsyncIterator(AsyncIterable[T_co],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncIterator):
__slots__ = ()
if hasattr(typing, 'Deque'):
Deque = typing.Deque
elif _geqv_defined:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Deque):
return collections.deque(*args, **kwds)
return _generic_new(collections.deque, cls, *args, **kwds)
else:
class Deque(collections.deque, typing.MutableSequence[T],
metaclass=_ExtensionsGenericMeta,
extra=collections.deque):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Deque:
return collections.deque(*args, **kwds)
return _generic_new(collections.deque, cls, *args, **kwds)
if hasattr(typing, 'ContextManager'):
ContextManager = typing.ContextManager
elif hasattr(contextlib, 'AbstractContextManager'):
class ContextManager(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=contextlib.AbstractContextManager):
__slots__ = ()
else:
class ContextManager(typing.Generic[T_co]):
__slots__ = ()
def __enter__(self):
return self
@abc.abstractmethod
def __exit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is ContextManager:
# In Python 3.6+, it is possible to set a method to None to
# explicitly indicate that the class does not implement an ABC
# (https://bugs.python.org/issue25958), but we do not support
# that pattern here because this fallback class is only used
# in Python 3.5 and earlier.
if (any("__enter__" in B.__dict__ for B in C.__mro__) and
any("__exit__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
if hasattr(typing, 'AsyncContextManager'):
AsyncContextManager = typing.AsyncContextManager
__all__.append('AsyncContextManager')
elif hasattr(contextlib, 'AbstractAsyncContextManager'):
class AsyncContextManager(typing.Generic[T_co],
metaclass=_ExtensionsGenericMeta,
extra=contextlib.AbstractAsyncContextManager):
__slots__ = ()
__all__.append('AsyncContextManager')
elif sys.version_info[:2] >= (3, 5):
exec("""
class AsyncContextManager(typing.Generic[T_co]):
__slots__ = ()
async def __aenter__(self):
return self
@abc.abstractmethod
async def __aexit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncContextManager:
return _check_methods_in_mro(C, "__aenter__", "__aexit__")
return NotImplemented
__all__.append('AsyncContextManager')
""")
if hasattr(typing, 'DefaultDict'):
DefaultDict = typing.DefaultDict
elif _geqv_defined:
class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.defaultdict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, DefaultDict):
return collections.defaultdict(*args, **kwds)
return _generic_new(collections.defaultdict, cls, *args, **kwds)
else:
class DefaultDict(collections.defaultdict, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.defaultdict):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is DefaultDict:
return collections.defaultdict(*args, **kwds)
return _generic_new(collections.defaultdict, cls, *args, **kwds)
if hasattr(typing, 'Counter'):
Counter = typing.Counter
elif (3, 5, 0) <= sys.version_info[:3] <= (3, 5, 1):
assert _geqv_defined
_TInt = typing.TypeVar('_TInt')
class _CounterMeta(typing.GenericMeta):
"""Metaclass for Counter"""
def __getitem__(self, item):
return super().__getitem__((item, int))
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_CounterMeta,
extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Counter):
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
elif _geqv_defined:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, Counter):
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
else:
class Counter(collections.Counter,
typing.Dict[T, int],
metaclass=_ExtensionsGenericMeta, extra=collections.Counter):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Counter:
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
if hasattr(typing, 'ChainMap'):
ChainMap = typing.ChainMap
__all__.append('ChainMap')
elif hasattr(collections, 'ChainMap'):
# ChainMap only exists in 3.3+
if _geqv_defined:
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if _geqv(cls, ChainMap):
return collections.ChainMap(*args, **kwds)
return _generic_new(collections.ChainMap, cls, *args, **kwds)
else:
class ChainMap(collections.ChainMap, typing.MutableMapping[KT, VT],
metaclass=_ExtensionsGenericMeta,
extra=collections.ChainMap):
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is ChainMap:
return collections.ChainMap(*args, **kwds)
return _generic_new(collections.ChainMap, cls, *args, **kwds)
__all__.append('ChainMap')
if _define_guard('AsyncGenerator'):
class AsyncGenerator(AsyncIterator[T_co], typing.Generic[T_co, T_contra],
metaclass=_ExtensionsGenericMeta,
extra=collections_abc.AsyncGenerator):
__slots__ = ()
if hasattr(typing, 'NewType'):
NewType = typing.NewType
else:
def NewType(name, tp):
"""NewType creates simple unique types with almost zero
runtime overhead. NewType(name, tp) is considered a subtype of tp
by static type checkers. At runtime, NewType(name, tp) returns
a dummy function that simply returns its argument. Usage::
UserId = NewType('UserId', int)
def name_by_id(user_id: UserId) -> str:
...
UserId('user') # Fails type check
name_by_id(42) # Fails type check
name_by_id(UserId(42)) # OK
num = UserId(5) + 1 # type: int
"""
def new_type(x):
return x
new_type.__name__ = name
new_type.__supertype__ = tp
return new_type
if hasattr(typing, 'Text'):
Text = typing.Text
else:
Text = str
if hasattr(typing, 'TYPE_CHECKING'):
TYPE_CHECKING = typing.TYPE_CHECKING
else:
# Constant that's True when type checking, but False here.
TYPE_CHECKING = False
def _gorg(cls):
"""This function exists for compatibility with old typing versions."""
assert isinstance(cls, GenericMeta)
if hasattr(cls, '_gorg'):
return cls._gorg
while cls.__origin__ is not None:
cls = cls.__origin__
return cls
if OLD_GENERICS:
def _next_in_mro(cls): # noqa
"""This function exists for compatibility with old typing versions."""
next_in_mro = object
for i, c in enumerate(cls.__mro__[:-1]):
if isinstance(c, GenericMeta) and _gorg(c) is Generic:
next_in_mro = cls.__mro__[i + 1]
return next_in_mro
_PROTO_WHITELIST = ['Callable', 'Awaitable',
'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
'ContextManager', 'AsyncContextManager']
def _get_protocol_attrs(cls):
attrs = set()
for base in cls.__mro__[:-1]: # without object
if base.__name__ in ('Protocol', 'Generic'):
continue
annotations = getattr(base, '__annotations__', {})
for attr in list(base.__dict__.keys()) + list(annotations.keys()):
if (not attr.startswith('_abc_') and attr not in (
'__abstractmethods__', '__annotations__', '__weakref__',
'_is_protocol', '_is_runtime_protocol', '__dict__',
'__args__', '__slots__',
'__next_in_mro__', '__parameters__', '__origin__',
'__orig_bases__', '__extra__', '__tree_hash__',
'__doc__', '__subclasshook__', '__init__', '__new__',
'__module__', '_MutableMapping__marker', '_gorg')):
attrs.add(attr)
return attrs
def _is_callable_members_only(cls):
return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
if hasattr(typing, 'Protocol'):
Protocol = typing.Protocol
elif HAVE_PROTOCOLS and not PEP_560:
class _ProtocolMeta(GenericMeta):
"""Internal metaclass for Protocol.
This exists so Protocol classes can be generic without deriving
from Generic.
"""
if not OLD_GENERICS:
def __new__(cls, name, bases, namespace,
tvars=None, args=None, origin=None, extra=None, orig_bases=None):
# This is just a version copied from GenericMeta.__new__ that
# includes "Protocol" special treatment. (Comments removed for brevity.)
assert extra is None # Protocols should not have extra
if tvars is not None:
assert origin is not None
assert all(isinstance(t, TypeVar) for t in tvars), tvars
else:
tvars = _type_vars(bases)
gvars = None
for base in bases:
if base is Generic:
raise TypeError("Cannot inherit from plain Generic")
if (isinstance(base, GenericMeta) and
base.__origin__ in (Generic, Protocol)):
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...] or"
" Protocol[...] multiple times.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
raise TypeError(
"Some type variables (%s) "
"are not listed in %s[%s]" %
(", ".join(str(t) for t in tvars if t not in gvarset),
"Generic" if any(b.__origin__ is Generic
for b in bases) else "Protocol",
", ".join(str(g) for g in gvars)))
tvars = gvars
initial_bases = bases
if (extra is not None and type(extra) is abc.ABCMeta and
extra not in bases):
bases = (extra,) + bases
bases = tuple(_gorg(b) if isinstance(b, GenericMeta) else b
for b in bases)
if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
bases = tuple(b for b in bases if b is not Generic)
namespace.update({'__origin__': origin, '__extra__': extra})
self = super(GenericMeta, cls).__new__(cls, name, bases, namespace,
_root=True)
super(GenericMeta, self).__setattr__('_gorg',
self if not origin else
_gorg(origin))
self.__parameters__ = tvars
self.__args__ = tuple(... if a is _TypingEllipsis else
() if a is _TypingEmpty else
a for a in args) if args else None
self.__next_in_mro__ = _next_in_mro(self)
if orig_bases is None:
self.__orig_bases__ = initial_bases
elif origin is not None:
self._abc_registry = origin._abc_registry
self._abc_cache = origin._abc_cache
if hasattr(self, '_subs_tree'):
self.__tree_hash__ = (hash(self._subs_tree()) if origin else
super(GenericMeta, self).__hash__())
return self
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol or
isinstance(b, _ProtocolMeta) and
b.__origin__ is Protocol
for b in cls.__bases__)
if cls._is_protocol:
for base in cls.__mro__[1:]:
if not (base in (object, Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, TypingMeta) and base._is_protocol or
isinstance(base, GenericMeta) and
base.__origin__ is Generic):
raise TypeError('Protocols can only inherit from other'
' protocols, got %r' % base)
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
cls.__init__ = _no_init
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
def __instancecheck__(self, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(self, '_is_protocol', False) or
_is_callable_members_only(self)) and
issubclass(instance.__class__, self)):
return True
if self._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(self, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(self)):
return True
return super(GenericMeta, self).__instancecheck__(instance)
def __subclasscheck__(self, cls):
if self.__origin__ is not None:
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if (self.__dict__.get('_is_protocol', None) and
not self.__dict__.get('_is_runtime_protocol', None)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return False
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if (self.__dict__.get('_is_runtime_protocol', None) and
not _is_callable_members_only(self)):
if sys._getframe(1).f_globals['__name__'] in ['abc',
'functools',
'typing']:
return super(GenericMeta, self).__subclasscheck__(cls)
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
return super(GenericMeta, self).__subclasscheck__(cls)
if not OLD_GENERICS:
@_tp_cache
def __getitem__(self, params):
# We also need to copy this from GenericMeta.__getitem__ to get
# special treatment of "Protocol". (Comments removed for brevity.)
if not isinstance(params, tuple):
params = (params,)
if not params and _gorg(self) is not Tuple:
raise TypeError(
"Parameter list to %s[...] cannot be empty" % self.__qualname__)
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if self in (Generic, Protocol):
if not all(isinstance(p, TypeVar) for p in params):
raise TypeError(
"Parameters to %r[...] must all be type variables" % self)
if len(set(params)) != len(params):
raise TypeError(
"Parameters to %r[...] must all be unique" % self)
tvars = params
args = params
elif self in (Tuple, Callable):
tvars = _type_vars(params)
args = params
elif self.__origin__ in (Generic, Protocol):
raise TypeError("Cannot subscript already-subscripted %s" %
repr(self))
else:
_check_generic(self, params)
tvars = _type_vars(params)
args = params
prepend = (self,) if self.__origin__ is None else ()
return self.__class__(self.__name__,
prepend + self.__bases__,
_no_slots_copy(self.__dict__),
tvars=tvars,
args=args,
origin=self,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
class Protocol(metaclass=_ProtocolMeta):
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto({bases}):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if _gorg(cls) is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can be used only as a base class")
if OLD_GENERICS:
return _generic_new(_next_in_mro(cls), cls, *args, **kwds)
return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
if Protocol.__doc__ is not None:
Protocol.__doc__ = Protocol.__doc__.format(bases="Protocol, Generic[T]" if
OLD_GENERICS else "Protocol[T]")
elif PEP_560:
from typing import _type_check, _GenericAlias, _collect_type_vars # noqa
class _ProtocolMeta(abc.ABCMeta):
# This metaclass is a bit unfortunate and exists only because of the lack
# of __instancehook__.
def __instancecheck__(cls, instance):
# We need this method for situations where attributes are
# assigned in __init__.
if ((not getattr(cls, '_is_protocol', False) or
_is_callable_members_only(cls)) and
issubclass(instance.__class__, cls)):
return True
if cls._is_protocol:
if all(hasattr(instance, attr) and
(not callable(getattr(cls, attr, None)) or
getattr(instance, attr) is not None)
for attr in _get_protocol_attrs(cls)):
return True
return super().__instancecheck__(instance)
class Protocol(metaclass=_ProtocolMeta):
# There is quite a lot of overlapping code with typing.Generic.
# Unfortunately it is hard to avoid this while these live in two different
# modules. The duplicated code will be removed when Protocol is moved to typing.
"""Base class for protocol classes. Protocol classes are defined as::
class Proto(Protocol):
def meth(self) -> int:
...
Such classes are primarily used with static type checkers that recognize
structural subtyping (static duck-typing), for example::
class C:
def meth(self) -> int:
return 0
def func(x: Proto) -> int:
return x.meth()
func(C()) # Passes static type check
See PEP 544 for details. Protocol classes decorated with
@typing_extensions.runtime act as simple-minded runtime protocol that checks
only the presence of given attributes, ignoring their type signatures.
Protocol classes can be generic, they are defined as::
class GenProto(Protocol[T]):
def meth(self) -> T:
...
"""
__slots__ = ()
_is_protocol = True
def __new__(cls, *args, **kwds):
if cls is Protocol:
raise TypeError("Type Protocol cannot be instantiated; "
"it can only be used as a base class")
return super().__new__(cls)
@_tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple):
params = (params,)
if not params and cls is not Tuple:
raise TypeError(
"Parameter list to {}[...] cannot be empty".format(cls.__qualname__))
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if cls is Protocol:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, TypeVar) for p in params):
i = 0
while isinstance(params[i], TypeVar):
i += 1
raise TypeError(
"Parameters to Protocol[...] must all be type variables."
" Parameter {} is {}".format(i + 1, params[i]))
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Protocol[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
_check_generic(cls, params)
return _GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
tvars = []
if '__orig_bases__' in cls.__dict__:
error = Generic in cls.__orig_bases__
else:
error = Generic in cls.__bases__
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
tvars = _collect_type_vars(cls.__orig_bases__)
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...] and/or Protocol[...].
gvars = None
for base in cls.__orig_bases__:
if (isinstance(base, _GenericAlias) and
base.__origin__ in (Generic, Protocol)):
# for error messages
the_base = 'Generic' if base.__origin__ is Generic else 'Protocol'
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...]"
" and/or Protocol[...] multiple types.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
s_args = ', '.join(str(g) for g in gvars)
raise TypeError("Some type variables ({}) are"
" not listed in {}[{}]".format(s_vars,
the_base, s_args))
tvars = gvars
cls.__parameters__ = tuple(tvars)
# Determine if this is a protocol or a concrete subclass.
if not cls.__dict__.get('_is_protocol', None):
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
# Set (or override) the protocol subclass hook.
def _proto_hook(other):
if not cls.__dict__.get('_is_protocol', None):
return NotImplemented
if not getattr(cls, '_is_runtime_protocol', False):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Instance and class checks can only be used with"
" @runtime protocols")
if not _is_callable_members_only(cls):
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
return NotImplemented
raise TypeError("Protocols with non-method members"
" don't support issubclass()")
if not isinstance(other, type):
# Same error as for issubclass(1, int)
raise TypeError('issubclass() arg 1 must be a class')
for attr in _get_protocol_attrs(cls):
for base in other.__mro__:
if attr in base.__dict__:
if base.__dict__[attr] is None:
return NotImplemented
break
annotations = getattr(base, '__annotations__', {})
if (isinstance(annotations, typing.Mapping) and
attr in annotations and
isinstance(other, _ProtocolMeta) and
other._is_protocol):
break
else:
return NotImplemented
return True
if '__subclasshook__' not in cls.__dict__:
cls.__subclasshook__ = _proto_hook
# We have nothing more to do for non-protocols.
if not cls._is_protocol:
return
# Check consistency of bases.
for base in cls.__bases__:
if not (base in (object, Generic) or
base.__module__ == 'collections.abc' and
base.__name__ in _PROTO_WHITELIST or
isinstance(base, _ProtocolMeta) and base._is_protocol):
raise TypeError('Protocols can only inherit from other'
' protocols, got %r' % base)
def _no_init(self, *args, **kwargs):
if type(self)._is_protocol:
raise TypeError('Protocols cannot be instantiated')
cls.__init__ = _no_init
if hasattr(typing, 'runtime_checkable'):
runtime_checkable = typing.runtime_checkable
elif HAVE_PROTOCOLS:
def runtime_checkable(cls):
"""Mark a protocol class as a runtime protocol, so that it
can be used with isinstance() and issubclass(). Raise TypeError
if applied to a non-protocol class.
This allows a simple-minded structural check very similar to the
one-offs in collections.abc such as Hashable.
"""
if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
' got %r' % cls)
cls._is_runtime_protocol = True
return cls
if HAVE_PROTOCOLS:
# Exists for backwards compatibility.
runtime = runtime_checkable
if hasattr(typing, 'SupportsIndex'):
SupportsIndex = typing.SupportsIndex
elif HAVE_PROTOCOLS:
@runtime_checkable
class SupportsIndex(Protocol):
__slots__ = ()
@abc.abstractmethod
def __index__(self) -> int:
pass
if sys.version_info[:2] >= (3, 9):
# The standard library TypedDict in Python 3.8 does not store runtime information
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
TypedDict = typing.TypedDict
else:
def _check_fails(cls, other):
try:
if sys._getframe(1).f_globals['__name__'] not in ['abc',
'functools',
'typing']:
# Typed dicts are only for static structural subtyping.
raise TypeError('TypedDict does not support instance and class checks')
except (AttributeError, ValueError):
pass
return False
def _dict_new(*args, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
return dict(*args, **kwargs)
_dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
def _typeddict_new(*args, total=True, **kwargs):
if not args:
raise TypeError('TypedDict.__new__(): not enough arguments')
_, args = args[0], args[1:] # allow the "cls" keyword be passed
if args:
typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
elif '_typename' in kwargs:
typename = kwargs.pop('_typename')
import warnings
warnings.warn("Passing '_typename' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
raise TypeError("TypedDict.__new__() missing 1 required positional "
"argument: '_typename'")
if args:
try:
fields, = args # allow the "_fields" keyword be passed
except ValueError:
raise TypeError('TypedDict.__new__() takes from 2 to 3 '
'positional arguments but {} '
'were given'.format(len(args) + 2))
elif '_fields' in kwargs and len(kwargs) == 1:
fields = kwargs.pop('_fields')
import warnings
warnings.warn("Passing '_fields' as keyword argument is deprecated",
DeprecationWarning, stacklevel=2)
else:
fields = None
if fields is None:
fields = kwargs
elif kwargs:
raise TypeError("TypedDict takes either a dict or keyword arguments,"
" but not both")
ns = {'__annotations__': dict(fields), '__total__': total}
try:
# Setting correct module is necessary to make typed dict classes pickleable.
ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return _TypedDictMeta(typename, (), ns)
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
' /, *, total=True, **kwargs)')
class _TypedDictMeta(type):
def __new__(cls, name, bases, ns, total=True):
# Create new typed dict class object.
# This method is called directly when TypedDict is subclassed,
# or via _typeddict_new when TypedDict is instantiated. This way
# TypedDict supports all three syntaxes described in its docstring.
# Subclasses and instances of TypedDict return actual dictionaries
# via _dict_new.
ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
annotations = {}
own_annotations = ns.get('__annotations__', {})
own_annotation_keys = set(own_annotations.keys())
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
own_annotations = {
n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
}
required_keys = set()
optional_keys = set()
for base in bases:
annotations.update(base.__dict__.get('__annotations__', {}))
required_keys.update(base.__dict__.get('__required_keys__', ()))
optional_keys.update(base.__dict__.get('__optional_keys__', ()))
annotations.update(own_annotations)
if total:
required_keys.update(own_annotation_keys)
else:
optional_keys.update(own_annotation_keys)
tp_dict.__annotations__ = annotations
tp_dict.__required_keys__ = frozenset(required_keys)
tp_dict.__optional_keys__ = frozenset(optional_keys)
if not hasattr(tp_dict, '__total__'):
tp_dict.__total__ = total
return tp_dict
__instancecheck__ = __subclasscheck__ = _check_fails
TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
TypedDict.__module__ = __name__
TypedDict.__doc__ = \
"""A simple typed name space. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type that expects all of its
instances to have a certain set of keys, with each key
associated with a value of a consistent type. This expectation
is not checked at runtime but is only enforced by type checkers.
Usage::
class Point2D(TypedDict):
x: int
y: int
label: str
a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
The type info can be accessed via the Point2D.__annotations__ dict, and
the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
TypedDict supports two additional equivalent forms::
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
The class syntax is only supported in Python 3.6+, while two other
syntax forms work for Python 2.7 and 3.2+
"""
# Python 3.9+ has PEP 593 (Annotated and modified get_type_hints)
if hasattr(typing, 'Annotated'):
Annotated = typing.Annotated
get_type_hints = typing.get_type_hints
# Not exported and not a public API, but needed for get_origin() and get_args()
# to work.
_AnnotatedAlias = typing._AnnotatedAlias
elif PEP_560:
class _AnnotatedAlias(typing._GenericAlias, _root=True):
"""Runtime representation of an annotated type.
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
with extra annotations. The alias behaves like a normal typing alias,
instantiating is the same as instantiating the underlying type, binding
it to types is also the same.
"""
def __init__(self, origin, metadata):
if isinstance(origin, _AnnotatedAlias):
metadata = origin.__metadata__ + metadata
origin = origin.__origin__
super().__init__(origin, origin)
self.__metadata__ = metadata
def copy_with(self, params):
assert len(params) == 1
new_type = params[0]
return _AnnotatedAlias(new_type, self.__metadata__)
def __repr__(self):
return "typing_extensions.Annotated[{}, {}]".format(
typing._type_repr(self.__origin__),
", ".join(repr(a) for a in self.__metadata__)
)
def __reduce__(self):
return operator.getitem, (
Annotated, (self.__origin__,) + self.__metadata__
)
def __eq__(self, other):
if not isinstance(other, _AnnotatedAlias):
return NotImplemented
if self.__origin__ != other.__origin__:
return False
return self.__metadata__ == other.__metadata__
def __hash__(self):
return hash((self.__origin__, self.__metadata__))
class Annotated:
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type (and will be in
the __origin__ field), the remaining arguments are kept as a tuple in
the __extra__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
__slots__ = ()
def __new__(cls, *args, **kwargs):
raise TypeError("Type Annotated cannot be instantiated.")
@_tp_cache
def __class_getitem__(cls, params):
if not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be used "
"with at least two arguments (a type and an "
"annotation).")
msg = "Annotated[t, ...]: t must be a type."
origin = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return _AnnotatedAlias(origin, metadata)
def __init_subclass__(cls, *args, **kwargs):
raise TypeError(
"Cannot subclass {}.Annotated".format(cls.__module__)
)
def _strip_annotations(t):
"""Strips the annotations from a given type.
"""
if isinstance(t, _AnnotatedAlias):
return _strip_annotations(t.__origin__)
if isinstance(t, typing._GenericAlias):
stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
res = t.copy_with(stripped_args)
res._special = t._special
return res
return t
def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, adds Optional[t] if a
default value equal to None is set and recursively replaces all
'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively.
"""
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
if include_extras:
return hint
return {k: _strip_annotations(t) for k, t in hint.items()}
elif HAVE_ANNOTATED:
def _is_dunder(name):
"""Returns True if name is a __dunder_variable_name__."""
return len(name) > 4 and name.startswith('__') and name.endswith('__')
# Prior to Python 3.7 types did not have `copy_with`. A lot of the equality
# checks, argument expansion etc. are done on the _subs_tre. As a result we
# can't provide a get_type_hints function that strips out annotations.
class AnnotatedMeta(typing.GenericMeta):
"""Metaclass for Annotated"""
def __new__(cls, name, bases, namespace, **kwargs):
if any(b is not object for b in bases):
raise TypeError("Cannot subclass " + str(Annotated))
return super().__new__(cls, name, bases, namespace, **kwargs)
@property
def __metadata__(self):
return self._subs_tree()[2]
def _tree_repr(self, tree):
cls, origin, metadata = tree
if not isinstance(origin, tuple):
tp_repr = typing._type_repr(origin)
else:
tp_repr = origin[0]._tree_repr(origin)
metadata_reprs = ", ".join(repr(arg) for arg in metadata)
return '%s[%s, %s]' % (cls, tp_repr, metadata_reprs)
def _subs_tree(self, tvars=None, args=None): # noqa
if self is Annotated:
return Annotated
res = super()._subs_tree(tvars=tvars, args=args)
# Flatten nested Annotated
if isinstance(res[1], tuple) and res[1][0] is Annotated:
sub_tp = res[1][1]
sub_annot = res[1][2]
return (Annotated, sub_tp, sub_annot + res[2])
return res
def _get_cons(self):
"""Return the class used to create instance of this type."""
if self.__origin__ is None:
raise TypeError("Cannot get the underlying type of a "
"non-specialized Annotated type.")
tree = self._subs_tree()
while isinstance(tree, tuple) and tree[0] is Annotated:
tree = tree[1]
if isinstance(tree, tuple):
return tree[0]
else:
return tree
@_tp_cache
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if self.__origin__ is not None: # specializing an instantiated type
return super().__getitem__(params)
elif not isinstance(params, tuple) or len(params) < 2:
raise TypeError("Annotated[...] should be instantiated "
"with at least two arguments (a type and an "
"annotation).")
else:
msg = "Annotated[t, ...]: t must be a type."
tp = typing._type_check(params[0], msg)
metadata = tuple(params[1:])
return self.__class__(
self.__name__,
self.__bases__,
_no_slots_copy(self.__dict__),
tvars=_type_vars((tp,)),
# Metadata is a tuple so it won't be touched by _replace_args et al.
args=(tp, metadata),
origin=self,
)
def __call__(self, *args, **kwargs):
cons = self._get_cons()
result = cons(*args, **kwargs)
try:
result.__orig_class__ = self
except AttributeError:
pass
return result
def __getattr__(self, attr):
# For simplicity we just don't relay all dunder names
if self.__origin__ is not None and not _is_dunder(attr):
return getattr(self._get_cons(), attr)
raise AttributeError(attr)
def __setattr__(self, attr, value):
if _is_dunder(attr) or attr.startswith('_abc_'):
super().__setattr__(attr, value)
elif self.__origin__ is None:
raise AttributeError(attr)
else:
setattr(self._get_cons(), attr, value)
def __instancecheck__(self, obj):
raise TypeError("Annotated cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Annotated cannot be used with issubclass().")
class Annotated(metaclass=AnnotatedMeta):
"""Add context specific metadata to a type.
Example: Annotated[int, runtime_check.Unsigned] indicates to the
hypothetical runtime_check module that this type is an unsigned int.
Every other consumer of this type can ignore this metadata and treat
this type as int.
The first argument to Annotated must be a valid type, the remaining
arguments are kept as a tuple in the __metadata__ field.
Details:
- It's an error to call `Annotated` with less than two arguments.
- Nested Annotated are flattened::
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
- Instantiating an annotated type is equivalent to instantiating the
underlying type::
Annotated[C, Ann1](5) == C(5)
- Annotated can be used as a generic type alias::
Optimized = Annotated[T, runtime.Optimize()]
Optimized[int] == Annotated[int, runtime.Optimize()]
OptimizedList = Annotated[List[T], runtime.Optimize()]
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
"""
# Python 3.8 has get_origin() and get_args() but those implementations aren't
# Annotated-aware, so we can't use those, only Python 3.9 versions will do.
if sys.version_info[:2] >= (3, 9):
get_origin = typing.get_origin
get_args = typing.get_args
elif PEP_560:
from typing import _GenericAlias # noqa
def get_origin(tp):
"""Get the unsubscripted version of a type.
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
and Annotated. Return None for unsupported types. Examples::
get_origin(Literal[42]) is Literal
get_origin(int) is None
get_origin(ClassVar[int]) is ClassVar
get_origin(Generic) is Generic
get_origin(Generic[T]) is Generic
get_origin(Union[T, int]) is Union
get_origin(List[Tuple[T, T]][int]) == list
"""
if isinstance(tp, _AnnotatedAlias):
return Annotated
if isinstance(tp, _GenericAlias):
return tp.__origin__
if tp is Generic:
return Generic
return None
def get_args(tp):
"""Get type arguments with all substitutions performed.
For unions, basic simplifications used by Union constructor are performed.
Examples::
get_args(Dict[str, int]) == (str, int)
get_args(int) == ()
get_args(Union[int, Union[T, int], str][int]) == (int, str)
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
get_args(Callable[[], T][int]) == ([], int)
"""
if isinstance(tp, _AnnotatedAlias):
return (tp.__origin__,) + tp.__metadata__
if isinstance(tp, _GenericAlias) and not tp._special:
res = tp.__args__
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
res = (list(res[:-1]), res[-1])
return res
return ()
if hasattr(typing, 'TypeAlias'):
TypeAlias = typing.TypeAlias
elif sys.version_info[:2] >= (3, 9):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
@_TypeAliasForm
def TypeAlias(self, parameters):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
raise TypeError("{} is not subscriptable".format(self))
elif sys.version_info[:2] >= (3, 7):
class _TypeAliasForm(typing._SpecialForm, _root=True):
def __repr__(self):
return 'typing_extensions.' + self._name
TypeAlias = _TypeAliasForm('TypeAlias',
doc="""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example
above.""")
elif hasattr(typing, '_FinalTypingBase'):
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __repr__(self):
return 'typing_extensions.TypeAlias'
class _TypeAliasBase(typing._FinalTypingBase, metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __repr__(self):
return 'typing_extensions.TypeAlias'
TypeAlias = _TypeAliasBase(_root=True)
else:
class _TypeAliasMeta(typing.TypingMeta):
"""Metaclass for TypeAlias"""
def __instancecheck__(self, obj):
raise TypeError("TypeAlias cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("TypeAlias cannot be used with issubclass().")
def __call__(self, *args, **kwargs):
raise TypeError("Cannot instantiate TypeAlias")
class TypeAlias(metaclass=_TypeAliasMeta, _root=True):
"""Special marker indicating that an assignment should
be recognized as a proper type alias definition by type
checkers.
For example::
Predicate: TypeAlias = Callable[..., bool]
It's invalid when used anywhere except as in the example above.
"""
__slots__ = () | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/typing_extensions.py | typing_extensions.py |
import re
import sys
import copy
import types
import inspect
import keyword
__all__ = ['dataclass',
'field',
'Field',
'FrozenInstanceError',
'InitVar',
'MISSING',
# Helper functions.
'fields',
'asdict',
'astuple',
'make_dataclass',
'replace',
'is_dataclass',
]
# Conditions for adding methods. The boxes indicate what action the
# dataclass decorator takes. For all of these tables, when I talk
# about init=, repr=, eq=, order=, unsafe_hash=, or frozen=, I'm
# referring to the arguments to the @dataclass decorator. When
# checking if a dunder method already exists, I mean check for an
# entry in the class's __dict__. I never check to see if an attribute
# is defined in a base class.
# Key:
# +=========+=========================================+
# + Value | Meaning |
# +=========+=========================================+
# | <blank> | No action: no method is added. |
# +---------+-----------------------------------------+
# | add | Generated method is added. |
# +---------+-----------------------------------------+
# | raise | TypeError is raised. |
# +---------+-----------------------------------------+
# | None | Attribute is set to None. |
# +=========+=========================================+
# __init__
#
# +--- init= parameter
# |
# v | | |
# | no | yes | <--- class has __init__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __repr__
#
# +--- repr= parameter
# |
# v | | |
# | no | yes | <--- class has __repr__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __setattr__
# __delattr__
#
# +--- frozen= parameter
# |
# v | | |
# | no | yes | <--- class has __setattr__ or __delattr__ in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because not adding these methods would break the "frozen-ness"
# of the class.
# __eq__
#
# +--- eq= parameter
# |
# v | | |
# | no | yes | <--- class has __eq__ in __dict__?
# +=======+=======+=======+
# | False | | |
# +-------+-------+-------+
# | True | add | | <- the default
# +=======+=======+=======+
# __lt__
# __le__
# __gt__
# __ge__
#
# +--- order= parameter
# |
# v | | |
# | no | yes | <--- class has any comparison method in __dict__?
# +=======+=======+=======+
# | False | | | <- the default
# +-------+-------+-------+
# | True | add | raise |
# +=======+=======+=======+
# Raise because to allow this case would interfere with using
# functools.total_ordering.
# __hash__
# +------------------- unsafe_hash= parameter
# | +----------- eq= parameter
# | | +--- frozen= parameter
# | | |
# v v v | | |
# | no | yes | <--- class has explicitly defined __hash__
# +=======+=======+=======+========+========+
# | False | False | False | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | False | True | | | No __eq__, use the base class __hash__
# +-------+-------+-------+--------+--------+
# | False | True | False | None | | <-- the default, not hashable
# +-------+-------+-------+--------+--------+
# | False | True | True | add | | Frozen, so hashable, allows override
# +-------+-------+-------+--------+--------+
# | True | False | False | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | False | True | add | raise | Has no __eq__, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | False | add | raise | Not frozen, but hashable
# +-------+-------+-------+--------+--------+
# | True | True | True | add | raise | Frozen, so hashable
# +=======+=======+=======+========+========+
# For boxes that are blank, __hash__ is untouched and therefore
# inherited from the base class. If the base is object, then
# id-based hashing is used.
#
# Note that a class may already have __hash__=None if it specified an
# __eq__ method in the class body (not one that was created by
# @dataclass).
#
# See _hash_action (below) for a coded version of this table.
# Raised when an attempt is made to modify a frozen class.
class FrozenInstanceError(AttributeError): pass
# A sentinel object for default values to signal that a default
# factory will be used. This is given a nice repr() which will appear
# in the function signature of dataclasses' constructors.
class _HAS_DEFAULT_FACTORY_CLASS:
def __repr__(self):
return '<factory>'
_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS()
# A sentinel object to detect if a parameter is supplied or not. Use
# a class to give it a better repr.
class _MISSING_TYPE:
pass
MISSING = _MISSING_TYPE()
# Since most per-field metadata will be unused, create an empty
# read-only proxy that can be shared among all fields.
_EMPTY_METADATA = types.MappingProxyType({})
# Markers for the various kinds of fields and pseudo-fields.
class _FIELD_BASE:
def __init__(self, name):
self.name = name
def __repr__(self):
return self.name
_FIELD = _FIELD_BASE('_FIELD')
_FIELD_CLASSVAR = _FIELD_BASE('_FIELD_CLASSVAR')
_FIELD_INITVAR = _FIELD_BASE('_FIELD_INITVAR')
# The name of an attribute on the class where we store the Field
# objects. Also used to check if a class is a Data Class.
_FIELDS = '__dataclass_fields__'
# The name of an attribute on the class that stores the parameters to
# @dataclass.
_PARAMS = '__dataclass_params__'
# The name of the function, that if it exists, is called at the end of
# __init__.
_POST_INIT_NAME = '__post_init__'
# String regex that string annotations for ClassVar or InitVar must match.
# Allows "identifier.identifier[" or "identifier[".
# https://bugs.python.org/issue33453 for details.
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
class _InitVarMeta(type):
def __getitem__(self, params):
return self
class InitVar(metaclass=_InitVarMeta):
pass
# Instances of Field are only ever created from within this module,
# and only from the field() function, although Field instances are
# exposed externally as (conceptually) read-only objects.
#
# name and type are filled in after the fact, not in __init__.
# They're not known at the time this class is instantiated, but it's
# convenient if they're available later.
#
# When cls._FIELDS is filled in with a list of Field objects, the name
# and type fields will have been populated.
class Field:
__slots__ = ('name',
'type',
'default',
'default_factory',
'repr',
'hash',
'init',
'compare',
'metadata',
'_field_type', # Private: not to be used by user code.
)
def __init__(self, default, default_factory, init, repr, hash, compare,
metadata):
self.name = None
self.type = None
self.default = default
self.default_factory = default_factory
self.init = init
self.repr = repr
self.hash = hash
self.compare = compare
self.metadata = (_EMPTY_METADATA
if metadata is None or len(metadata) == 0 else
types.MappingProxyType(metadata))
self._field_type = None
def __repr__(self):
return ('Field('
f'name={self.name!r},'
f'type={self.type!r},'
f'default={self.default!r},'
f'default_factory={self.default_factory!r},'
f'init={self.init!r},'
f'repr={self.repr!r},'
f'hash={self.hash!r},'
f'compare={self.compare!r},'
f'metadata={self.metadata!r},'
f'_field_type={self._field_type}'
')')
# This is used to support the PEP 487 __set_name__ protocol in the
# case where we're using a field that contains a descriptor as a
# defaul value. For details on __set_name__, see
# https://www.python.org/dev/peps/pep-0487/#implementation-details.
#
# Note that in _process_class, this Field object is overwritten
# with the default value, so the end result is a descriptor that
# had __set_name__ called on it at the right time.
def __set_name__(self, owner, name):
func = getattr(type(self.default), '__set_name__', None)
if func:
# There is a __set_name__ method on the descriptor, call
# it.
func(self.default, owner, name)
class _DataclassParams:
__slots__ = ('init',
'repr',
'eq',
'order',
'unsafe_hash',
'frozen',
)
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init
self.repr = repr
self.eq = eq
self.order = order
self.unsafe_hash = unsafe_hash
self.frozen = frozen
def __repr__(self):
return ('_DataclassParams('
f'init={self.init!r},'
f'repr={self.repr!r},'
f'eq={self.eq!r},'
f'order={self.order!r},'
f'unsafe_hash={self.unsafe_hash!r},'
f'frozen={self.frozen!r}'
')')
# This function is used instead of exposing Field creation directly,
# so that a type checker can be told (via overloads) that this is a
# function whose type depends on its parameters.
def field(*, default=MISSING, default_factory=MISSING, init=True, repr=True,
hash=None, compare=True, metadata=None):
"""Return an object to identify dataclass fields.
default is the default value of the field. default_factory is a
0-argument function called to initialize a field's value. If init
is True, the field will be a parameter to the class's __init__()
function. If repr is True, the field will be included in the
object's repr(). If hash is True, the field will be included in
the object's hash(). If compare is True, the field will be used
in comparison functions. metadata, if specified, must be a
mapping which is stored but not otherwise examined by dataclass.
It is an error to specify both default and default_factory.
"""
if default is not MISSING and default_factory is not MISSING:
raise ValueError('cannot specify both default and default_factory')
return Field(default, default_factory, init, repr, hash, compare,
metadata)
def _tuple_str(obj_name, fields):
# Return a string representing each field of obj_name as a tuple
# member. So, if fields is ['x', 'y'] and obj_name is "self",
# return "(self.x,self.y)".
# Special case for the 0-tuple.
if not fields:
return '()'
# Note the trailing comma, needed if this turns out to be a 1-tuple.
return f'({",".join([f"{obj_name}.{f.name}" for f in fields])},)'
def _create_fn(name, args, body, *, globals=None, locals=None,
return_type=MISSING):
# Note that we mutate locals when exec() is called. Caller
# beware! The only callers are internal to this module, so no
# worries about external callers.
if locals is None:
locals = {}
return_annotation = ''
if return_type is not MISSING:
locals['_return_type'] = return_type
return_annotation = '->_return_type'
args = ','.join(args)
body = '\n'.join(f' {b}' for b in body)
# Compute the text of the entire function.
txt = f'def {name}({args}){return_annotation}:\n{body}'
exec(txt, globals, locals)
return locals[name]
def _field_assign(frozen, name, value, self_name):
# If we're a frozen class, then assign to our fields in __init__
# via object.__setattr__. Otherwise, just use a simple
# assignment.
#
# self_name is what "self" is called in this function: don't
# hard-code "self", since that might be a field name.
if frozen:
return f'object.__setattr__({self_name},{name!r},{value})'
return f'{self_name}.{name}={value}'
def _field_init(f, frozen, globals, self_name):
# Return the text of the line in the body of __init__ that will
# initialize this field.
default_name = f'_dflt_{f.name}'
if f.default_factory is not MISSING:
if f.init:
# This field has a default factory. If a parameter is
# given, use it. If not, call the factory.
globals[default_name] = f.default_factory
value = (f'{default_name}() '
f'if {f.name} is _HAS_DEFAULT_FACTORY '
f'else {f.name}')
else:
# This is a field that's not in the __init__ params, but
# has a default factory function. It needs to be
# initialized here by calling the factory function,
# because there's no other way to initialize it.
# For a field initialized with a default=defaultvalue, the
# class dict just has the default value
# (cls.fieldname=defaultvalue). But that won't work for a
# default factory, the factory must be called in __init__
# and we must assign that to self.fieldname. We can't
# fall back to the class dict's value, both because it's
# not set, and because it might be different per-class
# (which, after all, is why we have a factory function!).
globals[default_name] = f.default_factory
value = f'{default_name}()'
else:
# No default factory.
if f.init:
if f.default is MISSING:
# There's no default, just do an assignment.
value = f.name
elif f.default is not MISSING:
globals[default_name] = f.default
value = f.name
else:
# This field does not need initialization. Signify that
# to the caller by returning None.
return None
# Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars.
if f._field_type is _FIELD_INITVAR:
return None
# Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name)
def _init_param(f):
# Return the __init__ parameter string for this field. For
# example, the equivalent of 'x:int=3' (except instead of 'int',
# reference a variable set to int, and instead of '3', reference a
# variable set to 3).
if f.default is MISSING and f.default_factory is MISSING:
# There's no default, and no default_factory, just output the
# variable name and type.
default = ''
elif f.default is not MISSING:
# There's a default, this will be the name that's used to look
# it up.
default = f'=_dflt_{f.name}'
elif f.default_factory is not MISSING:
# There's a factory function. Set a marker.
default = '=_HAS_DEFAULT_FACTORY'
return f'{f.name}:_type_{f.name}{default}'
def _init_fn(fields, frozen, has_post_init, self_name):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
# with defaults. This actually would be caught when exec-ing the
# function source code, but catching it here gives a better error
# message, and future-proofs us in case we build up the function
# using ast.
seen_default = False
for f in fields:
# Only consider fields in the __init__ call.
if f.init:
if not (f.default is MISSING and f.default_factory is MISSING):
seen_default = True
elif seen_default:
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
globals = {'MISSING': MISSING,
'_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY}
body_lines = []
for f in fields:
line = _field_init(f, frozen, globals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ','.join(f.name for f in fields
if f._field_type is _FIELD_INITVAR)
body_lines.append(f'{self_name}.{_POST_INIT_NAME}({params_str})')
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ['pass']
locals = {f'_type_{f.name}': f.type for f in fields}
return _create_fn('__init__',
[self_name] + [_init_param(f) for f in fields if f.init],
body_lines,
locals=locals,
globals=globals,
return_type=None)
def _repr_fn(fields):
return _create_fn('__repr__',
('self',),
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
')"'])
def _frozen_get_del_attr(cls, fields):
# XXX: globals is modified on the first call to _create_fn, then
# the modified version is used in the second call. Is this okay?
globals = {'cls': cls,
'FrozenInstanceError': FrozenInstanceError}
if fields:
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
else:
# Special case for the zero-length tuple.
fields_str = '()'
return (_create_fn('__setattr__',
('self', 'name', 'value'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
f'super(cls, self).__setattr__(name, value)'),
globals=globals),
_create_fn('__delattr__',
('self', 'name'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
f'super(cls, self).__delattr__(name)'),
globals=globals),
)
def _cmp_fn(name, op, self_tuple, other_tuple):
# Create a comparison function. If the fields in the object are
# named 'x' and 'y', then self_tuple is the string
# '(self.x,self.y)' and other_tuple is the string
# '(other.x,other.y)'.
return _create_fn(name,
('self', 'other'),
[ 'if other.__class__ is self.__class__:',
f' return {self_tuple}{op}{other_tuple}',
'return NotImplemented'])
def _hash_fn(fields):
self_tuple = _tuple_str('self', fields)
return _create_fn('__hash__',
('self',),
[f'return hash({self_tuple})'])
def _is_classvar(a_type, typing):
# This test uses a typing internal class, but it's the best way to
# test if this is a ClassVar.
return type(a_type) is typing._ClassVar
def _is_initvar(a_type, dataclasses):
# The module we're checking against is the module we're
# currently in (dataclasses.py).
return a_type is dataclasses.InitVar
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
# Given a type annotation string, does it refer to a_type in
# a_module? For example, when checking that annotation denotes a
# ClassVar, then a_module is typing, and a_type is
# typing.ClassVar.
# It's possible to look up a_module given a_type, but it involves
# looking in sys.modules (again!), and seems like a waste since
# the caller already knows a_module.
# - annotation is a string type annotation
# - cls is the class that this annotation was found in
# - a_module is the module we want to match
# - a_type is the type in that module we want to match
# - is_type_predicate is a function called with (obj, a_module)
# that determines if obj is of the desired type.
# Since this test does not do a local namespace lookup (and
# instead only a module (global) lookup), there are some things it
# gets wrong.
# With string annotations, cv0 will be detected as a ClassVar:
# CV = ClassVar
# @dataclass
# class C0:
# cv0: CV
# But in this example cv1 will not be detected as a ClassVar:
# @dataclass
# class C1:
# CV = ClassVar
# cv1: CV
# In C1, the code in this function (_is_type) will look up "CV" in
# the module and not find it, so it will not consider cv1 as a
# ClassVar. This is a fairly obscure corner case, and the best
# way to fix it would be to eval() the string "CV" with the
# correct global and local namespaces. However that would involve
# a eval() penalty for every single field of every dataclass
# that's defined. It was judged not worth it.
match = _MODULE_IDENTIFIER_RE.match(annotation)
if match:
ns = None
module_name = match.group(1)
if not module_name:
# No module name, assume the class's module did
# "from dataclasses import InitVar".
ns = sys.modules.get(cls.__module__).__dict__
else:
# Look up module_name in the class's module.
module = sys.modules.get(cls.__module__)
if module and module.__dict__.get(module_name) is a_module:
ns = sys.modules.get(a_type.__module__).__dict__
if ns and is_type_predicate(ns.get(match.group(2)), a_module):
return True
return False
def _get_field(cls, a_name, a_type):
# Return a Field object for this field name and type. ClassVars
# and InitVars are also returned, but marked as such (see
# f._field_type).
# If the default value isn't derived from Field, then it's only a
# normal default value. Convert it to a Field().
default = getattr(cls, a_name, MISSING)
if isinstance(default, Field):
f = default
else:
if isinstance(default, types.MemberDescriptorType):
# This is a field in __slots__, so it has no default value.
default = MISSING
f = field(default=default)
# Only at this point do we know the name and the type. Set them.
f.name = a_name
f.type = a_type
# Assume it's a normal field until proven otherwise. We're next
# going to decide if it's a ClassVar or InitVar, everything else
# is just a normal field.
f._field_type = _FIELD
# In addition to checking for actual types here, also check for
# string annotations. get_type_hints() won't always work for us
# (see https://github.com/python/typing/issues/508 for example),
# plus it's expensive and would require an eval for every stirng
# annotation. So, make a best effort to see if this is a ClassVar
# or InitVar using regex's and checking that the thing referenced
# is actually of the correct type.
# For the complete discussion, see https://bugs.python.org/issue33453
# If typing has not been imported, then it's impossible for any
# annotation to be a ClassVar. So, only look for ClassVar if
# typing has been imported by any module (not necessarily cls's
# module).
typing = sys.modules.get('typing')
if typing:
if (_is_classvar(a_type, typing)
or (isinstance(f.type, str)
and _is_type(f.type, cls, typing, typing.ClassVar,
_is_classvar))):
f._field_type = _FIELD_CLASSVAR
# If the type is InitVar, or if it's a matching string annotation,
# then it's an InitVar.
if f._field_type is _FIELD:
# The module we're checking against is the module we're
# currently in (dataclasses.py).
dataclasses = sys.modules[__name__]
if (_is_initvar(a_type, dataclasses)
or (isinstance(f.type, str)
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar,
_is_initvar))):
f._field_type = _FIELD_INITVAR
# Validations for individual fields. This is delayed until now,
# instead of in the Field() constructor, since only here do we
# know the field name, which allows for better error reporting.
# Special restrictions for ClassVar and InitVar.
if f._field_type in (_FIELD_CLASSVAR, _FIELD_INITVAR):
if f.default_factory is not MISSING:
raise TypeError(f'field {f.name} cannot have a '
'default factory')
# Should I check for other field settings? default_factory
# seems the most serious to check for. Maybe add others. For
# example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError(f'mutable default {type(f.default)} for field '
f'{f.name} is not allowed: use default_factory')
return f
def _set_new_attribute(cls, name, value):
# Never overwrites an existing attribute. Returns True if the
# attribute already exists.
if name in cls.__dict__:
return True
setattr(cls, name, value)
return False
# Decide if/how we're going to create a hash function. Key is
# (unsafe_hash, eq, frozen, does-hash-exist). Value is the action to
# take. The common case is to do nothing, so instead of providing a
# function that is a no-op, use None to signify that.
def _hash_set_none(cls, fields):
return None
def _hash_add(cls, fields):
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
return _hash_fn(flds)
def _hash_exception(cls, fields):
# Raise an exception.
raise TypeError(f'Cannot overwrite attribute __hash__ '
f'in class {cls.__name__}')
#
# +-------------------------------------- unsafe_hash?
# | +------------------------------- eq?
# | | +------------------------ frozen?
# | | | +---------------- has-explicit-hash?
# | | | |
# | | | | +------- action
# | | | | |
# v v v v v
_hash_action = {(False, False, False, False): None,
(False, False, False, True ): None,
(False, False, True, False): None,
(False, False, True, True ): None,
(False, True, False, False): _hash_set_none,
(False, True, False, True ): None,
(False, True, True, False): _hash_add,
(False, True, True, True ): None,
(True, False, False, False): _hash_add,
(True, False, False, True ): _hash_exception,
(True, False, True, False): _hash_add,
(True, False, True, True ): _hash_exception,
(True, True, False, False): _hash_add,
(True, True, False, True ): _hash_exception,
(True, True, True, False): _hash_add,
(True, True, True, True ): _hash_exception,
}
# See https://bugs.python.org/issue32929#msg312829 for an if-statement
# version of this table.
def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# Now that dicts retain insertion order, there's no reason to use
# an ordered dict. I am leveraging that ordering here, because
# derived class fields overwrite base class fields, but the order
# is defined by the base class, which is found first.
fields = {}
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
unsafe_hash, frozen))
# Find our base classes in reverse MRO order, and exclude
# ourselves. In reversed order so that more derived classes
# override earlier field definitions in base classes. As long as
# we're iterating over them, see if any are frozen.
any_frozen_base = False
has_dataclass_bases = False
for b in cls.__mro__[-1:0:-1]:
# Only process classes that have been processed by our
# decorator. That is, they have a _FIELDS attribute.
base_fields = getattr(b, _FIELDS, None)
if base_fields:
has_dataclass_bases = True
for f in base_fields.values():
fields[f.name] = f
if getattr(b, _PARAMS).frozen:
any_frozen_base = True
# Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that are
# added by this class.
#
# Fields are found from cls_annotations, which is guaranteed to be
# ordered. Default values are from class attributes, if a field
# has a default. If the default value is a Field(), then it
# contains additional info beyond (and possibly including) the
# actual default value. Pseudo-fields ClassVars and InitVars are
# included, despite the fact that they're not real fields. That's
# dealt with later.
cls_annotations = cls.__dict__.get('__annotations__', {})
# Now find fields in our class. While doing so, validate some
# things, and set the default values (as class attributes) where
# we can.
cls_fields = [_get_field(cls, name, type)
for name, type in cls_annotations.items()]
for f in cls_fields:
fields[f.name] = f
# If the class attribute (which is the default value for this
# field) exists and is of type 'Field', replace it with the
# real default. This is so that normal class introspection
# sees a real default value, not a Field.
if isinstance(getattr(cls, f.name, None), Field):
if f.default is MISSING:
# If there's no default, delete the class attribute.
# This happens if we specify field(repr=False), for
# example (that is, we specified a field object, but
# no default value). Also if we're using a default
# factory. The class attribute should not be set at
# all in the post-processed class.
delattr(cls, f.name)
else:
setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f'{name!r} is a field but has no type annotation')
# Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen:
raise TypeError('cannot inherit non-frozen dataclass from a '
'frozen one')
# Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen:
raise TypeError('cannot inherit frozen dataclass from a '
'non-frozen one')
# Remember all of the fields on our class (including bases). This
# also marks this class as being a dataclass.
setattr(cls, _FIELDS, fields)
# Was this class defined with an explicit __hash__? Note that if
# __eq__ is defined in this class, then python will automatically
# set __hash__ to None. This is a heuristic, as it's possible
# that such a __hash__ == None was not auto-generated, but it
# close enough.
class_hash = cls.__dict__.get('__hash__', MISSING)
has_explicit_hash = not (class_hash is MISSING or
(class_hash is None and '__eq__' in cls.__dict__))
# If we're generating ordering methods, we must be generating the
# eq methods.
if order and not eq:
raise ValueError('eq must be true if order is true')
if init:
# Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME)
# Include InitVars and regular fields (so, not ClassVars).
flds = [f for f in fields.values()
if f._field_type in (_FIELD, _FIELD_INITVAR)]
_set_new_attribute(cls, '__init__',
_init_fn(flds,
frozen,
has_post_init,
# The name to use for the "self"
# param in __init__. Use "self"
# if possible.
'__dataclass_self__' if 'self' in fields
else 'self',
))
# Get the fields as a list, and include only real fields. This is
# used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD]
if repr:
flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, '__repr__', _repr_fn(flds))
if eq:
# Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
_set_new_attribute(cls, '__eq__',
_cmp_fn('__eq__', '==',
self_tuple, other_tuple))
if order:
# Create and set the ordering methods.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
for name, op in [('__lt__', '<'),
('__le__', '<='),
('__gt__', '>'),
('__ge__', '>='),
]:
if _set_new_attribute(cls, name,
_cmp_fn(name, op, self_tuple, other_tuple)):
raise TypeError(f'Cannot overwrite attribute {name} '
f'in class {cls.__name__}. Consider using '
'functools.total_ordering')
if frozen:
for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn):
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
f'in class {cls.__name__}')
# Decide if/how we're going to create a hash function.
hash_action = _hash_action[bool(unsafe_hash),
bool(eq),
bool(frozen),
has_explicit_hash]
if hash_action:
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, '__doc__'):
# Create a class doc-string.
cls.__doc__ = (cls.__name__ +
str(inspect.signature(cls)).replace(' -> None', ''))
return cls
# _cls should never be specified by keyword, so start it with an
# underscore. The presence of _cls is used to detect if this
# decorator is being called with parameters or not.
def dataclass(_cls=None, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
Examines PEP 526 __annotations__ to determine fields.
If init is true, an __init__() method is added to the class. If
repr is true, a __repr__() method is added. If order is true, rich
comparison dunder methods are added. If unsafe_hash is true, a
__hash__() method function is added. If frozen is true, fields may
not be assigned to after instance creation.
"""
def wrap(cls):
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen)
# See if we're being called as @dataclass or @dataclass().
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
def fields(class_or_instance):
"""Return a tuple describing the fields of this dataclass.
Accepts a dataclass or an instance of one. Tuple elements are of
type Field.
"""
# Might it be worth caching this, per class?
try:
fields = getattr(class_or_instance, _FIELDS)
except AttributeError:
raise TypeError('must be called with a dataclass type or instance')
# Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD)
def _is_dataclass_instance(obj):
"""Returns True if obj is an instance of a dataclass."""
return not isinstance(obj, type) and hasattr(obj, _FIELDS)
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
return hasattr(obj, _FIELDS)
def asdict(obj, *, dict_factory=dict):
"""Return the fields of a dataclass instance as a new dictionary mapping
field names to field values.
Example usage:
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert asdict(c) == {'x': 1, 'y': 2}
If given, 'dict_factory' will be used instead of built-in dict.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("asdict() should be called on dataclass instances")
return _asdict_inner(obj, dict_factory)
def _asdict_inner(obj, dict_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _asdict_inner(getattr(obj, f.name), dict_factory)
result.append((f.name, value))
return dict_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_asdict_inner(v, dict_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_asdict_inner(k, dict_factory), _asdict_inner(v, dict_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def astuple(obj, *, tuple_factory=tuple):
"""Return the fields of a dataclass instance as a new tuple of field values.
Example usage::
@dataclass
class C:
x: int
y: int
c = C(1, 2)
assert astuple(c) == (1, 2)
If given, 'tuple_factory' will be used instead of built-in tuple.
The function applies recursively to field values that are
dataclass instances. This will also look into built-in containers:
tuples, lists, and dicts.
"""
if not _is_dataclass_instance(obj):
raise TypeError("astuple() should be called on dataclass instances")
return _astuple_inner(obj, tuple_factory)
def _astuple_inner(obj, tuple_factory):
if _is_dataclass_instance(obj):
result = []
for f in fields(obj):
value = _astuple_inner(getattr(obj, f.name), tuple_factory)
result.append(value)
return tuple_factory(result)
elif isinstance(obj, (list, tuple)):
return type(obj)(_astuple_inner(v, tuple_factory) for v in obj)
elif isinstance(obj, dict):
return type(obj)((_astuple_inner(k, tuple_factory), _astuple_inner(v, tuple_factory))
for k, v in obj.items())
else:
return copy.deepcopy(obj)
def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
repr=True, eq=True, order=False, unsafe_hash=False,
frozen=False):
"""Return a new dynamically created dataclass.
The dataclass name will be 'cls_name'. 'fields' is an iterable
of either (name), (name, type) or (name, type, Field) objects. If type is
omitted, use the string 'typing.Any'. Field objects are created by
the equivalent of calling 'field(name, type [, Field-info])'.
C = make_dataclass('C', ['x', ('y', int), ('z', int, field(init=False))], bases=(Base,))
is equivalent to:
@dataclass
class C(Base):
x: 'typing.Any'
y: int
z: int = field(init=False)
For the bases and namespace parameters, see the builtin type() function.
The parameters init, repr, eq, order, unsafe_hash, and frozen are passed to
dataclass().
"""
if namespace is None:
namespace = {}
else:
# Copy namespace since we're going to mutate it.
namespace = namespace.copy()
# While we're looking through the field names, validate that they
# are identifiers, are not keywords, and not duplicates.
seen = set()
anns = {}
for item in fields:
if isinstance(item, str):
name = item
tp = 'typing.Any'
elif len(item) == 2:
name, tp, = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
else:
raise TypeError(f'Invalid field: {item!r}')
if not isinstance(name, str) or not name.isidentifier():
raise TypeError(f'Field names must be valid identifers: {name!r}')
if keyword.iskeyword(name):
raise TypeError(f'Field names must not be keywords: {name!r}')
if name in seen:
raise TypeError(f'Field name duplicated: {name!r}')
seen.add(name)
anns[name] = tp
namespace['__annotations__'] = anns
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
# of generic dataclassses.
cls = types.new_class(cls_name, bases, {}, lambda ns: ns.update(namespace))
return dataclass(cls, init=init, repr=repr, eq=eq, order=order,
unsafe_hash=unsafe_hash, frozen=frozen)
def replace(obj, **changes):
"""Return a new object replacing specified fields with new values.
This is especially useful for frozen classes. Example usage:
@dataclass(frozen=True)
class C:
x: int
y: int
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj.
for f in getattr(obj, _FIELDS).values():
# Only consider normal fields or InitVars.
if f._field_type is _FIELD_CLASSVAR:
continue
if not f.init:
# Error if this field is specified in changes.
if f.name in changes:
raise ValueError(f'field {f.name} is declared with '
'init=False, it cannot be specified with '
'replace()')
continue
if f.name not in changes:
if f._field_type is _FIELD_INITVAR:
raise ValueError(f"InitVar {f.name!r} "
'must be specified with replace()')
changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields we've
# added and/or left in 'changes'. If there are values supplied in
# changes that aren't fields, this will correctly raise a
# TypeError.
return obj.__class__(**changes) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/_dataclasses.py | _dataclasses.py |
import imaplib
import io
import re
import email
import chardet
import base64
import quopri
import sys
import time
from datetime import datetime
from email.header import decode_header
from zato.common.ext.imbox.utils import str_encode, str_decode
import logging
logger = logging.getLogger(__name__)
class Struct:
def __init__(self, **entries):
self.__dict__.update(entries)
def keys(self):
return self.__dict__.keys()
def __repr__(self):
return str(self.__dict__)
def decode_mail_header(value, default_charset='us-ascii'):
"""
Decode a header value into a unicode string.
"""
try:
headers = decode_header(value)
except email.errors.HeaderParseError:
return str_decode(str_encode(value, default_charset, 'replace'), default_charset)
else:
for index, (text, charset) in enumerate(headers):
logger.debug("Mail header no. {index}: {data} encoding {charset}".format(
index=index,
data=str_decode(text, charset or 'utf-8', 'replace'),
charset=charset))
try:
headers[index] = str_decode(text, charset or default_charset,
'replace')
except LookupError:
# if the charset is unknown, force default
headers[index] = str_decode(text, default_charset, 'replace')
return ''.join(headers)
def get_mail_addresses(message, header_name):
"""
Retrieve all email addresses from one message header.
"""
headers = [h for h in message.get_all(header_name, [])]
addresses = email.utils.getaddresses(headers)
for index, (address_name, address_email) in enumerate(addresses):
addresses[index] = {'name': decode_mail_header(address_name),
'email': address_email}
logger.debug("{} Mail address in message: <{}> {}".format(
header_name.upper(), address_name, address_email))
return addresses
def decode_param(param):
name, v = param.split('=', 1)
values = v.split('\n')
value_results = []
for value in values:
match = re.search(r'=\?((?:\w|-)+)\?([QB])\?(.+)\?=', value)
if match:
encoding, type_, code = match.groups()
if type_ == 'Q':
value = quopri.decodestring(code)
elif type_ == 'B':
value = base64.decodebytes(code.encode())
value = str_encode(value, encoding)
value_results.append(value)
if value_results:
v = ''.join(value_results)
logger.debug("Decoded parameter {} - {}".format(name, v))
return name, v
def parse_attachment(message_part):
# Check again if this is a valid attachment
content_disposition = message_part.get("Content-Disposition", None)
if content_disposition is not None and not message_part.is_multipart():
dispositions = [
disposition.strip()
for disposition in content_disposition.split(";")
if disposition.strip()
]
if dispositions[0].lower() in ["attachment", "inline"]:
file_data = message_part.get_payload(decode=True)
attachment = {
'content-type': message_part.get_content_type(),
'size': len(file_data),
'content': io.BytesIO(file_data),
'content-id': message_part.get("Content-ID", None)
}
filename = message_part.get_param('name')
if filename:
attachment['filename'] = filename
filename_parts = []
for param in dispositions[1:]:
if param:
name, value = decode_param(param)
# Check for split filename
s_name = name.split("*")
if s_name[0] == 'filename':
# If this is a split file name - use the number after the * as an index to insert this part
if len(s_name) > 1:
filename_parts.insert(int(s_name[1]),value[1:-1] if value.startswith('"') else value)
else:
filename_parts.insert(0,value[1:-1] if value.startswith('"') else value)
if 'create-date' in name:
attachment['create-date'] = value
attachment['filename'] = "".join(filename_parts)
return attachment
return None
def decode_content(message):
content = message.get_payload(decode=True)
charset = message.get_content_charset('utf-8')
try:
return content.decode(charset, 'ignore')
except LookupError:
encoding = chardet.detect(content).get('encoding')
if encoding:
return content.decode(encoding, 'ignore')
return content
except AttributeError:
return content
def fetch_email_by_uid(uid, connection, parser_policy):
message, data = connection.uid('fetch', uid, '(BODY.PEEK[] FLAGS)')
logger.debug("Fetched message for UID {}".format(int(uid)))
raw_headers, raw_email = data[0]
email_object = parse_email(raw_email, policy=parser_policy)
flags = parse_flags(raw_headers.decode())
email_object.__dict__['flags'] = flags
return email_object
def parse_flags(headers):
"""Copied from https://github.com/girishramnani/gmail/blob/master/gmail/message.py"""
if len(headers) == 0:
return []
if sys.version_info[0] == 3:
headers = bytes(headers, "ascii")
return list(imaplib.ParseFlags(headers))
def parse_email(raw_email, policy=None):
if isinstance(raw_email, bytes):
raw_email = str_encode(raw_email, 'utf-8', errors='ignore')
if policy is not None:
email_parse_kwargs = dict(policy=policy)
else:
email_parse_kwargs = {}
try:
email_message = email.message_from_string(
raw_email, **email_parse_kwargs)
except UnicodeEncodeError:
email_message = email.message_from_string(
raw_email.encode('utf-8'), **email_parse_kwargs)
maintype = email_message.get_content_maintype()
parsed_email = {'raw_email': raw_email}
body = {
"plain": [],
"html": []
}
attachments = []
if maintype in ('multipart', 'image'):
logger.debug("Multipart message. Will process parts.")
for part in email_message.walk():
content_type = part.get_content_type()
part_maintype = part.get_content_maintype()
content_disposition = part.get('Content-Disposition', None)
if content_disposition or not part_maintype == "text":
content = part.get_payload(decode=True)
else:
content = decode_content(part)
is_inline = content_disposition is None \
or content_disposition.startswith("inline")
if content_type == "text/plain" and is_inline:
body['plain'].append(content)
elif content_type == "text/html" and is_inline:
body['html'].append(content)
elif content_disposition:
attachment = parse_attachment(part)
if attachment:
attachments.append(attachment)
elif maintype == 'text':
payload = decode_content(email_message)
body['plain'].append(payload)
parsed_email['attachments'] = attachments
parsed_email['body'] = body
email_dict = dict(email_message.items())
parsed_email['sent_from'] = get_mail_addresses(email_message, 'from')
parsed_email['sent_to'] = get_mail_addresses(email_message, 'to')
parsed_email['cc'] = get_mail_addresses(email_message, 'cc')
parsed_email['bcc'] = get_mail_addresses(email_message, 'bcc')
value_headers_keys = ['subject', 'date', 'message-id']
key_value_header_keys = ['received-spf',
'mime-version',
'x-spam-status',
'x-spam-score',
'content-type']
parsed_email['headers'] = []
for key, value in email_dict.items():
if key.lower() in value_headers_keys:
valid_key_name = key.lower().replace('-', '_')
parsed_email[valid_key_name] = decode_mail_header(value)
if key.lower() in key_value_header_keys:
parsed_email['headers'].append({'Name': key,
'Value': value})
if parsed_email.get('date'):
timetuple = email.utils.parsedate(parsed_email['date'])
parsed_date = datetime.fromtimestamp(time.mktime(timetuple)) \
if timetuple else None
parsed_email['parsed_date'] = parsed_date
logger.info("Downloaded and parsed mail '{}' with {} attachments".format(
parsed_email.get('subject'), len(parsed_email.get('attachments'))))
return Struct(**parsed_email) | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/imbox/parser.py | parser.py |
import imaplib
from zato.common.ext.imbox.imap import ImapTransport
from zato.common.ext.imbox.messages import Messages
import logging
from zato.common.ext.imbox.vendors import GmailMessages, hostname_vendorname_dict, name_authentication_string_dict
logger = logging.getLogger(__name__)
class Imbox:
authentication_error_message = None
def __init__(self, hostname, username=None, password=None, ssl=True,
port=None, ssl_context=None, policy=None, starttls=False,
vendor=None):
self.server = ImapTransport(hostname, ssl=ssl, port=port,
ssl_context=ssl_context, starttls=starttls)
self.hostname = hostname
self.username = username
self.password = password
self.parser_policy = policy
self.vendor = vendor or hostname_vendorname_dict.get(self.hostname)
if self.vendor is not None:
self.authentication_error_message = name_authentication_string_dict.get(
self.vendor)
try:
self.connection = self.server.connect(username, password)
except imaplib.IMAP4.error as e:
if self.authentication_error_message is None:
raise
raise imaplib.IMAP4.error(
self.authentication_error_message + '\n' + str(e))
logger.info("Connected to IMAP Server with user {username} on {hostname}{ssl}".format(
hostname=hostname, username=username, ssl=(" over SSL" if ssl or starttls else "")))
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.logout()
def logout(self):
self.connection.close()
self.connection.logout()
logger.info("Disconnected from IMAP Server {username}@{hostname}".format(
hostname=self.hostname, username=self.username))
def mark_seen(self, uid):
logger.info("Mark UID {} with \\Seen FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Seen)')
def mark_flag(self, uid):
logger.info("Mark UID {} with \\Flagged FLAG".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Flagged)')
def delete(self, uid):
logger.info(
"Mark UID {} with \\Deleted FLAG and expunge.".format(int(uid)))
self.connection.uid('STORE', uid, '+FLAGS', '(\\Deleted)')
self.connection.expunge()
def copy(self, uid, destination_folder):
logger.info("Copy UID {} to {} folder".format(
int(uid), str(destination_folder)))
return self.connection.uid('COPY', uid, destination_folder)
def move(self, uid, destination_folder):
logger.info("Move UID {} to {} folder".format(
int(uid), str(destination_folder)))
if self.copy(uid, destination_folder):
self.delete(uid)
def messages(self, **kwargs):
folder = kwargs.get('folder', False)
messages_class = Messages
if self.vendor == 'gmail':
messages_class = GmailMessages
if folder:
self.connection.select(
messages_class.FOLDER_LOOKUP.get((folder.lower())) or folder)
msg = " from folder '{}'".format(folder)
del kwargs['folder']
else:
msg = " from inbox"
logger.info("Fetch list of messages{}".format(msg))
return messages_class(connection=self.connection,
parser_policy=self.parser_policy,
**kwargs)
def folders(self):
return self.connection.list() | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/imbox/imbox.py | imbox.py |
import logging
from zato.common.ext.imbox.query import build_search_query
from zato.common.ext.imbox.parser import fetch_email_by_uid
logger = logging.getLogger(__name__)
class Messages:
IMAP_ATTRIBUTE_LOOKUP = {
'unread': '(UNSEEN)',
'flagged': '(FLAGGED)',
'unflagged': '(UNFLAGGED)',
'sent_from': '(FROM "{}")',
'sent_to': '(TO "{}")',
'date__gt': '(SINCE "{}")',
'date__lt': '(BEFORE "{}")',
'date__on': '(ON "{}")',
'subject': '(SUBJECT "{}")',
'uid__range': '(UID {})',
'text': '(TEXT "{}")',
}
FOLDER_LOOKUP = {}
def __init__(self,
connection,
parser_policy,
**kwargs):
self.connection = connection
self.parser_policy = parser_policy
self.kwargs = kwargs
self._uid_list = self._query_uids(**kwargs)
logger.debug("Fetch all messages for UID in {}".format(self._uid_list))
def _fetch_email(self, uid):
return fetch_email_by_uid(uid=uid,
connection=self.connection,
parser_policy=self.parser_policy)
def _query_uids(self, **kwargs):
query_ = build_search_query(self.IMAP_ATTRIBUTE_LOOKUP, **kwargs)
_, data = self.connection.uid('search', None, query_)
if data[0] is None:
return []
return data[0].split()
def _fetch_email_list(self):
for uid in self._uid_list:
yield uid, self._fetch_email(uid)
def __repr__(self):
if len(self.kwargs) > 0:
return 'Messages({})'.format('\n'.join('{}={}'.format(key, value)
for key, value in self.kwargs.items()))
return 'Messages(ALL)'
def __iter__(self):
return self._fetch_email_list()
def __next__(self):
return self
def __len__(self):
return len(self._uid_list)
def __getitem__(self, index):
uids = self._uid_list[index]
if not isinstance(uids, list):
uid = uids
return uid, self._fetch_email(uid)
return [(uid, self._fetch_email(uid))
for uid in uids] | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/ext/imbox/messages.py | messages.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
import sys
# Zato
from zato.common.const import SECRETS
# ################################################################################################################################
# ################################################################################################################################
def resolve_secret_key(secret_key, _url_prefix=SECRETS.URL_PREFIX):
""" Finds a secret key among command line options or via environment variables.
"""
# We always require a string
secret_key = secret_key or ''
if secret_key and (not isinstance(_url_prefix, bytes)):
_url_prefix = _url_prefix.encode('utf8')
# This is a direct value, to be used as-is
if not secret_key.startswith(_url_prefix):
return secret_key
else:
# We need to look it up somewhere
secret_key = secret_key.replace(_url_prefix, '', 1)
# Command line options
if secret_key.startswith('cli'):
# This will be used by check-config
for idx, elem in enumerate(sys.argv):
if elem == '--secret-key':
secret_key = sys.argv[idx+1]
break
# This will be used when components are invoked as subprocesses
else:
# To prevent circular imports
from zato.common.util.api import parse_cmd_line_options
cli_options = parse_cmd_line_options(sys.argv[1])
secret_key = cli_options['secret_key']
# Environment variables
elif secret_key.startswith('env'):
env_key = secret_key.replace('env.', '', 1)
secret_key = os.environ[env_key]
# Unknown scheme, we need to give up
else:
raise ValueError('Unknown secret key type `{}`'.format(secret_key))
# At this point, we have a secret key extracted in one way or another
return secret_key if isinstance(secret_key, bytes) else secret_key.encode('utf8')
# ################################################################################################################################
# ################################################################################################################################ | zato-common | /zato-common-3.2.1.tar.gz/zato-common-3.2.1/src/zato/common/crypto/secret_key.py | secret_key.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.