code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import threading
import Queue
import inspect
#import apsw
from pysqlcipher import dbapi2 as sqlite
from zas_rep_tools.src.utils.custom_exceptions import ZASCursorError, ZASConnectionError
from zas_rep_tools.src.utils.debugger import p
from zas_rep_tools.src.classes.sql.basic import BasicConnection, BasicCursor
from zas_rep_tools.src.classes.basecontent import BaseContent
OperationalError = sqlite.OperationalError
DatabaseError = sqlite.DatabaseError
DataError = sqlite.DataError
InternalError =sqlite.InternalError
IntegrityError = sqlite.IntegrityError
NotSupportedError = sqlite.NotSupportedError
ProgrammingError = sqlite.ProgrammingError
InterfaceError = sqlite.InterfaceError
auto_clear = True
def connect(*args, **kwargs):
#p(kwargs, "1**kwargs", c="r")
return MultiThreadMultiCursor(*args, **kwargs).connect(*args, **kwargs)
class MultiThreadMultiCursor(BaseContent,BasicConnection,threading.Thread):#, sqlite):
def __init__(self, *args, **kwargs):
#p(kwargs, "2.1**kwargs", c="r")
super(type(self), self).__init__( **kwargs)
#p(kwargs, "2.2**kwargs", c="r")
#BaseContent.__init__(self,*args, **kwargs)
#p(kwargs, "2.2**kwargs", c="r")
#BasicConnection.__init__(self,*args, **kwargs)
threading.Thread.__init__(self)
self.lock_connection = threading.Lock()
#self.active_cursor = False
self.daemon = True
self.start()
#p("NEW DB Class was init {}".format(inspect.stack()[-18][3]))
#5/0
def connect(self, *args, **kwargs):
#p(args, "3args")
#p(kwargs, "3kwargs")
isolation_level = kwargs.get("isolation_level", None)
check_same_thread = kwargs.get("check_same_thread", None)
kargs = {}
if isolation_level != None: kargs["isolation_level"] = isolation_level
if check_same_thread != None: kargs["check_same_thread"] = check_same_thread
if not self._connection:
self._connection = sqlite.connect(*args, **kargs)
return self
else:
raise ZASConnectionError, "Connection is already exist!"
def _cursor(self, *args, **kwargs):
if not self._connection:
raise ZASConnectionError, "No active Connection!"
return MultiCursor(self._connection,self.lock_connection,*args, **kwargs)
#lock = threading.Lock()
# class ThreadSafeConnection(self):
# def __init(*args, **kwargs):
# self._connection = sqlite.connect(*args, **kwargs)
# self.lock_connection = threading.Lock()
class MultiCursor(BasicCursor,threading.Thread):
def __init__(self, connection,conn_lock,*args, **kwargs):
threading.Thread.__init__(self)
self.connection = connection
self.lock_connection = conn_lock
self.cursor = self.connection.cursor(*args, **kwargs)
self.daemon = True
self.start()
# def __init__(self, *args, **kwargs):
# super(type(self), self).__init__(*args, **kwargs)
# BasicCursor.__init__(self,*args, **kwargs)
# threading.Thread.__init__(self)
# #self.active_cursor = False
# self.daemon = True
# self.lock_cursor = threading.Lock()
# self.start()
#p("CURSOR WAS INIT {}".format(inspect.stack()[-18][3]), c="b")
def execute(self,sql,*args, **kwargs):
#print self, "67"
#self.cursor = self.connection.cursor()
#p(locals())
with self.lock_connection:
self._check_cursor_existenz()
self.cursor.execute(sql,*args, **kwargs)
self.join()
return self
def executemany(self,sql,*args, **kwargs):
#p((sql, args[0]))
with self.lock_connection:
self._check_cursor_existenz()
self.cursor.executemany(sql,*args, **kwargs)
self.join()
return self
def executescript(self,sql,*args, **kwargs):
with self.lock_connection:
self._check_cursor_existenz()
self.cursor.executescript(sql,*args, **kwargs)
self.join()
return self | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/classes/sql/MultiThreadMultiCursor.py | MultiThreadMultiCursor.py |
from threading import Thread
import Queue
import inspect
import time
#import apsw
from pysqlcipher import dbapi2 as sqlite
from zas_rep_tools.src.utils.custom_exceptions import ZASCursorError, ZASConnectionError
from zas_rep_tools.src.utils.debugger import p
OperationalError = sqlite.OperationalError
DatabaseError = sqlite.DatabaseError
DataError = sqlite.DataError
InternalError =sqlite.InternalError
IntegrityError = sqlite.IntegrityError
NotSupportedError = sqlite.NotSupportedError
ProgrammingError = sqlite.ProgrammingError
InterfaceError = sqlite.InterfaceError
class BasicConnection(object):#, sqlite):
def __init__(self,*args, **kwargs):
self._connection = False
#self._init_cursors = []
# def __del__(self):
# #p(self._init_cursors, "init_cursors")
# #p("CONNECTION WAS DESTRUCTED", c="r")
def __del__(self):
try:
self._connection.close()
del self._connection
except:
pass
def connect(self, *args, **kwargs):
if not self._connection:
self._connection = sqlite.connect(*args, **kwargs)
#return Status(status=True)
#return None
#p("NEW CONNECTION WAS ESTABL. {}, {}".format(inspect.stack()[-18][3], args[0]), c="g")
return self
else:
raise ZASConnectionError, "Connection is already exist!"
def cursor(self, *args, **kwargs):
c = self._cursor(*args, **kwargs)
#self._init_cursors.append(c)
return c
def _cursor(self, *args, **kwargs):
if not self._connection:
raise ZASConnectionError, "No active Connection!"
return BasicCursor(self._connection,*args, **kwargs)
def enable_load_extension(self,*args, **kwargs):
self._connection.enable_load_extension(*args, **kwargs)
def rollback(self,*args, **kwargs):
self._connection.rollback(*args, **kwargs)
def create_function(self,*args, **kwargs):
self._connection.create_function(*args, **kwargs)
def create_aggregate(self,*args, **kwargs):
self._connection.create_aggregate(*args, **kwargs)
def create_collation(self,*args, **kwargs):
self._connection.create_collation(*args, **kwargs)
def interrupt(self,*args, **kwargs):
self._connection.interrupt(*args, **kwargs)
def set_authorizer(self,*args, **kwargs):
self._connection.set_authorizer(*args, **kwargs)
def set_progress_handler(self,*args, **kwargs):
self._connection.set_progress_handler(*args, **kwargs)
def load_extension(self,*args, **kwargs):
self._connection.load_extension(*args, **kwargs)
def close(self):
if not self._connection:
raise ZASConnectionError, "No active Connection!"
self._connection.close()
self._connection = False
def commit(self):
if not self._connection:
raise ZASConnectionError, "No active Connection!"
self._connection.commit()
@property
def row_factory(self):
return self._connection.row_factory
@row_factory.setter
def row_factory(self, value):
self._connection.row_factory = value
@property
def text_factory(self):
return self._connection.text_factory
@text_factory.setter
def text_factory(self, value):
self._connection.text_factory = value
@property
def total_changes(self):
return self._connection.total_changes
@total_changes.setter
def total_changes(self, value):
self._connection.total_changes = value
@property
def iterdump(self):
return self._connection.iterdump
@iterdump.setter
def iterdump(self, value):
self._connection.iterdump = value
@property
def isolation_level(self):
return self._connection.isolation_level
@isolation_level.setter
def isolation_level(self, value):
self._connection.isolation_level = value
class BasicCursor(object):
def __init__(self, connection,*args, **kwargs):
self.connection = connection
self.cursor = self.connection.cursor(*args, **kwargs)
def __del__(self):
try:
self.cursor.close()
except:
pass
#p("CURSOR WAS DESTRUCTED", c="m")
def _check_cursor_existenz(self):
if not self.cursor:
raise ZASCursorError, "Cursor to Connection not exist or was deleted. (Check 'auto_clear'-Option)"
def fetchone(self):
self._check_cursor_existenz()
elem = self.cursor.fetchone()
return elem
def fetchmany(self,size_to_fetch):
self._check_cursor_existenz()
for row in self.cursor.fetchmany(size_to_fetch):
if row:
yield row
else:
yield None
return
def fetchall(self):
self._check_cursor_existenz()
elem = self.cursor.fetchall()
return elem
def _is_fetchable(self,sql):
self._check_cursor_existenz()
sql = sql.lower()
#p(sql)
if "select" in sql or "pragma" in sql:
return True
else:
return False
def execute(self,sql,*args, **kwargs):
self._check_cursor_existenz()
self.cursor.execute(sql,*args, **kwargs)
return self
def executemany(self,sql,*args, **kwargs):
self._check_cursor_existenz()
self.cursor.executemany(sql,*args, **kwargs)
return self
def executescript(self,sql,*args, **kwargs):
self._check_cursor_existenz()
self.cursor.executescript(sql,*args, **kwargs)
return self
@property
def rowcount(self):
self._check_cursor_existenz()
return self.cursor.rowcount
@property
def lastrowid(self):
self._check_cursor_existenz()
return self.cursor.lastrowid
@property
def description(self):
self._check_cursor_existenz()
return self.cursor.description | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/classes/sql/basic.py | basic.py |
import logging
import datetime
import time
import os
import sys
from collections import defaultdict
import codecs
import traceback
import inspect
import gc
#import logging.Handler
from logutils.colorize import ColorizingStreamHandler
from zas_rep_tools.src.utils.debugger import p
from zas_rep_tools.src.utils.custom_exceptions import StackTraceBack
#from zas_rep_tools.src.utils.helpers import stack
#ZASLogger._root_logger
def clear_logger():
del ZASLogger._root_logger
del ZASLogger._handlers
del ZASLogger._loggers
ZASLogger._root_logger = None
ZASLogger._creation_day = None
ZASLogger._creation_time = None
ZASLogger._loggers = defaultdict(dict)
ZASLogger._handlers = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
ZASLogger._event_folder = None
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
class ZASLogger(object):
_root_logger = None
_creation_day = None
_creation_time = None
_loggers = defaultdict(dict)
_handlers = defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))
_event_folder = None
_save_lower_debug = False
_save_debug = False
_set_functon_name_as_event_name = False
#p(_save_lower_debug, "_save_lower_debug")
# def __new__(self):
# '''
# __new__ is the first step of instance creation. It's called first, and is responsible for returning a new instance of your class. In contrast, __init__ doesn't return anything; it's only responsible for initializing the instance after it's been created.
# '''
# if ZASLogger._root_logger is None:
# ZASLogger._root_logger = logging.getLogger()
# return ZASLogger._root_logger
# #pass
def __init__(self, logger_name, level=logging.INFO, folder_for_log="logs", logger_usage=True, save_logs=False):
#p(ZASLogger._set_functon_name_as_event_name,c="r")
folder_for_log = folder_for_log if folder_for_log else "logs"
# Init Root Logger
if ZASLogger._root_logger is None:
ZASLogger._root_logger = logging.getLogger()
self._set_filters(ZASLogger._root_logger)
self._logger_name = logger_name
self._level = level
self._folder_for_log = folder_for_log
self._logger_usage = logger_usage
self._save_logs = save_logs
if self._logger_usage:
self.update_root_logger()
self._formatter = logging.Formatter(u'%(asctime)7s [%(name)s] %(threadName)11s:{%(sthread)s} [%(funcName)s()] %(levelname)5s \n %(message)s \n', "%H:%M:%S")
self._logger = None
self.day_folder = None
self.time_folder = None
#ZASLogger._event_folder = None
self.level_functions = {}
if self._logger_usage:
self._set_day()
self._set_time()
self._set_dirs()
#def __del__(self):
def _close_handlers(self):
if ZASLogger._handlers[self._logger_name]:
for handler_name, handler in ZASLogger._handlers[self._logger_name].items():
#p((handler_name), c="r")
if handler_name == 'FileHandler':
for file_handler in handler.values():
#p((file_handler), c="r")
file_handler.flush()
file_handler.close()
del file_handler
else:
handler.flush()
handler.close()
del handler
del ZASLogger._handlers[self._logger_name]
#del self
gc.collect()
def update_root_logger(self):
#p((self._logger_name,self._level,ZASLogger._root_logger.level),c="m")
if self._level < ZASLogger._root_logger.level:
ZASLogger._root_logger.setLevel(self._level)
#p((self._logger_name,self._level,ZASLogger._root_logger.level),c="r")
#p(ZASLogger._handlers)
if self._save_logs:
ZASLogger._root_logger.setLevel(1)
#p((self._logger_name,self._level,ZASLogger._root_logger.level),c="r")
#def _init_logger(self):
def getLogger(self):
#p((self._logger_usage, self._logger_name), c="m")
self._set_usage()
self._logger = logging.getLogger(self._logger_name)
if not self._logger_usage: self._logger.handlers = []
ZASLogger._loggers[self._logger_name] = self._logger
self._set_filters()
self._add_additional_levels()
if self._logger_usage:
self._add_stream_handler()
if self._save_logs:
self._makedirs()
self._add_multihandler()
self._add_default_file_handlers()
#p(self._logger.handlers)
return self._logger
def _set_usage(self):
if self._logger_usage:
logging.disable(logging.NOTSET)
else:
logging.disable(sys.maxint)
self._root_logger.setLevel(sys.maxint)
ZASLogger._handlers[self._logger_name]["StreamHandler"] = []
ZASLogger._handlers[self._logger_name]["FileHandler"] = defaultdict(lambda:[])
ZASLogger._handlers["MultiHandler"] = []
#p(self._logger.handlers, "self._logger.handlers")
# if self._logger.handlers:
# pass
#self._logger.setLevel(100)
def _clear(self):
clear_logger()
def _set_dirs(self):
self.day_folder = os.path.join(self._folder_for_log,ZASLogger._creation_day)
self.time_folder = os.path.join(self.day_folder,ZASLogger._creation_time)
def _makedirs(self):
if self._save_logs and self._logger_usage:
if not ZASLogger._event_folder or not os.path.isdir(ZASLogger._event_folder):
i = 0
status = True
func_name = None
#p((self._logger_name,self._level,ZASLogger._set_functon_name_as_event_name, self._save_logs),c="m")
if ZASLogger._set_functon_name_as_event_name:
status2= True
st = inspect.stack()
stack_index = -15
while status2:
#p(stack_index,"stack_index",c="m")
try:
current_stack_depth = st[stack_index]
func_name = current_stack_depth[3]
#p((stack_index,func_name),"func_name", c="r")
if "test_" in func_name.lower():
module_name = os.path.splitext(os.path.basename(current_stack_depth[1]))[0]
status2 = False
break
else:
if stack_index < -25:
p("FunctionNameError: It wasn't possible to find FunctionName of the TestCase.", "ERROR", c="r")
#sys.exit()
#ZASLogger._root_logger.error("FunctionNameError: It wasn't possible to find FunctionName of the TestCase.".format())
#sys.exit()
func_name = "none"
module_name = "none"
status2 = False
break
stack_index-=1
#continue
except:
p("FunctionNameError: (IndexOutOfList) It wasn't possible to find FunctionName of the TestCase.", "ERROR", c="r")
func_name = "none"
module_name = "none"
status2 = False
temp_folder = "{}__{}".format(module_name,func_name)
#ZASLogger._event_folder = new_file_name
clear_path = os.path.join(self.day_folder,temp_folder)
else:
#temp_folder = self.time_folder
clear_path = self.time_folder
#temp_path = clear_path
temp_path = clear_path
while status:
i += 1
if not os.path.isdir(temp_path):
os.makedirs(temp_path)
ZASLogger._event_folder = temp_path
status = False
else:
temp_path = "{}--{}".format(clear_path,i)
def _set_day(self, reset=False):
if (ZASLogger._creation_day is None) or reset==True:
ZASLogger._creation_day = datetime.date.today().strftime("%Y_%m_%d")
def _set_time(self, reset=False):
if ZASLogger._creation_time is None or reset==True:
ZASLogger._creation_time = time.strftime('%H_%M')
def _set_filters(self, logger=None):
#p(logger, "1logger_in_filt")
logger = logger if logger else self._logger
#p(logger, "2logger_in_filt")
#d = DuplicateFilter()
f = ContextFilter()
#self._logger.addFilter(d)
logger.addFilter(f)
def _check_stream_hander_uniqness(self, current_handler):
i = 0
indexes_to_delete = []
#print "befove", self._logger.handlers
for hndlr in self._logger.handlers:
if type(current_handler).__name__ == type(hndlr).__name__:
indexes_to_delete.append(i)
i+=0
for indx in indexes_to_delete:
#print self._logger.handlers[indx]
del self._logger.handlers[indx]
def handlers(self):
output= defaultdict(list)
for hndl in self._logger.handlers:
output[type(hndl).__name__].append(hndl.level)
return output
def _add_stream_handler(self):
# Create a logger, with the previously-defined handler
if not ZASLogger._handlers[self._logger_name]["StreamHandler"] or ZASLogger._handlers[self._logger_name]["StreamHandler"].level > self._level:
# ((self._logger_name, self._level))
handler = RainbowLoggingHandler(sys.stdout)
self._check_stream_hander_uniqness(handler)
handler.setLevel(self._level)
#if self._level == logging.ERROR:
# handler.addFilter(SealedOffFilter(logging.ERROR))
ZASLogger._handlers[self._logger_name]["StreamHandler"] = handler
self._logger.addHandler(handler)
self._logger.low_debug("StreamHandler for '{}'-level in '{}'-Logger was added.".format(self._level, self._logger_name))
else:
self._logger.low_debug("StreamHandler for '{}'-level in '{}'-Logger is already exist. New Handler wasn't added.".format(self._level, self._logger_name))
def _add_default_file_handlers(self):
self._add_file_handler(logging.CRITICAL, "critical", sealed_off=False)
self._add_file_handler(logging.ERROR, "error", sealed_off=True)
self._add_file_handler(logging.WARNING, "warning", sealed_off=True)
self._add_file_handler(logging.INFO, "info", sealed_off=False)
if self._level <= 10 or ZASLogger._save_debug:
self._add_file_handler(logging.DEBUG, "debug", sealed_off=False)
#elif level_num==10:
# if ZASLogger._save_debug:
# self._add_file_handler(level_num, level_name.lower(), sealed_off=sealed_off)
def _add_file_handler(self, level, fname, sealed_off=True):
if not ZASLogger._handlers[self._logger_name]["FileHandler"][level]:
if self._save_logs:
handler = logging.FileHandler(os.path.join(ZASLogger._event_folder, "{}.log".format(fname)) ,mode="a", encoding="utf-8", delay=True)
handler.setLevel(level)
handler.setFormatter(self._formatter)
self._check_file_hander_uniqness(handler)
if sealed_off:
handler.addFilter(SealedOffFilter(level))
ZASLogger._handlers[self._logger_name]["FileHandler"][level] = handler
self._logger.addHandler(handler)
self._logger.low_debug("FileHandler for '{}'-level in '{}'-Logger was added.".format(level, self._logger_name))
else:
self._logger.low_debug("FileHandler for '{}'-level in '{}'-Logger is already exist. New Handler wasn't added.".format(level, self._logger_name))
def _add_multihandler(self,sealed_off=True):
multi_handler = MultiHandler(ZASLogger._event_folder)
multi_handler.setLevel(self._level)
multi_handler.setFormatter(self._formatter)
if not ZASLogger._handlers["MultiHandler"]:
ZASLogger._handlers["MultiHandler"] = multi_handler
ZASLogger._root_logger.addHandler(multi_handler)
self._logger.low_debug("MultiHandler for '{}'-level was added. ".format(self._level))
else:
if ZASLogger._handlers["MultiHandler"].level > self._level:
old_level = ZASLogger._handlers["MultiHandler"].level
ZASLogger._handlers["MultiHandler"] = multi_handler
#p(ZASLogger._root_logger.handlers, c="m")
ZASLogger._root_logger.handlers = []
ZASLogger._root_logger.addHandler(multi_handler)
#p(ZASLogger._root_logger.handlers, c="m")
self._logger.low_debug("MultiHandler for '{}'-level is already exist and was changed to '{}'-level. ".format(old_level,self._level ))
def _check_file_hander_uniqness(self, current_handler):
i = 0
indexes_to_delete = []
#print "befove", self._logger.handlers
for hndlr in self._logger.handlers:
if type(current_handler).__name__ == type(hndlr).__name__:
if current_handler.level ==hndlr.level:
indexes_to_delete.append(i)
i+=0
for indx in indexes_to_delete:
#print self._logger.handlers[indx]
del self._logger.handlers[indx]
def function_builder(self,level_name,level_num, logger_name,logger, sealed_off=True, **kws):
def current_level_logger(message, *args, **kws):
l = False
try:
#stck = inspect.stack()
l = ZASLogger._handlers[logger_name].get("FileHandler", False).get(level_num, False)
except:
l = False
if l == False:
if self._save_logs and self._logger_usage:
self._makedirs()
if level_num==9:
if ZASLogger._save_lower_debug:
self._add_file_handler(level_num, level_name.lower(), sealed_off=sealed_off)
else:
self._add_file_handler(level_num, level_name.lower(), sealed_off=sealed_off)
logger.log(level_num, message, **kws)
return current_level_logger
def _set_level(self, level_name, level_num, sealed_off=True):
'''
if not ZASLogger._handlers["FileHandler"][level_num]:
self._add_file_handler(level_num, level_name.lower(), sealed_off=True)
'''
#logger = self._logger
self._add_level(level_name.upper(), log_num=level_num, custom_log_module=logging)
self.level_functions[level_name.lower()] = self.function_builder(level_name,level_num, self._logger_name,self._logger, sealed_off=sealed_off)
# f="""def {}(message, *args, **kws):
# logger.log({}, message)
# """.format(level_name.lower(), level_num)
# exec(f)
#p(self.level_functions)
var = """self._logger.{0} = self.level_functions['{0}']""".format(level_name.lower())
exec(var)
def _add_additional_levels(self):
self._set_level("low_debug", 9, sealed_off=False)
self._set_level("outsorted_stats", 8)
self._set_level("outsorted_corpus", 7)
self._set_level("outsorted_reader", 6)
self._set_level("error_insertion", 55)
self._set_level("healed", 4)
self._set_level("status", 3)
self._set_level("test", 2)
self._set_level("settings", 1)
def _add_level(self,log_name,custom_log_module=None,log_num=None,
log_call=None, lower_than=None, higher_than=None,
same_as=None, verbose=True):
'''
Function to dynamically add a new log level to a given custom logging module.
<custom_log_module>: the logging module. If not provided, then a copy of
<logging> module is used
<log_name>: the logging level name
<log_num>: the logging level num. If not provided, then function checks
<lower_than>,<higher_than> and <same_as>, at the order mentioned.
One of those three parameters must hold a string of an already existent
logging level name.
In case a level is overwritten and <verbose> is True, then a message in WARNING
level of the custom logging module is established.
'''
if custom_log_module is None:
import imp
custom_log_module = imp.load_module('custom_log_module',
*imp.find_module('logging'))
log_name = log_name.upper()
def cust_log(par, message, *args, **kws):
# Yes, logger takes its '*args' as 'args'.
if par.isEnabledFor(log_num):
par._log(log_num, message, args, **kws)
available_level_nums = [key for key in custom_log_module._levelNames
if isinstance(key,int)]
available_levels = {key:custom_log_module._levelNames[key]
for key in custom_log_module._levelNames
if isinstance(key,str)}
if log_num is None:
try:
if lower_than is not None:
log_num = available_levels[lower_than]-1
elif higher_than is not None:
log_num = available_levels[higher_than]+1
elif same_as is not None:
log_num = available_levels[higher_than]
else:
raise Exception('Infomation about the '+
'log_num should be provided')
except KeyError:
raise Exception('Non existent logging level name')
# if log_num in available_level_nums and verbose:
# custom_log_module.warn('Changing ' +
# custom_log_module._levelNames[log_num] +
# ' to '+log_name)
custom_log_module.addLevelName(log_num, log_name)
if log_call is None:
log_call = log_name.lower()
exec('custom_log_module.Logger.'+eval('log_call')+' = cust_log', None, locals())
return custom_log_module
# Create a special logger that logs to per-thread-name files
# I'm not confident the locking strategy here is correct, I think this is
# a global lock and it'd be OK to just have a per-thread or per-file lock.
class MultiHandler(logging.Handler):
def __init__(self, dirname):
super(MultiHandler, self).__init__()
self.files = {}
self.dirname = dirname
if not os.access(dirname, os.W_OK):
raise Exception("Directory %s not writeable" % dirname)
def flush(self):
self.acquire()
try:
for fp in self.files.values():
fp.flush()
finally:
self.release()
def _get_or_open(self, key):
"Get the file pointer for the given key, or else open the file"
self.acquire()
try:
if self.files.has_key(key):
return self.files[key]
else:
fp = codecs.open(os.path.join(self.dirname, "%s.log" % key), "a", encoding="utf-8")
self.files[key] = fp
return fp
finally:
self.release()
def emit(self, record):
# No lock here; following code for StreamHandler and FileHandler
try:
#p(record)
if "sthread" not in record.__dict__:
record.sthread = str(record.thread)[10:] if len(
str(record.thread)) > 10 else str(record.thread)
fp = self._get_or_open(record.threadName)
msg = self.format(record)
fp.write('%s\n' % msg.encode("utf-8"))
except (KeyboardInterrupt, SystemExit):
raise
except:
#p(record.__dict__)
self.handleError(record)
class SealedOffFilter(object):
'''
to set records just for this level and ignore reocrds from other higher levels
'''
def __init__(self, level):
self.__level = level
def filter(self, logRecord):
return logRecord.levelno <= self.__level
class ContextFilter(logging.Filter):
"""
This is a filter which injects contextual information into the log.
Rather than use actual contextual information, we just use random
data in this demo.
"""
#current_level_logger
def filter(self, record):
#p(record.__dict__,"1record.__dict__" )
record.sthread = str(record.thread)[10:] if len(
str(record.thread)) > 10 else str(record.thread)
if record.funcName == "current_level_logger":
try:
record.funcName = traceback.extract_stack()[-8][2]
except:
record.funcName = "UNKNOWN_FUNKTION"
return True
class DuplicateFilter(logging.Filter):
def filter(self, record):
# add other fields if you need more granular comparison, depends on your app
#p(current_log)
current_log = (record.module, record.levelno, record.msg)
#p(getattr(self, "last_log", None), c="r")
if record.msg != getattr(self, "last_log", None):
self.last_log = current_log
return True
return False
class RainbowLoggingHandler(ColorizingStreamHandler):
""" A colorful logging handler optimized for terminal debugging aestetichs.
- Designed for diagnosis and debug mode output - not for disk logs
- Highlight the content of logging message in more readable manner
- Show function and line, so you can trace where your logging messages
are coming from
- Keep timestamp compact
- Extra module/function output for traceability
The class provide few options as member variables you
would might want to customize after instiating the handler.
"""
date_format = "%H:%M:%S"
#: How many characters reserve to function name logging
who_padding = 22
#: Show logger name
show_name = True
# Define color for message payload
level_map = {
logging.DEBUG: (None, 'cyan', False),
logging.INFO: (None, 'white', True),
logging.WARNING: ("yellow", 'white', True),
logging.ERROR: ( 'red', "white", True),
logging.CRITICAL: ('magenta', 'white', True),
}
def get_color(self, fg=None, bg=None, bold=False):
"""
Construct a terminal color code
:param fg: Symbolic name of foreground color
:param bg: Symbolic name of background color
:param bold: Brightness bit
"""
params = []
if bg in self.color_map:
params.append(str(self.color_map[bg] + 40))
if fg in self.color_map:
params.append(str(self.color_map[fg] + 30))
if bold:
params.append('1')
color_code = ''.join((self.csi, ';'.join(params), 'm'))
return color_code
def colorize(self, record):
"""
Get a special format string with ASCII color codes.
"""
# Dynamic message color based on logging level
if record.levelno in self.level_map:
fg, bg, bold = self.level_map[record.levelno]
else:
# Defaults
bg = None
fg = "white"
bold = False
# Magician's hat
# https://www.youtube.com/watch?v=1HRa4X07jdE
# template = [
# "[",
# self.get_color("black", None, True),
# "%(asctime)s",
# self.reset,
# "] ",
# self.get_color("white", None, True) if self.show_name else "",
# "%(name)s " if self.show_name else "",
# "%(padded_who)s",
# self.reset,
# " ",
# self.get_color(bg, fg, bold),
# "%(message)s",
# self.reset,
# ]
template = [
"[",
self.get_color("gray", None, False),
"%(asctime)s",
self.reset,
"] ",
#self.get_color("white", None, True) if self.show_name else "",
self.get_color(bg, fg, bold),
"%(name)s" if self.show_name else "",
self.reset,
self.get_color("green", None, False),
" %(threadName)s:",
self.reset,
#"{%(process)d} ",
"{%(sthread)s} ",
#self.get_color("red", None, False),
self.get_color(bg, fg, bold),
"%(levelname)s",
self.reset,
self.get_color("yellow", None, False),
" %(message)s",
self.reset,
self.get_color("blue", None, False),
" [%(padded_who)s]\n",
self.reset,
]
#'%(asctime)7s [%(name)s] %(threadName)11s:{%(sthread)s} %(levelname)5s %(message)s '
format = "".join(template)
who = [self.get_color("blue"),
getattr(record, "funcName", ""),
"()",
#self.get_color("black", None, True),
# ":",
# self.get_color("cyan"),
# str(getattr(record, "lineno", 0))
]
who = "".join(who)
# We need to calculate padding length manualy
# as color codes mess up string length based calcs
# unformatted_who = getattr(record, "funcName", "") + "()" + \
# ":" + str(getattr(record, "lineno", 0))
# if len(unformatted_who) < self.who_padding:
# spaces = " " * (self.who_padding - len(unformatted_who))
# else:
# spaces = ""
record.padded_who = who #+ spaces
#record.nasctime =
formatter = logging.Formatter(format, self.date_format)
self.colorize_traceback(formatter, record)
output = formatter.format(record)
# Clean cache so the color codes of traceback don't leak to other formatters
record.ext_text = None
return output
def colorize_traceback(self, formatter, record):
"""
Turn traceback text to diff coloures.
# """
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
output_list = []
for line in formatter.formatException(record.exc_info).split("\n"):
#p(line)
if "Traceback" in line:
output_list.append(self.get_color("red", None, True))
output_list.append(line)
#output_list.append(self.reset)
output_list.append("\n")
else:
if "File" in line:
new_line = []
for token in line.split(" "):
#p(repr(token))
if "file" in token.lower():
new_line.append(" ")
new_line.append(self.reset)
new_line.append(self.get_color("gray", None, True))
new_line.append(token)
#new_line.append(self.reset)
new_line.append(self.get_color("magenta", None, False))
elif "line" == token:
#p(repr(token))
new_line.append(self.reset)
#new_line.append("\t ")
new_line.append(self.get_color("gray", None, True))
new_line.append(token)
new_line.append(self.get_color("magenta", None, False))
elif "in" == token:
#p(repr(token))
new_line.append(self.reset)
new_line.append(self.get_color("gray", None, True))
new_line.append(token)
new_line.append(self.get_color("magenta", None, False))
else:
new_line.append(token)
output_list.append(" ".join(new_line))
else:
#new_line.append()
#self.get_color("yellow", None, False)
output_list.append("\n ")
output_list.append(self.get_color("magenta"))
output_list.append(line)
output_list.append("\n")
output_list.append(self.reset)
record.exc_text = "".join(output_list) + "\n\n\n\n" #p(record.exc_text )
def format(self, record):
"""
Formats a record for output.
Takes a custom formatting path on a terminal.
"""
if self.is_tty:
message = self.colorize(record)
else:
message = logging.StreamHandler.format(self, record)
return message | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/zaslogger.py | zaslogger.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : Corpus Module
# Author:
# c(Developer) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
import platform
from raven import Client
from raven.processors import SanitizePasswordsProcessor
from zas_rep_tools.src.utils.debugger import p
#self.client.captureException()
#Raven.setExtraContext({
# arbitrary: {key: value},
# foo: "bar"
#});
#https://github.com/getsentry/raven-python/blob/master/docs/api.rst
def initialisation():
#SanitizePasswordsProcessor.KEYS = frozenset(['sentry_dsn', 'password', 'passwd', 'access_token', 'secret', 'apikey', 'api_key', 'authorization', 'consumer_key', 'consumer_secret', 'access_token', 'access_token_secret'])
#p(SanitizePasswordsProcessor.KEYS)
platform_info = {"platform":platform.platform(), "uname":platform.uname(), "system":platform.system(), "processor":platform.processor(), "machine":platform.machine(), "version":platform.version(), "architecture":platform.architecture }
python_info = {"python_build":platform.python_build(), "python_compiler":platform.python_compiler(), "python_implementation":platform.python_implementation(), "python_version":platform.python_version(), }
user_info = {"platform_info":platform_info, "python_info":python_info}
#p(user_info)
client = Client(dsn='https://0ec650403a06441aa6075e14322a9b15:[email protected]/1213596',
auto_log_stacks=True,
include_paths=[__name__.split('.', 1)[0]],
release = '0.1',
#user = user_info,
#intern_attribute = self.__dict__,
ignore_exceptions = [
'Http404'
],
processors = (
'raven.processors.SanitizePasswordsProcessor',
),
sanitize_keys = ['_consumer_key', '_consumer_secret', '_access_token', '_access_token_secret'],
# pass along the version of your application
# release='1.0.0'
# release=raven.fetch_package_version('my-app')
#release=raven.fetch_git_sha(os.path.dirname(__file__)),
)
#p(client.sanitize_keys)
client.module_cache['raven.processors.SanitizePasswordsProcessor'].KEYS = frozenset(['sentry_dsn', 'password', 'passwd', 'access_token', 'secret', 'apikey', 'api_key', 'authorization', '_consumer_key', '_consumer_secret', '_access_token', '_access_token_secret'])
#p(client.module_cache['raven.processors.SanitizePasswordsProcessor'].KEYS)
client.context.merge({'user': user_info})
#client.context.merge({'user': python_info})
#client.context.merge({'user': platform_info})
return client | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/error_tracking.py | error_tracking.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : TraceBack Handling
# Author:
# c(Developer) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
import sys
import traceback
from zas_rep_tools.src.utils.debugger import p
def print_exc_plus( ):
"""
Print the usual traceback information, followed by a listing of all the
local variables in each frame.
"""
p("START Extended Traceback", "!!!")
tb = sys.exc_info( )[2]
while 1:
if not tb.tb_next:
break
tb = tb.tb_next
stack = []
f = tb.tb_frame
while f:
stack.append(f)
f = f.f_back
stack.reverse( )
traceback.print_exc( )
print "Locals by frame, innermost last"
for frame in stack:
print
print "Frame %s in %s at line %s" % (frame.f_code.co_name,
frame.f_code.co_filename,
frame.f_lineno)
for key, value in frame.f_locals.items( ):
print "\t%20s = " % key,
# We have to be VERY careful not to cause a new error in our error
# printer! Calling str( ) on an unknown object could cause an
# error we don't want, so we must use try/except to catch it --
# we can't stop it from happening, but we can and should
# stop it from propagating if it does happen!
try:
print value
except:
print "<ERROR WHILE PRINTING VALUE>"
p("END Extended Traceback", "!!!")
def raising_code_info():
## could be deleted
code_info = ''
try:
frames = inspect.trace()
p(frames)
#print frames
# for fr in frames:
# print fr
if(len(frames)):
full_method_name = frames[0][4][0].rstrip('\n\r').strip()
line_number = frames[1][2]
module_name = frames[0][0].f_globals['__name__']
if(module_name == '__main__'):
module_name = os.path.basename(sys.argv[0]).replace('.py','')
class_name = ''
obj_name_dot_method = full_method_name.split('.', 1)
if len(obj_name_dot_method) > 1:
obj_name, full_method_name = obj_name_dot_method
try:
class_name = frames[0][0].f_locals[obj_name].__class__.__name__
except:
pass
method_name = module_name + '.'
if len(class_name) > 0:
method_name += class_name + '.'
method_name += full_method_name
code_info = '%s, line %d' % (method_name, line_number)
finally:
del frames
sys.exc_clear()
return (code_info,frames) | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/traceback_helpers.py | traceback_helpers.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : XXX{Information about this code}XXX
# Author:
# c(Developer) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
#
#
#
#
#
#import Stemmer
class CorpusData(object):
invalid_symbols_in_colnames = list(u"{}[]:,;-()<>=#@?^°")+[u" "]
info = {
"tagger":{
"tweetnlp":{
"url": "http://www.cs.cmu.edu/~ark/TweetNLP/",
"paper":"http://www.cs.cmu.edu/~ark/TweetNLP/owoputi+etal.naacl13.pdf",
"tagset": {
"en":{
"data":{
'!': 'interjection',
'#': 'hashtag (indicates topic/category for tweet)',
'$': 'numeral',
'&': 'coordinating conjunction',
',': 'punctuation',
'@': 'at-mention (indicates a user as a recipient of a tweet)',
'A': 'adjective',
'D': 'determiner',
'E': 'emoticon',
'G': 'other abbreviations, foreign words, possessive endings, symbols, garbage',
'L': 'nominal + verbal (e.g. i\xe2\x80\x99m), verbal + nominal (let\xe2\x80\x99s)',
'M': 'proper noun + verbal',
'N': 'common noun',
'O': 'pronoun (personal/WH; not possessive)',
'P': 'pre- or postposition, or subordinating conjunction',
'R': 'adverb',
'S': 'nominal + possessive',
'T': 'verb particle',
'U': 'URL or email address',
'V': 'verb including copula, auxiliaries',
'X': 'existential there, predeterminers',
'Y': 'X + verbal',
'Z': 'proper noun + possessive',
'^': 'proper noun',
'~': 'discourse marker, indications of continuation across multiple tweets'
},
},
}
},
"someweta":{
"url": "https://github.com/tsproisl/SoMeWeTa",
"paper":"http://www.lrec-conf.org/proceedings/lrec2018/pdf/49.pdf",
"tagset":{
"de":{
"name": "STTS IBK tagset",
"url":["http://www.ims.uni-stuttgart.de/forschung/ressourcen/lexika/TagSets/stts-table.html", ],
"paper":"https://ids-pub.bsz-bw.de/frontdoor/deliver/index/docId/5065/file/Beisswenger_Bartz_Storrer_Tagset_und_Richtlinie_fuer_das_PoS_Tagging_2015.pdf",
"data":{
'$(': 'sonstige Satzzeichen; satzintern',
'$,': 'Komma',
'$.': 'Satzbeendende Interpunktion',
'ADJA': 'attributives Adjektiv',
'ADJD': 'adverbiales oder pr\xc3\xa4dikatives Adjektiv',
'ADR': 'Adressierung',
'ADV': 'Adverb',
'ADVART': 'Kontraktion: Adverb + Artikel',
'AKW': 'Aktionswort',
'APPO': 'Postposition',
'APPR': 'Pr\xc3\xa4position, Zirkumposition links',
'APPRART': 'Pr\xc3\xa4position mit Artikel',
'APZR': 'Zirkumposition rechts',
'ART': 'bestimmter oder unbestimmter Artikel',
'CARD': 'Kardinalzahl',
'DM': 'Diskursmarker',
'EML': 'E-Mail-Adresse',
'EMOASC': 'Emoticon, als Zeichenfolge dargestellt (Typ \xe2\x80\x9eASCII\xe2\x80\x9c)',
'EMOIMG': 'Emoticon, als Grafik-Ikon dargestellt (Typ \xe2\x80\x9eImage\xe2\x80\x9c)',
'FM': 'Fremdsprachliches Material',
'HST': 'Hashtag',
'ITJ': 'Interjektion',
'KOKOM': 'Vergleichspartikel ohne Satz',
'KON': 'nebenordnende Konjunktion',
'KOUI': 'unterordnende Konjunktion mit \xe2\x80\x9ezu\xe2\x80\x9c und Infinitiv',
'KOUS': 'unterordnende Konjunktion mit Satz (VL-Stellung)',
'KOUSPPER': 'Kontraktion: unterordnende Konjunk- tion mit Satz (VL-Stellung) + irreflexi- ves Personalpronomen',
'NE': 'Eigennamen',
'NN': 'Appellativa',
'ONO': 'Onomatopoetikon',
'PAV': 'Pronominaladverb',
'PDAT': 'attributierendes Demonstrativprono- men',
'PDS': 'substituierendes Demonstrativprono- men',
'PIAT': 'attributierendes Indefinitpronomen ohne Determiner',
'PIDAT': 'attributierendes Indefinitpronomen mit Determiner',
'PIS': 'substituierendes Indefinitpronomen',
'PPER': 'irreflexives Personalpronomen',
'PPERPPER': 'Kontraktion: irreflexives Personalpro- nomen + irreflexives Personalprono- men',
'PPOSAT': 'attributierendes Possesivpronomen',
'PPOSS': 'substituierendes Possesivpronomen',
'PRELAT': 'attributierendes Relativpronomen',
'PRELS': 'substituierendes Relativpronomen',
'PRF': 'reflexives Personalpronomen',
'PTKA': 'Partikel bei Adjektiv oder Adverb',
'PTKANT': 'Antwortpartikel',
'PTKIFG': 'Intensit\xc3\xa4ts-, Fokus- oder Gradpartikel',
'PTKMA': 'Modal- oder Abt\xc3\xb6nungspartikel',
'PTKMWL': 'Partikel als Teil eines Mehrwort- Lexems',
'PTKNEG': 'Negationspartikel',
'PTKVZ': 'abgetrennter Verbzusatz',
'PTKZU': '\xe2\x80\x9ezu\xe2\x80\x9c vor Infinitiv',
'PWAT': 'attributierendes Interrogativpronomen',
'PWAV': 'adverbiales Interrogativ- oder Relativ- pronomen',
'PWS': 'substituierendes Interrogativprono- men',
'TRUNC': 'Kompositions-Erstglied',
'URL': 'Uniform Resource Locator',
'VAFIN': 'finites Verb, aux',
'VAIMP': 'Imperativ, aux',
'VAINF': 'Infinitiv, aux',
'VAPP': 'Partizip Perfekt, aux',
'VAPPER': 'Kontraktion: Auxiliarverb + irreflexives Personalpronomen',
'VMFIN': 'finites Verb, modal',
'VMINF': 'Infinitiv, modal',
'VMPP': 'Partizip Perfekt, modal',
'VMPPER': 'Kontraktion: Modalverb + irreflexives Personalpronomen',
'VVFIN': 'finites Verb, voll',
'VVIMP': 'Imperativ, voll',
'VVINF': 'Infinitiv, voll',
'VVIZU': 'Infinitiv mit \xe2\x80\x9ezu\xe2\x80\x9c, voll',
'VVPP': 'Partizip Perfekt, voll',
'VVPPER': 'Kontraktion: Vollverb + irreflexives Personalpronomen',
'XY': 'Nichtwort, Sonderzeichen enthaltend'
},
},
"en":{
"name": "Treebank-3",
"url":["https://catalog.ldc.upenn.edu/ldc99t42", ],
"paper":"https://catalog.ldc.upenn.edu/docs/LDC99T42/tagguid1.pdf",
"data":{
'CC': 'Coordinating conjunction',
'CD': 'Cardinal number',
'DT': 'Determiner',
'EX': 'Existential there',
'FW': 'Foreign word',
'IN': 'Preposition or subordinating',
'JJ': 'Adjective',
'JJR': 'Adjective, comparative',
'JJS': 'Adjective, superlative',
'LS': 'List item marker',
'MD': 'Modal',
'NN': 'Noun, singular or mass',
'NNP': 'Proper noun, singular',
'NNPS': 'Proper noun, plural',
'NNS': 'Noun, plural',
'PDT': 'Predeterminer',
'PEP$': 'Possessive pronoun',
'POS': 'Possessive ending',
'PRP': 'Personal pronoun',
'RB': 'Adverb',
'RBR': 'Adverb, comparative',
'RBS': 'Adverb, superlative',
'RP': 'Particle',
'SYM': 'Symbol',
'TO': 'to',
'UH': 'Interjection',
'VB': 'Verb, base form',
'VBD': 'Verb, past tense',
'VBG': 'Verb, gerund or present participle',
'VBN': 'Verb,past participle',
'VBP': 'Verb, non-3rd person singular present',
'VBZ': 'Verb, 3rd person singular present',
'WDT': 'Wh-determiner',
'WP': 'Wh-pronoun',
'WP$': 'Possessive wh-pronoun',
'WRB': 'Wh-adverb',
"''": 'punctuation',
},
},
},
}
},
"splitter":{
"somajo":{
"link":"https://github.com/tsproisl/SoMaJo",
"supported_lang":"de, en, fr",
},
},
"tokenizer":{
"somajo":{
"link":"https://github.com/tsproisl/SoMaJo",
"supported_lang":"de, en,fr",
},
"nltk":{
"link":"http://www.nltk.org",
"supported_lang":"en",
},
},
"stemmer":{
"pystemmer":{
"algorithm":"snowball",
"links":["https://github.com/snowballstem/pystemmer", "http://snowball.tartarus.org"],
}
},
"lang_classification": {
"langid":{
"link":"https://github.com/saffsd/langid.py",
"supported_lang":"af, am, an, ar, as, az, be, bg, bn, br, bs, ca, cs, cy, da, de, dz, el, en, eo, es, et, eu, fa, fi, fo, fr, ga, gl, gu, he, hi, hr, ht, hu, hy, id, is, it, ja, jv, ka, kk, km, kn, ko, ku, ky, la, lb, lo, lt, lv, mg, mk, ml, mn, mr, ms, mt, nb, ne, nl, nn, no, oc, or, pa, pl, ps, pt, qu, ro, ru, rw, se, si, sk, sl, sq, sr, sv, sw, ta, te, th, tl, tr, ug, uk, ur, vi, vo, wa, xh, zh, zu",
},
},
}
#### Tokenizer
tokenizer_for_languages = {
"en":["somajo","nltk"],
"de":["somajo"],
"test":["somajo"],
}
supported_languages_tokenizer = [key for key in tokenizer_for_languages]
supported_tokenizer = set([v for values in tokenizer_for_languages.itervalues() for v in values])
### Sent Splitters
sent_splitter_for_languages = {
"en":["somajo"],
"de":["somajo"],
"test":["somajo"],
}
supported_languages_sent_splitter = [key for key in sent_splitter_for_languages]
supported_sent_splitter = set([v for values in sent_splitter_for_languages.itervalues() for v in values])
###POS-Taggers
pos_tagger_for_languages = {
"en":["someweta", "tweetnlp"],
"de":["someweta"],
"fr":["someweta"],
"test":["tweetnlp"],
}
supported_languages_pos_tagger = [key for key in pos_tagger_for_languages]
supported_pos_tagger = set([v for values in pos_tagger_for_languages.itervalues() for v in values])
pos_tagger_models = {
"tweetnlp":{"en":[]},
"someweta":{
"de":["german_web_social_media_2017-12-20.model", "german_newspaper_for_empirist_2017-12-20.model"],
"en":["english_newspaper_2017-09-15.model"],
"fr":["french_newspaper_2018-06-20.model"],
"test":["english_newspaper_2017-09-15.model"],
}
}
### Sentiment Anylysers
sentiment_analyzer_for_languages = {
"en":["textblob"],
"de":["textblob"],
"fr":["textblob"],
"test":["textblob"],
}
supported_languages_sentiment_analyzer = [key for key in sentiment_analyzer_for_languages]
supported_sentiment_analyzer = set([v for values in sentiment_analyzer_for_languages.itervalues() for v in values])
#### Stemmer
stemmer_for_languages = {
u'da': ["pystemmer",],
u'nl': ["pystemmer",],
u'en': ["pystemmer",],
u'fi': ["pystemmer",],
u'fr': ["pystemmer",],
u'de': ["pystemmer",],
u'hu': ["pystemmer",],
u'it': ["pystemmer",],
u'no': ["pystemmer",],
u'pt': ["pystemmer",],
u'ro': ["pystemmer",],
u'ru': ["pystemmer",],
u'es': ["pystemmer",],
u'sv': ["pystemmer",],
u'tr': ["pystemmer",],
u"test": ["pystemmer",],
}
supported_languages_stemmer = [key for key in stemmer_for_languages]
supported_stemmer = set([v for values in stemmer_for_languages.itervalues() for v in values])
### | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/corpus_helpers.py | corpus_helpers.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : Utilities for Command Line Interface
# Author:
# c(Student) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
#from zas_rep_tools.src.utils.logger import Logger
import logging
import inspect
import os
import codecs
import ast
import re
import json
import sys
from blessings import Terminal
import enlighten
#from zas_rep_tools.src.classes.corpus import Corpus
from zas_rep_tools.src.classes.dbhandler import DBHandler
from zas_rep_tools.src.utils.debugger import p
from zas_rep_tools.src.utils.helpers import set_class_mode
from zas_rep_tools.src.utils.zaslogger import ZASLogger
# path_to_zas_rep_tools = os.path.dirname(os.path.dirname(os.path.dirname(inspect.getfile(Corpus))))
# path_to_file_with_twitter_creditials = "user-config/twitter_api_credentials.json"
# path_to_file_with_user_agreements = "user-config/user_agreement.json"
# path_to_file_with_db_settings = "user-config/db_settings.json"
def cli_logger(level=logging.INFO, folder_for_log="stats", logger_usage=True,save_logs=False):
L = ZASLogger("CLI", level=level,
folder_for_log=folder_for_log,
logger_usage=logger_usage,
save_logs=save_logs)
logger = L.getLogger()
return logger
def get_settings(mode):
settings = tuple(set_class_mode(mode))
level = settings[0]
logger_usage = settings[6]
save_logs = settings[2]
return level, logger_usage, save_logs
def get_cli_logger(mode,folder_for_log):
level, logger_usage, save_logs = get_settings(mode)
logger = cli_logger(level=logging.INFO, folder_for_log="logs", logger_usage=True,save_logs=False)
return logger
def set_main_folders(project_folder):
main_folders = {
"corp": os.path.join(project_folder, "corpora") if project_folder else None,
"stats": os.path.join(project_folder, "stats") if project_folder else None,
"export": os.path.join(project_folder, "export") if project_folder else None,
}
try:
for t, path in main_folders.items():
#p((t, path))
if not path:
continue
if not os.path.isdir(path):
os.makedirs(path)
except Exception as e:
print "ERROR: Folders in the ProjectDirectory wasn't created. ({}) Please select other ProjectDirectory.".format(repr(e))
return main_folders
def strtobool(obj):
#p(obj, "obj")
try:
return ast.literal_eval(obj)
except:
return obj
def get_corp_fname(main_folders):
files = os.listdir(main_folders["corp"])
files = [fname for fname in files if ".db" in fname and ".db-journal" not in fname]
return files
def get_stats_fname(main_folders):
files = os.listdir(main_folders["stats"])
files = [fname for fname in files if ".db" in fname and ".db-journal" not in fname]
return files
def _get_status_bars_manager():
config_status_bar = {'stream': sys.stdout,
'useCounter': True,
"set_scroll": True,
"resize_lock": True
}
enableCounter_status_bar = config_status_bar['useCounter'] and config_status_bar['stream'].isatty()
return enlighten.Manager(stream=config_status_bar['stream'], enabled=enableCounter_status_bar, set_scroll=config_status_bar['set_scroll'], resize_lock=config_status_bar['resize_lock'])
def _get_new_status_bar( total, desc, unit, counter_format=False, status_bars_manager=False):
if counter_format:
counter = status_bars_manager.counter(total=total, desc=desc, unit=unit, leave=True, counter_format=counter_format)
else:
counter = status_bars_manager.counter(total=total, desc=desc, unit=unit, leave=True)
return counter
def validate_corp(main_folders,files=False,):
files = files if isinstance(files, (list, tuple)) else get_corp_dbname()
validated = []
possibly_encrypted = []
wrong = []
handl = DBHandler(mode="blind")
opened_db = []
for fname in files:
status = handl._validation_DBfile(os.path.join(main_folders["corp"],fname))
if status["status"]:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["corp"],fname))
if h.typ() == "corpus":
validated.append(fname)
opened_db.append(h)
else:
wrong.append(fname)
else:
possibly_encrypted.append(fname)
return validated,possibly_encrypted,wrong,opened_db
def validate_stats(main_folders,files=False):
files = files if isinstance(files, (list, tuple)) else get_corp_dbname()
validated = []
possibly_encrypted = []
wrong = []
handl = DBHandler(mode="blind")
opened_db = []
for fname in files:
status = handl._validation_DBfile(os.path.join(main_folders["stats"],fname))
if status["status"]:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["stats"],fname))
if h.typ() == "stats":
validated.append(fname)
opened_db.append(h)
else:
wrong.append(fname)
else:
possibly_encrypted.append(fname)
return validated,possibly_encrypted,wrong,opened_db | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/cli_helper.py | cli_helper.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : Corpus Module
# Author:
# c(Developer) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
import os
import time
import smtplib
import codecs
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from zas_rep_tools.src.utils.db_helper import DBErrorCatcher
import io
import json
import inspect
import zipfile
import socket
import sys
import logging
from ZODB import FileStorage, DB
from zc.lockfile import LockError
import transaction
import copy
import urllib2
import emoji
import re
import regex
import psutil
from decimal import Decimal, ROUND_HALF_UP, ROUND_UP, ROUND_HALF_DOWN, ROUND_DOWN
import threading
import inspect
import ctypes
from collections import OrderedDict, Callable, defaultdict, Counter
from multiprocessing import Process, RawValue
import traceback
#from collections import
import platform
import unicodecsv
from zas_rep_tools.src.utils.zaslogger import ZASLogger
from zas_rep_tools.src.utils.debugger import p
L = ZASLogger("Helpers", level=logging.INFO,
folder_for_log="stats",
logger_usage=True,
save_logs=False)
logger = L.getLogger()
path_to_zas_rep_tools = os.path.dirname(os.path.dirname(os.path.dirname(inspect.getfile(p))))
modi = ["error","test", "dev","dev+","dev-", "prod", "free", "prod+t", "test+s+","test+s-", "silent", "prod+","prod-", "blind"]
punkt_str = set(""".!:;-"#$%&'()*+,/<=>?@[\\]^_`{|}~""")
emoticons = set(['-(', '-)', '=p', ':-p','-p', '8-)', '=)','=(', ':-)', ':)', '<3', '{}', '(-',')-',')-:','(-:','):','(:', 'o:-)', 'x-(', ':-d', ':-#', ':-(', ':(', ')','(', ':p', ':o', ':-|', 'x-p', ':-)*', ':-*', ':*', 'b-)', ':_(', ":'(", '\\:d/', '*-*', ':o3', '#-o', ':*)', '/_^', '>:)', '<><', '(-}{-)', 'xd', '=d', ')-:', '(-:', '=/', ':-)(-:', '<:3)~', '~,~', ':-b', '^_^', '<l:0', ':-/', '=8)', '@~)~', ':s', ':-@', '=o', ':-o', ':-q', ':>', ':-j', ':-&', '=-o', ':-\\', ':-e', ';-)', ';)', '|-o', '(.v.)', '~:0', '(*v*)', '=^.^=', '</3','*<:o)', 'o.o','$_$', ':->', '8-#' ])
emoji.UNICODE_EMOJI[u'\ufe0f'] = "additional"
emoji.UNICODE_EMOJI[u'\u200d'] = "additional"
emoji.UNICODE_EMOJI[u'\u2640'] = "additional"
emoji.UNICODE_EMOJI[u'\u2642'] = "additional"
emoji.UNICODE_EMOJI[u'\ufe0f'] = "additional"
emoji.UNICODE_EMOJI[u'\U0001f3ff'] = "additional: skin coloure"
emoji.UNICODE_EMOJI[u'\U0001f3fe'] = "additional: skin coloure"
emoji.UNICODE_EMOJI[u"\U0001f3fd"] = "additional: skin coloure"
emoji.UNICODE_EMOJI[u"\U0001f3fc"] = "additional: skin coloure"
emoji.UNICODE_EMOJI[u"\U0001f3fb"] = "additional: skin coloure"
from_ISO639_2= {
u'test':u'en',
u'aa': u'afar',
u'ab': u'abkhaz',
u'ae': u'avestan',
u'af': u'afrikaans',
u'ak': u'akan',
u'am': u'amharic',
u'an': u'aragonese',
u'ar': u'arabic',
u'as': u'assamese',
u'av': u'avaric',
u'ay': u'aymara',
u'az': u'azerbaijani',
u'ba': u'bashkir',
u'be': u'belarusian',
u'bg': u'bulgarian',
u'bh': u'bihari',
u'bi': u'bislama',
u'bm': u'bambara',
u'bn': u'bengali',
u'bo': u'tibetan',
u'br': u'breton',
u'bs': u'bosnian',
u'ca': u'catalan; valencian',
u'ce': u'chechen',
u'ch': u'chamorro',
u'co': u'corsican',
u'cr': u'cree',
u'cs': u'czech',
u'cu': u'old church slavonic',
u'cv': u'chuvash',
u'cy': u'welsh',
u'da': u'danish',
u'de': u'german',
u'dv': u'divehi; maldivian;',
u'dz': u'dzongkha',
u'ee': u'ewe',
u'el': u'greek, modern',
u'en': u'english',
u'eo': u'esperanto',
u'es': u'spanish; castilian',
u'et': u'estonian',
u'eu': u'basque',
u'fa': u'persian',
u'ff': u'fula',
u'fi': u'finnish',
u'fj': u'fijian',
u'fo': u'faroese',
u'fr': u'french',
u'fy': u'western frisian',
u'ga': u'irish',
u'gd': u'scottish gaelic',
u'gl': u'galician',
u'gn': u'guaran\xed',
u'gu': u'gujarati',
u'gv': u'manx',
u'ha': u'hausa',
u'he': u'hebrew (modern)',
u'hi': u'hindi',
u'ho': u'hiri motu',
u'hr': u'croatian',
u'ht': u'haitian',
u'hu': u'hungarian',
u'hy': u'armenian',
u'hz': u'herero',
u'ia': u'interlingua',
u'id': u'indonesian',
u'ie': u'interlingue',
u'ig': u'igbo',
u'ii': u'nuosu',
u'ik': u'inupiaq',
u'io': u'ido',
u'is': u'icelandic',
u'it': u'italian',
u'iu': u'inuktitut',
u'ja': u'japanese',
u'jv': u'javanese',
u'ka': u'georgian',
u'kg': u'kongo',
u'ki': u'kikuyu, gikuyu',
u'kj': u'kwanyama, kuanyama',
u'kk': u'kazakh',
u'kl': u'kalaallisut',
u'km': u'khmer',
u'kn': u'kannada',
u'ko': u'korean',
u'kr': u'kanuri',
u'ks': u'kashmiri',
u'ku': u'kurdish',
u'kv': u'komi',
u'kw': u'cornish',
u'ky': u'kirghiz, kyrgyz',
u'la': u'latin',
u'lb': u'luxembourgish',
u'lg': u'luganda',
u'li': u'limburgish',
u'ln': u'lingala',
u'lo': u'lao',
u'lt': u'lithuanian',
u'lu': u'luba-katanga',
u'lv': u'latvian',
u'mg': u'malagasy',
u'mh': u'marshallese',
u'mi': u'm\u0101ori',
u'mk': u'macedonian',
u'ml': u'malayalam',
u'mn': u'mongolian',
u'mr': u'marathi (mar\u0101\u1e6dh\u012b)',
u'ms': u'malay',
u'mt': u'maltese',
u'my': u'burmese',
u'na': u'nauru',
u'nb': u'norwegian bokm\xe5l',
u'nd': u'north ndebele',
u'ne': u'nepali',
u'ng': u'ndonga',
u'nl': u'dutch',
u'nn': u'norwegian nynorsk',
u'no': u'norwegian',
u'nr': u'south ndebele',
u'nv': u'navajo, navaho',
u'ny': u'chichewa; chewa; nyanja',
u'oc': u'occitan',
u'oj': u'ojibwe, ojibwa',
u'om': u'oromo',
u'or': u'oriya',
u'os': u'ossetian, ossetic',
u'pa': u'panjabi, punjabi',
u'pi': u'p\u0101li',
u'pl': u'polish',
u'ps': u'pashto, pushto',
u'pt': u'portuguese',
u'qu': u'quechua',
u'rm': u'romansh',
u'rn': u'kirundi',
u'ro': u'romanian, moldavan',
u'ru': u'russian',
u'rw': u'kinyarwanda',
u'sa': u'sanskrit (sa\u1e41sk\u1e5bta)',
u'sc': u'sardinian',
u'sd': u'sindhi',
u'se': u'northern sami',
u'sg': u'sango',
u'si': u'sinhala, sinhalese',
u'sk': u'slovak',
u'sl': u'slovene',
u'sm': u'samoan',
u'sn': u'shona',
u'so': u'somali',
u'sq': u'albanian',
u'sr': u'serbian',
u'ss': u'swati',
u'st': u'southern sotho',
u'su': u'sundanese',
u'sv': u'swedish',
u'sw': u'swahili',
u'ta': u'tamil',
u'te': u'telugu',
u'tg': u'tajik',
u'th': u'thai',
u'ti': u'tigrinya',
u'tk': u'turkmen',
u'tl': u'tagalog',
u'tn': u'tswana',
u'to': u'tonga',
u'tr': u'turkish',
u'ts': u'tsonga',
u'tt': u'tatar',
u'tw': u'twi',
u'ty': u'tahitian',
u'ug': u'uighur, uyghur',
u'uk': u'ukrainian',
u'ur': u'urdu',
u'uz': u'uzbek',
u've': u'venda',
u'vi': u'vietnamese',
u'vo': u'volap\xfck',
u'wa': u'walloon',
u'wo': u'wolof',
u'xh': u'xhosa',
u'yi': u'yiddish',
u'yo': u'yoruba',
u'za': u'zhuang, chuang',
u'zh': u'chinese',
u'zu': u'zulu'}
to_ISO639_2 = {
u'test':u'en',
u'abkhaz': u'ab',
u'afar': u'aa',
u'afrikaans': u'af',
u'akan': u'ak',
u'albanian': u'sq',
u'amharic': u'am',
u'arabic': u'ar',
u'aragonese': u'an',
u'armenian': u'hy',
u'assamese': u'as',
u'avaric': u'av',
u'avestan': u'ae',
u'aymara': u'ay',
u'azerbaijani': u'az',
u'bambara': u'bm',
u'bashkir': u'ba',
u'basque': u'eu',
u'belarusian': u'be',
u'bengali': u'bn',
u'bihari': u'bh',
u'bislama': u'bi',
u'bosnian': u'bs',
u'breton': u'br',
u'bulgarian': u'bg',
u'burmese': u'my',
u'valencian': u'ca',
u'catalan': u'ca',
u'chamorro': u'ch',
u'chechen': u'ce',
u'chichewa; chewa; nyanja': u'ny',
u'chinese': u'zh',
u'chuvash': u'cv',
u'cornish': u'kw',
u'corsican': u'co',
u'cree': u'cr',
u'croatian': u'hr',
u'czech': u'cs',
u'danish': u'da',
u'divehi; maldivian;': u'dv',
u'dutch': u'nl',
u'dzongkha': u'dz',
u'english': u'en',
u'esperanto': u'eo',
u'estonian': u'et',
u'ewe': u'ee',
u'faroese': u'fo',
u'fijian': u'fj',
u'finnish': u'fi',
u'french': u'fr',
u'fula': u'ff',
u'galician': u'gl',
u'georgian': u'ka',
u'german': u'de',
u'greek, modern': u'el',
u'guaran\xed': u'gn',
u'gujarati': u'gu',
u'haitian': u'ht',
u'hausa': u'ha',
u'hebrew (modern)': u'he',
u'herero': u'hz',
u'hindi': u'hi',
u'hiri motu': u'ho',
u'hungarian': u'hu',
u'icelandic': u'is',
u'ido': u'io',
u'igbo': u'ig',
u'indonesian': u'id',
u'interlingua': u'ia',
u'interlingue': u'ie',
u'inuktitut': u'iu',
u'inupiaq': u'ik',
u'irish': u'ga',
u'italian': u'it',
u'japanese': u'ja',
u'javanese': u'jv',
u'kalaallisut': u'kl',
u'kannada': u'kn',
u'kanuri': u'kr',
u'kashmiri': u'ks',
u'kazakh': u'kk',
u'khmer': u'km',
u'kikuyu, gikuyu': u'ki',
u'kinyarwanda': u'rw',
u'kirghiz, kyrgyz': u'ky',
u'kirundi': u'rn',
u'komi': u'kv',
u'kongo': u'kg',
u'korean': u'ko',
u'kurdish': u'ku',
u'kwanyama, kuanyama': u'kj',
u'lao': u'lo',
u'latin': u'la',
u'latvian': u'lv',
u'limburgish': u'li',
u'lingala': u'ln',
u'lithuanian': u'lt',
u'luba-katanga': u'lu',
u'luganda': u'lg',
u'luxembourgish': u'lb',
u'macedonian': u'mk',
u'malagasy': u'mg',
u'malay': u'ms',
u'malayalam': u'ml',
u'maltese': u'mt',
u'manx': u'gv',
u'marathi': u'mr',
u'marshallese': u'mh',
u'mongolian': u'mn',
u'm\u0101ori': u'mi',
u'nauru': u'na',
u'navajo, navaho': u'nv',
u'ndonga': u'ng',
u'nepali': u'ne',
u'north ndebele': u'nd',
u'northern sami': u'se',
u'norwegian': u'no',
u'norwegian bokm\xe5l': u'nb',
u'norwegian nynorsk': u'nn',
u'nuosu': u'ii',
u'occitan': u'oc',
u'ojibwe, ojibwa': u'oj',
u'old church slavonic': u'cu',
u'oriya': u'or',
u'oromo': u'om',
u'ossetian, ossetic': u'os',
u'panjabi, punjabi': u'pa',
u'pashto, pushto': u'ps',
u'persian': u'fa',
u'polish': u'pl',
u'portuguese': u'pt',
u'p\u0101li': u'pi',
u'quechua': u'qu',
u'romanian': u'ro',
u'moldavan': u'ro',
u'romansh': u'rm',
u'russian': u'ru',
u'samoan': u'sm',
u'sango': u'sg',
u'sanskrit': u'sa',
u'sardinian': u'sc',
u'scottish': u'gd',
u'gaelic': u'gd',
u'serbian': u'sr',
u'shona': u'sn',
u'sindhi': u'sd',
u'sinhala, sinhalese': u'si',
u'slovak': u'sk',
u'slovene': u'sl',
u'somali': u'so',
u'south ndebele': u'nr',
u'southern sotho': u'st',
u'spanish': u'es',
u'castilian': u'es',
u'sundanese': u'su',
u'swahili': u'sw',
u'swati': u'ss',
u'swedish': u'sv',
u'tagalog': u'tl',
u'tahitian': u'ty',
u'tajik': u'tg',
u'tamil': u'ta',
u'tatar': u'tt',
u'telugu': u'te',
u'thai': u'th',
u'tibetan': u'bo',
u'tigrinya': u'ti',
u'tonga': u'to',
u'tsonga': u'ts',
u'tswana': u'tn',
u'turkish': u'tr',
u'turkmen': u'tk',
u'twi': u'tw',
u'uighur, uyghur': u'ug',
u'ukrainian': u'uk',
u'urdu': u'ur',
u'uzbek': u'uz',
u'venda': u've',
u'vietnamese': u'vi',
u'volap\xfck': u'vo',
u'walloon': u'wa',
u'welsh': u'cy',
u'western frisian': u'fy',
u'wolof': u'wo',
u'xhosa': u'xh',
u'yiddish': u'yi',
u'yoruba': u'yo',
u'zhuang, chuang': u'za',
u'zulu': u'zu'}
class DefaultOrderedDict(OrderedDict):
# Source: http://stackoverflow.com/a/6190500/562769
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not isinstance(default_factory, Callable)):
raise TypeError('first argument must be callable')
OrderedDict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return OrderedDict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'OrderedDefaultDict(%s, %s)' % (self.default_factory,
OrderedDict.__repr__(self))
def function_name(n=-2):
stack = traceback.extract_stack()
try:
filename, codeline, funcName, text = stack[n]
except:
funcName = None
return funcName
def get_tb(n=-2):
stack = traceback.extract_stack()
filename, codeline, funcName, text = (None,None,None,None)
try:
filename, codeline, funcName, text = stack[n]
except:
filename, codeline, funcName, text = (None,None,None,None)
return {"filename":filename, "codeline":codeline, "funcName":funcName, "text":text}
def get_module_name(n=-2):
stack = traceback.extract_stack()
try:
filename, codeline, funcName, text = stack[n]
except:
funcName = None
return os.path.splitext(os.path.basename(filename))[0]
class Status(object):
# {"status":False, "track_id":track_id, "desc":"Table ('{}') wasn't found or not exist.".format(table_name), "level_name":"error_insertion", "action":"ignored"}
collection_complete_status= defaultdict(lambda: defaultdict(dict))
lock = threading.Lock()
allow_full_tb = False
allow_auto_func_names = False
allow_auto_arginfo = False
light_mode = True
def __init__(self, status=None, track_id=None, desc=None, level=None, action=None, error_name=None, inp_obj = None, func_name=None, out_obj=0, exception=None, tb=None, local_var=None, arginfo=True, outsort=0, answer=0):
#with Status.lock:
#p((status,track_id),"status")
self._status = {}
if Status.light_mode:
#p((status,track_id),"1status")
if status==True:
#p((status,track_id),"2status")
self.module_name = None
self.track_id = None
self._status = {"status":status, "track_id":self.track_id, "out_obj":out_obj, "desc":desc, "outsort":outsort, "answer":answer}
else:
#p((status,track_id),"3status")
self._create_case(status=status, track_id=track_id, desc=desc, level=level, action=action, error_name=error_name, inp_obj = inp_obj , func_name=func_name, out_obj=out_obj, exception=exception, tb=tb, local_var=local_var, arginfo=arginfo)
else:
#p((status,track_id),"4status")
self._create_case(status=status, track_id=track_id, desc=desc, level=level, action=action, error_name=error_name, inp_obj = inp_obj , func_name=func_name, out_obj=out_obj, exception=exception, tb=tb, local_var=local_var, arginfo=arginfo)
def _create_case(self,status=None, track_id=None, desc=None, level=None, action=None, error_name=None, inp_obj = None, func_name=None, out_obj=0, exception=None, tb=None, local_var=None, arginfo=True,outsort=0,answer=0):
self.module_name = get_module_name(-3)
#p(module_name)
self.track_id = track_id if track_id else SharedCounterExtern().incr()
auto_func_name = None
auto_tb = None
arginfo = None
#p((status,self.track_id,self.module_name),"5status")
if Status.allow_auto_func_names:
auto_func_name = {"-1":function_name(-1),"-2":function_name(-2), "-3":function_name(-3), "-4":function_name(-4),"-5":function_name(-5)}
if Status.allow_full_tb:
auto_tb={"-1":get_tb(-1),"-2":get_tb(-2), "-3":get_tb(-3), "-4":get_tb(-4),"-5":get_tb(-5), "-6":get_tb(-6)}
if Status.allow_auto_arginfo:
arginfo = inspect.getargvalues(inspect.currentframe().f_back)
#p(auto_func_name,"auto_func_name")
#p(auto_tb,"auto_tb")
#p(arginfo,"arginfo")
#p(Status.allow_auto_arginfo, "Status.allow_auto_arginfo")
#p(arginfo,"arginfo")
#p((status,self.track_id,self.module_name),"6status")
#p((Status.allow_full_tb, Status.allow_auto_func_names,Status.allow_auto_arginfo, ))
self._status = {
"status":status,
"track_id":self.track_id,
"desc":desc,
"level":level,
"action":action,
"error_name":error_name,
"inp_obj":inp_obj,
"func_name":func_name,
"auto_func_name":auto_func_name,
"out_obj":out_obj,
"exception":exception,
"tb":tb,
"local_var":local_var,
"args":arginfo.args if arginfo else None,
"auto_tb":auto_tb,
"answer":answer,
# "varargs":str(arginfo.varargs),
# "keywords":str(arginfo.keywords),
"locals":{k:str(v) for k,v in arginfo.locals.iteritems()} if arginfo else None,
"outsort":outsort,
#"arginfo":inspect.formatargvalues(arginfo.args,arginfo.varargs, arginfo.keywords ,arginfo.locals),
#"arginfo":arginfo
}
#p((status,self.track_id,self.module_name),"7status")
Status.collection_complete_status[self.module_name][self.track_id] = self
#p((status,self.track_id,self.module_name),"8status")
def __delitem__(self,item_key):
#p(self._status)
del self._status[item_key]
def __getitem__(self,item_key):
with Status.lock:
return self._status[item_key]
#return copy.deepcopy(self._status[item_key])
def __str__(self):
outputstr = ""
outputstr += ">>>>>>> {} <<<<<<<<\n".format(self.track_id)
#p(self.__dict__, "self._status")
for name, values in OrderedDict(self._status).iteritems():
if not values:
outputstr += "{}:{}\n".format(name, values)
else:
if name in ["locals", "auto_func_name"]:
outputstr += "{}: (".format(name)
for var_name, var_values in OrderedDict(values).iteritems():
outputstr += "\n {}:{}".format(var_name, repr(var_values))
outputstr += "\n )\n".format()
elif name in ["auto_tb"]:
outputstr += "{}: (".format(name)
for var_name, var_values in OrderedDict(values).iteritems():
outputstr += "\n {}: {{".format(var_name)
for tag_name, tag_value in OrderedDict(var_values).iteritems():
outputstr += "\n {}:{}".format(tag_name, repr(tag_value))
outputstr += "\n }}\n".format()
#outputstr += "\n {}:{}".format(var_name, var_values)
outputstr += "\n )\n".format()
# elif name == "auto_tb":
# pass
# elif name == "auto_func_name":
# pass
else:
outputstr += "{}:{}\n".format(name, repr(values))
outputstr += "\n".format()
return outputstr
def __setitem__(self,item_key,item):
with Status.lock:
if self.module_name == None:
pass
else:
if item_key == "track_id":
if item not in [track_id for module, module_data in Status.collection_complete_status.iteritems() for track_id in module_data]:
del Status.collection_complete_status[self.module_name][self.track_id]
self.track_id = item
self._status[item_key] = item
Status.collection_complete_status[self.module_name][self.track_id] = self
else:
raise KeyError, "Given track_id is already exist in the current collection!"
else:
self._status[item_key] = item
def get(self):
with Status.lock:
return copy.deepcopy(self._status)
def statusesTstring(module_name):
outputstr = ""
if module_name in Status.collection_complete_status:
for k,v in OrderedDict(Status.collection_complete_status[module_name]).iteritems():
outputstr += "{}".format(v)
#p(outputstr)
return outputstr
return False
class SharedCounterExtern(object):
val = RawValue('i', 0)
lock = threading.Lock()
def __init__(self, value=False, new=False):
# RawValue because we don't need it to create a Lock:
if value:
SharedCounterExtern.val.value = value
SharedCounterExtern.lock
if new:
self.new()
def __int__(self):
return int(self.v())
def __str__(self):
return str(self.v())
def incr(self, n=1):
with SharedCounterExtern.lock:
SharedCounterExtern.val.value += n
return SharedCounterExtern.val.value
def v(self):
with SharedCounterExtern.lock:
return SharedCounterExtern.val.value
def clear(self):
with SharedCounterExtern.lock:
SharedCounterExtern.val.value = 0
return SharedCounterExtern.val.value
def new(self):
SharedCounterExtern.val.value = 0
class SharedCounterIntern(object):
#val = RawValue('i', 0)
#lock = threading.Lock()
def __init__(self, value=False, new=False):
self.val = RawValue('i', 0)
self.lock = threading.Lock()
# RawValue because we don't need it to create a Lock:
if value:
self.val.value = value
if new:
self.new()
def __int__(self):
return int(self.v())
def __str__(self):
return str(self.v())
def incr(self, n=1):
with self.lock:
self.val.value += n
return self.val.value
def v(self):
with self.lock:
return self.val.value
def clear(self):
with self.lock:
self.val.value = 0
return self.val.value
def new(self):
self.val.value = 0
class MyZODB(object):
def __init__(self, path):
self.path = path
def open(self):
#p(LockError)
while True:
try:
self.storage = FileStorage.FileStorage(self.path, read_only=False)
self.db = DB(self.storage)
self.my_transaction_manager = transaction.TransactionManager()
self.conn = self.db.open(self.my_transaction_manager)
break
except LockError:
pass
def __getitem__(self,item_index):
self.open()
getted_item = copy.deepcopy(self.conn.root())
self.close()
return getted_item[item_index]
def __str__(self):
self.open()
getted_str = str(self.conn.root())
self.close()
return getted_str
def __get__(self, instance, owner):
self.open()
root = copy.deepcopy(self.conn.root())
self.close()
return root
def __setitem__(self,index,item):
self.open()
with self.my_transaction_manager as trans:
#trans.note(u"incrementing x")
self.conn.root()[index] = item
self.close()
def get_root(self):
self.open()
root = copy.deepcopy(self.conn.root())
self.close()
return root
def close(self):
self.conn.close()
#self.my_transaction_manager = False
#self.my_transaction_manager.close()
self.db.close()
self.storage.close()
def iteritems(self):
self.open()
root = copy.deepcopy(self.conn.root())
self.close()
for k,v in root.iteritems():
yield k,v
#return iter([(x[0], x) for x in "alpha bravo charlie".split()])
#return root
def __iter__(self):
self.open()
getted_item = copy.deepcopy(self.conn.root())
self.close()
return iter(getted_item.keys())
#return iter(self.settings.keys())
def clean(self):
self.open()
with self.my_transaction_manager as trans:
keys = [key for key in self.conn.root()]
for key in keys:
del self.conn.root()[key]
self.close()
return True
def levenstein(s1,s2):
len_1=len(s1)
len_2=len(s2)
x =[[0]*(len_2+1) for _ in range(len_1+1)]#the matrix whose last element ->edit distance
for i in range(0,len_1+1): #initialization of base case values
x[i][0]=i
for j in range(0,len_2+1):
x[0][j]=j
for i in range (1,len_1+1):
for j in range(1,len_2+1):
if s1[i-1]==s2[j-1]:
x[i][j] = x[i-1][j-1]
else :
x[i][j]= min(x[i][j-1],x[i-1][j],x[i-1][j-1])+1
print x[i][j]
def NestedDictValues(d):
'''
$ a={4:1,6:2,7:{8:3,9:4,5:{10:5},2:6,6:{2:7,1:8}}}
$ list(NestedDictValues(a))
>>> [1, 2, 3, 4, 6, 5, 8, 7]
'''
for v1 in d.values():
if isinstance(v1, dict):
for v2 in NestedDictValues(v1):
yield v2
else:
yield v1
#Rle().del_rep()
class Rle(object):
def __init__(self, inplogger=False):
#from zas_rep_tools.src.extensions.uniseg import graphemecluster as gc
#self.gc = gc
self.re_pattern_char_recognition= regex.compile(ur'[\ud800-\udbff][\udc00-\udfff]|.',regex.UNICODE)
global logger
if inplogger:
self.logger = inplogger
else:
self.logger = logger
def del_rep_from_sent(self, input_string):
#time.sleep(8)
count = 1
prev = u''
encoded = u""
if not isinstance(input_string, list):
msg = "RLE(del_rep_from_sent): Given Obj is not an list"
if self.logger:
self.logger.error(msg)
else:
print(msg)
return False
for character in input_string:
if not isinstance(character, unicode):
character = character.decode("utf-8")
if character != prev:
if prev:
#entry = (prev,count)
#lst.append(entry)
if prev:
encoded += u"{} ".format(prev)
else:
encoded += u"{}".format(prev)
#print lst
count = 1
prev = character
else:
count += 1
else:
try:
if prev:
encoded += u"{} ".format(prev)
else:
encoded += u"{}".format(prev)
return encoded.strip()
except Exception as e:
if self.logger:
self.logger.error("RLE: Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def get_chars(self, input_string):
#input_string = input_string.decode("utf-8")
try:
input_string = input_string.decode("utf-8")
except (UnicodeEncodeError, AttributeError):
pass
except Exception as e:
if self.logger:
self.logger.error("RLE: Exception encountered: {e}".format(e=e), exc_info=True)
else:
print("Exception encountered: {e}".format(e=e))
try:
input_string = self.re_pattern_char_recognition.findall(input_string)
except TypeError:
return input_string
return input_string
# def get_chars(self, input_string):
# #input_string = input_string.decode("utf-8")
# #input_string = self.re_pattern_char_recognition.findall(input_string)
# if isinstance(input_string, unicode):
# input_string = self.gc.grapheme_clusters(input_string)
# elif isinstance(input_string, str):
# input_string = self.gc.grapheme_clusters(unicode(input_string,"utf-8"))
# # try:
# # input_string = self.re_pattern_char_recognition.findall(input_string)
# # except TypeError:
# # return input_string
# return input_string
def del_rep(self, input_string):
#time.sleep(8)
count = 1
prev = u''
encoded = u""
#lst = []
# input_string = self.re_pattern_char_recognition.findall(input_string)
# # if isinstance(input_string, unicode):
# # input_string = self.gc.grapheme_clusters(input_string)
# # elif isinstance(input_string, str):
# # input_string = self.gc.grapheme_clusters(unicode(input_string,"utf-8"))
for character in self.get_chars(input_string):
if character != prev:
if prev:
#entry = (prev,count)
#lst.append(entry)
encoded += u"{}".format(prev)
#print lst
count = 1
prev = character
else:
count += 1
else:
try:
#entry = (character,count)
#lst.append(entry)
encoded += u"{}".format(character)
return encoded
except Exception as e:
if self.logger:
self.logger.error("RLE: Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def get_repetativ_elems(self, encoded_rle_to_tuples, repeat_up=3):
#p(encoded_rle_to_tuples)
extracted_reps = []
char_index = -1
for char_container in encoded_rle_to_tuples:
char_index += 1
#rep_free_token += char_container[0]
if char_container[1] >= repeat_up:
extracted_reps.append((char_container[0], char_container[1],char_index))
return extracted_reps
def rep_extraction_word(self,encoded_rle_to_tuples, repeat_up=3, get_rle_as_str=False):
extracted_reps = []
rep_free_word = ""
char_index = -1
#p(encoded_rle_to_tuples, "encoded_rle_to_tuples")
if not get_rle_as_str:
for char_container in encoded_rle_to_tuples:
char_index += 1
rep_free_word += char_container[0]
if char_container[1] >= repeat_up:
extracted_reps.append((char_container[0], char_container[1],char_index))
return extracted_reps, rep_free_word
else:
#p(encoded_rle_to_tuples,"encoded_rle_to_tuples")
rle_word = ""
for char_container in encoded_rle_to_tuples:
char_index += 1
rep_free_word += char_container[0]
rle_word += char_container[0]+"^"+str(char_container[1]) if char_container[1]> 1 else char_container[0]
if char_container[1] >= repeat_up:
extracted_reps.append((char_container[0], char_container[1],char_index))
#p(rep_free_word, "rep_free_word", c="r")
return extracted_reps, rep_free_word,rle_word
def rep_extraction_sent(self,encoded_rle_to_tuples,mappped, repeat_up=2): #,rle_for_repl_in_text_container=False,repl_free_text_container=False,
extracted_reps = []
rep_free_sent = []
char_index = -1
#p(encoded_rle_to_tuples, "encoded_rle_to_tuples")
#p(mappped,"mappped")
#if not rle_for_repl_in_text_container:
for char_container in encoded_rle_to_tuples:
char_index += 1
#p(repr(char_container[0]), "char_container[0]",c="m")
#index_in_redu_free = 0
rep_free_sent.append(char_container[0])
if char_container[1] >= repeat_up:
extracted_reps.append({"word":char_container[0], "length":char_container[1], "start_index_in_orig":mappped[char_index], "index_in_redu_free":char_index})
#extracted_reps.append((char_container[0], char_container[1],char_container[2],char_index))
return extracted_reps, rep_free_sent
def get_rep_free_word_from_rle_in_tuples(self, encoded_rle_to_tuples, decode_to_unicode=True):
if decode_to_unicode:
return u"".join([char[0] for char in encoded_rle_to_tuples])
else:
return "".join([char[0] for char in encoded_rle_to_tuples])
def get_rep_free_sent_from_rle_in_tuples(self, encoded_rle_to_tuples, decode_to_unicode=True):
if decode_to_unicode:
return u" ".join([char[0].decode("utf-8") for char in encoded_rle_to_tuples])
else:
return " ".join([char[0] for char in encoded_rle_to_tuples])
def convert_rle_intuples_to_rle_as_str(self, encoded_rle_to_tuples):
pass
def encode_to_tuples(self,input_string, mapping=False):
count = 1
prev = ''
lst = []
if mapping: mapped = []
#p(input_string, "input_string")
i = -1
start_index = None
for character in self.get_chars(input_string):
i+=1
if character != prev:
if prev:
#p((character, prev, i,start_index))
#start_index = start_index if start_index else i
entry = (prev,count)
#p((prev,count,start_index), c="m")
lst.append(entry)
if mapping: mapped.append(start_index)
#print lst
start_index = None
if start_index is None:
start_index = i
count = 1
prev = character
else:
count += 1
else:
try:
entry = (character,count)
if mapping: mapped.append(start_index)
#p((character,count,i), c="r")
lst.append(entry)
#start_index = None
if mapping:
return lst, mapped
else:
return lst
except Exception as e:
if self.logger:
self.logger.error("RLE: Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
if mapping:
return False, False
else:
return False
def decode_words_to_str(self,lst):
try:
q = ""
#prev = ""
for word, count in lst:
q += (word+" ") * count
return q.strip()
except Exception, e:
if self.logger:
self.logger.error(" RLE:Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def decode_words_to_list(self,lst):
try:
q = []
#prev = ""
for word, count in lst:
for c in xrange(count):
q.append(word)
return q
except Exception, e:
if self.logger:
self.logger.error(" RLE:Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def decode_letters_to_str(self,lst):
try:
q = ""
for character, count in lst:
q += character * count
return q
except Exception, e:
if self.logger:
self.logger.error("RLE: Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def encode_str_to_str(self,input_string):
count = 1
prev = u''
encoded = u""
#lst = []
# if isinstance(input_string, unicode):
# input_string = self.gc.grapheme_clusters(input_string)
# elif isinstance(input_string, str):
# input_string = self.gc.grapheme_clusters(unicode(input_string,"utf-8"))
for character in self.get_chars(input_string):
if character != prev:
if prev:
#entry = (prev,count)
#lst.append(entry)
encoded += u"{}{}".format(prev,count)
#print lst
count = 1
prev = character
else:
count += 1
else:
try:
#entry = (character,count)
#lst.append(entry)
encoded += u"{}{}".format(character,count)
return encoded
except Exception as e:
if self.logger:
self.logger.error("RLE: Exception encountered {e}".format(e=e), exc_info=True)
else:
print("Exception encountered {e}".format(e=e))
def decode_str_from_str(self,inp_str):
pattern = regex.compile(r"(.+?)(\d+)")
matched = regex.findall(pattern, inp_str)
#p(matched)
if not matched:
if self.logger:
self.logger.error("RLE: Given Str to encode had not correct structure!", exc_info=True)
else:
print("RLE: Given Str to encode had not correct structure!")
try:
return self.decode_letters_to_str([(t[0], int(t[1])) for t in matched])
except:
if self.logger:
self.logger.error("RLE: Given Str to encode had not correct structure!", exc_info=True)
else:
print("RLE: Given Str to encode had not correct structure!")
rle = Rle()
class LenGen(object):
def __init__(self,gen,length):
self.gen=gen
self.length=length
def __call__(self):
return itertools.islice(self.gen(),self.length)
def __len__(self):
return self.length
def __iter__(self):
#self._gen = self.gen
return self.gen
nextHighest = lambda seq,x: min([(i-x,i) for i in seq if x<=i] or [(0,None)])[1]
nextLowest = lambda seq,x: min([(x-i,i) for i in seq if x>=i] or [(0,None)])[1]
def _async_raise(tid, exctype):
"""raises the exception, performs cleanup if needed"""
if not inspect.isclass(exctype):
raise TypeError("Only types can be raised (not instances)")
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype))
if res == 0:
raise ValueError("invalid thread id")
elif res != 1:
# """if it returns a number greater than one, you're in trouble,
# and you should call it again with exc=NULL to revert the effect"""
ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, 0)
raise SystemError("PyThreadState_SetAsyncExc failed")
class MyThread(threading.Thread):
#http://tomerfiliba.com/recipes/Thread2/
def _get_my_tid(self):
"""determines this (self's) thread id"""
if not self.isAlive():
raise threading.ThreadError("the thread is not active")
# do we have it cached?
if hasattr(self, "_thread_id"):
return self._thread_id
# no, look for it in the _active dict
for tid, tobj in threading._active.items():
if tobj is self:
self._thread_id = tid
return tid
raise AssertionError("could not determine the thread's id")
def raise_exc(self, exctype):
"""raises the given exception type in the context of this thread"""
_async_raise(self._get_my_tid(), exctype)
def terminate(self):
"""raises SystemExit in the context of the given thread, which should
cause the thread to exit silently (unless caught)"""
self.raise_exc(SystemExit)
print "'{}'-Thread was terminated.".format(self.name)
def ngrams(token_list, n):
return [tuple(token_list[i:i+n]) for i in xrange(len(token_list)-n+1)]
# class InternTuple()
# def __init__(self):
# adress = 0
# self.table = {}
def char_is_punkt(character):
return character in punkt_str
def text_is_punkt(text):
for char in text:
if char not in punkt_str:
return False
else:
return True
#def text_is_punkt(text):
# flags = set([True if character in punkt_str else False for character in text ])
# if len(flags) == 1 and True in flags:
# return True
# return False
def text_is_emoji(text):
flags = set([True if character in emoji.UNICODE_EMOJI else False for character in text ])
if len(flags) == 1 and True in flags:
return True
return False
# def is_emoticon(character):
# #if character[0] in ['#', '$', ')', '(', '*', '-', '/', '8', ';', ':', '=', '<', '>', '@', '\\', '^', 'b', 'o', 'x', '{', '|', '~']:
# return rle.del_rep(character) in emoticons1
def is_emoticon(character):
#if character[0] in ['#', '$', ')', '(', '*', '-', '/', '8', ';', ':', '=', '<', '>', '@', '\\', '^', 'b', 'o', 'x', '{', '|', '~']:
if character[0] in punkt_str:
replfree = rle.del_rep(character)
if replfree.lower() in emoticons:
#### This is for those emoticons, like "))))", or "((((". But if character was replicated many times. if not, than it is just an punctuation symbol.
if len(replfree) == 1:
if len(character) > 1:
return True
else:
return False
else:
return True
else:
return False
else:
return False
url_compiled_pattern = re.compile('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+')
def is_url(inptoken):
urls = url_compiled_pattern.findall( inptoken)
if urls:
return True
return False
def categorize_token(inptoken):
try:
if inptoken.isdigit():
return "number"
elif is_emoji(inptoken):
return "EMOIMG"
elif is_emoticon(inptoken):
#print "-----", inptoken,is_emoticon(inptoken)
return "EMOASC"
elif text_is_punkt(inptoken):
return "symbol"
elif inptoken[0] == "@":
return "mention"
elif inptoken[0] == "#":
return "hashtag"
elif is_url(inptoken):
return "URL"
return "regular"
except:
return None
def categorize_token_list(inpliste):
return [(token, categorize_token(token)) for token in inpliste]
def categorize_emoticon(inptoken):
try:
if is_emoji(inptoken):
#p((inptoken,"EMOIMG"), "inptoken", c="m")
return "EMOIMG"
elif is_emoticon(inptoken):
#p((inptoken,"EMOASC"), "inptoken", c="m")
return "EMOASC"
#elif is_emoji(inptoken[0]):
# return "EMOIMG"
return None
except:
return None
def recognize_emoticons_types(inpliste):
output_list = []
for token in inpliste:
cat = categorize_emoticon(token[0])
if cat:
#print "(token[0], cat)", (token[0], cat)
output_list.append((token[0], cat))
else:
#print "token", token
output_list.append(token)
return output_list
def get_categories(inpliste):
return [ categorize_token(token) for token in inpliste]
def removetags(raw_html):
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
def remove_html_codded_chars(raw_html):
# 
cleanr = re.compile('&.*?;')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
def text_has_punkt(text):
for character in text:
if character in punkt_str:
return True
return False
def is_emoji(character):
return character in emoji.UNICODE_EMOJI
def text_has_emoji(text):
for character in text:
if character in emoji.UNICODE_EMOJI:
return True
return False
def internet_on():
try:
urllib2.urlopen('http://216.58.192.142', timeout=1)
return True
except urllib2.URLError as err:
return False
def set_class_mode(mode):
if mode in modi:
if mode == "dev":
logger_level = logging.DEBUG
logger_traceback = True
logger_save_logs = True
save_status = True
Status.allow_full_tb = True
Status.allow_auto_func_names = True
Status.allow_auto_arginfo = True
Status.light_mode = False
ZASLogger._save_lower_debug = True
ZASLogger._set_functon_name_as_event_name = False
ZASLogger._save_debug = True
log_content = True
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "dev-":
logger_level = logging.INFO
logger_traceback = True
logger_save_logs = True
save_status = True
Status.allow_full_tb = False
Status.allow_auto_func_names = True
Status.allow_auto_arginfo = True
Status.light_mode = False
ZASLogger._save_lower_debug = True
ZASLogger._set_functon_name_as_event_name = False
ZASLogger._save_debug = True
log_content = False
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "dev+":
logger_level = 9
logger_traceback = True
logger_save_logs = True
save_status = True
Status.allow_full_tb = True
Status.allow_auto_func_names = True
Status.allow_auto_arginfo = True
Status.light_mode = False
ZASLogger._save_lower_debug = True
ZASLogger._set_functon_name_as_event_name = False
ZASLogger._save_debug = True
log_content = True
save_settings = True
logger_usage = True
ext_tb = True
elif mode == "test":
logger_level = logging.ERROR
logger_traceback = True
Status.light_mode = True
logger_save_logs = False
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = False
save_settings = False
logger_usage = True
ext_tb = False
elif mode == "error":
logger_level = logging.ERROR
logger_traceback = False
Status.light_mode = True
logger_save_logs = False
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = False
save_settings = False
logger_usage = True
ext_tb = False
elif mode == "prod":
logger_level = logging.INFO
logger_traceback = True
logger_save_logs = True
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = False
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "prod+":
logger_level = logging.INFO
logger_traceback = True
logger_save_logs = True
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = True
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "prod-":
logger_level = logging.INFO
logger_traceback = False
logger_save_logs = False
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = False
save_settings = False
logger_usage = True
ext_tb = False
elif mode == "prod+t":
logger_level = logging.INFO
logger_traceback = True
logger_save_logs = False
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
ZASLogger._set_functon_name_as_event_name = False
save_status = False
log_content = False
save_settings = False
logger_usage = False
ext_tb = False
elif mode == "test+s+":
logger_level = logging.INFO
logger_traceback = True
logger_save_logs = True
Status.light_mode = True
ZASLogger._save_lower_debug = True
ZASLogger._save_debug = True
save_status = True
log_content = True
Status.allow_full_tb = True
Status.allow_auto_func_names = True
Status.allow_auto_arginfo = True#
ZASLogger._set_functon_name_as_event_name = True
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "test+s-":
logger_level = logging.ERROR
logger_traceback = True
logger_save_logs = True
Status.light_mode = True
ZASLogger._save_lower_debug = True
ZASLogger._save_debug = True
save_status = True
log_content = False
Status.allow_full_tb = True
Status.allow_auto_func_names = True
Status.allow_auto_arginfo = True#
ZASLogger._set_functon_name_as_event_name = True
save_settings = True
logger_usage = True
ext_tb = False
elif mode == "silent":
logger_level = logging.ERROR
logger_traceback = False
logger_save_logs = False
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
save_status = False
log_content = False
Status.allow_full_tb = False
Status.allow_auto_func_names = False
Status.allow_auto_arginfo = False#
ZASLogger._set_functon_name_as_event_name = False
save_settings = False
logger_usage = False
ext_tb = False
elif mode == "blind":
logger_level = 70
logger_traceback = False
logger_save_logs = False
Status.light_mode = True
ZASLogger._save_lower_debug = False
ZASLogger._save_debug = False
save_status = False
log_content = False
Status.allow_full_tb = False
Status.allow_auto_func_names = False
Status.allow_auto_arginfo = False#
ZASLogger._set_functon_name_as_event_name = False
save_settings = False
logger_usage = True
ext_tb = False
else:
msg = "CorpusError: Given Mode '{}' is not supported. Please use one of the following modi: '{}'. ".format(mode, modi)
if platform.uname()[0].lower() !="windows":
p(msg,"ERROR", c="r")
else:
print msg
return logger_level, logger_traceback, logger_save_logs, save_status, log_content, save_settings, logger_usage, ext_tb
def print_mode_name(mode, logger):
if mode in modi:
if mode == "dev":
logger.debug("DEVELOPING MODE was started!")
elif mode == "dev-":
logger.debug("DEVELOPING MODE(-) was started!")
elif mode == "dev+":
logger.debug("DEVELOPING MODE(+) was started!")
elif mode == "test":
logger.debug("TEST MODE was started!")
elif mode == "prod":
logger.debug("PRODUCTION Mode was started!")
elif mode == "free":
logger.debug("FREE Mode was started!")
else:
msg = "Given Mode '{}' is not supported. Please use one of the following modi: '{}'. ".format(mode, modi)
if platform.uname()[0].lower() !="windows":
p(msg,"ERROR", c="r")
else:
print msg
def make_zipfile(output_filename, source_dir):
relroot = os.path.abspath(os.path.join(source_dir, os.pardir))
with zipfile.ZipFile(output_filename, "w", zipfile.ZIP_DEFLATED, allowZip64=True) as zip:
for root, dirs, files in os.walk(source_dir):
# add directory (needed for empty dirs)
zip.write(root, os.path.relpath(root, relroot))
for file in files:
filename = os.path.join(root, file)
if os.path.isfile(filename): # regular files only
arcname = os.path.join(os.path.relpath(root, relroot), file)
zip.write(filename, arcname)
def get_number_of_streams_adjust_cpu( min_files_pro_stream, row_number, stream_number, cpu_percent_to_get=50):
if row_number <= 0:
return None
#p(( min_files_pro_stream, row_number, stream_number, cpu_percent_to_get))
if min_files_pro_stream <= 0:
min_files_pro_stream = 1
## get possible thread number
processors_number = psutil.cpu_count()
if processors_number > 1:
processors_to_use = int((cpu_percent_to_get * processors_number) / 100.0) # to use 50% of CPUs
else:
processors_to_use = 1
if stream_number > processors_to_use:
stream_number = processors_to_use
temp_stream_number = int(Decimal(float(row_number) / min_files_pro_stream).quantize(Decimal('1.'), rounding=ROUND_DOWN))
#p(temp_stream_number, "temp_stream_number", c="r")
if temp_stream_number <= stream_number:
if temp_stream_number >0:
return temp_stream_number
else:
return 1
else:
return stream_number
# def get_number_of_threads_adjust_cpu( min_files_pro_thread, whole_file_number, thread_number=False):
# if whole_file_number <= 0:
# return None
# processors_number = psutil.cpu_count()
# if processors_number > 1:
# processors_to_use = int((50 * processors_number) / 100.0) # to use 50% of CPUs
# else:
# processors_to_use = 1
# if thread_number:
# if thread_number < processors_to_use:
# thread_number = thread_number
# else:
# thread_number = processors_to_use
# else:
# thread_number = processors_to_use
# threads_to_create = thread_number
# #p((processors_number, processors_to_use, thread_number,threads_to_create, whole_file_number,min_files_pro_thread ))
# while threads_to_create != 1:
# number_of_files_per_thread = whole_file_number/threads_to_create
# if number_of_files_per_thread >= min_files_pro_thread:
# break
# else:
# threads_to_create -=1
# continue
# if threads_to_create == 1:
# break
# return threads_to_create
def get_file_list(path, extention):
if os.path.isdir(path):
txt_files = [file for file in os.listdir(path) if extention in file]
if txt_files:
return (path,txt_files)
else:
False
else:
return False
def instance_info(instance_dict, attr_to_ignore=False, attr_to_flag=False, attr_to_len=False, as_str=False):
#instance_dict = json.loads(instance_dict)
instance_dict = copy.deepcopy(instance_dict)
#p(instance_dict)
instance_dict = OrderedDict(instance_dict)
if attr_to_ignore:
for attr in attr_to_ignore:
if attr in instance_dict:
del instance_dict[attr]
if attr_to_flag:
for attr in attr_to_flag:
if attr in instance_dict:
instance_dict[attr] = "|FLAG|:|True|" if instance_dict[attr] else "|FLAG|:|False|"
if attr_to_len:
for attr in attr_to_len:
if attr in instance_dict:
instance_dict[attr] = "|LEN|:|{}|".format(len(instance_dict[attr])) if isinstance(instance_dict[attr], (str, unicode, list,tuple, bool, int, dict)) else "|LEN|:|NOT_COUNTABLE|"
if as_str:
attr_as_str = ""
for k,v in instance_dict.iteritems():
attr_as_str += "\n ---> {} : {} ".format(k,v)
return attr_as_str
else:
return instance_dict
def paste_new_line():
print "\n"
global last_path
last_path = ""
def write_data_to_json(path, data):
global last_path
if last_path == path:
p("WriteDataToJsonError'{}'-File was opened one more time.".format(last_path))
#json_file = io.open(path, "w", encoding="utf-8")
with codecs.open(path, "w", encoding="utf-8") as f:
f.write(unicode(json.dumps(data,
indent=4, sort_keys=True,
separators=(',', ': '), ensure_ascii=False)))
last_path = path
#send_email("[email protected]", "dfghjkl", "fghjkl")
def _send_email(toaddr,Subject, text):
#logger = main_logger("MailSender")
#num = None
if toaddr:
fromaddr = '[email protected]'
#toaddrs = ['[email protected]']
#Date: {time}<CRLF>
#Message-ID: <[email protected]><CRLF>
#From: "Mr. Error Informer" <{fromaddr}><CRLF>
#To: "Mrs. Someone" <{toaddr}><CRLF>
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Subject'] = Subject
msg.attach(MIMEText(text, 'html'))
server_answer = ''
try_number= 0
while True:
try:
#server = smtplib.SMTP('smtp.gmail.com:25')
server = smtplib.SMTP_SSL('smtp.gmail.com:465')
#server.starttls()
server.login('[email protected]', 'gxtgjjemskhndfag')
server_answer = server.sendmail(fromaddr,toaddr ,msg.as_string())
server.quit()
logger_msg = "Email was send to: '{}'.\n (If you don't get an Email: 1) check if the given Address is correct; 2) or check also in your Spam-Folder;) ".format(toaddr)
logger.info(logger_msg)
except socket.gaierror, e:
if try_number==0:
logger_msg = "\rEmailSendingError: (Socket.gaierror) Smtplib returned the following Problem: ‘{}‘. (Check your Internet Connection or DNS Server.)".format(e)
logger.error(logger_msg)
#print logger_msg
time.sleep(30)
try_number+=1
except Exception, e:
logger_msg = "\nEmailSendingError: SMTP-Server returned the following Problem: ‘{}‘ ".format(e)
#print logger_msg
logger.error(logger_msg)
#return False
finally:
#p(server_answer)
if server_answer:
logger_msg = "\nEmailSendingError: SMTP Server returned following error: ‘{}‘.".format(server_answer, "http://www.supermailer.de/smtp_reply_codes.htm")
return False
break
return True
def send_email(toaddr,Subject, text):
#num = None
if toaddr:
if isinstance(toaddr, (list, tuple)):
for addr in toaddr:
if not _send_email(addr,Subject, text):
return False
elif isinstance(toaddr, (str, unicode)):
if not _send_email(toaddr,Subject, text):
return False
else:
logger.error("Given Email Address has illegal structure.")
return False
return True
#\033[1A | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/helpers.py | helpers.py |
import platform
import sys
from kitchen.text.converters import getwriter
import inspect
import re
import traceback
if platform.uname()[0].lower() !="windows":
from blessings import Terminal
from nose.plugins.attrib import attr
from cached_property import cached_property
UTF8Writer = getwriter('utf8')
sys.stdout = UTF8Writer(sys.stdout)
t = Terminal()
colores = {'b':'t.bold_on_bright_blue', 'r':'t.bold_on_bright_red', 'g':'t.bold_on_bright_green', 'w':'t.bold_black_on_bright_white', 'm':'t.bold_white_on_bright_magenta', "c":'t.bold_white_on_bright_cyan', "y":'t.bold_white_on_bright_yellow', "b":'t.bold_white_on_bright_black'}
pattern = re.compile(r'\(\s?\((.*?)\).*\).*$')
def p(context_to_print, name_to_print='DEBUGING', c='w', r=False):
'''
Functionality: Print-Function for Debigging
'''
try:
context_to_print = context_to_print.decode("utf-8")
except:
pass
if platform.uname()[0].lower() !="windows":
#t = Terminal()
#colores = {'b':'t.bold_on_bright_blue', 'r':'t.bold_on_bright_red', 'g':'t.bold_on_bright_green', 'w':'t.bold_black_on_bright_white', 'm':'t.bold_white_on_bright_magenta', "c":'t.bold_white_on_bright_cyan', "y":'t.bold_white_on_bright_yellow', "b":'t.bold_white_on_bright_black'}
#colores = {'b':'t.bold_blue', 'r':'t.bold_red', 'g':'t.bold_green', 'w':'t.bold', 'm':'t.bold_magenta'}
if isinstance(context_to_print, tuple):
#p("tzui")
stack = traceback.extract_stack()
filename, lineno, function_name, code = stack[-2]
#var_names = re.compile(r'\((.*?)\).*').search(code).groups()[0]
var_names = pattern.search(code)
if var_names:
var_names = var_names.groups()[0]
var_names = var_names.strip(" ").strip(",").strip(" ")
#var_names = var_names.split("[")
var_names = var_names.split(",")
var_names = [var.strip(" ") for var in var_names]
#print var_names
#print (context_to_print, var_names)
if len(context_to_print)== len(var_names):
temp_elem_to_print = ""
for var_name,var_value in zip(var_names, context_to_print):
var_value = repr(var_value) if r else var_value
var_name = var_name if (("'" not in var_name) and ('"' not in var_name)) else None
#temp_elem_to_print += "\n {start}{var_name}{stop} = '{var_value}'\n".format(var_name=var_name,var_value=var_value,t=t, start=t.bold_black_on_bright_white, stop=t.normal)
temp_elem_to_print += u"\n {start}{var_name}{stop} = {var_value}\n".format(var_name=var_name,var_value=var_value,t=t, start=t.bold_magenta, stop=t.normal)
if temp_elem_to_print:
r = False
#temp_elem_to_print = "\n" + temp_elem_to_print
context_to_print = temp_elem_to_print
#p(context_to_print)
#print context_to_print
else:
print "ERROR(P): No right Number of extracted val_names"
#else:
#p()
context_to_print =repr(context_to_print) if r else context_to_print
print u"\n\n{start} <{0}>{stop} \n {1} \n {start} </{0}>{stop}\n".format( name_to_print, context_to_print, t=t, start=eval(colores[c]), stop=t.normal )
else:
print "p() is not supported for 'Windows'-OS."
def wipd(f):
'''
decorator for nose attr.
'''
return attr('wipd')(f)
def wipdn(f): # now
'''
decorator for nose attr.
'''
return attr('wipdn')(f)
def wipdl(f): #later
'''
decorator for nose attr.
'''
return attr('wipdl')(f)
def wipdo(f): # open
'''
decorator for nose attr.
'''
return attr('wipdo')(f)
# def cached(timeout=None):
# def decorator(func):
# def wrapper(self, *args, **kwargs):
# value = None
# key = '_'.join([type(self).__name__, str(self.id) if hasattr(self, 'id') else '', func.__name__])
# if settings.CACHING_ENABLED:
# value = cache.get(key)
# if value is None:
# value = func(self, *args, **kwargs)
# if settings.CACHING_ENABLED:
# # if timeout=None Django cache reads a global value from settings
# cache.set(key, value, timeout=timeout)
# return value
# return wrapper
# return decorator
# def cachedproperty(func):
# " Used on methods to convert them to methods that replace themselves\
# with their return value once they are called. "
# def cache(*args):
# self = args[0] # Reference to the class who owns the method
# funcname = func.__name__
# ret_value = func(self)
# setattr(self, funcname, ret_value) # Replace the function with its value
# return ret_value # Return the result of the function
# return property(cache)
# def p(func):
# def func_wrapper(name):
# return "<\n{0}>{1}</{0}\n>".format(func.__name__, func(name))
# return func_wrapper
# def markup(func):
# def func_wrapper(name):
# return "<\n{0}>{1}</{0}\n>".format(func.__name__, func(name))
# return func_wrapper | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/debugger.py | debugger.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : SQL Helper
# Author:
# c(Student) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
from __future__ import generators
import os
import sys
import regex
#import sqlite3
import inspect
import itertools
import logging
import json
from pysqlcipher import dbapi2 as sqlite
import functools
from pyhashxx import hashxx
from datetime import datetime as dt
from zas_rep_tools.src.utils.debugger import p
from zas_rep_tools.src.utils.zaslogger import ZASLogger
#path_to_zas_rep_tools = os.path.dirname(os.path.dirname(os.path.dirname(inspect.getfile(Logger))))
########################################################################################
#########################Exception- Handling#############################################
########################################################################################
sql_supported_data_type =["NULL", "INTEGER", "REAL", "TEXT", "BLOB"]
sql_supported_data_functions = ["date", "time", "datetime", "julianday","strftime"]
def dict_to_list(inp_dict, order_list):
out_list = []
for col in order_list:
try:
out_list.append(inp_dict[col])
except:
out_list.append(None)
return out_list
def log_and_execute(cursor, sql, *args):
s = sql
if len(args) > 0:
# generates SELECT quote(?), quote(?), ...
cursor.execute("SELECT " + ", ".join(["quote(?)" for i in args]), args)
quoted_values = cursor.fetchone()
for quoted_value in quoted_values:
s = s.replace('?', quoted_value, 1)
print "SQL command: " + s
cursor.execute(sql, args)
#__metaclass__ = ErrorCatcher
class DBErrorCatcher(type):
# @classmethod
# def __prepare__(metacls, name, bases, **kargs):
# #kargs = {"myArg1": 1, "myArg2": 2}
# return super().__prepare__(name, bases, **kargs)
def __new__(cls, name, bases, dct):
#p(dct)
if 'DBErrorCatcher' in dct:
#p(dct['DBErrorCatcher'])
if dct['DBErrorCatcher']:
self.L = ZASLogger('DBErrorCatcher', level=logging.ERROR, logger_usage=True)
self.logger = self.L.getLogger()
#logger = errorLogger("DBErrorCatcher")
for m in dct:
if hasattr(dct[m], '__call__'):
dct[m] = catch_exception(dct[m],logger)
return type.__new__(cls, name, bases, dct)
#@catch_exception
def catch_exception(f,logger):
@functools.wraps(f)
def func(*args, **kwargs):
#p(type(args[0]), c="r")
#p(args[0].__dict__)
#args[0].close()
#args.__dict__
#sys.exit()
try:
return f(*args, **kwargs)
except sqlite.DataError as e:
msg = "sqlite.DataError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except sqlite.InternalError as e:
msg = "sqlite.InternalError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except sqlite.IntegrityError as e:
msg = "sqlite.IntegrityError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except sqlite.OperationalError as e:
msg = "sqlite.OperationalError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except sqlite.NotSupportedError as e:
msg = "sqlite.NotSupportedError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except sqlite.ProgrammingError as e:
msg = "sqlite.ProgrammingError: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
self.logger.error(msg)
sys.exit()
except KeyboardInterrupt:
self.logger.warning("KeyboardInterrupt: Process was stopped from User. Some inconsistence in the current DB may situated.")
sys.exit()
except Exception as e:
msg = "OtherExceptions: '{}'-Function returned following Error: '{}'. ".format(f.__name__, e)
#logger.error(msg)
#p(logger, c="m")
self.logger.error(msg)
sys.exit()
return func
# class defaultlist(list):
# """List returning default value when accessing uninitialized index.
# Original implementation: http://stackoverflow.com/a/8719940/315168
# """
# def __init__(self, fx):
# self._fx = fx
# def __setitem__(self, index, value):
# while len(self) <= index:
# self.append(self._fx())
# list.__setitem__(self, index, value)
# def __getitem__(self, index):
# """Allows self.dlist[0] style access before value is initialized."""
# while len(self) <= index:
# self.append(self._fx())
# return list.__getitem__(self, index)
########################################################################################
########################################################################################
########################################################################################
########################################################################################
########################################################################
##########################corpus#######################################
########################################################################
attributs_names_corpus = [
("id", "INTEGER NOT NULL"),
("name", "TEXT NOT NULL"),
("platform_name", "TEXT NOT NULL"),
("template_name", "TEXT"),
("version", "TEXT"),
("language", "TEXT NOT NULL"),
("created_at", "TEXT NOT NULL"),
("source", "TEXT"),
("license", "TEXT"),
("visibility", "TEXT NOT NULL"),
("typ", "TEXT NOT NULL"),
("tokenizer", "TEXT"),
("sent_splitter", "TEXT"),
("pos_tagger", "TEXT"),
("sentiment_analyzer", "TEXT"),
("preprocession", "INTEGER"),
("del_url", "INTEGER"),
("del_punkt", "INTEGER"),
("del_num", "INTEGER"),
("del_mention", "INTEGER"),
("del_hashtag", "INTEGER"),
("del_html", "INTEGER"),
("case_sensitiv", "INTEGER"),
("lang_classification", "INTEGER"),
("emojis_normalization", "INTEGER"),
("sent_num", "INTEGER"),
("token_num", "INTEGER"),
("doc_num", "INTEGER"),
("text_field_name", "TEXT"),
("id_field_name", "TEXT"),
("locked", "INTEGER"),
]
### Documnets_Table (default)
doc_id_tag = "id"
default_columns_and_types_for_corpus_documents = [
('rowid','INTEGER PRIMARY KEY'),
(doc_id_tag,'INTEGER'),
('text','JSON NOT NULL')
]
default_constraints_for_corpus_documents = [
'CONSTRAINT "Uniq_ID" UNIQUE ("id")',
]
default_index_for_corpus_documents = [
'CREATE UNIQUE INDEX IF NOT EXISTS ix_id ON documents (id);',
]
### Documnets_Table (special)
extended_columns_and_types_for_corpus_documents_twitter = [
('t_created_at','TEXT'),
('t_language','TEXT'),
('t_used_client','TEXT'),
('u_created_at','TEXT'),
('u_description','TEXT'),
('u_favourites','INTEGER'),
('u_followers','INTEGER'),
('u_friends','TEXT'),
('u_id','INTEGER NOT NULL'),
('u_lang','TEXT'),
('u_given_name','TEXT'),
('u_username','TEXT NOT NULL'),
('u_verified','TEXT'),
('u_location','TEXT'),
('is_extended','INTEGER'),
('is_retweet','INTEGER'),
('is_answer','INTEGER'),
]
extended_columns_and_types_for_corpus_documents_blogger =[
('gender','TEXT NOT NULL'),
('age','INTEGER NOT NULL'),
('working_area','TEXT NOT NULL'),
('star_constellation','TEXT NOT NULL'),
]
########################################################################
##############################Stats#####################################
########################################################################
### Info_Table
attributs_names_stats = [
("id", "INTEGER NOT NULL"),
("corpus_id", "INTEGER"),
("name", "TEXT NOT NULL"),
("version", "TEXT"),
("created_at", "TEXT NOT NULL"),
("visibility", "TEXT NOT NULL"),
("typ", "TEXT NOT NULL"),
("db_frozen", "INTEGER"),
("context_lenght", "INTEGER"),
("language", "TEXT"),
("repl_up", "INTEGER"),
("ignore_hashtag", "INTEGER"),
("ignore_url", "INTEGER"),
("ignore_mention", "INTEGER"),
("ignore_punkt", "INTEGER"),
("ignore_num", "INTEGER"),
("force_cleaning", "INTEGER"),
("case_sensitiv", "INTEGER"),
#("text_field_name", "TEXT"),
#("id_field_name", "TEXT"),
("full_repetativ_syntagma", "INTEGER"),
("min_scope_for_indexes", "INTEGER"),
("locked", "INTEGER"),
("pos_tagger", "TEXT"),
("sentiment_analyzer", "TEXT"),
("baseline_delimiter", "TEXT"),
]
### Baseline_Tables (default)
#default_columns_and_types_for_stats_baseline
default_col_baseline_main = (
('syntagma','TEXT PRIMARY KEY NOT NULL'),
('stemmed','TEXT NOT NULL'),
("scope", "INTEGER NOT NULL"),
('occur_syntagma_all','INTEGER NOT NULL'),
)
default_col_baseline_repls_core = (
('occur_repl_uniq','TEXT'),
('occur_repl_exhausted','TEXT'),
)
default_col_baseline_redus_core = (
('occur_redu_uniq','TEXT'),
('occur_redu_exhausted','TEXT'),
)
default_col_baseline_repls_addit = (
('occur_full_syn_repl','TEXT'),
)
default_col_baseline_redus_addit = (
('occur_full_syn_redu','TEXT'),
)
#repl_baseline
default_columns_and_types_for_stats_baseline = list(default_col_baseline_main + default_col_baseline_repls_core +default_col_baseline_redus_core +default_col_baseline_repls_addit +default_col_baseline_redus_addit)
# #repl_baseline
# default_columns_and_types_for_stats_repl_baseline = [
# ('syntagma','TEXT PRIMARY KEY NOT NULL'),
# ('repl_uniq','INTEGER NOT NULL'),
# ('repl_add','INTEGER NOT NULL'),
# ('non_repl','INTEGER NOT NULL'),
# ('repl_ids','TEXT'),
# ]
# #redu_baseline
# default_columns_and_types_for_redu_baseline = [
# ('syntagma','TEXT PRIMARY KEY NOT NULL'),
# ('scope','INTEGER NOT NULL'),
# ('occur','INTEGER NOT NULL'),
# ('redu_ids','TEXT'),
# ]
default_constraints_for_stats_baseline = [
'CONSTRAINT "Uniq_Syntagma" UNIQUE ("syntagma")',
'CONSTRAINT "Uniq_Syntagma" UNIQUE ("stemmed")',
]
default_indexes_for_stats_baseline = [
'CREATE UNIQUE INDEX IF NOT EXISTS "ix_syntagma" ON "baseline" ("syntagma");',
#'CREATE UNIQUE INDEX IF NOT EXISTS "ix_scope" ON "baseline" ("scope");',
]
### Replications_Table (default)
tag_normalized_word = 'normalized_word'
default_col_for_rep_core = (
('id','INTEGER PRIMARY KEY'),
)
default_col_for_rep_doc_info = (
('doc_id','INTEGER NOT NULL'),
('redufree_len','JSON NOT NULL'),
)
default_col_for_rep_indexes = (
('index_in_corpus','JSON NOT NULL'),
('index_in_redufree','JSON NOT NULL'),
)
default_col_for_repl_word_info = (
(tag_normalized_word,' TEXT NOT NULL'),
('rle_word','TEXT NOT NULL'),
('stemmed','TEXT NOT NULL'),
)
default_col_for_rep_repl_data = (
('repl_letter','TEXT NOT NULL'),
('repl_length','INTEGER NOT NULL'),
('index_of_repl','INTEGER NOT NULL'),
("in_redu",'JSON'),
)
default_col_for_rep_addit_info_word = (
('pos',' TEXT NOT NULL'),
('polarity','JSON NOT NULL'),
)
default_columns_and_types_for_stats_replications = list(
default_col_for_rep_core +
default_col_for_rep_doc_info+
default_col_for_rep_indexes +
default_col_for_repl_word_info +
default_col_for_rep_repl_data +
default_col_for_rep_addit_info_word
)
# default_columns_and_types_for_stats_replications = [
# ('id','INTEGER PRIMARY KEY'),
# ('doc_id','INTEGER NOT NULL'),
# ('redufree_len','JSON NOT NULL'),
# ('index_in_corpus','JSON NOT NULL'),
# ('index_in_redufree','JSON NOT NULL'),
# (tag_normalized_word,' TEXT NOT NULL'),
# ('rle_word','TEXT NOT NULL'),
# ('stemmed','TEXT NOT NULL'),
# ('repl_letter','TEXT NOT NULL'),
# ('repl_length','INTEGER NOT NULL'),
# ('index_of_repl','INTEGER NOT NULL'),
# ('pos',' TEXT NOT NULL'),
# ('polarity','JSON NOT NULL'),
# ("in_redu",'JSON'),
# ]
default_constraints_for_stats_replications = [
'CONSTRAINT "Uniq_Repl_ID" UNIQUE ("id")',
]
default_indexes_for_stats_replications = [
#'CREATE UNIQUE INDEX IF NOT EXISTS "ix_replID" ON "replications" ("repl_id");',
#'CREATE UNIQUE INDEX IF NOT EXISTS ix_norm_word_repl ON replications (normalized_word);',
]
### Reduplications_Table (default)
default_col_for_rep_redu_data = (
('orig_words','JSON NOT NULL'),
("redu_length",'INTEGER NOT NULL'),
)
default_col_for_redu_word_info = (
(tag_normalized_word,' TEXT NOT NULL'),
('stemmed','TEXT NOT NULL'),
)
default_columns_and_types_for_stats_reduplications = list(
default_col_for_rep_core +
default_col_for_rep_doc_info+
default_col_for_rep_indexes +
default_col_for_redu_word_info +
default_col_for_rep_redu_data +
default_col_for_rep_addit_info_word
)
# default_columns_and_types_for_stats_reduplications = [
# ('id','INTEGER PRIMARY KEY'),
# ('doc_id','INTEGER NOT NULL'),
# ('redufree_len','JSON NOT NULL'),
# ('index_in_corpus','JSON NOT NULL'),
# ('index_in_redufree','JSON NOT NULL'),
# (tag_normalized_word,' TEXT NOT NULL'),
# ('stemmed','TEXT NOT NULL'),
# ('orig_words','JSON NOT NULL'),
# ("redu_length",'INTEGER NOT NULL'),
# ('pos',' TEXT NOT NULL'),
# ('polarity','JSON NOT NULL'),
# #('scopus','BLOB'),
# ]
default_constraints_for_stats_reduplications = [
'CONSTRAINT "Uniq_Redu-ID" UNIQUE ("id")',
]
default_indexes_for_stats_reduplications = [
#'CREATE UNIQUE INDEX IF NOT EXISTS "ix_reduID" ON "reduplications" ("redu-id");',
#'CREATE UNIQUE INDEX IF NOT EXISTS ix_norm_word_redu ON reduplications (normalized_word);',
]
default_indexes = {
"corpus": {
"documents":default_index_for_corpus_documents
},
"stats": {
#"baseline":default_indexes_for_stats_baseline,
"replications":default_indexes_for_stats_replications,
"reduplications":default_indexes_for_stats_reduplications,
"baseline":default_indexes_for_stats_baseline,
}
}
default_tables = {
"corpus":{
"info":attributs_names_corpus,
"documents":{
"basic": default_columns_and_types_for_corpus_documents,
"twitter":extended_columns_and_types_for_corpus_documents_twitter,
"blogger":extended_columns_and_types_for_corpus_documents_blogger,
}
},
"stats":{
"info":attributs_names_stats,
"replications":default_columns_and_types_for_stats_replications,
"reduplications":default_columns_and_types_for_stats_reduplications,
"baseline":default_columns_and_types_for_stats_baseline,
}
}
default_constraints = {
"corpus":{
"documents":default_constraints_for_corpus_documents,
},
"stats":{
"replications":default_constraints_for_stats_replications,
"reduplications":default_constraints_for_stats_reduplications,
"repl_baseline":default_constraints_for_stats_baseline,
"redu_baseline":default_constraints_for_stats_baseline,
}
}
########################################################################
#########################Other Helpers##################################
########################################################################
# def ResultIter(cursor, arraysize=1000):
# 'An iterator that uses fetchmany to keep memory usage down'
# while True:
# results = cursor.fetchmany(arraysize)
# if not results:
# break
# for result in results:
# yield result
def columns_and_values_to_str(columns_names,values):
'''
without Datatypes
'''
if isinstance(columns_names, list) and isinstance(values, list):
str_columns_and_values = ""
i=1
for column, value in zip(columns_names,values):
if value is None:
value = "NULL"
if value == "NULL":
if len(columns_names) > 1:
if i < len(columns_names):
str_columns_and_values += "\n{}={}, ".format(column,value)
else:
str_columns_and_values += "\n{}={} ".format(column,value)
i+=1
elif len(columns_names) == 1:
str_columns_and_values += "{}={}".format(column,value)
else:
return False
else:
if len(columns_names) > 1:
if i < len(columns_names):
str_columns_and_values += "\n{}='{}', ".format(column,value)
else:
str_columns_and_values += "\n{}='{}' ".format(column,value)
i+=1
elif len(columns_names) == 1:
str_columns_and_values += "{}='{}'".format(column,value)
else:
return False
else:
return False
return str_columns_and_values
def values_to_placeholder(number):
output = ""
for i in range(number):
if i < number-1:
output += "?,"
elif i== number-1:
output += "?"
return output
def columns_and_types_in_tuples_to_str(attributs_names):
'''
use by table initialisation (with Datatypes)
'''
if isinstance(attributs_names, list):
str_attributs_names = ""
i=1
for attribut in attributs_names:
if not isinstance(attribut, tuple):
return False
if len(attributs_names) > 1:
if i < len(attributs_names):
str_attributs_names += "\n{} {}, ".format(attribut[0], attribut[1])
else:
str_attributs_names += "\n{} {} ".format(attribut[0], attribut[1])
i+=1
elif len(attributs_names) == 1:
str_attributs_names += "\n{} {}\n".format(attribut[0], attribut[1])
else:
return False
else:
return False
return str_attributs_names
# def attributs_to_str(attributs_names):
# '''
# without Datatypes
# '''
# if isinstance(attributs_names, list):
# str_attributs_names = ""
# i=1
# for attribut in attributs_names:
# if isinstance(attribut, unicode):
# attribut = attribut.encode('utf8')
# if not isinstance(attribut, tuple):
# return False
# if len(attributs_names) > 1:
# if i < len(attributs_names):
# str_attributs_names += "\n{}, ".format(attribut[0])
# else:
# str_attributs_names += "\n{} ".format(attribut[0])
# i+=1
# elif len(attributs_names) == 1:
# str_attributs_names += "{}".format(attribut[0])
# else:
# return False
# else:
# return False
# return str_attributs_names
def list_of_select_objects_to_str(inputobj):
'''
without Datatypes
'''
try:
inputobj + "" # if True, than
outputstr += " {}".format(inputobj)
except TypeError:
outputstr = ""
i=1
for obj in inputobj:
if isinstance(obj, unicode):
obj = obj.encode('utf8')
if len(inputobj) > 1:
if i < len(inputobj):
outputstr += "\n{}, ".format(obj)
else:
outputstr += "\n{} ".format(obj)
i+=1
elif len(inputobj) == 1:
outputstr += "{}".format(obj)
else:
return False
#except:
return outputstr
# def columns_list_to_str(inputobj):
# '''
# without Datatypes
# '''
# if isinstance(inputobj, list):
# outputstr = ""
# i=1
# for column in inputobj:
# if isinstance(column, unicode):
# column = column.encode('utf8')
# if len(inputobj) > 1:
# if i < len(inputobj):
# outputstr += "\n{}, ".format(column)
# else:
# outputstr += "\n{} ".format(column)
# i+=1
# elif len(inputobj) == 1:
# outputstr += "{}".format(column)
# else:
# return False
# elif isinstance(inputobj, str):
# outputstr += " {}".format(inputobj)
# else:
# return False
# return outputstr
def constraints_list_to_str(constraints):
'''
without Datatypes
'''
#p(constraints)
if isinstance(constraints, list):
str_constraints = ""
i=0
for constrain in constraints:
if len(constraints) > 1:
if i == 0:
str_constraints += ",\n{} ".format(constrain)
elif i<0 and i < len(constraints):
str_constraints += "\n{}, ".format(constrain)
else:
str_constraints += "\n{} ".format(constrain)
i+=1
elif len(constraints) == 1:
str_constraints += ",\n{}".format(constrain)
else:
return False
else:
return False
#p(str_constraints, c="m")
return str_constraints
def clean_value(value):
#p(value, "value")
if isinstance(value, (str,unicode)):
#newval= value
newval = value.replace('\n', '\\n"')
newval = newval.replace('"', '\'')
newval = newval.replace('\r', '\\r"')
return newval
else:
return False
def values_to_tuple(values, mode):
values_as_list = list()
if mode == "one":
try:
for value in values:
#values_as_list.append(clean_value(value))
if isinstance(value, (list,dict,tuple)):
values_as_list.append(json.dumps(value))
else:
values_as_list.append(value)
except:
return False
else:
try:
for row in values:
temp_row = []
for item in row:
if isinstance(item, (list,dict,tuple)):
temp_row.append(unicode(json.dumps(item)))
else:
temp_row.append(item)
values_as_list.append(tuple(temp_row))
except:
return False
return tuple(values_as_list)
def values_to_list(values, mode):
values_as_list = list()
if mode == "one":
try:
for value in values:
#values_as_list.append(clean_value(value))
if isinstance(value, (tuple,list,dict)):
values_as_list.append(json.dumps(value))
else:
values_as_list.append(value)
except:
return False
else:
try:
for row in values:
temp_row = []
for item in row:
if isinstance(item, (tuple,list,dict)):
temp_row.append(unicode(json.dumps(item)))
else:
temp_row.append(item)
values_as_list.append(temp_row)
except:
return False
return values_as_list
# def values_to_list10(values, mode):
# values_as_list = list()
# if mode == "one":
# try:
# for value in values:
# #values_as_list.append(clean_value(value))
# try:
# value.decode ## if str, unicode
# values_as_list.append(value)
# except:
# try:
# value.__getitem__
# values_as_list.append(json.dumps(value))
# except: #if None
# values_as_list.append(value)
# except:
# return False
# else:
# try:
# for row in values:
# temp_row = []
# for item in row:
# try:
# value.decode
# temp_row.append(item)
# except:
# try:
# value.__getitem__
# temp_row.append(unicode(json.dumps(item)))
# except:
# temp_row.append(item)
# values_as_list.append(temp_row)
# except:
# return False
# return values_as_list
# def where_condition_to_str(inputobj, connector="AND"):
# outputstr = ""
# i=0
# if isinstance(inputobj, list):
# for item in inputobj:
# i+=1
# if i < len(inputobj):
# outputstr += " {} {}".format(item, connector)
# else:
# outputstr += " {} ".format(item)
# elif isinstance(inputobj, str):
# outputstr += " {}".format(inputobj)
# else:
# return False
# return outputstr
def where_condition_to_str(inputobj, connector="AND"):
#outputstr = ""
try:
inputobj.decode
return u" {}".format(inputobj)
except:
#connector = u" {} ".format(connector)
return " {} ".format(connector).join(inputobj)
def get_file_name(prjFolder,first_id ,DBname, language,visibility, typ, fileName=False, platform_name=False, second_id=False,encrypted=False, rewrite=False, stop_if_db_already_exist=False):
#p("1234", c="m")
status = True
i=0
while status:
#Created FileName
if not fileName:
if platform_name:
if encrypted:
if i==1:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_{}_encrypted_{}".format(first_id,second_id,typ,platform_name,DBname,language,visibility,i)
else:
fileName = "{}_{}_{}_{}_{}_{}_encrypted_{}".format(first_id,typ,platform_name,DBname,language,visibility,i)
else:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_{}_encrypted".format(first_id,second_id,typ,platform_name,DBname,language,visibility)
else:
fileName = "{}_{}_{}_{}_{}_{}_encrypted".format(first_id,typ,platform_name,DBname,language,visibility)
else:
if i==1:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_{}_plaintext_{}".format(first_id,second_id,typ,platform_name,DBname,language,visibility,i)
else:
fileName = "{}_{}_{}_{}_{}_{}_plaintext_{}".format(first_id,typ,platform_name,DBname,language,visibility,i)
else:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_{}_plaintext".format(first_id,second_id,ttyp,platform_name,DBname,language,visibility)
else:
fileName = "{}_{}_{}_{}_{}_{}_plaintext".format(first_id,typ,platform_name,DBname,language,visibility)
else:
if encrypted:
if i==1:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_encrypted_{}".format(first_id,second_id,typ,DBname,language,visibility,i)
else:
fileName = "{}_{}_{}_{}_{}_encrypted_{}".format(first_id,typ,DBname,language,visibility,i)
else:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_encrypted".format(first_id,second_id,typ,DBname,language,visibility)
else:
fileName = "{}_{}_{}_{}_{}_encrypted".format(first_id,typ,DBname,language,visibility)
else:
if i==1:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_plaintext_{}".format(first_id,second_id,typ,DBname,language,visibility,i)
else:
fileName = "{}_{}_{}_{}_{}_plaintext_{}".format(first_id,typ,DBname,language,visibility,i)
else:
if second_id:
fileName = "{}_{}_{}_{}_{}_{}_plaintext".format(first_id,second_id,typ,DBname,language,visibility)
else:
fileName = "{}_{}_{}_{}_{}_plaintext".format(first_id,typ,DBname,language,visibility)
else:
if i>0:
fileName_without_extension = os.path.splitext(fileName)[0]
pattern = r"^(.*?)(_[0-9]*)$"
matched = regex.findall(pattern,fileName_without_extension)
#sys.exit()
if matched:
fileName_without_extension = matched[0][0]
fileName = fileName_without_extension+"_"+str(i)
if i > 10000:
#p(fileName)
print "db_helpers.get_file_name(): Script was Aborting!!! To avoid never-ending loop"
return False, False
sys.exit()
i+=1
#Add Extention
if ".db" not in fileName:
fileName = fileName+".db"
#Create path_to_db
path_to_db = os.path.join(prjFolder,fileName)
if stop_if_db_already_exist:
if os.path.isfile(path_to_db):
status = False
return fileName,None
if rewrite:
if os.path.isfile(path_to_db):
status = False
#p((fileName,path_to_db))
return fileName,path_to_db
else:
if not os.path.isfile(path_to_db):
status = False
return fileName,path_to_db
def get_file_name_for_empty_DB(prjFolder,DBname, fileName=False, encrypted=False, rewrite=False, stop_if_db_already_exist=False):
status = True
i=0
while status:
#Created FileName
if not fileName:
if encrypted:
if i==1:
fileName = "{}_encrypted_{}".format(DBname,i)
else:
fileName = "{}_encrypted".format(DBname)
else:
if i==1:
fileName = "{}_plaintext_{}".format(DBname,i)
else:
fileName = "{}_plaintext".format(DBname)
else:
if i>0:
fileName_without_extension = os.path.splitext(fileName)[0]
pattern = r"^(.*?)(_[0-9]*)$"
matched = regex.findall(pattern,fileName_without_extension)
#sys.exit()
if matched:
fileName_without_extension = matched[0][0]
fileName = fileName_without_extension+"_"+str(i)
if i > 10000:
print "db_helpers.get_file_name_for_empty_DB(): Aborting!!! To avoid never-ending loop"
#return False
sys.exit()
i+=1
#Add Extention
if ".db" not in fileName:
fileName = fileName+".db"
#Create path_to_db
path_to_db = os.path.join(prjFolder,fileName)
#Check if this file already exist. and if yes, than change the name
if stop_if_db_already_exist:
if os.path.isfile(path_to_db):
status = False
return fileName, None
if rewrite:
if os.path.isfile(path_to_db):
status = False
#p((fileName,path_to_db))
return fileName,path_to_db
else:
if not os.path.isfile(path_to_db):
status = False
return fileName,path_to_db
#int(str(number)[2:5])
def create_id(name,lang, typ, visibility):
time_now = dt.now().strftime('%H:%M:%S')
return str(hashxx("{}{}{}{}{}".format(name[0], lang[0], typ[0], visibility[0], time_now)))[2:6]
# #int(str(number)[2:5])
# def create_id(name,lang, typ, visibility, corpus_id=False, stats_id = False):
# time_now = datetime.now().strftime('%H:%M:%S')
# if stats_id:
# if corpus_id:
# return "{}_{}".format(corpus_id,stats_id)
# else:
# return False
# else:
# if corpus_id:
# hashfunc_as_str = str(hashxx("{}{}{}{}{}".format(name[0], lang[0], typ[0], visibility[0],time_now)))[2:6]
# return "{}_{}".format(corpus_id,hashfunc_as_str)
# else:
# return str(hashxx("{}{}{}{}{}".format(name[0], lang[0], typ[0], visibility[0], time_now)))[2:6]
# def make_acronyme(full_name):
# '''
# Rules:
# take all first 3 consonant non-repetitive from the word
# Example:
# Full_name = "twitter"
# acronyme = "twt"
# '''
# acronyme = ''
# consonants = set("bcdfghjklmnpqrstvwxyz")
# i=0
# if isinstance(full_name, (str, unicode)):
# for char in full_name:
# if char in consonants:
# if len(acronyme)==0:
# acronyme += char
# else:
# if acronyme[-1]!= char:
# acronyme += char
# i+=1
# if i >=3:
# return acronyme
# else:
# return False
# return acronyme | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/db_helper.py | db_helper.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : XXX{Information about this code}XXX
# Author:
# c(Developer) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
#
#
#
#
#
import json
from collections import defaultdict
from nltk.tokenize import TweetTokenizer
import zas_rep_tools.src.utils.db_helper as db_helper
class ConfigerData(object):
def __init__(self):
self._suported_user_info = ["error_tracking", "project_folder", "twitter_creditials", "email"]
self._path_to_testdbs = "data/tests_data/testDBs/testFolder"
self._path_to_testsets = {
"blogger":"data/tests_data/Corpora/BloggerCorpus",
"twitter":"data/tests_data/Corpora/TwitterCorpus"
}
self._types_folder_names_of_testsets = {
"txt":{
"highrepetativ":"txt/HighRepetativSubSet",
"fake":"txt/SmallFakeSubset",
"small":"txt/SmallSubset"
},
"csv":{
"highrepetativ":"csv/HighRepetativSubSet",
"fake":"csv/SmallFakeSubset",
"small":"csv/SmallSubset"
},
"xml":{
"highrepetativ":"xml/HighRepetativSubSet",
"fake":"xml/SmallFakeSubset",
"small":"xml/SmallSubset"
},
"json":{
"highrepetativ":"json/HighRepetativSubSet",
"fake":"json/SmallFakeSubset",
"small":"json/SmallSubset"
},
"sqlite":{
"highrepetativ":"sqlite/HighRepetativSubSet",
"fake":"sqlite/SmallFakeSubset",
"small":"sqlite/SmallSubset"
}
}
self._test_dbs = {
"plaintext":{
"blogger":{
"en":{
"corpus":"7614_corpus_blogs_bloggerCorpus_en_extern_plaintext.db",
"stats":"7614_3497_stats_bloggerCorpus_en_extern_plaintext.db"
},
"de":{
"corpus":"7614_corpus_blogs_bloggerCorpus_de_extern_plaintext.db",
"stats":"7614_3497_stats_bloggerCorpus_de_extern_plaintext.db"
},
"ru":{},
"test":{
"corpus":"7614_corpus_blogs_bloggerCorpus_test_extern_plaintext.db",
"stats":"7614_3497_stats_bloggerCorpus_test_extern_plaintext.db"
},
},
"twitter":{
"en":{},
"de":{
#"corpus":"9588_corpus_twitter_streamed_de_intern_plaintext.db",
#"stats": "9588_6361_stats_streamed_de_intern_plaintext.db"
},
"ru":{},
"test":{},
}
},
"encrypted":{
"blogger":{
"en":{
#"corpus":"7614_corpus_blogs_bloggerCorpus_en_extern_encrypted.db",
#"stats":"7614_3497_stats_bloggerCorpus_en_extern_encrypted.db"
},
"de":{},
"ru":{},
"test":{},
},
"twitter":{
"en":{},
"de":{
"corpus":"9588_corpus_twitter_streamed_de_intern_encrypted.db",
"stats": "9588_6361_stats_streamed_de_intern_encrypted.db"
},
"ru":{},
"test":{},
}
}
}
self._init_info_data = {
"blogger":{ "id":{"corpus":7614, "stats":3497},
"name":"bloggerCorpus",
"platform_name":"blogs",
"version":"1",
"language":"en",
"created_at":None,
"source":"LanguageGoldMine",
"license":"CreativCommon",
"visibility":"extern",
"template_name":"blogger",
"encryption_key": {"corpus":"corpus", "stats":"stats"}
},
"twitter":{ "id":{"corpus":9588, "stats":6361},
"name":"streamed",
"platform_name":"twitter",
"version":"1",
"language":"de",
"created_at":None,
"source":"Twitter API",
"license":"Twitter Developer Agreement",
"visibility":"intern",
"template_name":"twitter",
"encryption_key": {"corpus":"corpus", "stats":"stats"}
},
}
self._columns_in_doc_table = {
"blogger": [column[0] for column in db_helper.default_columns_and_types_for_corpus_documents+db_helper.extended_columns_and_types_for_corpus_documents_blogger],
"twitter": [column[0] for column in db_helper.default_columns_and_types_for_corpus_documents+db_helper.extended_columns_and_types_for_corpus_documents_twitter]
}
#p(self._columns_in_doc_table )
self._columns_in_info_tabel = {
"corpus": [column[0] for column in db_helper.attributs_names_corpus],
"stats": [column[0] for column in db_helper.attributs_names_stats]
}
self._columns_in_stats_tables = {
"redu": [column[0] for column in db_helper.default_columns_and_types_for_stats_reduplications],
"repl" : [column[0] for column in db_helper.default_columns_and_types_for_stats_replications],
"baseline": [column[0] for column in db_helper.default_columns_and_types_for_stats_baseline],
#"baseline":{
# "repl":[column[0] for column in db_helper.default_columns_and_types_for_stats_repl_baseline],
# "redu":[column[0] for column in db_helper.default_columns_and_types_for_redu_baseline]
# }
}
self._tokenizer = TweetTokenizer()
self._lang_order = ["en", "de", "ru", "other"]
self._text_elements_collection = {
"en":[
"I loved it. But it was also verrrryyyyy vvveRRRRRRrry very piiiiiiiiity pity pity piiitttyyy for me...... :-((((( @real_trump #sheetlife #readytogo http://www.absurd.com", # [u'It', u'was', u'verrrryyyyy', u'vvveRRRRRRrry', u'very', u'piiiiiiiiity', u'for', u'me', u'-', u'(', u'(', u'(', u'@real_trump', u'#sheetlife', u'#readytogo', u'http://www.absurd.com']
"glaaaaaaad to seeeeeeeee you -))))", #[u'glaaaaaaad', u'to', u'seeeeeeeee', u'you', u'-', u')', u')', u')']
"a baddddd bad bbbbbbbaaaaaad bbbbaaaaddddd baaaaaaad news, which we can not accept. -(((( 😫😫😫😫 😫😫😫😫😫 😫😫😫 :-((((( #sheetlife #sheetlife http://www.noooo.com", #[u'a', u'baddddd', u'bad', u'bbbbbbbaaaaaa', u'bbbbaaaaddddd', u'baaaaaaad', u'news', u'-', u'(', u'(', u'(', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'\ud83d', u'\ude2b', u'#sheetlife', u'#sheetlife', u'http://www.noooo.com']
"Tiny tiny tiny tiny tiny tiny mooooooodelllllll, which we can use for explain a biiig biiiiiiiiiiiiiiig things.", #[u'Tiny', u'tiny', u'tiny', u'tiny', u'tiny', u'tiny', u'mooooooodelllllll']
"Tiny model, but a big big big explaaaaanation. Riiiiiight? What do youuuuuu think about it???? 111111 😫😫😫😫 11111111. Bbbbbuuuutttt buuuuutttt yyyyyyou yoooooou bbbbbbut bbbbbutttt bbbbbuuuuut yyyoouuuu",
"tinnnyy tiny tiny surprise. Bbbbbut buuuuut yyyyyyou yoooooou bbbbbbut bbbbbut bbbbbut yyyoouuuu 😀😀😀😀😀🌈🌈🌈🌈🌈🌈🌈😀😀😀😀😀🌈🌈🌈🌈🌈🌈🌈😀😀😀😀😀",
"it was really bad surprise for me 😫😫😫😫, buuuuuuuuuut i really reallly reeeeeallllyyy liked it :P =)))))))))) 😀😀😀😀😀🌈🌈🌈🌈🌈🌈🌈😀",
],
"de":[
"Klitze kliiiitzeeeeeee kleEEEEinnnnne kleinnne Überaschung. Trotzdem hat sie mich glücklich gemacht! :-)))) -))) 😀😀😀😀😀-))) -)))", # [u'Klitze', u'kliiiitze', u'kleEEEEine', u'\xdcberaschung', u'-', u')', u')', u')', u'-', u')', u')', u')']
"einen wunderschönen Taaaaaagggggg wünsche ich euch. Geniesssstt geniiiiiessssssssttttt das Leben. Bleeeeeeeeibt bleeeeibt Huuuuuuuuuuuungrig. baseline baseline baseline in in in in baseline baseline baseline in in in in", #[u'einnennnnnn', u'toooolllleeeeen', u'Taaaaaagggggg', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08', u'\ud83c', u'\udf08'], [u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00', u'\ud83d', u'\ude00']
"eine klitzeeee kleeeeeine Überrrraschung @schönesleben #machwasdaraus #bewegedeinArsch https://www.freiesinternet.de Besser kannnnnn kaaaannnnn ess. Kleineeeesssssss kleinnnneeessss kleeeeiiiiinnneesss Mädchennnnn..... Kleinereeeee kleineeerreeeee Auswahhhllll. klitz kliiiitz kliiiitzzz kleeeiiinnn kleinnnnn. klitzessss kliiitzesss kleinnnees kleinessss",
"eine klitzeeee kleine Sache. Die aber trotzdem wichtiiiiiiiig isssssst! Weil es ja eine kleeeeeiinnnneeeee Überrrrraschung ist. 11111 2222 33333 4444 55555 6 . Kleineeeesssssss kleinnnneeessss kleeeeiiiiinnneesss Mädchennnnn.....",
"Eine klitze klitze klitze klitze kleine Überrrraschung, die ich mal gerne hatte. 111111 😫😫😫😫 11111111 Du meintest, es war so eineeee kleeeeiiiiinnnneeeeeeee Übeeeerrrrraaaschunnnnnnggg. ",
],
"ru":[
"Oчень оооооченнннь ооооччччееееннннньььь хорошееего дняяяяяяяяя", #[u'O\u0447\u0435\u043d\u044c', u'\u043e\u043e\u043e\u0447\u0435\u043d\u043d\u043d\u044c', u'\u043e\u043e\u043e\u0447\u0447\u0447\u0435\u0435\u0435\u043d\u043d\u043d\u044c\u044c\u044c', u'\u0445\u043e\u0440\u043e\u0448\u0435\u0435\u0435\u0433\u043e', u'\u0434\u043d\u044f\u044f\u044f']
"самммооово сааамово приятногооооо прииииииииятного ужииииина 🥡🍽🍽🍽🍽🍽🍽🍽🍽🍽🍽", #[u'\u0441\u0430\u043c\u043c\u043c\u043e\u043e\u043e\u0432\u043e', u'\u0441\u0430\u0430\u0430\u043c\u043e\u0432\u043e', u'\u043f\u0440\u0438\u044f\u0442\u043d\u043e\u0433\u043e\u043e\u043e', u'\u043f\u0440\u0438\u0438\u0438\u044f\u0442\u043d\u043e\u0433\u043e', u'\u0443\u0436\u0438\u0438\u0438\u043d\u0430', u'\ud83e', u'\udd61', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d', u'\ud83c', u'\udf7d']
],
"other":[
"اللغة العربية رائعة",
],
}
self._counted_reps = {
"en": {
u'#shetlife': {'baseline': 3, 'redu': [1, 2]},
u'-(': {'baseline': 1, 'repl': (1, 1)},
u'-)': {'baseline': 1, 'repl': (1, 1)},
u'.': {'baseline': 7, 'repl': (1, 1)},
u'1': {'baseline': 2, 'repl': (2, 2)},
u':-(': {'baseline': 2, 'repl': (2, 2)},
u'=)': {'baseline': 1, 'repl': (1, 1)},
u'?': {'repl': (1, 1), 'baseline': 2},
u'bad': {'baseline': 6, 'redu': (1, 5), 'repl': (4, 7)},
u'big': {'baseline': 5, 'redu': (2, 5), 'repl': (2, 2)},
u'but': {'baseline': 13, 'redu': (4, 10), 'repl': (11, 16)},
u'explanation': {'baseline': 1, 'repl': (1, 1)},
u'glad': {'baseline': 1, 'repl': (1, 1)},
u'model': {'baseline': 2, 'repl': (1, 2)},
u'pity': {'baseline': 4, 'redu': (1, 4), 'repl': (2, 4)},
u'realy': {'baseline': 4, 'redu': (1, 3), 'repl': (2, 4)},
u'right': {'baseline': 1, 'repl': (1, 1)},
u'se': {'baseline': 1, 'repl': (1, 1)},
u'tiny': {'baseline': 10, 'redu': (2, 9), 'repl': (1, 1)},
u'very': {'baseline': 3, 'redu': (1, 3), 'repl': (2, 4)},
u'you': {'baseline': 8, 'redu': (2, 4), 'repl': (7, 9)},
u'\U0001f308': {'baseline': 3, 'repl': (3, 3)},
u'\U0001f600': {'baseline': 5, 'repl': (4, 4)},
u'\U0001f62b': {'baseline': 3, 'repl': (3, 3)}
},
}
self.right_rep_num = {
"en": {
"repls":sum([data["repl"][1] for word, data in self._counted_reps["en"].items() if "repl" in data ]),
"redus":sum([data["redu"][0] for word, data in self._counted_reps["en"].items() if "redu" in data ]),
},
}
def _row_text_elements(self, lang="all"):
if lang == "test":
lang ="all"
if lang == "all":
return [text_item for lang in self._lang_order for text_item in self._text_elements_collection[lang]]
else:
if lang in self._text_elements_collection:
return self._text_elements_collection[lang]
else:
self.logger.error("No test-text-elements exist for given language: '{}'.".format(lang))
def _text_elements(self, token=True, unicode_str=True, lang="all"):
if lang == "test":
lang ="all"
if token:
if unicode_str:
#sys.exit()
return [t_elem.split() for t_elem in self._text_elements(token=False, unicode_str=True,lang=lang)]
#return [self._tokenizer.tokenize(t_elem) for t_elem in self._text_elements(token=False, unicode_str=True,lang=lang)]
#return self._text_elements_as_unicode_str_tokenized
else:
return [t_elem.split() for t_elem in self._text_elements(token=False, unicode_str=False,lang=lang)]
#return [self._tokenizer.tokenize(t_elem) for t_elem in self._text_elements(token=False, unicode_str=False,lang=lang)]
#return self._text_elements_as_byte_str_tokenized
elif not token:
if unicode_str:
#for t_elem in self._row_text_elements(lang=lang):
# p((t_elem, type(t_elem)))
#sys.exit()
#json.loads(t_elem)
return [json.loads(r'"{}"'.format(t_elem)) for t_elem in self._row_text_elements(lang=lang) ]
#return [t_elem.decode("utf-8") for t_elem in self._row_text_elements(lang=lang) ]
#return self._text_elements_as_unicode_str_untokenized
else:
return self._row_text_elements(lang=lang)
def _docs_row_values(self,token=True, unicode_str=True, lang="all"):
if lang == "test":
lang ="all"
text_element = self._text_elements(token=token, unicode_str=unicode_str, lang=lang)
#self.logger.critical(text_element)
if lang == "en":
text_element = self._text_elements(token=token, unicode_str=unicode_str, lang=lang)
docs_row_values_en = {
"blogger":[
[1, 1111, text_element[0], u'w', 37, u'IT', u'lion' ],
[2, 2222, text_element[1], u'm', 23, u'Care', u'fish' ],
[3, 3333, text_element[2], u'w', 22, u'Finance', u'aquarius' ],
[4, 4444, text_element[3], u'm', 27, u'IT', u'gemini' ],
[5, 5555, text_element[4], u'w', 35, u'Air Industry', u'lion' ],
[6, 6666, text_element[5], u'm', 21, u'Industry', "crawfish" ],
[7, 7777, text_element[6], u'w', 37, u'IT', u'lion' ],
],
"twitter":[
[1 ,1111, text_element[0], u"20/06/2014", u"en", u"Iphone", u"22/03/2014", u"Die Welt ist schön", 45, 76, 765, 34567890, u"en", u"MotherFucker", u"realBro", u"True", "planet Earth", False, False, False ],
[2 ,2222, text_element[1], u"03/02/2013", u"en", u"Iphone", u"29/06/2012", u"Kein Plan", 45, 76, 765, 34567890, u"en", u"MotherFucker", u"realBro", u"True", "planet Earth", False, False, False ],
[3 ,3333, text_element[2], u"21/06/2014", u"en", u"WebAPI", u"21/07/2017", u"Neiiiiin", 45, 76, 765, 34567890, u"en", u"MotherFucker", u"realBro", u"True", "planet Earth", False, False, False ],
[4 ,4444, text_element[3], u"20/04/2014", u"fr", u"Iphone", u"12/06/2011", u"Nööö", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[5 ,5555, text_element[4], u"20/06/2011", u"ru", u"Android", u"12/06/2012", u"Was willste, alter?", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[6 ,6666, text_element[5], u"30/09/2014", u"ru", u"Iphone", u"20/03/2013", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[7 ,7777, text_element[6], u"01/06/2014", u"de", u"Android", u"22/06/2011", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
]
}
return docs_row_values_en
elif lang == "de":
text_element = self._text_elements(token=token, unicode_str=unicode_str, lang=lang)
#p((text_element))
docs_row_values_de = {
"blogger":[
[8, 8888, text_element[0], u'm', 23, u'Care', u'fish' ],
[9, 9999, text_element[1], u'w', 22, u'Finance', u'aquarius' ],
[10, 10000, text_element[2], u'w', 35, u'Air Industry', u'lion' ],
[11, 11111, text_element[3], u'm', 21, u'Industry', "crawfish" ],
[12, 12222, text_element[4], u'w', 37, u'IT', u'lion' ],
],
"twitter":[
[8 ,8888, text_element[0], u"20/06/2007", u"de", u"Iphone", u"20/02/2009", u"Jööööö", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[9 ,9999, text_element[1], u"20/04/2014", u"it", u"WebAPI", u"01/06/2011", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[10 ,10000, text_element[2], u"21/06/2014", u"en", u"WebAPI", u"21/07/2017", u"Neiiiiin", 45, 76, 765, 34567890, u"en", u"MotherFucker", u"realBro", u"True", "planet Earth", False, False, False ],
[11 ,11111, text_element[3], u"20/04/2014", u"fr", u"Iphone", u"12/06/2011", u"Nööö", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[12 ,12222, text_element[4], u"20/06/2011", u"ru", u"Android", u"12/06/2012", u"Was willste, alter?", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
]
}
return docs_row_values_de
elif lang == "ru":
text_element = self._text_elements(token=token, unicode_str=unicode_str, lang=lang)
docs_row_values_ru = {
"blogger":[
[13, 13333, text_element[0], u'm', 23, u'Care', u'fish' ],
[14, 14444, text_element[1], u'w', 22, u'Finance', u'aquarius' ],
],
"twitter":[
[13 ,13333, text_element[0], u"30/09/2014", u"ru", u"Iphone", u"20/03/2013", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
[14 ,14444, text_element[1], u"01/06/2014", u"de", u"Android", u"22/06/2011", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ],
]
}
return docs_row_values_ru
elif lang == "other":
text_element = self._text_elements(token=token, unicode_str=unicode_str, lang=lang)
docs_row_values_other = {
"blogger":[
[15, 15555, text_element[0], u'w', 22, u'Finance', u'aquarius' ],
],
"twitter":[
[15 ,16666, text_element[0], u"20/04/2014", u"it", u"WebAPI", u"01/06/2011", u"Neiiiiin", 45, 76, 765, 98765, u"en", u"Lighter", u"LivelyLife", u"True", "planet Earth", False, False, False ]
]
}
return docs_row_values_other
elif lang == "all":
temp_dict = defaultdict(list)
for language in ["en", "de", "ru", "other"]:
output_for_current_lang = self._docs_row_values(token=token, unicode_str=unicode_str, lang=language)
for k,v in output_for_current_lang.iteritems():
temp_dict[k] += v
return temp_dict
def _docs_row_dict(self, token=True, unicode_str=True, all_values=True , lang="all"):
'''
just one dict with colums as key and list of all values as values for each columns()key
'''
if lang == "test":
lang ="all"
docs_row_values = self._docs_row_values(token=token, unicode_str=unicode_str, lang=lang)
#p(docs_row_values,"docs_row_values")
if all_values:
return {template_name:{k:v for k,v in zip(columns, zip(*docs_row_values[template_name]))} for template_name, columns in self._columns_in_doc_table.iteritems()}
else:
return {template_name:{col:row[0] for col, row in data.iteritems()} for template_name, data in self._docs_row_dict(token=token, unicode_str=unicode_str,lang=lang,all_values=True).iteritems()}
def _docs_row_dicts(self, token=True, unicode_str=True, lang="all"):
'''
list of dicts with colums and values for each row
'''
if lang == "test":
lang ="all"
docs_row_values = self._docs_row_values(token=token, unicode_str=unicode_str, lang=lang)
docs_row_dicts = { template_name:[dict(zip(columns, row)) for row in docs_row_values[template_name]] for template_name, columns in self._columns_in_doc_table.iteritems()}
return docs_row_dicts | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/src/utils/configer_helpers.py | configer_helpers.py |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# : Command Line Interface
# Author:
# c(Student) -> {'Egor Savin'}
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
###Programm Info######
import click
import shutil
import os
import inspect
import sys
import logging
import ast
import json
from blessings import Terminal
import enlighten
import time
import itertools
#from zas_rep_tools.src.utils.zaslogger import ZASLogger
from zas_rep_tools.src.utils.cli_helper import set_main_folders, get_cli_logger, validate_corp,validate_stats, strtobool, get_corp_fname, _get_status_bars_manager, _get_new_status_bar,get_stats_fname
from zas_rep_tools.src.utils.helpers import set_class_mode
import zas_rep_tools.src.utils.helpers as helpers
from zas_rep_tools.src.classes.reader import Reader
from zas_rep_tools.src.classes.corpus import Corpus
from zas_rep_tools.src.utils.corpus_helpers import CorpusData
from zas_rep_tools.src.classes.stats import Stats
from zas_rep_tools.src.classes.exporter import Exporter
from zas_rep_tools.src.classes.dbhandler import DBHandler
from zas_rep_tools.src.classes.streamer import Streamer
from zas_rep_tools.src.utils.debugger import p
from zas_rep_tools.src.classes.ToolConfiger import ToolConfiger
answer_error_tracking = None
project_folder = None
email = None
twitter_creditials = None
configer_obj = None
#logger = None
main_folders = set_main_folders(project_folder)
end_file_marker = -1
configer_obj = ToolConfiger(mode="error")
supported_commands = {
"configer": {
"prjdir":["clean", "set", "reset", "print"],
"error_track":["set", "reset", "respeak","print"],
"twitter":["set", "reset", "respeak","print"],
"email":["set", "reset", "respeak","print"],
"user_data":["clean", "location","print"],
},
"corpora": ["add", "del", "names", "meta", "basic_stats", "update_attr", "export", "used_tools", "clean_dir","cols", "doc", "ids"],
"stats": ["compute", "del", "names", "meta", "basic_stats", "update_attr", "export", "clean_dir", "recompute","optimize", "recreate_indexes"],
'streamer':[]
}
@click.group()
def main():
global answer_error_tracking
global project_folder
global email
global twitter_creditials
global configer_obj
global main_folders
configer_obj.get_data_from_user(rewrite=False)
answer_error_tracking = configer_obj._user_data['error_tracking']
project_folder = configer_obj._user_data['project_folder']
if not project_folder:
configer_obj.logger.error("ProjectDirectory wasn't given. Please Give first a new ProjectDirectory, before starting to use this tool.")
sys.exit()
return False
else:
if not os.path.isdir(project_folder):
configer_obj.logger.error("ProjectDirectory is not exist or given path to this directory is illegal. ('{}') Please give one ProjectDirectory, which are also exist".format(project_folder))
return False
main_folders = set_main_folders(project_folder)
email = configer_obj._user_data['email']
twitter_creditials = configer_obj._user_data['twitter_creditials']
@main.command('configer')
@click.argument('command1')
@click.argument('command2')
@click.option('--mode', '-m', default="prod" ,help="Set one of the Tool Modus", type=click.Choice(helpers.modi))
@click.option('--logdir', '-ld', default="logs", help="Choose the name of the Directory for log data.")
def configer(command1,command2, mode,logdir ):
# $ zas-vot-tools strat1 sets/train_set sets/eval_set segments voiceless voiced vlwindow vcwindow experiments
logger = get_cli_logger(mode,logdir)
#p(command,"command")
func_name = "configer"
if command1 not in supported_commands[func_name]:
logger.error(" Given Command ('{}') is illegal for '{}'. Please use one of the following commands: '{}' ".format(command1,func_name,supported_commands[func_name] ))
return False
if command2 not in supported_commands[func_name][command1]:
logger.error(" Given Command ('{}') is illegal for '{}'->{}. Please use one of the following commands: '{}' ".format(command2,func_name,command1,supported_commands[func_name][command1] ))
return False
if command1 == "prjdir":
if command2 in ["set", "reset"]:
#p(configer_obj, "configer_obj")
configer_obj._cli_menu_get_from_user_project_folder()
elif command2 == "clean":
if os.isdir(project_folder):
os.remove(project_folder)
os.makedirs(project_folder)
else:
logger.error("Current ProjectDirectory is not exist.")
return False
elif command2 == "print":
print "ProjectDirectory: '{}'. ".format(project_folder)
elif command1 == "error_track":
if command2 in ["set", "reset", "respeak"]:
configer_obj._cli_menu_error_agreement()
elif command2 == "print":
print ">>> Error_tracking: {};".format( configer_obj._user_data["error_tracking"])
elif command1 == "twitter":
if command2 in ["set", "reset", "respeak"]:
configer_obj._cli_menu_get_from_user_twitter_credentials()
elif command2 == "print":
print ">>> Twitter_creditials: {};".format( configer_obj._user_data["twitter_creditials"])
elif command1 == "email":
if command2 in ["set", "reset", "respeak"]:
configer_obj._cli_menu_get_from_user_emails()
elif command2 == "print":
print ">>> Email: {};".format( configer_obj._user_data["email"])
elif command1 == "user_data":
if command2 == "clean":
configer_obj._user_data.clean()
elif command2 == "location":
print "ProjectDirectory: '{}'. ".format(project_folder)
print "ConfigsData: '{}'. ".format(configer_obj._path_to_user_config_data)
elif command2 == "print":
#import json
for cat in configer_obj._user_data:
print ">>> {}: '{}';".format(cat.upper(), configer_obj._user_data[cat])
### all DB
@main.command('corpora')
@click.argument('command1')
### DB ###
@click.option('--status_bar', '-sb', default=True,type=bool, help="Enable/Disable the Status Bat")
#@click.option('--end_file_marker', '-efm', default=-1, help="Setup the end_file_marker for better matching the file end")
@click.option('--use_end_file_marker', '-uefm', default=False,type=bool, help="Enable/Disable usage of endfilemarker to change the couter unit from rows to files in the status bar")
@click.option('--tok_split_camel_case', '-tscc', default=True,type=bool, help="Enable/Disable the option for Tokenizer to convertion and split of the CamelCase (ex. 'CamelCase')")
@click.option('--make_backup', '-backup', default=True,type=bool, help="Enable/Disable making BackUp of the whole Corpus before the new Insetions")
@click.option('--lazyness_border', '-lb', default=50000, help="Set the number of the border, which ensure when exactly data collector should save data on the disk. If you have a big RAM than select the high number, to ensure the hight performance.")
@click.option('--rewrite', '-rw', default=False,type=bool, help="Enable/Disable rewrite option, which ensure the file replacing/rewriting during the export, if the same filename was found in the same directory.")
@click.option('--use_cash', '-uc', default=True,type=bool, help="Enable/Disable during the insertion process write direct on the disk or first into cash. It is a good performance booster, but just in the case of the big RAM.")
@click.option('--optimizer', '-opt', default="ljs", help="Enable/Disable DB Optimizer, which makes current DB much faster, but less safety.")
@click.option('--optimizer_page_size', '-optps', default=4096, help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_cache_size', '-optcs', default=1024000, help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_locking_mode', '-optlm', default="exclusive", type=click.Choice(DBHandler.non_mapped_states["locking_mode"]), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.") #, type=click.Choice(['md5', 'sha1'])
@click.option('--optimizer_synchronous', '-optsyn', default="off", type=click.Choice(DBHandler.mapped_states["synchronous"].keys()+DBHandler.mapped_states["synchronous"].values()), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_journal_mode', '-optjm', default="memory", type=click.Choice(DBHandler.non_mapped_states["journal_mode"]), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_temp_store', '-optts', default="memory", type=click.Choice(DBHandler.mapped_states["temp_store"].keys()+DBHandler.mapped_states["temp_store"].values()), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--gready', '-gr', default=False,type=bool, help="If False -> Stop Process immediately if error was returned. If True -> Try to execute script so long as possible, without stopping the main process.")
## Corp ####
@click.option('--corp_fname', '-cn', default=False, help="File Name of the CorpusDB (with or without extention)")
@click.option('--language', '-lang', default="False", help="Give language acronym according to standard ISO639_2.", type=click.Choice(CorpusData.tokenizer_for_languages.keys()+["False"]))
@click.option('--visibility', '-vis', default='False', help="Is that an intern or extern Corpus?", type=click.Choice(["extern", "intern", "False"]))
@click.option('--platform_name', '-platf', default=False)
@click.option('--encryption_key', '-encrkey', default=False, help="For encryption of the current DB please given an key. If key is not given, than the current DB will be not encrypted.")
@click.option('--corp_intern_dbname', '-cname', default=False, help="Intern Name of the DB, which will be saved as tag inside the DB.")
@click.option('--source', '-src', default=False, help="Source of the text collection.")
@click.option('--license', '-lic', default=False, help="License, under which this corpus will be used.")
@click.option('--template_name', '-templ', default='False', help="Templates are there for initialization of the preinitialized Document Table in the DB. Every Columns in the DocumentTable should be initialized. For this you can use Templates (which contain preinitialized Information) or initialize manually those columns manually with the '--cols_and_types_in_doc'-Option.", type=click.Choice(list(DBHandler.templates.keys())+["False"]))
@click.option('--version', '-ver', default=1, help="Version Number of the DB")
@click.option('--cols_and_types_in_doc', '-additcols', default=False, help="Additional Columns from input text Collections. Every Columns in the DocumentTable should be initialized. Every Document Table has already two default columns (id, text) if you want to insert also other columns, please define them here with the type names. The colnames should correspond to the colnames in the input text data and be given in the following form: 'colname1:coltype1,colname2:coltype2,colname3:coltype3' ")
@click.option('--corpus_id_to_init', '-cid', default=False, help="Manually given corpid")
@click.option('--tokenizer', '-tok', default='True', help="Select Tokenizer by name", type=click.Choice(list(CorpusData.supported_tokenizer)+["False", "True"]) )
@click.option('--pos_tagger', '-ptager', default='False', help="Select POS-Tagger by name", type=click.Choice(list(CorpusData.supported_pos_tagger)+["False", "True"]))
@click.option('--sentiment_analyzer', '-sentim', default='False', help="Select Sentiment Analyzer by name", type=click.Choice(list(CorpusData.supported_sentiment_analyzer)+["False", "True"]))
@click.option('--sent_splitter', '-sentspl', default='True', help="Select Stemmer by name", type=click.Choice(list(CorpusData.supported_stemmer)+["False", "True"]))
@click.option('--preprocession', '-preproc', default=True,type=bool, help="Enable/disable Proprocessing of the text elements.")
@click.option('--lang_classification', '-langclas', default=False, help="Enable/disable Language Classification")
@click.option('--del_url', '-durl', default=False,type=bool, help="Enable/disable Hiding of all URLs")
@click.option('--del_punkt', '-dpnkt', default=False,type=bool, help="Enable/disable Hiding of all Punctuation")
@click.option('--del_num', '-dnum', default=False,type=bool, help="Enable/disable Hiding of all Numbers")
@click.option('--del_mention', '-dment', default=False,type=bool, help="Enable/disable Hiding of all Mentions")
@click.option('--del_hashtag', '-dhash', default=False,type=bool, help="Enable/disable Hiding of all Hashtags")
@click.option('--del_html', '-dhtml', default=False,type=bool, help="Enable/disable cleaning of all not needed html tags")
@click.option('--case_sensitiv', '-case', default=False,type=bool, help="Enable/disable the case sensitivity in the Corpus during initialization.")
@click.option('--emojis_normalization', '-emojnorm', default=True,type=bool, help="Enable/disable restructure of all Emojis. (could cost much time)")
@click.option('--text_field_name', '-texname', default="text", help="If new input data has different name with text or id information, than use this options to ensure correct use of data. ")
@click.option('--id_field_name', '-idname', default="id", help="If new input data has different name with text or id information, than use this options to ensure correct use of data. ")
@click.option('--heal_me_if_possible', '-heal', default=True,type=bool, help="If '--template_name' and '--cols_and_types_in_doc' wasn't selected, than with this option ('--heal_me_if_possible') DB will try to initialize those information automatically. But be careful with this option, because it could also return unexpected errors. ")
## Reader
@click.option('--path_to_read', '-ptr', default=False, help="Path to folder with text collection, which should be collected and transformed into CorpusDB.")
@click.option('--file_format_to_read', '-readtyp', default='False', help="File Format which should be read.", type=click.Choice(Reader.supported_file_types[:]+["False"]))
@click.option('--reader_regex_template', '-readregextempl', default='False', type=click.Choice(Reader.regex_templates.keys()+["False"] ), help="Name of the template for Reading of the TXT Files.")
@click.option('--reader_regex_for_fname', '-readregexpattern', default=False, help="Regex Pattern for Extraction of the Columns from the filenames.")
@click.option('--read_from_zip', '-zipread', default=False,type=bool, help="Enable/Disable the possibly also to search and read automatically from *.zip Achieves.")
@click.option('--formatter_name', '-formatter', default='False', type=click.Choice(["twitterstreamapi", "sifter", "False"]), help="Give the name of the predefined Formatters and Preprocessors for different text collections.")
@click.option('--reader_ignore_retweets', '-retweetsignr', default=True,type=bool, help="Ignore Retweets, if original JsonTweet was given.")
@click.option('--min_files_pro_stream', '-minfile', default=1000, help="The Limit, when Multiprocessing will be start to create a new stream.")
@click.option('--csvdelimiter', '-csvd', default=',', help="CSV Files has offten different dialects and delimiters. With this option, it is possible, to set an delimiter, which ensure correct processing of the CSV File Data. ")
@click.option('--encoding', '-enc', default="utf-8", help="All Text Files are encoded with help of the EncodingTables. If you input files are not unicode-compatible, please give the encoding name, which was used for encoding the input data. ", type=click.Choice(Reader.supported_encodings_types))
@click.option('--doc_id', '-docid', default=False, help="Document ID in the Corpus DB. ")
@click.option('--attr_name', '-attr', default=False, help="Stats and Corpus DBs has intern Attributes. For changing of getting them you need to get the name of this attribute. ")
@click.option('--value', '-val', default=False, help="For setting of the new Value for one Attribute.")
@click.option('--type_to_export', '-exptyp', default="False", help="FileType for the export function." , type=click.Choice(Reader.supported_file_types_to_export[:]+["False"]))
@click.option('--export_dir', '-expdir', default=False, help="Directory where Exports will be saved. If False, than they will be saved in the default ProjectDirectory.")
@click.option('--export_name', '-expname', default=False, help="FileName for ExportData." )
@click.option('--rows_limit_in_file', '-rowlim', default=50000, help="Number of the Rows Max in the Files to export." )
@click.option('--stream_number', '-sn', default=1, help="Enable or Disable the Multiprocessing. If Number > 1, than tool try to compute every thing parallel. This function could bring much better performance on the PC with multi cores and big Operation Memory.")
@click.option('--mode', '-m', default="prod" ,help="Set one of the Tool Modus. Modi ensure the communication behavior of this Tool.", type=click.Choice(helpers.modi))
@click.option('--logdir', '-ld', default="logs", help="Choose the name of the Directory for log data.")
def corpora(command1,
status_bar, use_end_file_marker, tok_split_camel_case, make_backup, lazyness_border,
rewrite, use_cash, optimizer, optimizer_page_size,
optimizer_cache_size, optimizer_locking_mode, optimizer_synchronous, optimizer_journal_mode, optimizer_temp_store,gready,
corp_fname,language, visibility, platform_name,encryption_key, corp_intern_dbname,source, license,
template_name, version,cols_and_types_in_doc,corpus_id_to_init,
tokenizer, pos_tagger, sentiment_analyzer, sent_splitter, preprocession, lang_classification, del_url, del_punkt,
del_num, del_mention, del_hashtag, del_html, case_sensitiv, emojis_normalization, text_field_name, id_field_name,
heal_me_if_possible,
path_to_read, file_format_to_read, reader_regex_template, reader_regex_for_fname, read_from_zip,
formatter_name, reader_ignore_retweets,min_files_pro_stream, csvdelimiter,encoding,
stream_number, value, attr_name,type_to_export, export_dir, export_name,doc_id,rows_limit_in_file,
mode,logdir ):
# $ zas-vot-tools strat1 sets/train_set sets/eval_set segments voiceless voiced vlwindow vcwindow experiments
#p(template_name, "template_name")
#if configer.
formatter_name = strtobool(formatter_name)
path_to_read = strtobool(path_to_read)
file_format_to_read = strtobool(file_format_to_read)
corp_intern_dbname = strtobool(corp_intern_dbname)
visibility = strtobool(visibility)
platform_name = strtobool(platform_name)
optimizer = strtobool(optimizer)
cols_and_types_in_doc = strtobool(cols_and_types_in_doc)
corpus_id_to_init = strtobool(corpus_id_to_init)
tokenizer = strtobool(tokenizer)
pos_tagger = strtobool(pos_tagger)
sentiment_analyzer = strtobool(sentiment_analyzer)
sent_splitter = strtobool(sent_splitter)
lang_classification = strtobool(lang_classification)
template_name = strtobool(template_name)
reader_regex_template = strtobool(reader_regex_template)
type_to_export = strtobool(type_to_export)
language = strtobool(language)
doc_id = strtobool(doc_id)
status_bar = strtobool(status_bar)
try:
doc_id = doc_id.strip("'") if doc_id and doc_id[0] == ["'"] else doc_id.strip('"')
except:
pass
text_field_name=text_field_name.strip("'") if text_field_name[0] == "'" else text_field_name.strip('"')
id_field_name=text_field_name.strip("'") if id_field_name[0] == "'" else id_field_name.strip('"')
logger = get_cli_logger(mode,logdir)
func_name = "corpora"
#p(command,"command")
if command1 not in supported_commands[func_name]:
logger.error(" Given Command ('{}') is illegal for '{}'. Please use one of the following commands: '{}' ".format(command1,func_name,supported_commands[func_name] ))
return False
if command1 == "add":
if not path_to_read or not file_format_to_read:
logger.error("ObligatoryMetaDataForReaderMissing: If you want to add new Corpus, you need first give location to the raw text collection, from which than a new corpus will be created and also the file_type of those input text elements. For doing that please use following options: ['path_to_read', 'file_format_to_read']\n For example: 'zas-rep-tools corpora add --path_to_read . --file_format_to_read json'. ")
return False
if not corp_intern_dbname or not language or not visibility or not platform_name:
logger.error("ObligatoryMetaDataForCorpusMissing: If you want to add new Corpus, you need to set also following obligatory meta data: ['corp_fname', 'language', 'visibility', 'platform_name']\n For example: 'zas-rep-tools corpora add --corp_intern_dbname streamed --language de --visibility intern --platform_name twitter'. ")
return False
if file_format_to_read == "json":
if not formatter_name:
logger.warning("If you want to create TwitterCorpus from original TwitterJSON, than set following Option: '--formatter_name twitterstreamapi'. Otherwise ignore this warning. ")
if file_format_to_read == "csv":
if not formatter_name:
logger.warning("If you want to create TwitterCorpus from Sifter-CSV-Files, than set following Option: '--formatter_name sifter'. Otherwise ignore this warning. ")
### Option preprocession
stop_process_if_possible = False if gready else True
if cols_and_types_in_doc:
cols_and_types_in_doc = cols_and_types_in_doc.strip("'") if cols_and_types_in_doc[0] == "'" else cols_and_types_in_doc.strip('"')
cols_and_types_in_doc = cols_and_types_in_doc.split(",")
temp_cols_and_types_in_doc = []
for item in cols_and_types_in_doc:
if item:
splitted = item.split(":")
if len(splitted) == 2:
temp_cols_and_types_in_doc.append((splitted[0],splitted[1]))
else:
logger.error("Given '--cols_and_types_in_doc' is incomplete or invalid. Please give this without white spaces in the following form 'colname1:coltype1,colname2:coltype2,colname3:coltype3' ")
return False
else:
logger.error("Given '--cols_and_types_in_doc' is incomplete or invalid. Please give this without white spaces in the following form 'colname1:coltype1,colname2:coltype2,colname3:coltype3' ")
return False
cols_and_types_in_doc = temp_cols_and_types_in_doc
#p(cols_and_types_in_doc, "cols_and_types_in_doc")
#p(reader_regex_for_fname, "reader_regex_for_fname")
try:
reader_regex_for_fname = reader_regex_for_fname.strip("'") if reader_regex_for_fname[0] == "'" else reader_regex_for_fname.strip('"')
except:
pass
### Main Part of the Script
reader = Reader(path_to_read, file_format_to_read, regex_template=reader_regex_template,
regex_for_fname=reader_regex_for_fname, read_from_zip=read_from_zip,
end_file_marker = end_file_marker, send_end_file_marker=use_end_file_marker, stop_process_if_possible=stop_process_if_possible,
formatter_name=formatter_name, text_field_name = text_field_name, id_field_name=id_field_name,
ignore_retweets=reader_ignore_retweets, mode=mode)
if reader._get_number_of_left_over_files() == 0:
logger.error("No one file was found in the given path ('{}'). Please check the correctness of the given path or give other (correct one) path to the text data. If you want to search also in zips, than set following option: '--read_from_zip True'".format(reader._inp_path))
return False
stop_if_db_already_exist = False if rewrite else True
corp = Corpus(mode=mode, error_tracking=answer_error_tracking, status_bar=status_bar,
end_file_marker=end_file_marker, use_end_file_marker=use_end_file_marker, tok_split_camel_case=tok_split_camel_case,
make_backup=make_backup, lazyness_border=lazyness_border, thread_safe=True, rewrite=rewrite, stop_if_db_already_exist=stop_if_db_already_exist,
use_cash=use_cash, optimizer=optimizer,optimizer_page_size=optimizer_page_size,
optimizer_cache_size=optimizer_cache_size, optimizer_locking_mode=optimizer_locking_mode, optimizer_synchronous=optimizer_synchronous,
optimizer_journal_mode=optimizer_journal_mode, optimizer_temp_store=optimizer_temp_store,stop_process_if_possible=stop_process_if_possible,
heal_me_if_possible=heal_me_if_possible)
#p((text_field_name, id_field_name), c="m")
corp.init(main_folders["corp"], corp_intern_dbname, language, visibility, platform_name,
encryption_key=encryption_key, fileName=corp_fname, source=source, license=license,
template_name=template_name, version=version, cols_and_types_in_doc=cols_and_types_in_doc,
corpus_id=corpus_id_to_init, text_field_name=text_field_name,id_field_name=id_field_name,
tokenizer=tokenizer,pos_tagger=pos_tagger,sentiment_analyzer=sentiment_analyzer,
sent_splitter=sent_splitter,preprocession=preprocession, lang_classification=lang_classification,
del_url=del_url,del_punkt=del_punkt,del_num=del_num,del_mention=del_mention,del_hashtag=del_hashtag,del_html=del_html,
case_sensitiv=case_sensitiv, emojis_normalization=emojis_normalization)
#csvdelimiter,encoding,
csvdelimiter = csvdelimiter.strip("'") if csvdelimiter[0] == "'" else csvdelimiter.strip('"')
status = corp.insert(reader.getlazy(stream_number=stream_number,min_files_pro_stream=min_files_pro_stream,csvdelimiter=csvdelimiter,encoding=encoding), create_def_indexes=True)
if not status or corp.corpdb.rownum("documents") == 0:
corp_fname = corp.corpdb.fname()
#corp.corpdb.commit()
#import shutil
corp._close()
os.remove(os.path.join(main_folders["corp"],corp_fname))
logger.info("InsertionProecess into '{}'-CorpDB is failed. Corpus was deleted.".format(corp_fname))
else:
corp.corpdb.commit()
corp.close()
elif command1 == "del":
if not corp_fname:
logger.error("'--corp_fname' is not given. (you can also give tag 'all' instead of the corp_fname)")
if corp_fname == "all":
#os.remove()
shutil.rmtree(main_folders["corp"], ignore_errors=True)
logger.info("All CorpDB was removed.")
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
os.remove(os.path.join(main_folders["corp"], corp_fname))
logger.info("'{}'-CorpDB was removed".format(corp_fname))
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
os.remove(os.path.join(main_folders["corp"], corp_fname)+".db")
logger.info("'{}'-CorpDB was removed".format(corp_fname))
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
elif command1 == "clean_dir":
#if not corp_fname:
# logger.error("'--corp_fname' is not given. (you can also give tag 'all' instead of the corp_fname)")
#if corp_fname == "all":
files = get_corp_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_corp(main_folders,files)
deleted = []
for temp_dbname in validated:
corp = Corpus(mode="blind")
corp.open(os.path.join(main_folders["corp"], temp_dbname))
if corp.corpdb:
if corp.corpdb.get_attr("locked"):
deleted.append(temp_dbname)
os.remove(os.path.join(main_folders["corp"], temp_dbname))
files = os.listdir(main_folders["corp"])
for journal_fname in [fname for fname in files if".db-journal" in fname]:
#deleted.append(temp_dbname)
os.remove(os.path.join(main_folders["stats"], journal_fname))
if deleted:
print " Following not finished and locked CorpDBs was deleted:"
for dn in deleted:
print " |-> '{}';".format(dn)
return True
else:
print " Locked or not finished CorpDBs wasn't found."
return False
elif command1 == "names":
files = get_corp_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_corp(main_folders,files)
print ">>> {} DBs was found <<< ".format(len(files))
print " '{}'-From them was validated:".format(len(validated))
for i, fname in enumerate(validated):
print " {}. '{}';".format(i, fname)
if possibly_encrypted:
print "\n '{}'-From them are possibly encrypted/damaged/invalid:".format(len(possibly_encrypted))
for i, fname in enumerate(possibly_encrypted):
print " {}. '{}';".format(i, fname)
#p(files)
elif command1 == "meta":
if not corp_fname:
logger.error("'--corp_fname' is not given. (you can also give tag 'all' instead of the corp_fname)")
return False
files = get_corp_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_corp(main_folders,files)
if corp_fname == "all":
for db in opened_db:
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
print "\n\nNotice! with 'all'-Argument could be checked just not-encrypted DBs. If you want to check encrypted DB use additional to corp_fname also '--encryption_key'"
else:
if corp_fname in files:
if corp_fname in validated:
ix = validated.index(corp_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
elif corp_fname in possibly_encrypted:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
return False
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
try:
if h.typ() == "corpus":
print("\n >>>> {} <<<<".format(h.fname()))
for k,v in h.get_all_attr().items():
print " {} = '{}';".format(k,v)
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
return False
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
spl1 = [os.path.splitext(fname)[0] for fname in validated]
spl2 = [os.path.splitext(fname)[0] for fname in possibly_encrypted]
if corp_fname in spl1:
ix = spl1.index(corp_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
elif corp_fname in spl2:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["corp"],corp_fname)+".db",encryption_key=encryption_key)
try:
if h.typ() == "corpus":
print("\n >>>> {} <<<<".format(h.fname()))
for k,v in h.get_all_attr().items():
print " {} = '{}';".format(k,v)
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
return False
else:
logger.error("Given fname ('{}') wasn't validated. It means, that possibly it is not a CorpusDB or it is just encrypted!".format(corp_fname))
return False
else:
logger.error("Given fname ('{}') wasn't found!".format(corp_fname))
return False
elif command1 == "basic_stats":
if not corp_fname:
logger.error("'--corp_fname' is not given. (you can also give tag 'all' instead of the corp_fname)")
return False
files = get_corp_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_corp(main_folders,files)
if corp_fname == "all":
for db in opened_db:
print("\n >>>> {} <<<<".format(db.fname()))
print " doc_num = '{}';".format(db.get_attr("doc_num"))
print " sent_num = '{}';".format(db.get_attr("sent_num"))
print " token_num = '{}';".format(db.get_attr("token_num"))
print "\n\nNotice! with 'all'-Argument could be checked just not-encrypted DBs. If you want to check encrypted DB use additional to corp_fname also '--encryption_key'"
else:
if corp_fname in files:
if corp_fname in validated:
ix = validated.index(corp_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
print " doc_num = '{}';".format(db.get_attr("doc_num"))
print " sent_num = '{}';".format(db.get_attr("sent_num"))
print " token_num = '{}';".format(db.get_attr("token_num"))
elif corp_fname in possibly_encrypted:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
try:
if h.typ() == "corpus":
print("\n >>>> {} <<<<".format(h.fname()))
print " doc_num = '{}';".format(h.get_attr("doc_num"))
print " sent_num = '{}';".format(h.get_attr("sent_num"))
print " token_num = '{}';".format(h.get_attr("token_num"))
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
return False
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
spl1 = [os.path.splitext(fname)[0] for fname in validated]
spl2 = [os.path.splitext(fname)[0] for fname in possibly_encrypted]
if corp_fname in spl1:
ix = spl1.index(corp_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
print " doc_num = '{}';".format(db.get_attr("doc_num"))
print " sent_num = '{}';".format(db.get_attr("sent_num"))
print " token_num = '{}';".format(db.get_attr("token_num"))
elif corp_fname in spl2:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["corp"],corp_fname)+".db",encryption_key=encryption_key)
try:
if h.typ() == "corpus":
print("\n >>>> {} <<<<".format(h.fname()))
print " doc_num = '{}';".format(h.get_attr("doc_num"))
print " sent_num = '{}';".format(h.get_attr("sent_num"))
print " token_num = '{}';".format(h.get_attr("token_num"))
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
return False
else:
logger.error("Given fname ('{}') wasn't validated. It means, that possibly it is not a CorpusDB or it is just encrypted!".format(corp_fname))
return False
else:
logger.error("Given fname ('{}') wasn't found!".format(corp_fname))
return False
elif command1 == "update_attr":
if not corp_fname or not attr_name or not value:
logger.error("Command is incomplete: '--corp_fname' or '--attr_name' or '--value' is not given.")
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
try:
db = DBHandler(mode="error")
db.connect(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
if db._db:
if db.typ() == "corpus":
if attr_name not in db.get_all_attr():
logger.error("Given Attribute ('{}') is not exist in this DataBase.".format(attr_name))
return False
db.update_attr(attr_name,value)
db._commit()
updated_attr = db.get_attr(attr_name)
#p((updated_attr, value))
if str(value) != str(updated_attr):
logger.error("Update of the given Attribute ('{}') failed.".format(attr_name))
return
else:
logger.info("Given Attribute ('{}') in the '{}'-DB was updated to '{}'.".format(attr_name,corp_fname, value))
return True
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
except:
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
elif command1 == "export":
if not corp_fname or not type_to_export:
logger.error("Command is incomplete: '--corp_fname' or '--type_to_export' is not given.")
return False
else:
files = get_corp_fname(main_folders)
export_dir = export_dir if export_dir else main_folders["export"]
export_name = export_name if export_name else "Export{}".format(int(time.time()))
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
corp = Corpus(mode="error")
corp.open(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
def intern_getter():
if status_bar:
status_bars_manager = _get_status_bars_manager()
status_bar_start = _get_new_status_bar(None, status_bars_manager.term.center("Exporter") , "", counter_format=status_bars_manager.term.bold_white_on_green("{fill}{desc}{fill}"),status_bars_manager=status_bars_manager)
status_bar_start.refresh()
#if status_bar:
status_bar_current = _get_new_status_bar(num, "Exporting:", "row",status_bars_manager=status_bars_manager)
for item in corp.docs(output="dict"):
if status_bar:
status_bar_current.update(incr=1)
yield item
if status_bar:
status_bar_total_summary = _get_new_status_bar(None, status_bars_manager.term.center("Exported: Rows: '{}'; ".format(num) ), "", counter_format=status_bars_manager.term.bold_white_on_green('{fill}{desc}{fill}\n'),status_bars_manager=status_bars_manager)
status_bar_total_summary.refresh()
status_bars_manager.stop()
if corp.corpdb._db:
num = corp.corpdb.rownum("documents")
exporter = Exporter(intern_getter(),rewrite=False,silent_ignore=False , mode=mode)
if type_to_export not in Exporter.supported_file_formats:
logger.error("Given Export Type ('{}') is not supported.".format(type_to_export))
return False
if type_to_export == "csv":
#p(cols,"cols")
cols = corp.corpdb.col("documents")
exporter.tocsv(export_dir, export_name, cols, rows_limit_in_file=rows_limit_in_file)
elif type_to_export == "xml":
exporter.toxml(export_dir, export_name, rows_limit_in_file=rows_limit_in_file)
elif type_to_export == "json":
exporter.tojson(export_dir, export_name, rows_limit_in_file=rows_limit_in_file)
else:
logger.error("Given Export Type ('{}') is not supported.".format(type_to_export))
return False
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is locked or damaged or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
elif command1 == "used_tools":
term = Terminal()
for tool_name, data in CorpusData.info.items():
print "\n\n\n\n"
print term.bold_white_on_magenta(" >>>>> {} <<<<< ".format(tool_name))
if tool_name == "tagger":
for tagger_name, infos in data.items():
print term.bold_white_on_cyan(" TaggerName: {} ".format(tagger_name))
print "\t\t"+json.dumps(CorpusData.info[tool_name][tagger_name], sort_keys=True, indent=5).replace("\n", "\n\t")
print "\n\n"
else:
print "\t"+json.dumps(CorpusData.info[tool_name], sort_keys=True, indent=5).replace("\n", "\n\t")
elif command1 == "cols":
if not corp_fname :
logger.error("Command is incomplete: '--corp_fname' is not given.")
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
try:
db = DBHandler(mode="error")
db.connect(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
if db._db:
if db.typ() == "corpus":
print " Columns in the DocumentTable for {} :".format(db.fname())
i = 0
temp = []
for col in db.col("documents"):
i += 1
if i < 4:
temp.append(col)
else:
print " {}".format(temp)
temp = []
i = 0
if temp:
print " {}".format(temp)
else:
logger.error("'{}'-DB is not an CorpusDB or given encryption key ('{}') is wrong. ".format(corp_fname,encryption_key))
return False
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
except:
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
elif command1 == "doc":
if not corp_fname or not doc_id:
logger.error("Command is incomplete: '--corp_fname' , '--doc_id' is not given.")
return False
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
try:
corp = Corpus(mode="error")
corp.open(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
if corp.corpdb:
#p(corp._id_field_name, "corp._id_field_name")
getted_docs = list(corp.docs( where="{}='{}'".format(corp._id_field_name, doc_id), output="dict"))
#p(getted_docs, "getted_docs")
print "\n >>> {} <<< :".format(corp.corpdb.fname())
print " (Matched DocItems for doc_id: '{}') ".format(doc_id)
if getted_docs:
for i, doc_item in enumerate(getted_docs):
if i > 0:
print "\n"
if doc_item:
for k,v in doc_item.items():
print " {} : {}".format(k,repr(v))
else:
print "\n !!!!NO ONE DOC WAS FOUND FOR GIVEN DOCID!!!"
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
except:
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
elif command1 == "ids":
if not corp_fname :
logger.error("Command is incomplete: '--corp_fname' is not given.")
return False
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(corp_fname))
return False
try:
corp = Corpus(mode="error")
corp.open(os.path.join(main_folders["corp"],corp_fname),encryption_key=encryption_key)
if corp.corpdb:
#p(corp._id_field_name, "corp._id_field_name")
ids = [item[0] for item in corp.docs(columns=corp._id_field_name)]
print " IDs in the DocumentTable for {} :".format(corp.corpdb.fname())
print ids
# i = 0
# temp = []
# for col in ids:
# i += 1
# if i < 5:
# temp.append(col)
# else:
# temp_str = ""
# for id_item in temp:
# temp_str += "{}, ".format(id_item)
# print " {}".format(temp_str)
# temp = []
# i = 0
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
except:
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(corp_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(corp_fname,encryption_key))
return False
#"stats": ["compute", "recompute", "optimize", "recreate_indexes", "del", "names", "meta", "basic_stats", "update_attr", "export", ],
@main.command('stats')
@click.argument('command1')
### DB ###
@click.option('--status_bar', '-sb', default=True,type=bool, help="Enable/Disable the Status Bat")
@click.option('--use_end_file_marker', '-uefm', default=False,type=bool, help="Enable/Disable usage of endfilemarker to change the couter unit from rows to files in the status bar")
@click.option('--make_backup', '-backup', default=True,type=bool, help="Enable/Disable making BackUp of the whole Corpus before the new Insetions")
@click.option('--lazyness_border', '-lb', default=50000, help="Set the number of the border, which ensure when exactly data collector should save data on the disk. If you have a big RAM than select the high number, to ensure the hight performance.")
@click.option('--rewrite', '-rw', default=False,type=bool, help="Enable/Disable rewrite option, which ensure the file replacing/rewriting during the export, if the same filename was found in the same directory.")
@click.option('--use_cash', '-uc', default=True,type=bool, help="Enable/Disable during the insertion process write direct on the disk or first into cash. It is a good performance booster, but just in the case of the big RAM.")
#@click.option('--optimizer', '-opt', default=False, help="Enable/Disable DB Optimizer, which makes current DB much faster, but less safety. See more: https://www.sqlite.org/pragma.html")
@click.option('--optimizer', '-opt', default="ljs", help="Enable/Disable DB Optimizer, which makes current DB much faster, but less safety. See more: https://www.sqlite.org/pragma.html")
@click.option('--optimizer_page_size', '-optps', default=4096, help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_cache_size', '-optcs', default=1024000, help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_locking_mode', '-optlm', default="exclusive", type=click.Choice(DBHandler.non_mapped_states["locking_mode"]), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_synchronous', '-optsyn', default="off", type=click.Choice(DBHandler.mapped_states["synchronous"].keys()+DBHandler.mapped_states["synchronous"].values()), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_journal_mode', '-optjm', default="memory", type=click.Choice(DBHandler.non_mapped_states["journal_mode"]), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--optimizer_temp_store', '-optts', default="memory", type=click.Choice(DBHandler.mapped_states["temp_store"].keys()+DBHandler.mapped_states["temp_store"].values()), help="Setting for DBOptimizer. See more in the Hell-text for optimizer.")
@click.option('--gready', '-gr', default=False,type=bool, help="If False -> Stop Process immediately if error was returned. If True -> Try to execute script so long as possible, without stopping the main process.")
## Stats ####
@click.option('--corp_fname', '-cn', default=False, help="File Name of the CorpusDB (with or without extention)")
@click.option('--stream_number','-sn', default=1, help="Enable or Disable the Multiprocessing. If Number > 1, than tool try to compute every thing parallel. This function could bring much better performance on the PC with multi cores and big Operation Memory.")
@click.option('--create_indexes', '-crtix', default=True,type=bool, help="For better performance it is highly recommended to create indexes. But their creation could also cost time once during their creation and also space.")
@click.option('--freeze_db', '-freeze', default=True,type=bool, help="Freeze current DB and close for all next possible insertion of the new data. This option also triggers the DB Optimization Porcess, which could cost a lost of time, but make this DB much space and time efficient. Once this process is done, it is not possible anymore to decline it.")
@click.option('--optimized_for_long_syntagmas', '-optlongsyn', default=True,type=bool, help="If you are planing to search in the big syntagmas, than set this to True. It will optimize DB to be fast with long syntagmas.")
@click.option('--min_files_pro_stream', '-minfile', default=1000, help="The Limit, when Multiprocessing will be start to create a new stream.")
@click.option('--baseline_delimiter', '-basdelim', default="|+|", help="Delimiter for Syntagmas in intern Baseline Table. Change here if you really know, that you need it.")
@click.option('--stats_fname', '-sfn', default=False, help="File Name of the StatsDB.")
@click.option('--visibility', '-vis', default="False", help="Is that an intern or extern Corpus?", type=click.Choice(["extern", "intern", "False"]))
@click.option('--encryption_key', '-encrkey', default=False, help="For encryption of the current DB please given an key. If key is not given, than the current DB will be not encrypted.")
@click.option('--version', '-ver', default=1, help="Version Number of the DB")
@click.option('--stats_id', '-stats_id', default=False, help="Possibilty to set StatsId manually. Otherwise it will be setted automatically.")
@click.option('--stats_intern_dbname', '-cname', default=False, help="Intern Name of the DB, which will be saved as tag inside the DB.")
@click.option('--context_lenght', '-conlen', default=5, help="This number mean how much tokens left and right will be also captured and saved for each found re(du)plication. This number should be >=3")
@click.option('--full_repetativ_syntagma', '-fullrep', default=True,type=bool, help="Disable/Enable FullRepetativnes. If it is True, than just full repetativ syntagmas would be considered. FullRepetativ syntagma is those one, where all words was ongoing either replicated or replicated. (ex.: FullRepRedu: 'klitze klitze kleine kleine' , FullRepRepl: 'kliiitzeee kleeeinee') (See more about it in Readme -> Definitions) ")
@click.option('--repl_up', '-ru', default=3, help="Up this number this tool recognize repetativ letter as replication.")
@click.option('--ignore_hashtag', '-ignht', default=False,type=bool, help="Enable/disable Hiding of all Hashtags, if it wasn't done during CorpusCreationProcess.")
@click.option('--case_sensitiv','-case', default=False,type=bool, help="Enable/disable the case sensitivity during Stats Computation Process.")
@click.option('--ignore_url', '-ignurl', default=False,type=bool, help="Enable/disable Hiding of all URLS, if it wasn't done during CorpusCreationProcess.")
@click.option('--ignore_mention', '-ignment', default=False,type=bool, help="Enable/disable Hiding of all Mentions, if it wasn't done during CorpusCreationProcess.")
@click.option('--ignore_punkt', '-ignp', default=False,type=bool, help="Enable/disable Hiding of all Punctuation, if it wasn't done during CorpusCreationProcess.")
@click.option('--ignore_num', '-ignnum', default=False,type=bool, help="Enable/disable Hiding of all Numbers, if it wasn't done during CorpusCreationProcess.")
@click.option('--baseline_insertion_border', '-bliti', default=1000000, help="Number of the limit, when syntagmas will be delete from cash and saved on the disk.")
## Export
@click.option('--export_dir', '-expdir', default=False, help="Set Path to export dir. If it is not given, than all export will be saved into ProjectFolder.")
@click.option('--export_name', '-exp_fname', default=False, help="Set fname for export files.")
@click.option('--syntagma_for_export', '-syn', default="*", help="Set Syntagmas for search/extract. Default: '*'-match all syntagmas. Example: 'very|huge|highly,pitty|hard|happy,man|woman|boy|person' ('|' - as delimiter in paradigm; ',' - as delimiter of the syntagmas part.) Notice: Now white space is allow.")
@click.option('--exp_repl', '-repl', default=False,type=bool, help="Disable/Enable Replications Extraction ")
@click.option('--exp_redu', '-redu', default=False,type=bool, help="Disable/Enable Reduplications Extraction ")
@click.option('--exp_syntagma_typ', '-styp', default="lexem", help="Ensure type of the given components in Syntagma_to_search. It is possible to search in pos-tags or in lexems.", type=click.Choice(["pos", "lexem"]))
@click.option('--exp_sentiment', '-sent', default="False", help="Search in Sentiment tagged data.", type=click.Choice(["neutral", "positive","negative", "False"]))
@click.option('--export_file_type', '-ftyp', default="csv", type=click.Choice(['csv', 'json', "xml"]))
@click.option('--rows_limit_in_file', '-rowlim', default=50000, help="Number of the Rows Max in the Files to export." )
@click.option('--encryption_key_corp', '-exp_encrkey', default=False, help="For export additional columns (--additional_doc_cols) from encrypted CorpDB or for compution of the new StatsDb from the encrypted CorpDB")
@click.option('--output_table_type', '-ott', default="exhausted", type=click.Choice(["exhausted", "sum"]))
@click.option('--additional_doc_cols', '-doccols', default=False, help="For export of stats with additional columns from document from CorpDB. Don't forget to give also the FName of CorpusDB for which current statsDB was computed. (--corp_fname) Please give it in the following Form: 'gender,age,' (NO WHITE SPACES ARE ALLOW) ")
@click.option('--max_scope', '-mscope', default=False, help="Upper Limit of the syntagma length to search. Example: if max_scope = 1, than tool will search just in those syntagmas, which contain just 1 word.")
@click.option('--stemmed_search', '-stemm', default=False,type=bool, help="Search in lemantisated/stemmed syntagmas. Be careful and don't give different conjugations of one lemma, if current options is True. Because you could get duplicates.")
@click.option('--context_len_left', '-conleft', default=True, help="The length of context In Output Tables. Could be also Disabled (False).")
@click.option('--context_len_right', '-conright', default=False,help="The length of context In Output Tables. Could be also Disabled (False).")
@click.option('--separator_syn', '-sepsyn', default=" || ", help="Separator inside syntagma in baseline.")
@click.option('--word_examples_sum_table', '-wordex', default=True,type=bool, help="Enable/disable Word Examples in Exported Output. (Just For SumOutputTables) ")
@click.option('--ignore_symbol', '-ignsym', default=False, help="Enable/disable Symbols in Exported Outputs. (Just For SumOutputTables)")
@click.option('--recompute_flag', '-recflag', default=None,help="For 'recompute' command. This command recompute the FullRepetativnes in given StatsDB. True - full_repetativnes, False - no_full_repetativnes/all_syntagmas ")
@click.option('--attr_name', '-attr', default=False, help="Stats and Corpus DBs has intern Attributes. For changing of getting them you need to get the name of this attribute. ")
@click.option('--value', '-val', default=False, help="For setting of the new Value for one Attribute.")
@click.option('--mode', '-m', default="prod" ,help="Set one of the Tool Modus", type=click.Choice(helpers.modi))
@click.option('--logdir', '-ld', default="logs", help="Choose the name of the Directory for log data.")
def stats(command1,
status_bar, use_end_file_marker, make_backup, lazyness_border,
rewrite, use_cash, optimizer, optimizer_page_size,
optimizer_cache_size, optimizer_locking_mode, optimizer_synchronous, optimizer_journal_mode, optimizer_temp_store,
gready,min_files_pro_stream,baseline_delimiter,
corp_fname, stream_number,create_indexes, freeze_db, optimized_for_long_syntagmas,
stats_fname,stats_intern_dbname,visibility, encryption_key , version, stats_id, context_lenght, full_repetativ_syntagma,
repl_up, ignore_hashtag, case_sensitiv, ignore_url, ignore_mention, ignore_punkt, ignore_num,
recompute_flag,value, attr_name, baseline_insertion_border,
export_dir, syntagma_for_export, exp_repl, exp_redu, exp_sentiment, export_name, export_file_type, rows_limit_in_file,
encryption_key_corp, output_table_type, additional_doc_cols, max_scope, stemmed_search, context_len_left, context_len_right,
separator_syn, word_examples_sum_table, exp_syntagma_typ,ignore_symbol,
mode,logdir ):
# $ zas-vot-tools strat1 sets/train_set sets/eval_set segments voiceless voiced vlwindow vcwindow experiments
logger = get_cli_logger(mode,logdir)
func_name = "stats"
#p(command,"command")
if syntagma_for_export != "*":
temp_syn = []
syntagma_for_export = syntagma_for_export.strip("'") if syntagma_for_export[0] == "'" else syntagma_for_export.strip('"')
exctracted_syn = syntagma_for_export.split(",")
for syntagma_part in exctracted_syn:
temp_syn.append(syntagma_part.split("|"))
### combinatoric
if len(temp_syn) == 0:
logger.error("No one syntagma was exctracted. Probably wrong structure was given. Please give syntagma in the following structure 'very|huge|highly,pitty|hard|happy,man|woman|boy|person'")
return False
else:
#p(temp_syn,"temp_syn")
syntagma_for_export = list(itertools.product(*temp_syn))
optimizer = strtobool(optimizer)
stats_intern_dbname = strtobool(stats_intern_dbname)
visibility = strtobool(visibility)
#cols_and_types_in_doc = strtobool(cols_and_types_in_doc)
#type_to_export = strtobool(type_to_export)
exp_sentiment = strtobool(exp_sentiment)
recompute_flag = strtobool(recompute_flag) if recompute_flag is not None else None
status_bar = strtobool(status_bar)
stats_fname = strtobool(stats_fname)
try:
max_scope = int(max_scope)
except:
max_scope = False
#p(status_bar,"status_bar")
#p(type(status_bar),"status_bar")
if command1 not in supported_commands[func_name]:
logger.error(" Given Command ('{}') is illegal for '{}'. Please use one of the following commands: '{}' ".format(command1,func_name,supported_commands[func_name] ))
return False
if command1 == "compute":
if not corp_fname or not stats_intern_dbname or not visibility:
logger.error("Command is incomplete: One of the following options is empty '--corp_fname', '--stats_intern_dbname', '--visibility' ")
return False
else:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given corp_fname ('{}') wasn't found and can not be opened.".format(corp_fname))
return False
corp = Corpus(mode="error")
corp.open(os.path.join(main_folders["corp"],corp_fname), encryption_key=encryption_key_corp)
#p(("+++++",corp.corpdb))
if corp.corpdb:
if not corp.corpdb._db:
#p("----")
logger.error("CorpDB-Opening is failed. (CorpFname: ('{}')) If this corpus is encrypted, please use option '--encryption_key_corp'. ".format(corp))
return False
else:
logger.error("CorpDB-Opening is failed. (CorpFname: ('{}')) If this corpus is encrypted, please use option '--encryption_key_corp'. ".format(corp))
return False
#p("....")
language = corp.corpdb.get_attr("language")
corpus_id = corp.corpdb.get_attr("id")
stop_if_db_already_exist = False if rewrite else True
stop_process_if_possible = False if gready else True
stats = Stats(mode=mode, error_tracking=answer_error_tracking, status_bar=status_bar,
make_backup=strtobool(make_backup), lazyness_border=lazyness_border, thread_safe=True, rewrite=strtobool(rewrite), stop_if_db_already_exist=stop_if_db_already_exist,
use_cash=strtobool(use_cash), optimizer=strtobool(optimizer),optimizer_page_size=optimizer_page_size,
optimizer_cache_size=optimizer_cache_size, optimizer_locking_mode=optimizer_locking_mode, optimizer_synchronous=optimizer_synchronous,
optimizer_journal_mode=optimizer_journal_mode, optimizer_temp_store=optimizer_temp_store,stop_process_if_possible=stop_process_if_possible)
stats.init(main_folders["stats"], stats_intern_dbname, language, visibility, corpus_id=corpus_id,
encryption_key=encryption_key,fileName=stats_fname, version=version, stats_id=stats_id,
context_lenght=context_lenght, full_repetativ_syntagma=strtobool(full_repetativ_syntagma),
min_scope_for_indexes=2, repl_up=repl_up, ignore_hashtag=strtobool(ignore_hashtag), force_cleaning=False,
case_sensitiv=strtobool(case_sensitiv), ignore_url=strtobool(ignore_url), ignore_mention=strtobool(ignore_mention),
ignore_punkt=strtobool(ignore_punkt), ignore_num=strtobool(ignore_num),baseline_delimiter=baseline_delimiter,)
#p(stream_number, "stream_number")
stats.compute(corp, stream_number=stream_number, datatyp="dict",
adjust_to_cpu=True,min_files_pro_stream=min_files_pro_stream,cpu_percent_to_get=50,
create_indexes=create_indexes, freeze_db=freeze_db,baseline_insertion_border=baseline_insertion_border,
drop_indexes=True,optimized_for_long_syntagmas=optimized_for_long_syntagmas)
elif command1 == "recompute":
#p((stats_fname, recompute_flag))
if not stats_fname or recompute_flag is None:
logger.error("Command is incomplete: One of the following options is empty '--stats_fname', '--recompute_flag' ")
return False
else:
files = get_stats_fname(main_folders)
if stats_fname in files:
pass
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
stats_fname = stats_fname+".db"
else:
logger.error("Given stats_fname ('{}') wasn't found and can not be opened.".format(stats_fname))
return False
stats = Stats(mode=mode)
stats.open(os.path.join(main_folders["stats"],stats_fname), encryption_key=encryption_key)
if not stats.statsdb:
logger.error("StatsDB-Opening is failed. (StatsFname: ('{}')) If this statsus is encrypted, please use option '--encryption_key'. ".format(stats))
return False
stats.recompute_syntagma_repetativity_scope(recompute_flag, _check_statsdb_consistency=True)
elif command1 == "optimize":
if not stats_fname:
logger.error("Command is incomplete: One of the following options is empty '--stats_fname' ")
return False
else:
files = get_stats_fname(main_folders)
if stats_fname in files:
pass
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
stats_fname = stats_fname+".db"
else:
logger.error("Given stats_fname ('{}') wasn't found and can not be opened.".format(stats_fname))
return False
stats = Stats(mode=mode)
stats.open(os.path.join(main_folders["stats"],stats_fname), encryption_key=encryption_key)
if not stats.statsdb:
logger.error("StatsDB-Opening is failed. (StatsFname: ('{}')) If this statsus is encrypted, please use option '--encryption_key'. ".format(stats))
return False
stats.optimize_db( stream_number=stream_number, optimized_for_long_syntagmas=optimized_for_long_syntagmas, )
elif command1 == "recreate_indexes":
if not stats_fname:
logger.error("Command is incomplete: One of the following options is empty '--stats_fname' ")
return False
else:
files = get_stats_fname(main_folders)
if stats_fname in files:
pass
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
stats_fname = stats_fname+".db"
else:
logger.error("Given stats_fname ('{}') wasn't found and can not be opened.".format(stats_fname))
return False
stats = Stats(mode=mode)
stats.open(os.path.join(main_folders["stats"],stats_fname), encryption_key=encryption_key)
if not stats.statsdb:
logger.error("StatsDB-Opening is failed. (StatsFname: ('{}')) If this statsus is encrypted, please use option '--encryption_key'. ".format(stats))
return False
#stats.optimize_db( stream_number=stream_number, optimized_for_long_syntagmas=optimized_for_long_syntagmas, )
stats.create_additional_indexes(optimized_for_long_syntagmas=optimized_for_long_syntagmas)
elif command1 == "del":
if not stats_fname:
logger.error("'--stats_fname' is not given. (you can also give tag 'all' instead of the stats_fname)")
return False
if stats_fname == "all":
#os.remove()
shutil.rmtree(main_folders["stats"], ignore_errors=True)
logger.info("All StatsDB was removed.")
else:
files = get_stats_fname(main_folders)
if stats_fname in files:
os.remove(os.path.join(main_folders["stats"], stats_fname))
logger.info("'{}'-StatsDB was removed".format(stats_fname))
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
os.remove(os.path.join(main_folders["stats"], stats_fname)+".db")
logger.info("'{}'-StatsDB was removed".format(stats_fname))
else:
logger.error("Given fname ('{}') wasn't found and can not be removed.".format(stats_fname))
return False
elif command1 == "clean_dir":
#if not corp_fname:
# logger.error("'--corp_fname' is not given. (you can also give tag 'all' instead of the corp_fname)")
#if corp_fname == "all":
files = get_stats_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_stats(main_folders,files)
deleted = []
for temp_dbname in validated:
stats = Sretats(mode="blind")
stats.open(os.path.join(main_folders["stats"], temp_dbname))
if stats.statsdb:
if stats.statsdb.get_attr("locked"):
deleted.append(temp_dbname)
os.remove(os.path.join(main_folders["stats"], temp_dbname))
files = os.listdir(main_folders["stats"])
for journal_fname in [fname for fname in files if".db-journal" in fname]:
#deleted.append(temp_dbname)
os.remove(os.path.join(main_folders["stats"], journal_fname))
if deleted:
print " Following not finished and locked statsDBs was deleted:"
for dn in deleted:
print " |-> '{}';".format(dn)
return True
else:
print " Locked or not finished statsDBs wasn't found."
return False
elif command1 == "names":
#p("fghjk")
files = get_stats_fname(main_folders)
#p(files, "files")
validated,possibly_encrypted,wrong,opened_db = validate_stats(main_folders,files)
print ">>> {} DBs was found <<< ".format(len(files))
print " '{}'-From them was validated:".format(len(validated))
for i, fname in enumerate(validated):
print " {}. '{}';".format(i, fname)
if possibly_encrypted:
print "\n '{}'-From them are possibly encrypted/damaged/invalid:".format(len(possibly_encrypted))
for i, fname in enumerate(possibly_encrypted):
print " {}. '{}';".format(i, fname)
#p(files)
elif command1 == "meta":
if not stats_fname:
logger.error("'--stats_fname' is not given. (you can also give tag 'all' instead of the stats_fname)")
return False
files = get_stats_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_stats(main_folders,files)
if stats_fname == "all":
for db in opened_db:
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
print "\n\nNotice! with 'all'-Argument could be checked just not-encrypted DBs. If you want to check encrypted DB use additional to stats_fname also '--encryption_key'"
else:
if stats_fname in files:
if stats_fname in validated:
ix = validated.index(stats_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
elif stats_fname in possibly_encrypted:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["stats"],stats_fname),encryption_key=encryption_key)
try:
if h.typ() == "stats":
print("\n >>>> {} <<<<".format(h.fname()))
for k,v in h.get_all_attr().items():
print " {} = '{}';".format(k,v)
else:
logger.error("'{}'-DB is not an StatsDB or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
return False
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
spl1 = [os.path.splitext(fname)[0] for fname in validated]
spl2 = [os.path.splitext(fname)[0] for fname in possibly_encrypted]
if stats_fname in spl1:
ix = spl1.index(stats_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
for k,v in db.get_all_attr().items():
print " {} = '{}';".format(k,v)
elif stats_fname in spl2:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["stats"],stats_fname)+".db",encryption_key=encryption_key)
try:
if h.typ() == "stats":
print("\n >>>> {} <<<<".format(h.fname()))
for k,v in h.get_all_attr().items():
print " {} = '{}';".format(k,v)
else:
logger.error("'{}'-DB is not an StatsDB or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
return False
else:
logger.error("Given fname ('{}') wasn't validated. It means, that possibly it is not a StatsDB or it is just encrypted!".format(stats_fname))
return False
else:
logger.error("Given fname ('{}') wasn't found!".format(stats_fname))
return False
elif command1 == "basic_stats":
if not stats_fname:
logger.error("'--stats_fname' is not given. (you can also give tag 'all' instead of the stats_fname)")
return False
files = get_stats_fname(main_folders)
validated,possibly_encrypted,wrong,opened_db = validate_stats(main_folders,files)
if stats_fname == "all":
for db in opened_db:
print("\n >>>> {} <<<<".format(db.fname()))
print " repl_num = '{}';".format(db.rownum("replications"))
print " redu_num = '{}';".format(db.rownum("reduplications"))
print " baseline_syntagma_num = '{}';".format(db.rownum("baseline"))
print "\n\nNotice! with 'all'-Argument could be checked just not-encrypted DBs. If you want to check encrypted DB use additional to stats_fname also '--encryption_key'"
else:
if stats_fname in files:
if stats_fname in validated:
ix = validated.index(stats_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
print " repl_num = '{}';".format(db.rownum("replications"))
print " redu_num = '{}';".format(db.rownum("reduplications"))
print " baseline_syntagma_num = '{}';".format(db.rownum("baseline"))
elif stats_fname in possibly_encrypted:
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["stats"],stats_fname),encryption_key=encryption_key)
try:
if h.typ() == "stats":
print("\n >>>> {} <<<<".format(h.fname()))
print " repl_num = '{}';".format(db.rownum("replications"))
print " redu_num = '{}';".format(db.rownum("reduplications"))
print " baseline_syntagma_num = '{}';".format(db.rownum("baseline"))
else:
logger.error("'{}'-DB is not an StatsDB or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
return False
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
spl1 = [os.path.splitext(fname)[0] for fname in validated]
spl2 = [os.path.splitext(fname)[0] for fname in possibly_encrypted]
if stats_fname in spl1:
ix = spl1.index(stats_fname)
db = opened_db[ix]
print("\n >>>> {} <<<<".format(db.fname()))
print " repl_num = '{}';".format(db.rownum("replications"))
print " redu_num = '{}';".format(db.rownum("reduplications"))
print " baseline_syntagma_num = '{}';".format(db.rownum("baseline"))
elif stats_fname in spl2:
stats_fname = stats_fname+".db"
if not encryption_key:
logger.error("Current DBFile is possibly encrypted/damaged. Please use '--encryption_key'-Option to decrypt it. ")
else:
h = DBHandler(mode="blind")
h.connect(os.path.join(main_folders["stats"],stats_fname),encryption_key=encryption_key)
try:
if h.typ() == "stats":
print("\n >>>> {} <<<<".format(h.fname()))
print " repl_num = '{}';".format(db.rownum("replications"))
print " redu_num = '{}';".format(db.rownum("reduplications"))
print " baseline_syntagma_num = '{}';".format(db.rownum("baseline"))
else:
logger.error("'{}'-DB is not an StatsDB or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
return False
except:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
return False
else:
logger.error("Given fname ('{}') wasn't validated. It means, that possibly it is not a StatsDB or it is just encrypted!".format(stats_fname))
return False
else:
logger.error("Given fname ('{}') wasn't found!".format(stats_fname))
return False
elif command1 == "update_attr":
if not stats_fname or not attr_name or not value:
logger.error("Command is incomplete: '--stats_fname' or '--attr_name' or '--value' is not given.")
else:
files = get_stats_fname(main_folders)
if stats_fname in files:
pass
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
stats_fname = stats_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found.".format(stats_fname))
return False
try:
db = DBHandler(mode="error")
db.connect(os.path.join(main_folders["stats"],stats_fname),encryption_key=encryption_key)
if db._db:
if db.typ() == "stats":
if attr_name not in db.get_all_attr():
logger.error("Given Attribute ('{}') is not exist in this DataBase.".format(attr_name))
return False
db.update_attr(attr_name,value)
db._commit()
updated_attr = db.get_attr(attr_name)
#p((updated_attr, value))
if str(value) != str(updated_attr):
logger.error("Update of the given Attribute ('{}') failed.".format(attr_name))
return
else:
logger.info("Given Attribute ('{}') in the '{}'-DB was updated to '{}'.".format(attr_name,stats_fname, value))
return True
else:
logger.error("'{}'-DB is not an StatsDB or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
return False
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(stats_fname,encryption_key))
return False
except:
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or the encryption key was wrong'{}' ".format(stats_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(stats_fname,encryption_key))
return False
elif command1 == "export":
if not stats_fname or not export_file_type :
logger.error("Command is incomplete: '--stats_fname' or '--export_file_type' is not given.")
return False
else:
if not exp_repl and not exp_redu:
logger.error("No one Repetition Phanomen was selected. Please select minimum one phanomen. Ypu can use for that following options '--exp_redu', '--exp_repl' ")
return False
files = get_stats_fname(main_folders)
if stats_fname in files:
pass
elif stats_fname in [os.path.splitext(fname)[0] for fname in files]:
stats_fname = stats_fname+".db"
else:
logger.error("Given fname ('{}') wasn't found".format(stats_fname))
return False
stats = Stats(mode=mode, status_bar=status_bar)
stats.open(os.path.join(main_folders["stats"],stats_fname),encryption_key=encryption_key)
if stats.statsdb._db:
export_dir = export_dir if export_dir else main_folders["export"]
export_name = export_name if export_name else False
#if syntagma_for_export != "*":
#syntagma_for_export = syntagma_for_export.strip("'")
#syntagma_for_export = syntagma_for_export.split(",")
if additional_doc_cols:
if not corp_fname:
logger.error("'--corp_fname' wasn't given For additional extraction of the columns from CorpDB you need also to give CorpDB-Name.")
return False
additional_doc_cols = additional_doc_cols.strip("'")
additional_doc_cols = additional_doc_cols.split(",")
additional_doc_cols = [col.strip(" ") for col in additional_doc_cols]
if corp_fname:
files = get_corp_fname(main_folders)
if corp_fname in files:
pass
elif corp_fname in [os.path.splitext(fname)[0] for fname in files]:
corp_fname = corp_fname+".db"
else:
logger.error("Given corp_fname ('{}') wasn't found and can not be opened.".format(corp_fname))
return False
path_to_corpdb = os.path.join(main_folders["corp"],corp_fname) if corp_fname else False
#p((export_dir, syntagma_for_export, exp_repl, exp_redu,exp_syntagma_typ, exp_sentiment, export_name, export_file_type, rows_limit_in_file, encryption_key_corp, output_table_type, additional_doc_cols,corp_fname, max_scope, stemmed_search,context_len_left, context_len_right, separator_syn,word_examples_sum_table,ignore_num,ignore_symbol), r=True)
#p( {'path_to_corpdb': False, 'stemmed_search': False, 'redu': True, 'rewrite': False, 'ignore_num': False, 'additional_doc_cols': False, 'syntagma': u'*', 'baseline': True, 'sentiment': False, 'self': <zas_rep_tools.src.classes.stats.Stats object at 0x105f92b10>, 'encryption_key_for_exported_db': False, 'fname': False, 'word_examples_sum_table': True, 'syntagma_type': 'lexem', 'context_len_right': False, 'repl': True, 'max_scope': u'3', 'output_table_type': 'exhausted', 'path_to_export_dir': '/Users/egoruni/Desktop/BA/Code/zas-rep-tools/zas_rep_tools/tests/prjdir/export', 'encryption_key_corp': False, 'separator_syn': u' || ', 'context_len_left': True, 'rows_limit_in_file': 50000, 'ignore_symbol': False, 'export_file_type': 'csv'}(syntagma_for_export, exp_repl, exp_redu, exp_syntagma_typ))
#p(syntagma_for_export,"syntagma")
stats.export( export_dir, syntagma=syntagma_for_export, repl=exp_repl, redu=exp_redu,
baseline=True, syntagma_type=exp_syntagma_typ, sentiment=exp_sentiment,
fname=export_name, export_file_type=export_file_type, rows_limit_in_file=rows_limit_in_file,
encryption_key_corp=encryption_key_corp, output_table_type=output_table_type,
additional_doc_cols=additional_doc_cols,
path_to_corpdb=path_to_corpdb, max_scope=max_scope, stemmed_search=stemmed_search,rewrite=False,
context_len_left=context_len_left, context_len_right=context_len_right, separator_syn=separator_syn,
word_examples_sum_table=word_examples_sum_table,ignore_num=ignore_num,ignore_symbol=ignore_symbol)
else:
#p(type(strtobool(encryption_key)), "strtobool(encryption_key)")
if strtobool(encryption_key):
logger.error("'{}'-DB wasn't opened. Possibly this DB is locked or damaged or given encryption key ('{}') is wrong. ".format(stats_fname,encryption_key))
else:
logger.error("'{}'-DB wasn't opened. Possibly this DB is damaged or encrypted or locked. Please use '--encryption_key' option to set an encryption_key ".format(stats_fname,encryption_key))
return False
@main.command('streamTwitter')
@click.argument('path_to_save',type=click.Path())
@click.option('--language', '-l', default="False", type=click.Choice(list(Streamer.supported_languages)+ ["False"]))
@click.option('--stop_words', '-sw', default=False)
@click.option('--terms', '-t', default=False)
@click.option('--encoding', '-e', default='utf_8', type=click.Choice(list(Streamer.supported_encodings_types)))
@click.option('--ignore_rt', '-irt', default=False, type=bool)
@click.option('--filter_strategie', '-f', default="False", help="Set Filter Strategy. 1) 't'-just search for terms/stop_words; 2) 't+l' - search for stop_words and language (recomended) ",type=click.Choice(list(["t", "t+l", "False", ])))
@click.option('--save_used_terms', '-sut', default=True, type=bool)
@click.option('--mode', '-m', default="prod" ,help="Set one of the Tool Modus", type=click.Choice(helpers.modi))
@click.option('--logdir', '-ld', default="logs", help="Choose the name of the Directory for log data.")
def streamTwitter( path_to_save,language,stop_words,terms,encoding,ignore_rt, filter_strategie, save_used_terms, logdir, mode):
logger = get_cli_logger(mode,logdir)
func_name = sys._getframe().f_code.co_name
# self.test_consumer_key = "97qaczWSRfaaGVhKS6PGHSYXh"
# self.test_consumer_secret = "mWUhEL0MiJh7FqNlOkQG8rAbC8AYs4YiEOzdiCwx26or1oxivc"
# self.test_access_token = "1001080557130932224-qi6FxuYwtvpbae17kCjAS9kfL8taNT"
# self.test_access_token_secret = "jCu2tTVwUW77gzOtK9X9svbdKUFvlSzAo4JfIG8tVuSgX"
#p(configer_obj._user_data["twitter_creditials"])
try:
configer_obj._user_data["twitter_creditials"]
if not configer_obj._user_data["twitter_creditials"]:
raise Exception, "STOP"
except:
configer_obj._cli_menu_get_from_user_twitter_credentials()
try:
if not configer_obj._user_data["twitter_creditials"]:
logger.error("TwitterCreditials wasn't found. Please give TwitterCreditials before you can use this Tools.")
return False
except:
logger.error("TwitterCreditials wasn't found. Please give TwitterCreditials before you can use this Tools.")
return False
language = strtobool(language)
#p(configer_obj._user_data["twitter_creditials"])
#p(configer_obj._user_data["twitter_creditials"][0])
configer_obj._user_data["twitter_creditials"][0]
consumer_key = configer_obj._user_data["twitter_creditials"][0]["consumer_key"]
consumer_secret = configer_obj._user_data["twitter_creditials"][0]["consumer_secret"]
access_token = configer_obj._user_data["twitter_creditials"][0]["access_token"]
access_token_secret = configer_obj._user_data["twitter_creditials"][0]["access_token_secret"]
#p((consumer_key, consumer_secret, access_token, access_token_secret, path_to_save))
if stop_words and not os.path.isfile(stop_words):
if stop_words not in Streamer.stop_words_collection:
stop_words = stop_words.split(",")
logger.info("Recognized stop-words: {}".format(stop_words))
if terms and not os.path.isfile(terms):
terms = terms.split(",")
logger.info("Recognized terms: {}".format(terms))
stream = Streamer(consumer_key, consumer_secret, access_token, access_token_secret, path_to_save, platfrom="twitter",
language=language, email_addresse=email, stop_words=stop_words, terms=terms,
encoding=encoding, ignore_rt=ignore_rt, save_used_terms=save_used_terms, filterStrat=filter_strategie,
mode=mode)
stream.stream_twitter()
@main.command('streamerInfo')
@click.argument('command')
@click.option('--mode', '-m', default="prod" ,help="Set one of the Tool Modus", type=click.Choice(helpers.modi))
@click.option('--logdir', '-ld', default="logs", help="Choose the name of the Directory for log data.")
#@click.option('--logs_dir', '-l', default="logs")
def streamerInfo(command, logdir, mode):
# $ zas-vot-tools strat1 sets/train_set sets/eval_set segments voiceless voiced vlwindow vcwindow experiments
logger = get_cli_logger(mode,logdir)
func_name = sys._getframe().f_code.co_name
possible_commands = ["enc", "lang", "nltk_lang", "twitter_lang", "classiefier_lang", "stop_words", "platforms"]
if command not in possible_commands:
logger.error("Given Command {} is not exist. Please use one of the following commands: {}".format(command, possible_commands))
if command == "enc":
print Streamer.supported_encodings_types
if command == "lang":
print Streamer.supported_languages
if command == "nltk_lang":
print [k for k in Streamer.NLTKlanguages]
if command == "twitter_lang":
print Streamer.supported_languages_by_twitter
if command == "classiefier_lang":
print Streamer.supported_languages_by_langid
if command == "stop_words":
print Streamer.supported_stop_words
if command == "platforms":
print Streamer.supported_platforms
@main.command('testspath')
def testspath():
#print configer_obj.path_to_tests
print configer_obj.path_to_tests
if __name__ == "__main__":
main() | zas-rep-tools | /zas-rep-tools-0.2.tar.gz/zas-rep-tools-0.2/zas_rep_tools/cli/main.py | main.py |
zas
===
.. image:: https://travis-ci.org/appstore-zencore/zas.svg?branch=master
:target: https://travis-ci.org/appstore-zencore/zas
Zencore Application Server
Install
-------
::
pip install zas
Usage
-----
::
E:\zas\src\scripts>python zas.py
Usage: zas.py [OPTIONS] COMMAND [ARGS]...
Options:
-c, --config FILENAME Config file path, use yaml format. Default to
config.yaml.
--help Show this message and exit.
Commands:
reload Reload application server.
start Start application server.
stop Stop application server.
Example Config
--------------
::
application:
deamon: true
pidfile: /tmp/appname.pid
main: app.main
Create a new type server
------------------------
::
from zascore import server
from zascore import set_default_config_path
from zascore import set_default_pidfile
from zascore import set_config_loader
from zascore import default_config_loader
def helloserver_loader(config):
data = default_config_loader(config)
data["server-name"] = "hello server v1.0.0"
return data
if __name__ == "__main__":
set_default_config_path("/etc/helloserver.yaml")
set_default_pidfile("/var/run/helloserver.pid")
set_config_loader(helloserver_loader)
server()
| zas | /zas-0.2.0.tar.gz/zas-0.2.0/README.rst | README.rst |
import os
import yaml
import click
from zencore.utils.magic import import_from_string
from dictop import select
from zdas import daemon_start
from zdas import daemon_stop
DEFAULT_CONFIG_PATH = "config.yaml"
DEFAULT_PIDFILE = "server.pid"
GLOBAL_CONFIG = {}
CONFIG_LOADER = None
def set_default_config_path(path):
global DEFAULT_CONFIG_PATH
DEFAULT_CONFIG_PATH = path
def set_default_pidfile(pidfile):
global DEFAULT_PIDFILE
DEFAULT_PIDFILE = pidfile
def set_config_loader(loader):
global CONFIG_LOADER
CONFIG_LOADER = loader
def default_config_loader(config):
data = {}
if config:
if isinstance(config, dict):
data = config
else:
data = yaml.load(config) # click.File gets BufferedReader instance
return data or {}
def main():
os.sys.path.append(os.getcwd())
real_main = select(GLOBAL_CONFIG, "application.main")
if not real_main:
print("Item application.main required in config file.", file=os.sys.stderr)
os.sys.exit(1)
real_main = import_from_string(real_main)
if not real_main:
print("Load application.main = {} failed.".format(real_main), file=os.sys.stderr)
os.sys.exit(2)
real_main(GLOBAL_CONFIG)
@click.group()
@click.option("-c", "--config", default=DEFAULT_CONFIG_PATH, type=click.File("rb"), help="Config file path, use yaml format. Default to {}.".format(DEFAULT_CONFIG_PATH))
def server(config):
load_config = CONFIG_LOADER or default_config_loader
GLOBAL_CONFIG.update(load_config(config))
@server.command()
def start():
"""Start application server.
"""
daemon = select(GLOBAL_CONFIG, "application.daemon", False)
workspace = select(GLOBAL_CONFIG, "application.workspace", None)
pidfile = select(GLOBAL_CONFIG, "application.pidfile", DEFAULT_PIDFILE)
daemon_start(main, pidfile, daemon, workspace)
@server.command()
def stop():
"""Stop application server.
"""
pidfile = select(GLOBAL_CONFIG, "application.pidfile", DEFAULT_PIDFILE)
daemon_stop(pidfile)
@server.command()
def reload():
"""Reload application server.
"""
stop()
start() | zas | /zas-0.2.0.tar.gz/zas-0.2.0/src/zascore/core.py | core.py |
# %% auto 0
__all__ = ['NFTCollection', 'Token']
# %% ../nbs/01_collection.ipynb 4
import pandas as pd
from dateparser import parse as parse_date
from functools import cache
from dataclasses import dataclass
from datetime import datetime, timedelta
from .connectors import Connector
from fastcore.utils import patch
from fastcore.basics import basic_repr
from box import Box
from operator import attrgetter
from typing import Callable
from jinja2 import Template
from typing import Union
# %% ../nbs/01_collection.ipynb 5
def _parse_start_end_dates(start_time: str, end_time: str = "now") -> tuple[datetime]:
# ensure you have end_time:
end_time = end_time or 'now'
shortcuts = {
# put more here, but should be convertible to dateparser lib conventions.
"latest": 'now',
}
start_time = str(datetime.utcnow().date()) if start_time == 'today' else start_time
if start_time in shortcuts: start_time = shortcuts[start_time]
return parse_date(start_time, settings={'TIMEZONE': 'UTC'}), parse_date(end_time, settings={'TIMEZONE': 'UTC'})
# %% ../nbs/01_collection.ipynb 7
def _sqlize_list(l: list): return ','.join(list(map(lambda x: "'" + x + "'", l)))
# %% ../nbs/01_collection.ipynb 8
def _chain_prefix(chain: str):
return "" if chain == "eth" else chain + "_"
def _build_template(template_str: str, **render_kwargs):
return (
Template(template_str, lstrip_blocks=True, trim_blocks=True, autoescape=True)
.render(**render_kwargs)
.replace("\n", "")
.replace(" ", " ")
.replace("'", "'")
)
def _build_sol_trade_statement(
start_dt: datetime,
end_dt: datetime,
collection_address: Union[str, list] = "",
mints_only=False,
token_id: str = "",
expand_collection=True,
):
table = "sol_trades" if not mints_only else "sol_mints"
collection_addresses = ""
if type(collection_address) == list:
collection_address, collection_addresses = "", _sqlize_list(collection_address)
if "," in collection_address:
# assume this is sqlized
collection_address, collection_addresses = "", collection_address
template = """
SELECT *
FROM onchain.{{table}}
WHERE date >= '{{start_date}}' AND date <= '{{end_date}}'
AND hour >= {{start_hour}} AND hour <= {{end_hour}}
AND block_time >= {{ start_ts }} AND block_time <= {{ end_ts }}
{%- if collection_address %}
AND collection = '{{ collection_address }}'
{% endif %}
{%- if collection_addresses %}
AND collection_address IN ({{ collection_addresses }})
{% endif %}
{%- if token_id %}
AND mint = '{{ token_id }}'
{% endif %}
"""
return _build_template(
template,
table=table,
start_ts=round(start_dt.timestamp()),
end_ts=round(end_dt.timestamp()),
start_date=start_dt.date(),
end_date=end_dt.date(),
start_hour=start_dt.hour,
end_hour=end_dt.hour,
collection_addresses=collection_addresses,
collection_address=collection_address,
token_id=token_id,
)
def _build_trade_statement(
chain: str,
start_dt: datetime,
end_dt: datetime,
collection_address: Union[str, list] = "",
mints_only=False,
token_id: str = "",
expand_collection=False,
):
if chain == "sol":
return _build_sol_trade_statement(
start_dt=start_dt,
end_dt=end_dt,
collection_address=collection_address,
mints_only=mints_only,
token_id=token_id,
expand_collection=expand_collection,
)
table = f'{_chain_prefix(chain)}{"trades" if not mints_only else "mints"}'
collection_addresses = ""
if type(collection_address) == list:
collection_address, collection_addresses = "", _sqlize_list(collection_address)
if "," in collection_address:
# assume this is sqlized
collection_address, collection_addresses = "", collection_address
template = """
SELECT *
FROM onchain.{{table}}
WHERE timestamp >= {{ start_ts }} AND timestamp <= {{ end_ts }}
{%- if collection_address %}
AND collection_address = '{{ collection_address }}'
{% endif %}
{%- if collection_addresses %}
AND collection_address IN ({{ collection_addresses }})
{% endif %}
{%- if token_id %}
AND token_id = '{{ token_id }}'
{% endif %}
"""
t = _build_template(
template,
table=table,
start_ts=round(start_dt.timestamp()),
end_ts=round(end_dt.timestamp()),
mints_only=mints_only,
collection_addresses=collection_addresses,
collection_address=collection_address,
token_id=token_id,
)
if expand_collection:
expanded_temp = """
WITH x as ({{temp}})
SELECT x.*, y.name, y.slug
FROM x LEFT JOIN offchain.{{metadata_table}} y
ON x.collection_address = y.collection_id
"""
metadata_table = f"{_chain_prefix(chain)}collection_metadata"
return (
Template(expanded_temp, lstrip_blocks=True, trim_blocks=True)
.render(temp=t, metadata_table=metadata_table)
.replace("\n", "")
.replace(" ", " ")
)
else:
return t
def _build_loans_statement(
chain: str,
start_dt: datetime,
end_dt: datetime,
):
chain_prefix = _chain_prefix(chain)
table = chain_prefix + "nft_loans"
template = """
SELECT *
FROM onchain.{{table}}
WHERE timestamp >= {{ start_ts }} AND timestamp <= {{ end_ts }}
"""
return _build_template(
template,
table=table,
start_ts=round(start_dt.timestamp()),
end_ts=round(end_dt.timestamp()),
)
# %% ../nbs/01_collection.ipynb 10
@cache
def __fetch_trades(
conn: Connector,
start_dt: datetime, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_dt: datetime, # default now. Same type as start_time
chain: str = "eth",
collection_address: str = "", # collection address
token_id: str = "", # token id
mints_only: bool = False, # mints
expand_collection: bool = False,
) -> pd.DataFrame:
return conn.query(
_build_trade_statement(
chain,
start_dt,
end_dt,
collection_address=collection_address,
token_id=token_id,
mints_only=mints_only,
expand_collection=expand_collection,
)
)
def _fetch_trades(
conn: Connector,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = "now", # default now. Same type as start_time
chain: str = "eth",
collection_address: Union[str, list] = "", # collection address
token_id: str = "", # token id
mints_only: bool = False, # filter for mints if True
expand_collection: bool = False, # expand collection columns
) -> pd.DataFrame:
start_dt, end_dt = _parse_start_end_dates(start_time, end_time=end_time)
# we cache the one below because "yesterday" etc. style start_time is relative.
# this is needed for caching:
if type(collection_address) == list:
if len(collection_address) > 0:
collection_address = (
_sqlize_list(collection_address)
if len(collection_address) > 1
else collection_address[0]
)
else:
collection_address = ""
return __fetch_trades(
conn,
start_dt,
end_dt,
chain=chain,
collection_address=(
collection_address.lower() if chain != "sol" else collection_address
),
mints_only=mints_only,
token_id=token_id,
expand_collection=expand_collection,
)
@cache
def __fetch_loans(
conn: Connector,
start_dt: datetime, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_dt: datetime, # default now. Same type as start_time
chain: str = "eth",
) -> pd.DataFrame:
return conn.query(_build_loans_statement(chain, start_dt, end_dt))
def _fetch_loans(
conn: Connector,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = "now", # default now. Same type as start_time
chain: str = "eth",
collection_address: Union[str, list] = "", # collection address
token_id: str = "", # token id
mints_only: bool = False, # filter for mints if True
expand_collection: bool = False, # expand collection columns
) -> pd.DataFrame:
start_dt, end_dt = _parse_start_end_dates(start_time, end_time=end_time)
if chain != "eth":
raise Exception(f"Chain {chain} is not supported for this dataset")
return __fetch_loans(conn, start_dt, end_dt, chain=chain)
@cache
def _fetch_metadata(conn: Connector, collection_id: str, chain: str = "eth"):
res = conn.query(
f"""
select * from offchain.{_chain_prefix(chain)}collection_metadata
where collection_id='{collection_id.lower()}'"""
).squeeze()
res.name = res["name"]
for i in [
"volume_all",
"volume_daily",
"volume_weekly",
"item_count",
"floor_price",
]:
if i in res:
del res[i]
return res
@cache
def _fetch_metadatas(conn: Connector, collection_ids: list[str], chain: str = "eth"):
cs = ",".join(list(map(lambda i: "'" + i.lower() + "'", collection_id)))
df = conn.query(
f"""
select * from offchain.{_chain_prefix(chain)}collection_metadata
where collection_id IN ({cs})"""
)
df.drop(
["volume_all", "volume_daily", "volume_weekly", "item_count", "floor_price"],
errors="ignore",
)
res.name = res["name"]
for i in [
"volume_all",
"volume_daily",
"volume_weekly",
"item_count",
"floor_price",
]:
if i in res:
del res[i]
return res
@cache
def _fetch_socials(conn: Connector, collection_slug: str):
return conn.get(f"/socials/{collection_slug}").json()
# %% ../nbs/01_collection.ipynb 11
class NFTCollection:
"""NFT Collection object"""
def __init__(self,
conn: Connector,
collection_id: str, # collection_address e.g. 0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D
chain: str ='eth'): # chain_id - currently only "eth" or "bnb" allowed
self._conn, self.chain, self.collection_id = conn, chain, (collection_id.lower() if chain != 'sol' else collection_id)
# %% ../nbs/01_collection.ipynb 12
@patch
def trades(self: NFTCollection,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = 'now', # default now. Same type as start_time
expand_collection: bool = False, # expand collection details (name, slug etc.)
):
return _fetch_trades(
self._conn,
start_time,
end_time=end_time,
chain=self.chain,
collection_address=self.collection_id,
expand_collection=expand_collection
)
@patch
def token(self: NFTCollection, token_id: str):
"""Token model"""
return Token(self._conn, token_id, self.collection_id, chain=self.chain)
@patch
def metadata(self: NFTCollection):
"""Returns metadata on a collection"""
return _fetch_metadata(self._conn, self.collection_id, chain=self.chain)
@patch
def mints(self: NFTCollection,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = 'now', # default now. Same type as start_time
expand_collection: bool = False # expand collection details (name, slug etc.)
):
return _fetch_trades(
self._conn,
start_time,
end_time=end_time,
chain=self.chain,
collection_address=self.collection_id,
mints_only=True,
expand_collection=expand_collection
)
# %% ../nbs/01_collection.ipynb 13
def _timeseries_to_df(timeseries: list) -> pd.DataFrame:
"""turn socials timeseries to pd dataframe"""
if not timeseries: return timeseries
_ts = pd.DataFrame([dict(i) for i in timeseries])
_ts['ts'] = _ts['ts'].apply(pd.to_datetime)
_ts.set_index('ts')
return _ts
@patch
def socials(self: NFTCollection, collection_slug: str):
_socials = Box(
self._conn.get(f'/socials',
params={'id':collection_slug, 'chain': self.chain}).json()['social'])
_socials.discord = _timeseries_to_df(_socials.discord.timeseries)
_socials.twitter = _timeseries_to_df(_socials.twitter.timeseries)
return _socials
# %% ../nbs/01_collection.ipynb 14
class Token:
"""Token model"""
__repr__= basic_repr(['collection_id', 'token_id', 'token_type', 'chain'])
def __init__(self, conn: Connector, token_id: str, collection_id:str, chain:str='eth', token_type='721'):
self._conn, self.token_id, self.chain, self.token_type = conn, token_id, chain, token_type
self.collection_id = collection_id
def trades(self: 'Token',
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = 'now' # default now. Same type as start_time
):
return _fetch_trades(
self._conn,
start_time,
end_time=end_time,
chain=self.chain,
collection_address=self.collection_id,
token_id=self.token_id
)
@cache
def metadata(self):
"""Token metadata fetched as is from offchain stores"""
return self._conn.rpc(self.chain).get_token_metadata(self.token_id, self.collection_id) | zash-sdk | /zash_sdk-0.0.10-py3-none-any.whl/zash_sdk/collection.py | collection.py |
# %% auto 0
__all__ = ['aws_client_config', 'client', 'ci_client', 'BASE_URL', 'VERSION', 'DEFAULT_CHAIN', 'PERIODS', 'URI',
'IPFS_PUB_GATEWAY', 'authenticate', 'RSConnector', 'APIConnector', 'W3Connector', 'Connector']
# %% ../nbs/00_connectors.ipynb 3
import redshift_connector
import boto3
import json
import requests
import uuid
import pandas as pd
from functools import cache
from redshift_connector import Connection
from botocore.exceptions import ClientError
from botocore.config import Config
from fastcore.meta import delegates
from fastcore.basics import basic_repr
from functools import cache
from urllib.parse import urljoin
# needed for web3 client
from web3 import Web3
from web3.middleware import geth_poa_middleware
from box import Box
# %% ../nbs/00_connectors.ipynb 5
aws_client_config = Config(region_name="eu-west-1")
client = boto3.client("cognito-idp", config=aws_client_config)
ci_client = boto3.client("cognito-identity", config=aws_client_config)
def _fetch_dev_creds():
"""fetch api_key and db_user from parameter store"""
ssm = boto3.client('ssm')
return json.loads(ssm.get_parameter(
Name='sdk-admin-creds',
)['Parameter']['Value'])
@cache
def _authenticate(username, api_key):
auth_res = client.initiate_auth(
ClientId="377phvhgdn23vsor2ndafu02ek",
AuthFlow="USER_PASSWORD_AUTH",
AuthParameters={
"USERNAME": username,
"PASSWORD": api_key,
},
)["AuthenticationResult"]
_logins = {
"cognito-idp.eu-west-1.amazonaws.com/eu-west-1_NVKhGLh75":
auth_res["IdToken"]
}
identity_id = ci_client.get_id(
IdentityPoolId="eu-west-1:1b1ee2fe-2476-414c-9b29-993a56efb136",
Logins=_logins,
)["IdentityId"]
return {
**ci_client.get_credentials_for_identity(
IdentityId=identity_id,
Logins=_logins)["Credentials"],
'username': username,
'api_key': api_key
}
def _connect_to_redshift(creds):
return redshift_connector.connect(
iam=True,
database="prod",
db_user=creds['username'],
cluster_identifier="redshift-cluster-1",
access_key_id=creds["AccessKeyId"],
secret_access_key=creds["SecretKey"],
session_token=creds["SessionToken"],
region="eu-west-1",
)
def _connect_to_redshift_dev():
return _connect_to_redshift({
'username': _fetch_dev_creds()['db_user'],
"AccessKeyId": None,
"SecretKey": None,
"SessionToken": None
})
def authenticate(
username: str, # username, usually your email
api_key: str, # generated api_key, you can request one from zash.sh
) -> redshift_connector.Connection:
"""Authenticate and get Zash data warehouse connection"""
try:
if username == "your_username" or api_key == "your_api_key":
return _connect_to_redshift_dev()
return _connect_to_redshift(_authenticate(username, api_key))
except ClientError as e:
if not len(e.args):
raise e
if "ExpiredToken" in e.args[0]:
_authenticate.cache_clear()
return _connect_to_redshift(_authenticate(username, api_key))
# %% ../nbs/00_connectors.ipynb 6
class RSConnector:
"""Redshift connector"""
def __init__(self, username: str, api_key: str):
self._conn = authenticate(username, api_key)
self.username, self.api_key = username, api_key
def refresh(self):
self.close()
self._conn = authenticate(self.username, self.api_key)
def _fetch(self, statement: str):
try:
cur = self._conn.cursor()
cur.execute(statement)
df = cur.fetch_dataframe()
cur.close()
return df
except Exception as e:
# maybe add more details
self._conn.rollback()
cur.close()
raise e
def query(self, statement: str):
try:
return self._fetch(statement)
except ClientError as e:
if not len(e.args): raise e
if "ExpiredToken" in e.args[0]:
print('Session expired, reauthenticating...')
self.refresh()
return self._fetch(statement)
def close(self):
self._conn.close()
# %% ../nbs/00_connectors.ipynb 9
BASE_URL = "https://api.zash.sh"
VERSION = "v2"
DEFAULT_CHAIN = "eth"
PERIODS = {"1d", "1w", "1m", "all"}
class APIConnector:
"""Zash API connector. See [API docs](https://zashapi.readme.io) for more details."""
def __init__(self, base_url: str = BASE_URL, api_key: str = "") -> None:
self._session = requests.Session()
api_key = _fetch_dev_creds()['api_key'] if api_key == 'your_api_key' else api_key
self._session.headers.update({"X-API-Key": api_key})
self.base_url = base_url
self.product = "nft"
self.base_path = urljoin(base_url, f"{VERSION}/{self.product}")
@delegates(requests.Session.get)
def get(self,
path: str, # API path that is after the product (i.e nft), i.e. /socials/bayc
*args, **kwargs):
if not path.startswith('/'): path = '/' + path
return self._session.get(self.base_path + path, *args, **kwargs)
def close(self): self._session.close()
# %% ../nbs/00_connectors.ipynb 12
URI = {
'bnb': 'https://bsc-dataseed2.binance.org',
'eth':'https://mainnet.infura.io/v3/c13b727bfe204e51931409863f75ce15',
'matic':'https://polygon-mainnet.infura.io/v3/c13b727bfe204e51931409863f75ce15'
}
IPFS_PUB_GATEWAY = 'https://ipfs.io'
_nft_metadata_abi = [{"inputs":[{"internalType":"uint256","name":"tokenId","type":"uint256"}],"name":"tokenURI","outputs":[{"internalType":"string","name":"","type":"string"}],"stateMutability":"view","type":"function"}]
def _w3_client(uri, chain):
if chain == "eth":
return Web3(Web3.HTTPProvider(uri))
elif chain == "bnb":
_w3 = Web3(Web3.HTTPProvider(uri))
_w3.middleware_onion.inject(geth_poa_middleware, layer=0)
return _w3
class W3Connector:
"""Web3 connector to connect to RPC endpoints"""
def __init__(self,
chain:str ='eth', # chain (e.g. eth or bnb)
uri: str = ''): # uri if needs customization
self.uri = uri or URI[chain]
self.chain = chain
self._client = _w3_client(self.uri, self.chain)
def get_token_metadata(self, token_id: int, collection_address: str, abi=_nft_metadata_abi):
if collection_address == collection_address.lower():
collection_address = Web3.toChecksumAddress(collection_address)
token_id = int(token_id)
token_uri = self._client.eth.contract(collection_address, abi=_nft_metadata_abi).functions.tokenURI(token_id).call()
if token_uri.strip().startswith('ipfs'):
token_uri = token_uri.replace('ipfs://', f'{IPFS_PUB_GATEWAY}/ipfs/')
return Box({
'uri': token_uri,
**requests.get(token_uri).json()
})
# %% ../nbs/00_connectors.ipynb 15
class Connector:
def __init__(self, username: str, api_key:str):
self._rs_connector, self._api_connector = RSConnector(username, api_key), APIConnector(api_key=api_key)
@delegates(RSConnector.query)
def query(self, *args, **kwargs): return self._rs_connector.query(*args, **kwargs)
@delegates(APIConnector.get)
def get(self, *args, **kwargs): return self._api_connector.get(*args, **kwargs)
def rpc(self, chain: str): return W3Connector(chain=chain)
def close(self):
self._rs_connector.close()
self._api_connector.close() | zash-sdk | /zash_sdk-0.0.10-py3-none-any.whl/zash_sdk/connectors.py | connectors.py |
# %% auto 0
__all__ = ['NFT']
# %% ../nbs/02_nft.ipynb 3
import pandas as pd
from fastcore.utils import patch
from fastcore.meta import delegates
from .connectors import Connector
from .collection import NFTCollection, _fetch_trades, _fetch_loans
# %% ../nbs/02_nft.ipynb 4
class NFT:
"""Access NFT intelligence, such as trades and collections"""
def __init__(self,
conn: Connector,
chain: str = "eth" # chain_id - currently only "eth" and "bnb" is allowed
) -> None:
self._conn, self.chain = conn, chain
def mints(self,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = '' # default now. Same type as start_time
)-> pd.DataFrame: # pandas dataframe with each row as a trade
return _fetch_trades(self._conn, start_time, end_time=end_time, chain=self.chain, mints_only=True)
def trades(self,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = '', # default now. Same type as start_time
filter_collections=[], # collections to filter for when fetching trades
expand_collection: bool = False # expand collection column with collection details (i.e. slug, name)
)-> pd.DataFrame: # pandas dataframe with each row as a trade
return _fetch_trades(self._conn, start_time, end_time=end_time, chain=self.chain, expand_collection=expand_collection,collection_address=filter_collections)
def loans(self,
start_time: str, # start time in UTC. You can use shortcuts like today, yesterday or even clauses such as "1 day/week ago".
end_time: str = '', # default now. Same type as start_time
)-> pd.DataFrame: # pandas dataframe with each row as a loan
return _fetch_loans(self._conn, start_time, end_time=end_time, chain=self.chain)
def collection(self,
collection_id: str # collection_address with checksum (e.g. 0xBC4CA0EdA7647A8aB7C2061c2E118A18a936f13D)
) -> NFTCollection:
return NFTCollection(self._conn, collection_id, chain=self.chain)
# %% ../nbs/02_nft.ipynb 10
@patch
def search(self: NFT,
query:dict # Query object for example {"id": "boredape"}
):
"""Search collections by a query"""
query.update({'chain': self.chain})
return self._conn.get(f'/collection/search', params=query).json()['collections'] | zash-sdk | /zash_sdk-0.0.10-py3-none-any.whl/zash_sdk/nft.py | nft.py |
d = { 'settings': { 'branch': 'master',
'doc_baseurl': '/',
'doc_host': 'https://zashdev.github.io',
'git_url': 'https://github.com/zashdev/zash-sdk',
'lib_path': 'zash_sdk'},
'syms': { 'zash_sdk.collection': { 'zash_sdk.collection.NFTCollection': ('collection.html#nftcollection', 'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.__init__': ( 'collection.html#nftcollection.__init__',
'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.metadata': ( 'collection.html#nftcollection.metadata',
'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.mints': ( 'collection.html#nftcollection.mints',
'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.socials': ( 'collection.html#nftcollection.socials',
'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.token': ( 'collection.html#nftcollection.token',
'zash_sdk/collection.py'),
'zash_sdk.collection.NFTCollection.trades': ( 'collection.html#nftcollection.trades',
'zash_sdk/collection.py'),
'zash_sdk.collection.Token': ('collection.html#token', 'zash_sdk/collection.py'),
'zash_sdk.collection.Token.__init__': ('collection.html#token.__init__', 'zash_sdk/collection.py'),
'zash_sdk.collection.Token.metadata': ('collection.html#token.metadata', 'zash_sdk/collection.py'),
'zash_sdk.collection.Token.trades': ('collection.html#token.trades', 'zash_sdk/collection.py'),
'zash_sdk.collection.__fetch_loans': ('collection.html#__fetch_loans', 'zash_sdk/collection.py'),
'zash_sdk.collection.__fetch_trades': ('collection.html#__fetch_trades', 'zash_sdk/collection.py'),
'zash_sdk.collection._build_loans_statement': ( 'collection.html#_build_loans_statement',
'zash_sdk/collection.py'),
'zash_sdk.collection._build_sol_trade_statement': ( 'collection.html#_build_sol_trade_statement',
'zash_sdk/collection.py'),
'zash_sdk.collection._build_template': ('collection.html#_build_template', 'zash_sdk/collection.py'),
'zash_sdk.collection._build_trade_statement': ( 'collection.html#_build_trade_statement',
'zash_sdk/collection.py'),
'zash_sdk.collection._chain_prefix': ('collection.html#_chain_prefix', 'zash_sdk/collection.py'),
'zash_sdk.collection._fetch_loans': ('collection.html#_fetch_loans', 'zash_sdk/collection.py'),
'zash_sdk.collection._fetch_metadata': ('collection.html#_fetch_metadata', 'zash_sdk/collection.py'),
'zash_sdk.collection._fetch_metadatas': ('collection.html#_fetch_metadatas', 'zash_sdk/collection.py'),
'zash_sdk.collection._fetch_socials': ('collection.html#_fetch_socials', 'zash_sdk/collection.py'),
'zash_sdk.collection._fetch_trades': ('collection.html#_fetch_trades', 'zash_sdk/collection.py'),
'zash_sdk.collection._parse_start_end_dates': ( 'collection.html#_parse_start_end_dates',
'zash_sdk/collection.py'),
'zash_sdk.collection._sqlize_list': ('collection.html#_sqlize_list', 'zash_sdk/collection.py'),
'zash_sdk.collection._timeseries_to_df': ( 'collection.html#_timeseries_to_df',
'zash_sdk/collection.py')},
'zash_sdk.connectors': { 'zash_sdk.connectors.APIConnector': ('connectors.html#apiconnector', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.APIConnector.__init__': ( 'connectors.html#apiconnector.__init__',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.APIConnector.close': ( 'connectors.html#apiconnector.close',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.APIConnector.get': ('connectors.html#apiconnector.get', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector': ('connectors.html#connector', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector.__init__': ( 'connectors.html#connector.__init__',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector.close': ('connectors.html#connector.close', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector.get': ('connectors.html#connector.get', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector.query': ('connectors.html#connector.query', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.Connector.rpc': ('connectors.html#connector.rpc', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector': ('connectors.html#rsconnector', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector.__init__': ( 'connectors.html#rsconnector.__init__',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector._fetch': ( 'connectors.html#rsconnector._fetch',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector.close': ( 'connectors.html#rsconnector.close',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector.query': ( 'connectors.html#rsconnector.query',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.RSConnector.refresh': ( 'connectors.html#rsconnector.refresh',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.W3Connector': ('connectors.html#w3connector', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.W3Connector.__init__': ( 'connectors.html#w3connector.__init__',
'zash_sdk/connectors.py'),
'zash_sdk.connectors.W3Connector.get_token_metadata': ( 'connectors.html#w3connector.get_token_metadata',
'zash_sdk/connectors.py'),
'zash_sdk.connectors._authenticate': ('connectors.html#_authenticate', 'zash_sdk/connectors.py'),
'zash_sdk.connectors._connect_to_redshift': ( 'connectors.html#_connect_to_redshift',
'zash_sdk/connectors.py'),
'zash_sdk.connectors._connect_to_redshift_dev': ( 'connectors.html#_connect_to_redshift_dev',
'zash_sdk/connectors.py'),
'zash_sdk.connectors._fetch_dev_creds': ('connectors.html#_fetch_dev_creds', 'zash_sdk/connectors.py'),
'zash_sdk.connectors._w3_client': ('connectors.html#_w3_client', 'zash_sdk/connectors.py'),
'zash_sdk.connectors.authenticate': ('connectors.html#authenticate', 'zash_sdk/connectors.py')},
'zash_sdk.graph': { 'zash_sdk.graph.build_network': ('graph.html#build_network', 'zash_sdk/graph.py'),
'zash_sdk.graph.plot_trade_network': ('graph.html#plot_trade_network', 'zash_sdk/graph.py')},
'zash_sdk.nft': { 'zash_sdk.nft.NFT': ('nft.html#nft', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.__init__': ('nft.html#nft.__init__', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.collection': ('nft.html#nft.collection', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.loans': ('nft.html#nft.loans', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.mints': ('nft.html#nft.mints', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.search': ('nft.html#nft.search', 'zash_sdk/nft.py'),
'zash_sdk.nft.NFT.trades': ('nft.html#nft.trades', 'zash_sdk/nft.py')},
'zash_sdk.zash': { 'zash_sdk.zash.Zash': ('zash.html#zash', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.__init__': ('zash.html#zash.__init__', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.__repr__': ('zash.html#zash.__repr__', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.describe': ('zash.html#zash.describe', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.nft': ('zash.html#zash.nft', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.query': ('zash.html#zash.query', 'zash_sdk/zash.py'),
'zash_sdk.zash.Zash.tables': ('zash.html#zash.tables', 'zash_sdk/zash.py'),
'zash_sdk.zash.ddl_to_type_dict': ('zash.html#ddl_to_type_dict', 'zash_sdk/zash.py')}}} | zash-sdk | /zash_sdk-0.0.10-py3-none-any.whl/zash_sdk/_modidx.py | _modidx.py |
import re
import os
import shlex
import time
import argparse
from instructions import get_instructions
from defs import *
defines = {}
labels = {}
registers = {'a': 7, 'b': 0, 'c': 1, 'd': 2, 'e': 3, 'h': 4, 'l': 5}
assembler_pass = 1
class ASMSyntaxError(Exception):
def __init__(self, text='Syntax Error'):
super(ASMSyntaxError, self).__init__(text)
def to_number(text):
"""
Convert number strings to integers
:param text: Number string
:return: integer
"""
if isinstance(text, int):
return text
if re.match(hex_num, text):
return int(text[0:-1], 16)
if re.match(bin_num, text):
return int(text[0:-1], 2)
return int(text)
def parse_number_label(text):
"""
Parse a number either directly, or from a previously defined label
:param text: Parameter text
:return: Integer value
"""
try:
return to_number(text)
except ValueError:
if text in labels:
return labels.get(text)
raise ASMSyntaxError(f'Invalid address or label: {text}')
def parse_number(text, allow_to_fail):
"""
Convert to integer, throw if fails
:param text: Number as text (decimal, hex or binary)
:return: Integer value
"""
try:
if text in defines:
return parse_number(defines.get(text), allow_to_fail)
return to_number(text)
except ValueError:
if allow_to_fail:
return 0
else:
raise ASMSyntaxError(f'Invalid number format: {text}')
def is_in_dict(s, d):
if s in d:
return True
if not isinstance(s, str):
return False
m = re.match(label_name_add, s)
if m:
g = m.groups()
base = g[0]
return base in d
return False
def is_label(s):
return is_in_dict(s, labels)
def is_define(s):
return is_in_dict(s, defines)
def get_from_dict(s, d):
if s in d:
return d.get(s)
m = re.match(label_name_add, s)
g = m.groups()
base = g[0]
diff = g[1]
return parse_number(d.get(base), False) + int(diff)
def get_label_value(s):
return get_from_dict(s, labels)
def get_define_value(s):
return get_from_dict(s, defines)
def remove_comments(line):
"""
Remove all comments from the line
:param line: assembly source line
:return: line without comments (; ....)
"""
comment = line.find(';')
if comment >= 0:
line = line[0:comment]
return line
def define(name, value):
"""
Add a new definition
:param name: Name of variable
:param value: Numeric value
:return: Empty code (no new machine code)
"""
if not re.match(identifier, name):
raise ASMSyntaxError(f'EQU requires a valid identifier: {name}')
defines[name] = value
return [], -1
def compose(template, values):
"""
Create machine code for instruction
:param template: Machine code template
:param values: Values for placeholders
:return: Code as a list of integers (0-255 each)
"""
parts = template.split()
j = 0
for i in range(len(parts)):
if parts[i] == '::':
if j < len(values):
parts[i] = values[j]
j = j + 1
else:
raise ASMSyntaxError()
else:
parts[i] = int(parts[i], 16)
return parts
def handle_labels(values, types, pos):
"""
:param values: list of values to place in template placeholders
:param types: type of values (@@ or :: or %%) for 16bit, 8bit, relative
:param pos: Position in program code (bytes)
:return: Updated values with numbers where text used to be
"""
res = []
score = 0
for value, value_type in zip(values, types):
if value is None:
return [], -1
if is_define(value):
value = get_define_value(value)
if is_label(value):
value = get_label_value(value)
if not isinstance(value, int):
if re.match(label_name, value):
if assembler_pass == 1:
# Ignore unknown label values in first pass
value = 0
score = 1
else:
return [], -1
else:
value = parse_number(value, False)
if value_type == '::':
if value < 0 or value > 255:
raise ASMSyntaxError(f'Invalid byte value {value}')
res.append(value)
if value_type == '@@':
if value < 0 or value > 65535:
raise ASMSyntaxError(f'Invalid word value {value}')
res.append(value & 255)
res.append((value >> 8) & 255)
if value_type == '%%':
value = value - pos
if assembler_pass == 1:
value = 0
if -126 <= value <= 129:
res.append(value - 2)
else:
raise ASMSyntaxError(f'Invalid relative address {value}')
return res, score
def find_match(candidates, instruction, pos):
"""
:param candidates: A list of potential matches. Each is a (code, template) tuple
:param instruction: Text from assembly line
:param pos: Position in program (bytes)
:return: code as a list of integers (0-255 each)
"""
scored_candidates = [[], [], [], [], [], []]
for candidate in candidates:
code, pattern, types = candidate
match = re.match(pattern, instruction)
if match:
values, score = handle_labels(match.groups(), types, pos)
if score >= 0:
scored_candidates[score].append((code, values))
# return compose(candidate[0], values)
for score in range(6):
if len(scored_candidates[score]) > 0:
s = scored_candidates[score][0]
return compose(s[0], s[1])
raise ASMSyntaxError(f"Could not assemble: {instruction}")
# return []
def parse_bytes(parts):
parts = [p.split(',') for p in parts]
numbers = []
for p in parts:
numbers.extend(p)
numbers = [n.upper() for n in numbers]
return [parse_number(p, False) for p in numbers]
def parse_words(parts):
parts = [p.split(',') for p in parts]
numbers = []
for p in parts:
numbers.extend(p)
numbers = [n.upper() for n in numbers]
res = []
for n in numbers:
if is_label(n):
val = get_label_value(n)
else:
val = parse_number(n, assembler_pass == 1)
res.append(val & 255)
res.append((val >> 8) & 255)
return res
def parse_line(parts, pos):
"""
:param parts: list of words in line
:param pos: Position in program (bytes)
:return: (code,new position) Code is a list of integers (0-255 each)
new position should be positive for an ORG
or negative for a valid code line
"""
if len(parts) < 2 or parts[1].upper() != 'EQU':
replacements = 1
while replacements > 0:
replacements = 0
for i in range(len(parts)):
upart = parts[i].upper()
if upart in defines:
parts = parts[0:i] + defines.get(upart).split() + parts[(i + 1):]
replacements = 1
break
parts[0] = parts[0].upper()
if len(parts) >= 3 and parts[1].upper() == 'EQU':
return define(parts[0], ' '.join(parts[2:]))
if re.match(label_pat, parts[0]):
label = parts[0]
label = label[0:-1]
labels[label] = pos
del parts[0]
if len(parts) == 0:
return [], -1
parts[0] = parts[0].upper()
if parts[0] == 'ORG':
return [], parse_number(parts[1], False)
if parts[0] == 'DB':
return parse_bytes(parts[1:]), -1
if parts[0] == 'DW':
return parse_words(parts[1:]), -1
if parts[0] == 'BUF':
n = parse_number(parts[1], False)
return [0] * n, -1
m = re.match(def_str, ' '.join(parts))
if m:
s = m.groups()[0]
return [ord(x) for x in s], -1
candidates = get_instructions(parts[0])
if candidates is not None:
instruction = " ".join(parts)
code = find_match(candidates, instruction.upper(), pos)
if len(code) == 0:
raise ASMSyntaxError()
return code, -1
raise ASMSyntaxError()
def fix_negatives(code):
"""
Two's complement e.g. -1 -> 255
"""
for i in range(len(code)):
if code[i] < 0:
code[i] = 256 + code[i]
def assemble(src):
"""
Given a source file name, assemble it into machine code
:param src:
:return: binary program bytes
"""
full_code = []
pos = 0
last_line = ''
line_number = 0
try:
listing = []
for line_number, line in enumerate(open(src).readlines()):
last_line = line.strip()
line = remove_comments(line)
parts = shlex.split(line, False, False)
if len(parts) == 0:
continue
code, new_pos = parse_line(parts, pos)
if new_pos >= 0:
pos = new_pos
elif code:
fix_negatives(code)
if pos > len(full_code):
full_code.extend([0] * (pos - len(full_code)))
listing.append((pos, line_number, code, last_line))
full_code[pos:(pos + len(code))] = code
pos = pos + len(code)
return bytes(full_code), listing
except ASMSyntaxError as e:
print(last_line)
print(f'{e} in line {line_number}')
return None, None
except IOError as e:
print(e)
return None, None
except Exception as e:
print(f"Syntax error in line {line_number}")
print(last_line)
return None, None
def hexbytes(code):
s = [f'{b:#0{4}x}' for b in code]
s = [b[2:] for b in s]
return ' '.join(s)
def print_listing(f, listing):
for pos, line_number, code, line in listing:
f.write(f'{pos:#0{6}x} {str(line_number+1):<4} {hexbytes(code):<16} {line}\n')
def main():
parser = argparse.ArgumentParser(description='Z80 Assembler')
parser.add_argument('source', type=str, help='Asm source file to assemble')
parser.add_argument('offset', type=int, help='Program start offset', default=0)
parser.add_argument('fillsize', type=int, help='Fill image to size', default=0)
args = parser.parse_args()
# print("Pass 1")
source_name = args.source
base_name = os.path.splitext(source_name)[0]
bin_name = base_name + '.bin'
lst_name = base_name + '.lst'
code, listing = assemble(args.source)
if code:
global assembler_pass
assembler_pass = 2
# print("Pass 2")
code, listing = assemble(args.source)
if code:
with open(bin_name, 'wb') as f:
bin_code=code[args.offset:]
n=len(bin_code)
f.write(bin_code)
fill=args.fillsize
if fill>n:
f.write(bytes(fill-n))
with open(lst_name, 'w') as f:
print_listing(f, listing)
if __name__ == '__main__':
# start = time.time()
main()
# total = time.time() - start
# print(f"Total time: {total:0.2f} seconds") | zasm | /zasm-1.0.0-py3-none-any.whl/zasm.py | zasm.py |
import re
from defs import *
# Instructions text listed below is a modified version of
# a file taken from: https://github.com/toptensoftware/yazd
db = '''
CE :: ADC A,::
8E ADC A,(HL)
DD 8E :: ADC A,(IX+::)
FD 8E :: ADC A,(IY+::)
8F ADC A,A
88 ADC A,B
89 ADC A,C
8A ADC A,D
8B ADC A,E
8C ADC A,H
DD 8C ADC A,IXH
DD 8D ADC A,IXL
FD 8C ADC A,IYH
FD 8D ADC A,IYL
8D ADC A,L
ED 4A ADC HL,BC
ED 5A ADC HL,DE
ED 6A ADC HL,HL
ED 7A ADC HL,SP
C6 :: ADD A,::
86 ADD A,(HL)
DD 86 :: ADD A,(IX+::)
FD 86 :: ADD A,(IY+::)
87 ADD A,A
80 ADD A,B
81 ADD A,C
82 ADD A,D
83 ADD A,E
84 ADD A,H
DD 84 ADD A,IXH
DD 85 ADD A,IXL
FD 84 ADD A,IYH
FD 85 ADD A,IYL
85 ADD A,L
09 ADD HL,BC
19 ADD HL,DE
29 ADD HL,HL
39 ADD HL,SP
DD 09 ADD IX,BC
DD 19 ADD IX,DE
DD 29 ADD IX,IX
DD 39 ADD IX,SP
FD 09 ADD IY,BC
FD 19 ADD IY,DE
FD 29 ADD IY,IY
FD 39 ADD IY,SP
E6 :: AND ::
A6 AND (HL)
DD A6 :: AND (IX+::)
FD A6 :: AND (IY+::)
A7 AND A
A6 AND A,(HL)
A7 AND A,A
A0 AND A,B
A1 AND A,C
A2 AND A,D
A3 AND A,E
A4 AND A,H
A5 AND A,L
A0 AND B
A1 AND C
A2 AND D
A3 AND E
A4 AND H
DD A4 AND IXH
DD A5 AND IXL
FD A4 AND IYH
FD A5 AND IYL
A5 AND L
CB 46 BIT 0,(HL)
CB 4E BIT 1,(HL)
CB 56 BIT 2,(HL)
CB 5E BIT 3,(HL)
CB 66 BIT 4,(HL)
CB 6E BIT 5,(HL)
CB 76 BIT 6,(HL)
CB 7E BIT 7,(HL)
DD CB :: 46 BIT 0,(IX+::)
DD CB :: 4E BIT 1,(IX+::)
DD CB :: 56 BIT 2,(IX+::)
DD CB :: 5E BIT 3,(IX+::)
DD CB :: 66 BIT 4,(IX+::)
DD CB :: 6E BIT 5,(IX+::)
DD CB :: 76 BIT 6,(IX+::)
DD CB :: 7E BIT 7,(IX+::)
FD CB :: 40 BIT 0,(IY+::)
FD CB :: 48 BIT 1,(IY+::)
FD CB :: 50 BIT 2,(IY+::)
FD CB :: 58 BIT 3,(IY+::)
FD CB :: 60 BIT 4,(IY+::)
FD CB :: 68 BIT 5,(IY+::)
FD CB :: 70 BIT 6,(IY+::)
FD CB :: 78 BIT 7,(IY+::)
CB 47 BIT 0,A
CB 4F BIT 1,A
CB 57 BIT 2,A
CB 5F BIT 3,A
CB 67 BIT 4,A
CB 6F BIT 5,A
CB 77 BIT 6,A
CB 7F BIT 7,A
CB 40 BIT 0,B
CB 48 BIT 1,B
CB 50 BIT 2,B
CB 58 BIT 3,B
CB 60 BIT 4,B
CB 68 BIT 5,B
CB 70 BIT 6,B
CB 78 BIT 7,B
CB 41 BIT 0,C
CB 49 BIT 1,C
CB 51 BIT 2,C
CB 59 BIT 3,C
CB 61 BIT 4,C
CB 69 BIT 5,C
CB 71 BIT 6,C
CB 79 BIT 7,C
CB 42 BIT 0,D
CB 4A BIT 1,D
CB 52 BIT 2,D
CB 5A BIT 3,D
CB 62 BIT 4,D
CB 6A BIT 5,D
CB 72 BIT 6,D
CB 7A BIT 7,D
CB 43 BIT 0,E
CB 4B BIT 1,E
CB 53 BIT 2,E
CB 5B BIT 3,E
CB 63 BIT 4,E
CB 6B BIT 5,E
CB 73 BIT 6,E
CB 7B BIT 7,E
CB 44 BIT 0,H
CB 4C BIT 1,H
CB 54 BIT 2,H
CB 5C BIT 3,H
CB 64 BIT 4,H
CB 6C BIT 5,H
CB 74 BIT 6,H
CB 7C BIT 7,H
CB 45 BIT 0,L
CB 4D BIT 1,L
CB 55 BIT 2,L
CB 5D BIT 3,L
CB 65 BIT 4,L
CB 6D BIT 5,L
CB 75 BIT 6,L
CB 7D BIT 7,L
CD :: :: CALL @@
DC :: :: CALL C,@@
FC :: :: CALL M,@@
D4 :: :: CALL NC,@@
C4 :: :: CALL NZ,@@
F4 :: :: CALL P,@@
EC :: :: CALL PE,@@
E4 :: :: CALL PO,@@
CC :: :: CALL Z,@@
3F CCF
FE :: CP ::
BE CP (HL)
DD BE :: CP (IX+::)
FD BE :: CP (IY+::)
BF CP A
BE CP A,(HL)
BF CP A,A
B8 CP A,B
B9 CP A,C
BA CP A,D
BB CP A,E
BC CP A,H
BD CP A,L
B8 CP B
B9 CP C
BA CP D
BB CP E
BC CP H
DD BC CP IXH
DD BD CP IXL
FD BC CP IYH
FD BD CP IYL
BD CP L
ED A9 CPD
ED B9 CPDR
ED A1 CPI
ED B1 CPIR
2F CPL
27 DAA
35 DEC (HL)
DD 35 :: DEC (IX+::)
FD 35 :: DEC (IY+::)
3D DEC A
05 DEC B
0B DEC BC
0D DEC C
15 DEC D
1B DEC DE
1D DEC E
25 DEC H
2B DEC HL
DD 2B DEC IX
DD 25 DEC IXH
DD 2D DEC IXL
FD 2B DEC IY
FD 25 DEC IYH
FD 2D DEC IYL
2D DEC L
3B DEC SP
F3 DI
10 :: DJNZ %%
FB EI
E3 EX (SP),HL
DD E3 EX (SP),IX
FD E3 EX (SP),IY
08 EX AF,AF'
EB EX DE,HL
D9 EXX
76 HALT
ED 46 IM 0
ED 56 IM 1
ED 5E IM 2
DB :: IN A,(::)
ED 78 IN A,(C)
ED 40 IN B,(C)
ED 48 IN C,(C)
ED 50 IN D,(C)
ED 58 IN E,(C)
ED 60 IN H,(C)
ED 68 IN L,(C)
ED 70 IN F,(C)
34 INC (HL)
DD 34 :: INC (IX+::)
FD 34 :: INC (IY+::)
3C INC A
04 INC B
03 INC BC
0C INC C
14 INC D
13 INC DE
1C INC E
24 INC H
23 INC HL
DD 23 INC IX
DD 24 INC IXH
DD 2C INC IXL
FD 23 INC IY
FD 24 INC IYH
FD 2C INC IYL
2C INC L
33 INC SP
ED AA IND
ED BA INDR
ED A2 INI
ED B2 INIR
E9 JP (HL)
DD E9 JP (IX)
FD E9 JP (IY)
C3 :: :: JP @@
DA :: :: JP C,@@
FA :: :: JP M,@@
D2 :: :: JP NC,@@
C2 :: :: JP NZ,@@
F2 :: :: JP P,@@
EA :: :: JP PE,@@
E2 :: :: JP PO,@@
CA :: :: JP Z,@@
18 :: JR %%
38 :: JR C,%%
30 :: JR NC,%%
20 :: JR NZ,%%
28 :: JR Z,%%
32 :: :: LD (@@),A
ED 43 :: :: LD (@@),BC
ED 53 :: :: LD (@@),DE
22 :: :: LD (@@),HL
DD 22 :: :: LD (@@),IX
FD 22 :: :: LD (@@),IY
ED 73 :: :: LD (@@),SP
02 LD (BC),A
12 LD (DE),A
36 :: LD (HL),::
77 LD (HL),A
70 LD (HL),B
71 LD (HL),C
72 LD (HL),D
73 LD (HL),E
74 LD (HL),H
75 LD (HL),L
DD 36 :: :: LD (IX+::),::
DD 77 :: LD (IX+::),A
DD 70 :: LD (IX+::),B
DD 71 :: LD (IX+::),C
DD 72 :: LD (IX+::),D
DD 73 :: LD (IX+::),E
DD 74 :: LD (IX+::),H
DD 75 :: LD (IX+::),L
FD 36 :: :: LD (IY+::),::
FD 77 :: LD (IY+::),A
FD 70 :: LD (IY+::),B
FD 71 :: LD (IY+::),C
FD 72 :: LD (IY+::),D
FD 73 :: LD (IY+::),E
FD 74 :: LD (IY+::),H
FD 75 :: LD (IY+::),L
3E :: LD A,::
3A :: :: LD A,(@@)
0A LD A,(BC)
1A LD A,(DE)
7E LD A,(HL)
DD 7E :: LD A,(IX+::)
FD 7E :: LD A,(IY+::)
7F LD A,A
78 LD A,B
79 LD A,C
7A LD A,D
7B LD A,E
7C LD A,H
ED 57 LD A,I
DD 7C LD A,IXH
DD 7D LD A,IXL
FD 7C LD A,IYH
FD 7D LD A,IYL
7D LD A,L
ED 5F LD A,R
DD CB :: 87 LD A,RES 0,(IX+::)
DD CB :: 8F LD A,RES 1,(IX+::)
DD CB :: 97 LD A,RES 2,(IX+::)
DD CB :: 9F LD A,RES 3,(IX+::)
DD CB :: A7 LD A,RES 4,(IX+::)
DD CB :: AF LD A,RES 5,(IX+::)
DD CB :: B7 LD A,RES 6,(IX+::)
DD CB :: BF LD A,RES 7,(IX+::)
FD CB :: 87 LD A,RES 0,(IY+::)
FD CB :: 8F LD A,RES 1,(IY+::)
FD CB :: 97 LD A,RES 2,(IY+::)
FD CB :: 9F LD A,RES 3,(IY+::)
FD CB :: A7 LD A,RES 4,(IY+::)
FD CB :: AF LD A,RES 5,(IY+::)
FD CB :: B7 LD A,RES 6,(IY+::)
FD CB :: BF LD A,RES 7,(IY+::)
DD CB :: 17 LD A,RL (IX+::)
FD CB :: 17 LD A,RL (IY+::)
DD CB :: 07 LD A,RLC (IX+::)
FD CB :: 07 LD A,RLC (IY+::)
DD CB :: 1F LD A,RR (IX+::)
FD CB :: 1F LD A,RR (IY+::)
DD CB :: 0F LD A,RRC (IX+::)
FD CB :: 0F LD A,RRC (IY+::)
DD CB :: C7 LD A,SET 0,(IX+::)
DD CB :: CF LD A,SET 1,(IX+::)
DD CB :: D7 LD A,SET 2,(IX+::)
DD CB :: DF LD A,SET 3,(IX+::)
DD CB :: E7 LD A,SET 4,(IX+::)
DD CB :: EF LD A,SET 5,(IX+::)
DD CB :: F7 LD A,SET 6,(IX+::)
DD CB :: FF LD A,SET 7,(IX+::)
FD CB :: C7 LD A,SET 0,(IY+::)
FD CB :: CF LD A,SET 1,(IY+::)
FD CB :: D7 LD A,SET 2,(IY+::)
FD CB :: DF LD A,SET 3,(IY+::)
FD CB :: E7 LD A,SET 4,(IY+::)
FD CB :: EF LD A,SET 5,(IY+::)
FD CB :: F7 LD A,SET 6,(IY+::)
FD CB :: FF LD A,SET 7,(IY+::)
DD CB :: 27 LD A,SLA (IX+::)
FD CB :: 27 LD A,SLA (IY+::)
DD CB :: 37 LD A,SLL (IX+::)
FD CB :: 37 LD A,SLL (IY+::)
DD CB :: 2F LD A,SRA (IX+::)
FD CB :: 2F LD A,SRA (IY+::)
DD CB :: 3F LD A,SRL (IX+::)
FD CB :: 3F LD A,SRL (IY+::)
06 :: LD B,::
46 LD B,(HL)
DD 46 :: LD B,(IX+::)
FD 46 :: LD B,(IY+::)
47 LD B,A
40 LD B,B
41 LD B,C
42 LD B,D
43 LD B,E
44 LD B,H
DD 44 LD B,IXH
DD 45 LD B,IXL
FD 44 LD B,IYH
FD 45 LD B,IYL
45 LD B,L
DD CB :: 80 LD B,RES 0,(IX+::)
DD CB :: 88 LD B,RES 1,(IX+::)
DD CB :: 90 LD B,RES 2,(IX+::)
DD CB :: 98 LD B,RES 3,(IX+::)
DD CB :: A0 LD B,RES 4,(IX+::)
DD CB :: A8 LD B,RES 5,(IX+::)
DD CB :: B0 LD B,RES 6,(IX+::)
DD CB :: B8 LD B,RES 7,(IX+::)
FD CB :: 80 LD B,RES 0,(IY+::)
FD CB :: 88 LD B,RES 1,(IY+::)
FD CB :: 90 LD B,RES 2,(IY+::)
FD CB :: 98 LD B,RES 3,(IY+::)
FD CB :: A0 LD B,RES 4,(IY+::)
FD CB :: A8 LD B,RES 5,(IY+::)
FD CB :: B0 LD B,RES 6,(IY+::)
FD CB :: B8 LD B,RES 7,(IY+::)
DD CB :: 10 LD B,RL (IX+::)
FD CB :: 10 LD B,RL (IY+::)
DD CB :: 00 LD B,RLC (IX+::)
FD CB :: 00 LD B,RLC (IY+::)
DD CB :: 18 LD B,RR (IX+::)
FD CB :: 18 LD B,RR (IY+::)
DD CB :: 08 LD B,RRC (IX+::)
FD CB :: 08 LD B,RRC (IY+::)
DD CB :: C0 LD B,SET 0,(IX+::)
DD CB :: C8 LD B,SET 1,(IX+::)
DD CB :: D0 LD B,SET 2,(IX+::)
DD CB :: D8 LD B,SET 3,(IX+::)
DD CB :: E0 LD B,SET 4,(IX+::)
DD CB :: E8 LD B,SET 5,(IX+::)
DD CB :: F0 LD B,SET 6,(IX+::)
DD CB :: F8 LD B,SET 7,(IX+::)
FD CB :: C0 LD B,SET 0,(IY+::)
FD CB :: C8 LD B,SET 1,(IY+::)
FD CB :: D0 LD B,SET 2,(IY+::)
FD CB :: D8 LD B,SET 3,(IY+::)
FD CB :: E0 LD B,SET 4,(IY+::)
FD CB :: E8 LD B,SET 5,(IY+::)
FD CB :: F0 LD B,SET 6,(IY+::)
FD CB :: F8 LD B,SET 7,(IY+::)
DD CB :: 20 LD B,SLA (IX+::)
FD CB :: 20 LD B,SLA (IY+::)
DD CB :: 30 LD B,SLL (IX+::)
FD CB :: 30 LD B,SLL (IY+::)
DD CB :: 28 LD B,SRA (IX+::)
FD CB :: 28 LD B,SRA (IY+::)
DD CB :: 38 LD B,SRL (IX+::)
FD CB :: 38 LD B,SRL (IY+::)
ED 4B :: :: LD BC,(@@)
01 :: :: LD BC,@@
0E :: LD C,::
4E LD C,(HL)
DD 4E :: LD C,(IX+::)
FD 4E :: LD C,(IY+::)
4F LD C,A
48 LD C,B
49 LD C,C
4A LD C,D
4B LD C,E
4C LD C,H
DD 4C LD C,IXH
DD 4D LD C,IXL
FD 4C LD C,IYH
FD 4D LD C,IYL
4D LD C,L
DD CB :: 81 LD C,RES 0,(IX+::)
DD CB :: 89 LD C,RES 1,(IX+::)
DD CB :: 91 LD C,RES 2,(IX+::)
DD CB :: 99 LD C,RES 3,(IX+::)
DD CB :: A1 LD C,RES 4,(IX+::)
DD CB :: A9 LD C,RES 5,(IX+::)
DD CB :: B1 LD C,RES 6,(IX+::)
DD CB :: B9 LD C,RES 7,(IX+::)
FD CB :: 81 LD C,RES 0,(IY+::)
FD CB :: 89 LD C,RES 1,(IY+::)
FD CB :: 91 LD C,RES 2,(IY+::)
FD CB :: 99 LD C,RES 3,(IY+::)
FD CB :: A1 LD C,RES 4,(IY+::)
FD CB :: A9 LD C,RES 5,(IY+::)
FD CB :: B1 LD C,RES 6,(IY+::)
FD CB :: B9 LD C,RES 7,(IY+::)
DD CB :: 11 LD C,RL (IX+::)
FD CB :: 11 LD C,RL (IY+::)
DD CB :: 01 LD C,RLC (IX+::)
FD CB :: 01 LD C,RLC (IY+::)
DD CB :: 19 LD C,RR (IX+::)
FD CB :: 19 LD C,RR (IY+::)
DD CB :: 09 LD C,RRC (IX+::)
FD CB :: 09 LD C,RRC (IY+::)
DD CB :: C1 LD C,SET 0,(IX+::)
DD CB :: C9 LD C,SET 1,(IX+::)
DD CB :: D1 LD C,SET 2,(IX+::)
DD CB :: D9 LD C,SET 3,(IX+::)
DD CB :: E1 LD C,SET 4,(IX+::)
DD CB :: E9 LD C,SET 5,(IX+::)
DD CB :: F1 LD C,SET 6,(IX+::)
DD CB :: F9 LD C,SET 7,(IX+::)
FD CB :: C1 LD C,SET 0,(IY+::)
FD CB :: C9 LD C,SET 1,(IY+::)
FD CB :: D1 LD C,SET 2,(IY+::)
FD CB :: D9 LD C,SET 3,(IY+::)
FD CB :: E1 LD C,SET 4,(IY+::)
FD CB :: E9 LD C,SET 5,(IY+::)
FD CB :: F1 LD C,SET 6,(IY+::)
FD CB :: F9 LD C,SET 7,(IY+::)
DD CB :: 21 LD C,SLA (IX+::)
FD CB :: 21 LD C,SLA (IY+::)
DD CB :: 31 LD C,SLL (IX+::)
FD CB :: 31 LD C,SLL (IY+::)
DD CB :: 29 LD C,SRA (IX+::)
FD CB :: 29 LD C,SRA (IY+::)
DD CB :: 39 LD C,SRL (IX+::)
FD CB :: 39 LD C,SRL (IY+::)
16 :: LD D,::
56 LD D,(HL)
DD 56 :: LD D,(IX+::)
FD 56 :: LD D,(IY+::)
57 LD D,A
50 LD D,B
51 LD D,C
52 LD D,D
53 LD D,E
54 LD D,H
DD 54 LD D,IXH
DD 55 LD D,IXL
FD 54 LD D,IYH
FD 55 LD D,IYL
55 LD D,L
DD CB :: 82 LD D,RES 0,(IX+::)
DD CB :: 8A LD D,RES 1,(IX+::)
DD CB :: 92 LD D,RES 2,(IX+::)
DD CB :: 9A LD D,RES 3,(IX+::)
DD CB :: A2 LD D,RES 4,(IX+::)
DD CB :: AA LD D,RES 5,(IX+::)
DD CB :: B2 LD D,RES 6,(IX+::)
DD CB :: BA LD D,RES 7,(IX+::)
FD CB :: 82 LD D,RES 0,(IY+::)
FD CB :: 8A LD D,RES 1,(IY+::)
FD CB :: 92 LD D,RES 2,(IY+::)
FD CB :: 9A LD D,RES 3,(IY+::)
FD CB :: A2 LD D,RES 4,(IY+::)
FD CB :: AA LD D,RES 5,(IY+::)
FD CB :: B2 LD D,RES 6,(IY+::)
FD CB :: BA LD D,RES 7,(IY+::)
DD CB :: 12 LD D,RL (IX+::)
FD CB :: 12 LD D,RL (IY+::)
DD CB :: 02 LD D,RLC (IX+::)
FD CB :: 02 LD D,RLC (IY+::)
DD CB :: 1A LD D,RR (IX+::)
FD CB :: 1A LD D,RR (IY+::)
DD CB :: 0A LD D,RRC (IX+::)
FD CB :: 0A LD D,RRC (IY+::)
DD CB :: C2 LD D,SET 0,(IX+::)
DD CB :: CA LD D,SET 1,(IX+::)
DD CB :: D2 LD D,SET 2,(IX+::)
DD CB :: DA LD D,SET 3,(IX+::)
DD CB :: E2 LD D,SET 4,(IX+::)
DD CB :: EA LD D,SET 5,(IX+::)
DD CB :: F2 LD D,SET 6,(IX+::)
DD CB :: FA LD D,SET 7,(IX+::)
FD CB :: C2 LD D,SET 0,(IY+::)
FD CB :: CA LD D,SET 1,(IY+::)
FD CB :: D2 LD D,SET 2,(IY+::)
FD CB :: DA LD D,SET 3,(IY+::)
FD CB :: E2 LD D,SET 4,(IY+::)
FD CB :: EA LD D,SET 5,(IY+::)
FD CB :: F2 LD D,SET 6,(IY+::)
FD CB :: FA LD D,SET 7,(IY+::)
DD CB :: 22 LD D,SLA (IX+::)
FD CB :: 22 LD D,SLA (IY+::)
DD CB :: 32 LD D,SLL (IX+::)
FD CB :: 32 LD D,SLL (IY+::)
DD CB :: 2A LD D,SRA (IX+::)
FD CB :: 2A LD D,SRA (IY+::)
DD CB :: 3A LD D,SRL (IX+::)
FD CB :: 3A LD D,SRL (IY+::)
ED 5B :: :: LD DE,(@@)
11 :: :: LD DE,@@
1E :: LD E,::
5E LD E,(HL)
DD 5E :: LD E,(IX+::)
FD 5E :: LD E,(IY+::)
5F LD E,A
58 LD E,B
59 LD E,C
5A LD E,D
5B LD E,E
5C LD E,H
DD 5C LD E,IXH
DD 5D LD E,IXL
FD 5C LD E,IYH
FD 5D LD E,IYL
5D LD E,L
DD CB :: 83 LD E,RES 0,(IX+::)
DD CB :: 8B LD E,RES 1,(IX+::)
DD CB :: 93 LD E,RES 2,(IX+::)
DD CB :: 9B LD E,RES 3,(IX+::)
DD CB :: A3 LD E,RES 4,(IX+::)
DD CB :: AB LD E,RES 5,(IX+::)
DD CB :: B3 LD E,RES 6,(IX+::)
DD CB :: BB LD E,RES 7,(IX+::)
FD CB :: 83 LD E,RES 0,(IY+::)
FD CB :: 8B LD E,RES 1,(IY+::)
FD CB :: 93 LD E,RES 2,(IY+::)
FD CB :: 9B LD E,RES 3,(IY+::)
FD CB :: A3 LD E,RES 4,(IY+::)
FD CB :: AB LD E,RES 5,(IY+::)
FD CB :: B3 LD E,RES 6,(IY+::)
FD CB :: BB LD E,RES 7,(IY+::)
DD CB :: 13 LD E,RL (IX+::)
FD CB :: 13 LD E,RL (IY+::)
DD CB :: 03 LD E,RLC (IX+::)
FD CB :: 03 LD E,RLC (IY+::)
DD CB :: 1B LD E,RR (IX+::)
FD CB :: 1B LD E,RR (IY+::)
DD CB :: 0B LD E,RRC (IX+::)
FD CB :: 0B LD E,RRC (IY+::)
DD CB :: C3 LD E,SET 0,(IX+::)
DD CB :: CB LD E,SET 1,(IX+::)
DD CB :: D3 LD E,SET 2,(IX+::)
DD CB :: DB LD E,SET 3,(IX+::)
DD CB :: E3 LD E,SET 4,(IX+::)
DD CB :: EB LD E,SET 5,(IX+::)
DD CB :: F3 LD E,SET 6,(IX+::)
DD CB :: FB LD E,SET 7,(IX+::)
FD CB :: C3 LD E,SET 0,(IY+::)
FD CB :: CB LD E,SET 1,(IY+::)
FD CB :: D3 LD E,SET 2,(IY+::)
FD CB :: DB LD E,SET 3,(IY+::)
FD CB :: E3 LD E,SET 4,(IY+::)
FD CB :: EB LD E,SET 5,(IY+::)
FD CB :: F3 LD E,SET 6,(IY+::)
FD CB :: FB LD E,SET 7,(IY+::)
DD CB :: 23 LD E,SLA (IX+::)
FD CB :: 23 LD E,SLA (IY+::)
DD CB :: 33 LD E,SLL (IX+::)
FD CB :: 33 LD E,SLL (IY+::)
DD CB :: 2B LD E,SRA (IX+::)
FD CB :: 2B LD E,SRA (IY+::)
DD CB :: 3B LD E,SRL (IX+::)
FD CB :: 3B LD E,SRL (IY+::)
26 :: LD H,::
66 LD H,(HL)
DD 66 :: LD H,(IX+::)
FD 66 :: LD H,(IY+::)
67 LD H,A
60 LD H,B
61 LD H,C
62 LD H,D
63 LD H,E
64 LD H,H
65 LD H,L
DD CB :: 84 LD H,RES 0,(IX+::)
DD CB :: 8C LD H,RES 1,(IX+::)
DD CB :: 94 LD H,RES 2,(IX+::)
DD CB :: 9C LD H,RES 3,(IX+::)
DD CB :: A4 LD H,RES 4,(IX+::)
DD CB :: AC LD H,RES 5,(IX+::)
DD CB :: B4 LD H,RES 6,(IX+::)
DD CB :: BC LD H,RES 7,(IX+::)
FD CB :: 84 LD H,RES 0,(IY+::)
FD CB :: 8C LD H,RES 1,(IY+::)
FD CB :: 94 LD H,RES 2,(IY+::)
FD CB :: 9C LD H,RES 3,(IY+::)
FD CB :: A4 LD H,RES 4,(IY+::)
FD CB :: AC LD H,RES 5,(IY+::)
FD CB :: B4 LD H,RES 6,(IY+::)
FD CB :: BC LD H,RES 7,(IY+::)
DD CB :: 14 LD H,RL (IX+::)
FD CB :: 14 LD H,RL (IY+::)
DD CB :: 04 LD H,RLC (IX+::)
FD CB :: 04 LD H,RLC (IY+::)
DD CB :: 1C LD H,RR (IX+::)
FD CB :: 1C LD H,RR (IY+::)
DD CB :: 0C LD H,RRC (IX+::)
FD CB :: 0C LD H,RRC (IY+::)
DD CB :: C4 LD H,SET 0,(IX+::)
DD CB :: CC LD H,SET 1,(IX+::)
DD CB :: D4 LD H,SET 2,(IX+::)
DD CB :: DC LD H,SET 3,(IX+::)
DD CB :: E4 LD H,SET 4,(IX+::)
DD CB :: EC LD H,SET 5,(IX+::)
DD CB :: F4 LD H,SET 6,(IX+::)
DD CB :: FC LD H,SET 7,(IX+::)
FD CB :: C4 LD H,SET 0,(IY+::)
FD CB :: CC LD H,SET 1,(IY+::)
FD CB :: D4 LD H,SET 2,(IY+::)
FD CB :: DC LD H,SET 3,(IY+::)
FD CB :: E4 LD H,SET 4,(IY+::)
FD CB :: EC LD H,SET 5,(IY+::)
FD CB :: F4 LD H,SET 6,(IY+::)
FD CB :: FC LD H,SET 7,(IY+::)
DD CB :: 24 LD H,SLA (IX+::)
FD CB :: 24 LD H,SLA (IY+::)
DD CB :: 34 LD H,SLL (IX+::)
FD CB :: 34 LD H,SLL (IY+::)
DD CB :: 2C LD H,SRA (IX+::)
FD CB :: 2C LD H,SRA (IY+::)
DD CB :: 3C LD H,SRL (IX+::)
FD CB :: 3C LD H,SRL (IY+::)
2A :: :: LD HL,(@@)
21 :: :: LD HL,@@
ED 47 LD I,A
DD 2A :: :: LD IX,(@@)
DD 21 :: :: LD IX,@@
DD 26 :: LD IXH,::
DD 67 LD IXH,A
DD 60 LD IXH,B
DD 61 LD IXH,C
DD 62 LD IXH,D
DD 63 LD IXH,E
DD 64 LD IXH,IXH
DD 65 LD IXH,IXL
DD 2E :: LD IXL,::
DD 6F LD IXL,A
DD 68 LD IXL,B
DD 69 LD IXL,C
DD 6A LD IXL,D
DD 6B LD IXL,E
DD 6C LD IXL,IXH
DD 6D LD IXL,IXL
FD 2A :: :: LD IY,(@@)
FD 21 :: :: LD IY,@@
FD 26 :: LD IYH,::
FD 67 LD IYH,A
FD 60 LD IYH,B
FD 61 LD IYH,C
FD 62 LD IYH,D
FD 63 LD IYH,E
FD 64 LD IYH,IYH
FD 65 LD IYH,IYL
FD 2E :: LD IYL,::
FD 6F LD IYL,A
FD 68 LD IYL,B
FD 69 LD IYL,C
FD 6A LD IYL,D
FD 6B LD IYL,E
FD 6C LD IYL,IYH
FD 6D LD IYL,IYL
2E :: LD L,::
6E LD L,(HL)
DD 6E :: LD L,(IX+::)
FD 6E :: LD L,(IY+::)
6F LD L,A
68 LD L,B
69 LD L,C
6A LD L,D
6B LD L,E
6C LD L,H
6D LD L,L
DD CB :: 85 LD L,RES 0,(IX+::)
DD CB :: 8D LD L,RES 1,(IX+::)
DD CB :: 95 LD L,RES 2,(IX+::)
DD CB :: 9D LD L,RES 3,(IX+::)
DD CB :: A5 LD L,RES 4,(IX+::)
DD CB :: AD LD L,RES 5,(IX+::)
DD CB :: B5 LD L,RES 6,(IX+::)
DD CB :: BD LD L,RES 7,(IX+::)
FD CB :: 85 LD L,RES 0,(IY+::)
FD CB :: 8D LD L,RES 1,(IY+::)
FD CB :: 95 LD L,RES 2,(IY+::)
FD CB :: 9D LD L,RES 3,(IY+::)
FD CB :: A5 LD L,RES 4,(IY+::)
FD CB :: AD LD L,RES 5,(IY+::)
FD CB :: B5 LD L,RES 6,(IY+::)
FD CB :: BD LD L,RES 7,(IY+::)
DD CB :: 15 LD L,RL (IX+::)
FD CB :: 15 LD L,RL (IY+::)
DD CB :: 05 LD L,RLC (IX+::)
FD CB :: 05 LD L,RLC (IY+::)
DD CB :: 1D LD L,RR (IX+::)
FD CB :: 1D LD L,RR (IY+::)
DD CB :: 0D LD L,RRC (IX+::)
FD CB :: 0D LD L,RRC (IY+::)
DD CB :: C5 LD L,SET 0,(IX+::)
DD CB :: CD LD L,SET 1,(IX+::)
DD CB :: D5 LD L,SET 2,(IX+::)
DD CB :: DD LD L,SET 3,(IX+::)
DD CB :: E5 LD L,SET 4,(IX+::)
DD CB :: ED LD L,SET 5,(IX+::)
DD CB :: F5 LD L,SET 6,(IX+::)
DD CB :: FD LD L,SET 7,(IX+::)
FD CB :: C5 LD L,SET 0,(IY+::)
FD CB :: CD LD L,SET 1,(IY+::)
FD CB :: D5 LD L,SET 2,(IY+::)
FD CB :: DD LD L,SET 3,(IY+::)
FD CB :: E5 LD L,SET 4,(IY+::)
FD CB :: ED LD L,SET 5,(IY+::)
FD CB :: F5 LD L,SET 6,(IY+::)
FD CB :: FD LD L,SET 7,(IY+::)
DD CB :: 25 LD L,SLA (IX+::)
FD CB :: 25 LD L,SLA (IY+::)
DD CB :: 35 LD L,SLL (IX+::)
FD CB :: 35 LD L,SLL (IY+::)
DD CB :: 2D LD L,SRA (IX+::)
FD CB :: 2D LD L,SRA (IY+::)
DD CB :: 3D LD L,SRL (IX+::)
FD CB :: 3D LD L,SRL (IY+::)
ED 7B :: :: LD SP,(@@)
31 :: :: LD SP,@@
F9 LD SP,HL
DD F9 LD SP,IX
FD F9 LD SP,IY
ED 4F LD R,A
ED A8 LDD
ED B8 LDDR
ED A0 LDI
ED B0 LDIR
ED 44 NEG
00 NOP
F6 :: OR ::
B6 OR (HL)
DD B6 :: OR (IX+::)
FD B6 :: OR (IY+::)
B7 OR A
B6 OR A,(HL)
B7 OR A,A
B0 OR A,B
B1 OR A,C
B2 OR A,D
B3 OR A,E
B4 OR A,H
B5 OR A,L
B0 OR B
B1 OR C
B2 OR D
B3 OR E
B4 OR H
DD B4 OR IXH
DD B5 OR IXL
FD B4 OR IYH
FD B5 OR IYL
B5 OR L
ED BB OTDR
ED B3 OTIR
D3 :: OUT (::),A
ED 71 OUT (C),0
ED 79 OUT (C),A
ED 41 OUT (C),B
ED 49 OUT (C),C
ED 51 OUT (C),D
ED 59 OUT (C),E
ED 61 OUT (C),H
ED 69 OUT (C),L
ED AB OUTD
ED A3 OUTI
F1 POP AF
C1 POP BC
D1 POP DE
E1 POP HL
DD E1 POP IX
FD E1 POP IY
F5 PUSH AF
C5 PUSH BC
D5 PUSH DE
E5 PUSH HL
DD E5 PUSH IX
FD E5 PUSH IY
CB 86 RES 0,(HL)
CB 8E RES 1,(HL)
CB 96 RES 2,(HL)
CB 9E RES 3,(HL)
CB A6 RES 4,(HL)
CB AE RES 5,(HL)
CB B6 RES 6,(HL)
CB BE RES 7,(HL)
DD CB :: 86 RES 0,(IX+::)
DD CB :: 8E RES 1,(IX+::)
DD CB :: 96 RES 2,(IX+::)
DD CB :: 9E RES 3,(IX+::)
DD CB :: A6 RES 4,(IX+::)
DD CB :: AE RES 5,(IX+::)
DD CB :: B6 RES 6,(IX+::)
DD CB :: BE RES 7,(IX+::)
FD CB :: 86 RES 0,(IY+::)
FD CB :: 8E RES 1,(IY+::)
FD CB :: 96 RES 2,(IY+::)
FD CB :: 9E RES 3,(IY+::)
FD CB :: A6 RES 4,(IY+::)
FD CB :: AE RES 5,(IY+::)
FD CB :: B6 RES 6,(IY+::)
FD CB :: BE RES 7,(IY+::)
CB 87 RES 0,A
CB 8F RES 1,A
CB 97 RES 2,A
CB 9F RES 3,A
CB A7 RES 4,A
CB AF RES 5,A
CB B7 RES 6,A
CB BF RES 7,A
CB 80 RES 0,B
CB 88 RES 1,B
CB 90 RES 2,B
CB 98 RES 3,B
CB A0 RES 4,B
CB A8 RES 5,B
CB B0 RES 6,B
CB B8 RES 7,B
CB 81 RES 0,C
CB 89 RES 1,C
CB 91 RES 2,C
CB 99 RES 3,C
CB A1 RES 4,C
CB A9 RES 5,C
CB B1 RES 6,C
CB B9 RES 7,C
CB 82 RES 0,D
CB 8A RES 1,D
CB 92 RES 2,D
CB 9A RES 3,D
CB A2 RES 4,D
CB AA RES 5,D
CB B2 RES 6,D
CB BA RES 7,D
CB 83 RES 0,E
CB 8B RES 1,E
CB 93 RES 2,E
CB 9B RES 3,E
CB A3 RES 4,E
CB AB RES 5,E
CB B3 RES 6,E
CB BB RES 7,E
CB 84 RES 0,H
CB 8C RES 1,H
CB 94 RES 2,H
CB 9C RES 3,H
CB A4 RES 4,H
CB AC RES 5,H
CB B4 RES 6,H
CB BC RES 7,H
CB 85 RES 0,L
CB 8D RES 1,L
CB 95 RES 2,L
CB 9D RES 3,L
CB A5 RES 4,L
CB AD RES 5,L
CB B5 RES 6,L
CB BD RES 7,L
C9 RET
D8 RET C
F8 RET M
D0 RET NC
C0 RET NZ
F0 RET P
E8 RET PE
E0 RET PO
C8 RET Z
ED 4D RETI
ED 45 RETN
CB 16 RL (HL)
DD CB :: 16 RL (IX+::)
FD CB :: 16 RL (IY+::)
CB 17 RL A
CB 10 RL B
CB 11 RL C
CB 12 RL D
CB 13 RL E
CB 14 RL H
CB 15 RL L
17 RLA
CB 06 RLC (HL)
DD CB :: 06 RLC (IX+::)
FD CB :: 06 RLC (IY+::)
CB 07 RLC A
CB 00 RLC B
CB 01 RLC C
CB 02 RLC D
CB 03 RLC E
CB 04 RLC H
CB 05 RLC L
07 RLCA
ED 6F RLD
CB 1E RR (HL)
DD CB :: 1E RR (IX+::)
FD CB :: 1E RR (IY+::)
CB 1F RR A
CB 18 RR B
CB 19 RR C
CB 1A RR D
CB 1B RR E
CB 1C RR H
CB 1D RR L
1F RRA
CB 0E RRC (HL)
DD CB :: 0E RRC (IX+::)
FD CB :: 0E RRC (IY+::)
CB 0F RRC A
CB 08 RRC B
CB 09 RRC C
CB 0A RRC D
CB 0B RRC E
CB 0C RRC H
CB 0D RRC L
0F RRCA
ED 67 RRD
C7 RST 0X00
CF RST 0X08
D7 RST 0X10
DF RST 0X18
E7 RST 0X20
EF RST 0X28
F7 RST 0X30
FF RST 0X38
DE :: SBC A,::
9E SBC A,(HL)
DD 9E :: SBC A,(IX+::)
FD 9E :: SBC A,(IY+::)
9F SBC A,A
98 SBC A,B
99 SBC A,C
9A SBC A,D
9B SBC A,E
9C SBC A,H
DD 9C SBC A,IXH
DD 9D SBC A,IXL
FD 9C SBC A,IYH
FD 9D SBC A,IYL
9D SBC A,L
ED 42 SBC HL,BC
ED 52 SBC HL,DE
ED 62 SBC HL,HL
ED 72 SBC HL,SP
37 SCF
CB C6 SET 0,(HL)
CB CE SET 1,(HL)
CB D6 SET 2,(HL)
CB DE SET 3,(HL)
CB E6 SET 4,(HL)
CB EE SET 5,(HL)
CB F6 SET 6,(HL)
CB FE SET 7,(HL)
DD CB :: C6 SET 0,(IX+::)
DD CB :: CE SET 1,(IX+::)
DD CB :: D6 SET 2,(IX+::)
DD CB :: DE SET 3,(IX+::)
DD CB :: E6 SET 4,(IX+::)
DD CB :: EE SET 5,(IX+::)
DD CB :: F6 SET 6,(IX+::)
DD CB :: FE SET 7,(IX+::)
FD CB :: C6 SET 0,(IY+::)
FD CB :: CE SET 1,(IY+::)
FD CB :: D6 SET 2,(IY+::)
FD CB :: DE SET 3,(IY+::)
FD CB :: E6 SET 4,(IY+::)
FD CB :: EE SET 5,(IY+::)
FD CB :: F6 SET 6,(IY+::)
FD CB :: FE SET 7,(IY+::)
CB C7 SET 0,A
CB CF SET 1,A
CB D7 SET 2,A
CB DF SET 3,A
CB E7 SET 4,A
CB EF SET 5,A
CB F7 SET 6,A
CB FF SET 7,A
CB C0 SET 0,B
CB C8 SET 1,B
CB D0 SET 2,B
CB D8 SET 3,B
CB E0 SET 4,B
CB E8 SET 5,B
CB F0 SET 6,B
CB F8 SET 7,B
CB C1 SET 0,C
CB C9 SET 1,C
CB D1 SET 2,C
CB D9 SET 3,C
CB E1 SET 4,C
CB E9 SET 5,C
CB F1 SET 6,C
CB F9 SET 7,C
CB C2 SET 0,D
CB CA SET 1,D
CB D2 SET 2,D
CB DA SET 3,D
CB E2 SET 4,D
CB EA SET 5,D
CB F2 SET 6,D
CB FA SET 7,D
CB C3 SET 0,E
CB CB SET 1,E
CB D3 SET 2,E
CB DB SET 3,E
CB E3 SET 4,E
CB EB SET 5,E
CB F3 SET 6,E
CB FB SET 7,E
CB C4 SET 0,H
CB CC SET 1,H
CB D4 SET 2,H
CB DC SET 3,H
CB E4 SET 4,H
CB EC SET 5,H
CB F4 SET 6,H
CB FC SET 7,H
CB C5 SET 0,L
CB CD SET 1,L
CB D5 SET 2,L
CB DD SET 3,L
CB E5 SET 4,L
CB ED SET 5,L
CB F5 SET 6,L
CB FD SET 7,L
CB 26 SLA (HL)
DD CB :: 26 SLA (IX+::)
FD CB :: 26 SLA (IY+::)
CB 27 SLA A
CB 20 SLA B
CB 21 SLA C
CB 22 SLA D
CB 23 SLA E
CB 24 SLA H
CB 25 SLA L
CB 36 SLL (HL)
DD CB :: 36 SLL (IX+::)
FD CB :: 36 SLL (IY+::)
CB 37 SLL A
CB 30 SLL B
CB 31 SLL C
CB 32 SLL D
CB 33 SLL E
CB 34 SLL H
CB 35 SLL L
CB 2E SRA (HL)
DD CB :: 2E SRA (IX+::)
FD CB :: 2E SRA (IY+::)
CB 2F SRA A
CB 28 SRA B
CB 29 SRA C
CB 2A SRA D
CB 2B SRA E
CB 2C SRA H
CB 2D SRA L
CB 3E SRL (HL)
DD CB :: 3E SRL (IX+::)
FD CB :: 3E SRL (IY+::)
CB 3F SRL A
CB 38 SRL B
CB 39 SRL C
CB 3A SRL D
CB 3B SRL E
CB 3C SRL H
CB 3D SRL L
D6 :: SUB ::
96 SUB (HL)
DD 96 :: SUB (IX+::)
FD 96 :: SUB (IY+::)
97 SUB A
96 SUB A,(HL)
97 SUB A,A
90 SUB A,B
91 SUB A,C
92 SUB A,D
93 SUB A,E
94 SUB A,H
95 SUB A,L
90 SUB B
91 SUB C
92 SUB D
93 SUB E
94 SUB H
DD 94 SUB IXH
DD 95 SUB IXL
FD 94 SUB IYH
FD 95 SUB IYL
95 SUB L
EE :: XOR ::
AE XOR (HL)
DD AE :: XOR (IX+::)
FD AE :: XOR (IY+::)
AF XOR A
AE XOR A,(HL)
AF XOR A,A
A8 XOR A,B
A9 XOR A,C
AA XOR A,D
AB XOR A,E
AC XOR A,H
AD XOR A,L
A8 XOR B
A9 XOR C
AA XOR D
AB XOR E
AC XOR H
DD AC XOR IXH
DD AD XOR IXL
FD AC XOR IYH
FD AD XOR IYL
AD XOR L
'''
instructions = {}
def create_pattern(candidate):
"""
Create search pattern for parameter placeholders
:param candidate: Instruction template
:return: Regular expression pattern for matching
"""
types = []
pat = '@@|::|%%'
while True:
m = re.search(pat, candidate)
if m:
s = m.span()
types.append(candidate[s[0]:s[1]])
replacement = num_label
candidate = candidate[0:s[0]] + replacement + candidate[s[1]:]
else:
break
return re.compile(candidate + "$"), types
def preprocess():
lines = db.split('\n')
lines = [l.strip() for l in lines]
lines = [l for l in lines if len(l) > 0]
for line in lines:
code = line[0:17].strip()
inst = line[17:]
mnemonic = inst.split()[0]
if mnemonic not in instructions:
instructions[mnemonic] = []
inst,types=create_pattern(re.escape(inst))
instructions[mnemonic].append((code, inst, types))
def get_instructions(mnemonic):
if len(instructions) == 0:
preprocess()
return instructions.get(mnemonic.upper()) | zasm | /zasm-1.0.0-py3-none-any.whl/instructions.py | instructions.py |
<img align="right" style="padding:35px" src="notebooks/images/SCP_med.png" width="160">
# Zeek Analysis Tools (ZAT)
 [](http://codecov.io/github/SuperCowPowers/zat?branch=master) [](https://pypi.python.org/pypi/zat) [](https://choosealicense.com/licenses/apache-2.0)
The ZAT Python package supports the processing and analysis of Zeek data
with Pandas, scikit-learn, Kafka, and Spark
### Install
```
pip install zat
pip install zat[pyspark] (includes pyspark library)
pip install zat[all] (include pyarrow, yara-python, and tldextract)
```
### Getting Started
- [Examples of Using ZAT](https://supercowpowers.github.io/zat/examples.html)
### Installing on Raspberry Pi!
- [Raspberry Pi Instructions](https://supercowpowers.github.io/zat/raspberry_pi.html)
### Recent Improvements
- Faster/Smaller Pandas Dataframes for large log files: [Large Dataframes](https://supercowpowers.github.io/zat/large_dataframes.html)
- Better Panda Dataframe to Matrix (ndarray) support: [Dataframe To Matrix](https://supercowpowers.github.io/zat/dataframe_to_matrix.html)
- Scalable conversion from Zeek logs to Parquet: [Zeek to Parquet](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Parquet.ipynb)
- Vastly improved Spark Dataframe Class: [Zeek to Spark](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Spark.ipynb)
- Updated/improved Notebooks: [Analysis Notebooks](#analysis-notebooks)
- Zeek JSON to DataFrame class: [Zeek JSON to DataFrame Example](https://github.com/SuperCowPowers/zat/blob/main/examples/zeek_json_to_pandas.py)
### Video Presentation
- [Data Analysis and Machine Learning with Zeek](https://www.youtube.com/watch?v=pG5lU9CLnIU)
### Why ZAT?
Zeek already has a flexible, powerful scripting language why should I use
ZAT?
**Offloading:** Running complex tasks like statistics, state machines,
machine learning, etc.. should be offloaded from Zeek so that Zeek can
focus on the efficient processing of high volume network traffic.
**Data Analysis:** We have a large set of support classes that help
bridge from raw Zeek data to packages like Pandas, scikit-learn, Kafka, and
Spark. We also have example notebooks that show step-by-step how to get
from here to there.
### Analysis Notebooks
- [Zeek to Scikit-Learn](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Scikit_Learn.ipynb)
- [Zeek to Parquet](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Parquet.ipynb)
- [Zeek to Spark](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Spark.ipynb)
- [Spark Clustering](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Spark_Clustering.ipynb)
- [Zeek to Kafka](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Kafka.ipynb)
- [Zeek to Kafka to Spark](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Kafka_to_Spark.ipynb)
- [Clustering: Picking K (or not)](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Clustering_Picking_K.ipynb)
- [Anomaly Detection Exploration](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Anomaly_Detection.ipynb)
- [Risky Domains Stats and Deployment](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Risky_Domains.ipynb)
- [Zeek to Matplotlib](https://nbviewer.jupyter.org/github/SuperCowPowers/zat/blob/main/notebooks/Zeek_to_Plot.ipynb)
<img align="right" style="padding: 10px" src="notebooks/images/SCP_med.png" width="120">
### Documentation
<https://supercowpowers.github.io/zat/>
#### Running the Tests
```
pip install pytest coverage pytest-cov
pytest zat
```
### About SuperCowPowers
The company was formed so that its developers could follow their passion for Python, streaming data pipelines and having fun with data analysis. We also think cows are cool and should be superheros or at least carry around rayguns and burner phones. <a href="https://www.supercowpowers.com" target="_blank">Visit SuperCowPowers</a>
| zat | /zat-0.4.6.tar.gz/zat-0.4.6/README.md | README.md |
import os
import sys
import argparse
import time
import math
from collections import Counter
# Third Party Imports
import pandas as pd
from sklearn.ensemble import IsolationForest
from sklearn.cluster import MiniBatchKMeans
# Local imports
from zat import zeek_log_reader, live_simulator
from zat import dataframe_to_matrix, dataframe_cache
def entropy(string):
"""Compute entropy on the string"""
p, lns = Counter(string), float(len(string))
return -sum(count/lns * math.log(count/lns, 2) for count in p.values())
if __name__ == '__main__':
# Example to show the dataframe cache functionality on streaming data
pd.set_option('display.width', 200)
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# Sanity check dns log
if 'dns' in args.zeek_log:
log_type = 'dns'
else:
print('This example only works with Zeek with dns.log files..')
sys.exit(1)
# Create a Zeek log reader
print('Opening Data File: {:s}'.format(args.zeek_log))
reader = zeek_log_reader.ZeekLogReader(args.zeek_log, tail=True)
# Create a Zeek IDS log live simulator
print('Opening Data File: {:s}'.format(args.zeek_log))
reader = live_simulator.LiveSimulator(args.zeek_log, eps=10) # 10 events per second
# Create a Dataframe Cache
df_cache = dataframe_cache.DataFrameCache(max_cache_time=600) # 10 minute cache
# Streaming Clustering Class
batch_kmeans = MiniBatchKMeans(n_clusters=5, verbose=True)
# Use the ZeekThon DataframeToMatrix class
to_matrix = dataframe_to_matrix.DataFrameToMatrix()
# Add each new row into the cache
time_delta = 10
timer = time.time() + time_delta
FIRST_TIME = True
for row in reader.rows():
df_cache.add_row(row)
# Every 30 seconds grab the dataframe from the cache
if time.time() > timer:
timer = time.time() + time_delta
# Get the windowed dataframe (10 minute window)
zeek_df = df_cache.dataframe()
# Compute some addition data
zeek_df['query_length'] = zeek_df['query'].str.len()
zeek_df['answer_length'] = zeek_df['answers'].str.len()
zeek_df['entropy'] = zeek_df['query'].map(lambda x: entropy(x))
# Use the zat DataframeToMatrix class
features = ['Z', 'proto', 'qtype_name', 'query_length', 'answer_length', 'entropy', 'id.resp_p']
to_matrix = dataframe_to_matrix.DataFrameToMatrix()
zeek_matrix = to_matrix.fit_transform(zeek_df[features])
print(zeek_matrix.shape)
# Print out the range of the daterange and some stats
print('DataFrame TimeRange: {:s} --> {:s}'.format(str(zeek_df['ts'].min()), str(zeek_df['ts'].max())))
# Train/fit and Predict anomalous instances using the Isolation Forest model
odd_clf = IsolationForest(contamination=0.2) # Marking 20% as odd
predictions = odd_clf.fit_predict(zeek_matrix)
odd_df = zeek_df[predictions == -1]
# Now we're going to explore our odd observations with help from KMeans
odd_matrix = to_matrix.transform(odd_df[features])
batch_kmeans.partial_fit(odd_matrix)
clusters = batch_kmeans.predict(odd_matrix).tolist()
odd_df['cluster'] = clusters
# Now group the dataframe by cluster
cluster_groups = odd_df.groupby('cluster')
# Now print out the details for each cluster
show_fields = ['id.orig_h', 'id.resp_h', 'query'] + features
print('<<< Outliers Detected! >>>')
for key, group in cluster_groups:
print('\nCluster {:d}: {:d} observations'.format(key, len(group)))
print(group[show_fields].head()) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/anomaly_detection_streaming.py | anomaly_detection_streaming.py |
import os
import sys
import argparse
import re
from collections import Counter
# Local imports
from zat import zeek_log_reader
if __name__ == '__main__':
# Example to check for potential Tor connections and give a summary of different ports
# used for SSL connections. Please note that your Zeek installation must stamp the
# ssl.log file with the 'issuer' field. More info can be found here:
# https://docs.zeek.org/en/main/script-reference/proto-analyzers.html#zeek-ssl
# Set up the regex search that is used against the issuer field
issuer_regex = re.compile('CN=www.\w+.com')
# Set up the regex search that is used against the subject field
subject_regex = re.compile('CN=www.\w+.net')
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
parser.add_argument('-t', action='store_true', default=False, help='Sets the program to tail a live Zeek log')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# Sanity check that this is a ssl log
if 'ssl' not in args.zeek_log:
print('This example only works with Zeek ssl.log files..')
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# Run the zeek reader on the ssl.log file looking for potential Tor connections
reader = zeek_log_reader.ZeekLogReader(args.zeek_log, tail=args.t)
# Just a counter to keep an eye on how many possible Tor connections we identify
number = 0
# A empty list to use for the port statistics
ports = []
for row in reader.readrows():
# Add the destination port to the list of ports
ports.append(row['id.resp_p'])
# Pull out the Certificate Issuer
try:
issuer = row['issuer']
except KeyError:
print('Could not find the issuer field in your ssl.log. Please verify your log file.')
sys.exit(1)
# Check if the issuer matches the known Tor format
if issuer_regex.match(issuer):
# Pull out the Certificate Subject
try:
subject = row['subject']
except KeyError:
print('Could not find the subject field in your ssl.log. Please verify your log file.')
sys.exit(1)
# Check if the subject matches the known Tor format
if subject_regex.match(subject):
print('\nPossible Tor connection found')
print('From: {:s} To: {:s} Port: {:d}'.format(row['id.orig_h'], row['id.resp_h'], row['id.resp_p']))
number +=1
# If we are not tailing a live log file, let's print some stats.
if not args.t:
# First let's print (if any) the number of possible Tor connections that were found
print('\nTotal number of possible Tor connections found: {:d}'.format(number))
# Now let's do the stats on and printing of the port count
portcount = Counter(ports)
print('\nPort statistics')
for port, count in portcount.most_common():
print('{:<7} {:d}'.format(port, count)) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/tor_and_port_count.py | tor_and_port_count.py |
import sys
import argparse
from time import sleep
try:
from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StringType, BooleanType, IntegerType
from pyspark.sql.functions import from_json, to_json, col, struct, udf
except ImportError:
print('\npip install pyspark')
sys.exit(1)
try:
import tldextract
except ImportError:
print('\nThis example needs tldextract. Please do a $pip install tldextract and rerun this example')
sys.exit(1)
def exit_program():
"""Exit on Signal"""
print('Exiting Program...')
sys.exit()
def compute_domain(query):
# Pull out the domain
if query.endswith('.local'):
return 'local'
return tldextract.extract(query).registered_domain if query else None
if __name__ == '__main__':
"""Read Kafka Streams into Spark, perform simple filtering/aggregation"""
parser = argparse.ArgumentParser()
parser.add_argument('--server', type=str, default='localhost:9092',
help='Specify the Kafka Server (default: localhost:9092)')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# Grab the Kafka server
kserver = args.server
# Spin up a local Spark Session (with 4 executors)
spark = SparkSession.builder.master('local[4]').appName('my_awesome') \
.config('spark.jars.packages', 'org.apache.spark:spark-sql-kafka-0-10_2.11:2.4.4') \
.getOrCreate()
spark.sparkContext.setLogLevel('ERROR')
# Optimize the conversion to Spark
spark.conf.set("spark.sql.execution.arrow.enable", "true")
# SUBSCRIBE: Setup connection to Kafka Stream
raw_data = spark.readStream.format('kafka').option('kafka.bootstrap.servers', kserver) \
.option('subscribe', 'dns') \
.option('startingOffsets', 'earliest').load()
# Define the schema for the DNS message (do this better)
dns_schema = StructType().add('ts', StringType()).add('uid', StringType()).add('id.orig_h', StringType()) \
.add('id.orig_p', IntegerType()).add('id.resp_h', StringType()).add('id.resp_p', IntegerType()) \
.add('proto', StringType()).add('trans_id', IntegerType()).add('query', StringType()) \
.add('qclass', IntegerType()).add('qclass_name', StringType()).add('qtype', IntegerType()) \
.add('qtype_name', StringType()).add('rcode', IntegerType()).add('rcode_name', StringType()) \
.add('AA', BooleanType()).add('TC', BooleanType()).add('RD', BooleanType()).add('RA', BooleanType()) \
.add('Z', IntegerType()).add('answers', StringType()).add('TTLs', StringType()).add('rejected', BooleanType())
# ETL: Convert raw data into parsed and proper typed data
parsed_data = raw_data.select(from_json(col('value').cast('string'), dns_schema).alias('data')).select('data.*')
# FILTER: Only get DNS records that have 'query' field filled out
filtered_data = parsed_data.filter(parsed_data.query.isNotNull() & (parsed_data.query!='')==True)
# FILTER 2: Remove Local/mDNS queries
filtered_data = filtered_data.filter(~filtered_data.query.like('%.local')) # Note: using the '~' negation operator
# COMPUTE: A new column with the 2nd level domain extracted from the query
udf_compute_domain = udf(compute_domain, StringType())
computed_data = filtered_data.withColumn('domain', udf_compute_domain('query'))
# AGGREGATE: In this case a simple groupby operation
group_data = computed_data.groupBy('`id.orig_h`', 'domain', 'qtype_name').count()
# At any point in the pipeline you can see what you're getting out
group_data.printSchema()
# Take the end of our pipeline and pull it into memory
dns_count_memory_table = group_data.writeStream.format('memory').queryName('dns_counts').outputMode('complete').start()
# Let the pipeline pull some data
print('Pulling pipline...Please wait...')
# Create a Pandas Dataframe by querying the in memory table and converting
# Loop around every 5 seconds to update output
for _ in range(10):
sleep(5)
dns_counts_df = spark.sql("select * from dns_counts").toPandas()
print('\nDNS Query Total Counts = {:d}'.format(dns_counts_df['count'].sum()))
print(dns_counts_df.sort_values(ascending=False, by='count'))
# Stop the stream
dns_count_memory_table.stop()
sleep(1) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/kafka_spark.py | kafka_spark.py |
import os
import sys
import argparse
# Third Party Imports
import pandas as pd
from sklearn.decomposition import PCA
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.cluster import KMeans
import numpy as np
# Local imports
from zat import log_to_dataframe
from zat import dataframe_to_matrix
# Helper method for scatter/beeswarm plot
def jitter(arr):
stdev = .02*(max(arr)-min(arr))
return arr + np.random.randn(len(arr)) * stdev
if __name__ == '__main__':
# Example that demonstrates going from Zeek data to scikit-learn models
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# Sanity check that this is a dns log
if 'dns' not in args.zeek_log:
print('This example only works with Zeek dns.log files..')
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# Create a Pandas dataframe from the Zeek log
log_to_df = log_to_dataframe.LogToDataFrame()
zeek_df = log_to_df.create_dataframe(args.zeek_log)
# Add query length
zeek_df['query_length'] = zeek_df['query'].str.len()
# Normalize this field
#ql = zeek_df['query_length']
#zeek_df['query_length_norm'] = (ql - ql.min()) / (ql.max()-ql.min())
# These are the features we want (note some of these are categorical!)
features = ['AA', 'RA', 'RD', 'TC', 'Z', 'rejected', 'proto', 'qtype_name', 'rcode_name', 'query_length']
feature_df = zeek_df[features]
# Use the super awesome DataframeToMatrix class (handles categorical data!)
to_matrix = dataframe_to_matrix.DataFrameToMatrix()
zeek_matrix = to_matrix.fit_transform(feature_df)
# Now we're ready for scikit-learn!
# Just some simple stuff for this example, KMeans and PCA
kmeans = KMeans(n_clusters=5).fit_predict(zeek_matrix)
pca = PCA(n_components=2).fit_transform(zeek_matrix)
# Now we can put our ML results back onto our dataframe!
zeek_df['x'] = jitter(pca[:, 0]) # PCA X Column
zeek_df['y'] = jitter(pca[:, 1]) # PCA Y Column
zeek_df['cluster'] = kmeans
# Now use dataframe group by cluster
show_fields = ['query', 'Z', 'proto', 'qtype_name', 'x', 'y', 'cluster']
cluster_groups = zeek_df[show_fields].groupby('cluster')
# Now print out the details for each cluster
pd.set_option('display.width', 1000)
for key, group in cluster_groups:
print('Rows in Cluster: {:d}'.format(len(group)))
print(group.head(), '\n') | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/zeek_to_scikit.py | zeek_to_scikit.py |
import os
import sys
import argparse
# Note: We're going to import pyarrow but it currently has an open issue around supporting time deltas
# - https://issues.apache.org/jira/browse/ARROW-6780 so we have to convert timedelta fields to str
# Also see:
# - https://stackoverflow.com/questions/53893554/transfer-and-write-parquet-with-python-and-pandas-got-timestamp-error
#
from datetime import timedelta
import pandas as pd
try:
import pyarrow
except ImportError:
print('Please > pip install pyarrow')
sys.exit(1)
# Local imports
from zat.log_to_dataframe import LogToDataFrame
# Helper method for temporarily converting timedelta to string
def convert_timedelta_to_str(df):
delta_columns = df.select_dtypes(include=['timedelta'])
for column in delta_columns:
df[column] = df[column].apply(tdelta_value_to_str)
return df
def tdelta_value_to_str(value):
if pd.isnull(value):
return '-' # Standard for Zeek null value
else:
return str(timedelta(seconds=value.total_seconds()))
if __name__ == '__main__':
# Example to write Parquet file from a zeek log
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify the zeek log input file')
parser.add_argument('parquet_file', type=str, help='Specify the parquet file to write out')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# File may have a tilde in it
if args.zeek_log and args.parquet_file:
args.zeek_log = os.path.expanduser(args.zeek_log)
args.parquet_file = os.path.expanduser(args.parquet_file)
# Convert to dataframe and write out the parquet file
log_to_df = LogToDataFrame()
zeek_df = log_to_df.create_dataframe(args.zeek_log)
print('Dataframe Created: {:d} rows...'.format(len(zeek_df)))
# Check for any timedelta fields (see note above)
df = convert_timedelta_to_str(zeek_df)
zeek_df.to_parquet(args.parquet_file, compression='snappy', use_deprecated_int96_timestamps=True)
print('Complete: {:s} --> {:s}'.format(args.zeek_log, args.parquet_file)) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/zeek_to_parquet.py | zeek_to_parquet.py |
import os
import sys
import time
import argparse
from pprint import pprint
# Third Party Imports
try:
import yara
except ImportError:
print('\nThis example needs yara. Please do a $pip install yara-python')
sys.exit(1)
# Local imports
from zat.utils import dir_watcher, signal_utils
def yara_match(file_path, rules):
"""Callback for a newly extracted file"""
print('New Extracted File: {:s}'.format(file_path))
print('Mathes:')
pprint(rules.match(file_path))
def my_exit():
"""Exit on Signal"""
print('Goodbye...')
sys.exit()
if __name__ == '__main__':
# Run a set of Yara Rule matches on Extracted Files
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--rule-index', type=str, required=True, help='Specify the yara rule index file (e.g. /full/path/to/yara/rules/index.yar)')
parser.add_argument('-e', '--extract-dir', type=str, required=True, help='Specify the Zeek extract_files directory (e.g. /full/path/to/zeek/extract_files)')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# If no args just call help
if len(sys.argv) == 1:
parser.print_help()
print('\nNote: Download the yara repo and give the index file as an arg')
print('$ git clone https://github.com/Yara-Rules/rules')
print('$ python yara_matches -r /path/to/rules/index.yar -e /path/to/zeek/extract_files')
sys.exit(1)
# Sanity check that the args exist and are what we expect
if not os.path.isfile(args.rule_index):
print('--rule-index file not found.. should be /full/path/to/yara/rules/index.yar')
sys.exit(1)
if not os.path.isdir(args.extract_dir):
print('--extract-dir directory not found.. should be /full/path/to/zeek/extract_files')
sys.exit(1)
# Load/compile the yara rules
my_rules = yara.compile(args.rule_index)
# Create DirWatcher and start watching the Zeek extract_files directory
print('Watching Extract Files Directory: {:s}'.format(args.extract_dir))
dir_watcher.DirWatcher(args.extract_dir, callback=yara_match, rules=my_rules)
# Okay so just wait around for files to be dropped by Zeek or someone hits Ctrl-C
with signal_utils.signal_catcher(my_exit):
while True:
time.sleep(.5) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/yara_matches.py | yara_matches.py |
import os
import sys
import argparse
import math
from collections import Counter
# Third Party Imports
import pandas as pd
from sklearn.ensemble import IsolationForest
from sklearn.cluster import KMeans
# Local imports
from zat import log_to_dataframe
from zat import dataframe_to_matrix
def entropy(string):
"""Compute entropy on the string"""
p, lns = Counter(string), float(len(string))
return -sum(count/lns * math.log(count/lns, 2) for count in p.values())
if __name__ == '__main__':
# Example to show the dataframe cache functionality on streaming data
pd.set_option('display.width', 1000)
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# Sanity check either http or dns log
if 'http' in args.zeek_log:
log_type = 'http'
features = ['id.resp_p', 'method', 'resp_mime_types', 'request_body_len']
elif 'dns' in args.zeek_log:
log_type = 'dns'
features = ['Z', 'proto', 'qtype_name', 'query_length', 'answer_length', 'entropy']
else:
print('This example only works with Zeek with http.log or dns.log files..')
sys.exit(1)
# Create a Pandas dataframe from a Zeek log
try:
log_to_df = log_to_dataframe.LogToDataFrame()
zeek_df = log_to_df.create_dataframe(args.zeek_log)
print(zeek_df.head())
except IOError:
print('Could not open or parse the specified logfile: %s' % args.zeek_log)
sys.exit(1)
print('Read in {:d} Rows...'.format(len(zeek_df)))
# Using Pandas we can easily and efficiently compute additional data metrics
# Here we use the vectorized operations of Pandas/Numpy to compute query length
# We'll also compute entropy of the query
if log_type == 'dns':
zeek_df['query_length'] = zeek_df['query'].str.len()
zeek_df['answer_length'] = zeek_df['answers'].str.len()
zeek_df['entropy'] = zeek_df['query'].map(lambda x: entropy(x))
# Use the zat DataframeToMatrix class
to_matrix = dataframe_to_matrix.DataFrameToMatrix()
zeek_matrix = to_matrix.fit_transform(zeek_df[features])
print(zeek_matrix.shape)
# Train/fit and Predict anomalous instances using the Isolation Forest model
odd_clf = IsolationForest(contamination=0.2) # Marking 20% as odd
odd_clf.fit(zeek_matrix)
# Now we create a new dataframe using the prediction from our classifier
predictions = odd_clf.predict(zeek_matrix)
odd_df = zeek_df[features][predictions == -1]
display_df = zeek_df[predictions == -1].copy()
# Now we're going to explore our odd observations with help from KMeans
odd_matrix = to_matrix.fit_transform(odd_df)
num_clusters = min(len(odd_df), 4) # 4 clusters unless we have less than 4 observations
display_df['cluster'] = KMeans(n_clusters=num_clusters).fit_predict(odd_matrix)
print(odd_matrix.shape)
# Now group the dataframe by cluster
if log_type == 'dns':
features += ['query']
else:
features += ['host']
cluster_groups = display_df[features+['cluster']].groupby('cluster')
# Now print out the details for each cluster
print('<<< Outliers Detected! >>>')
for key, group in cluster_groups:
print('\nCluster {:d}: {:d} observations'.format(key, len(group)))
print(group.head()) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/anomaly_detection.py | anomaly_detection.py |
import os
import sys
import argparse
from pprint import pprint
import pickle
# Third Party Imports
try:
import tldextract
except ImportError:
print('\nThis example needs tldextract. Please do a $pip install tldextract and rerun this example')
sys.exit(1)
# Local imports
from zat import zeek_log_reader
from zat.utils import vt_query, signal_utils
def save_vtq():
"""Exit on Signal"""
global vtq
print('Saving VirusTotal Query Cache...')
pickle.dump(vtq, open('vtq.pkl', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
sys.exit()
if __name__ == '__main__':
# Risky DNS/VT Query application
global vtq
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# Sanity check that this is a dns log
if 'dns' not in args.zeek_log:
print('This example only works with Zeek dns.log files..')
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# See if we have a serialized VirusTotal Query Class.
# If we do not have one we'll create a new one
try:
vtq = pickle.load(open('vtq.pkl', 'rb'))
print('Opening VirusTotal Query Cache (cache_size={:d})...'.format(vtq.size))
except IOError:
vtq = vt_query.VTQuery(max_cache_time=60*24*7) # One week cache
# See our 'Risky Domains' Notebook for the analysis and
# statistical methods used to compute this risky set of TLDs
risky_tlds = set(['info', 'tk', 'xyz', 'online', 'club', 'ru', 'website', 'in', 'ws',
'top', 'site', 'work', 'biz', 'name', 'tech', 'loan', 'win', 'pro'])
# Launch long lived process with signal catcher
with signal_utils.signal_catcher(save_vtq):
# Run the zeek reader on the dns.log file looking for risky TLDs
reader = zeek_log_reader.ZeekLogReader(args.zeek_log)
for row in reader.readrows():
# Pull out the TLD
query = row['query']
tld = tldextract.extract(query).suffix
# Check if the TLD is in the risky group
if tld in risky_tlds:
# Show the risky dns
print('Making VT query for {:s}...'.format(query))
# Make the VT query
results = vtq.query_url(query)
if results.get('positives', 0) >= 1: # At least one hit (change this higher if you want)
print('\nRisky Domain DNS Query Found')
print('From: {:s} To: {:s} QType: {:s} RCode: {:s}'.format(row['id.orig_h'],
row['id.resp_h'],
row['qtype_name'],
row['rcode_name']))
pprint(results)
# Save the Virus Total Query
save_vtq() | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/risky_dns.py | risky_dns.py |
import sys
import argparse
from pprint import pprint
import pickle
import json
from kafka import KafkaConsumer
from kafka.errors import NoBrokersAvailable
# Third Party Imports
try:
import tldextract
except ImportError:
print('\nThis example needs tldextract. Please do a $pip install tldextract and rerun this example')
sys.exit(1)
# Local imports
from zat.utils import vt_query, signal_utils
def save_vtq():
"""Exit on Signal"""
global vtq
print('Saving VirusTotal Query Cache...')
pickle.dump(vtq, open('vtq.pkl', 'wb'), protocol=pickle.HIGHEST_PROTOCOL)
sys.exit()
if __name__ == '__main__':
# Risky DNS/VT Query application
global vtq
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('--server', type=str, default='localhost:9092',
help='Specify the Kafka Server (default: localhost:9092)')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# First we create a Kafka Consumer
kserver = args.server
try:
consumer = KafkaConsumer('dns', bootstrap_servers=[kserver],
value_deserializer=lambda x: json.loads(x.decode('utf-8')))
except NoBrokersAvailable:
print('Could not connect to Kafka server: {:s}'.format(args.server))
sys.exit(-1)
# See if we have a serialized VirusTotal Query Class.
# If we do not have one we'll create a new one
try:
vtq = pickle.load(open('vtq.pkl', 'rb'))
print('Opening VirusTotal Query Cache (cache_size={:d})...'.format(vtq.size))
except IOError:
vtq = vt_query.VTQuery(max_cache_time=60*24*7) # One week cache
# See our 'Risky Domains' Notebook for the analysis and
# statistical methods used to compute this risky set of TLDs
risky_tlds = set(['info', 'tk', 'xyz', 'online', 'club', 'ru', 'website', 'in', 'ws',
'top', 'site', 'work', 'biz', 'name', 'tech', 'loan', 'win', 'pro'])
# Launch long lived process with signal catcher
with signal_utils.signal_catcher(save_vtq):
# Now lets process our Kafka 'dns' Messages
for message in consumer:
dns_message = message.value
# Pull out the TLD
query = dns_message.get('query')
tld = tldextract.extract(query).suffix if query else None
# Check if the TLD is in the risky group
if tld in risky_tlds:
print('\n'+query)
# Make the query with the full query
results = vtq.query_url(query)
if results.get('positives', 0) > 3: # At least four hits
print('Risky Domain DNS Query Found')
print('From: {:s} To: {:s} QType: {:s} RCode: {:s}'.format(dns_message['id.orig_h'],
dns_message['id.resp_h'],
dns_message['qtype_name'],
dns_message['rcode_name']))
pprint(results) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/kafka_risky_dns.py | kafka_risky_dns.py |
import os
import sys
import argparse
from pprint import pprint
# Local imports
from zat import zeek_log_reader
from zat.utils import vt_query
if __name__ == '__main__':
# Example to check all the x509 Certs from 'Let's Encrypt' for potential phishing/malicious sites
# Collect args from the command line
parser = argparse.ArgumentParser()
parser.add_argument('zeek_log', type=str, help='Specify a zeek log to run ZeekLogReader test on')
args, commands = parser.parse_known_args()
# Check for unknown args
if commands:
print('Unrecognized args: %s' % commands)
sys.exit(1)
# Sanity check that this is a dns log
if 'x509' not in args.zeek_log:
print('This example only works with Zeek x509.log files..')
sys.exit(1)
# File may have a tilde in it
if args.zeek_log:
args.zeek_log = os.path.expanduser(args.zeek_log)
# Create a VirusTotal Query Class
vtq = vt_query.VTQuery()
# These domains may be spoofed with a certificate issued by 'Let's Encrypt'
spoofed_domains = set(['paypal', 'gmail', 'google', 'apple','ebay', 'amazon'])
# Run the zeek reader on the x509.log file looking for spoofed domains
reader = zeek_log_reader.ZeekLogReader(args.zeek_log, tail=True)
for row in reader.readrows():
# Pull out the Certificate Issuer
issuer = row['certificate.issuer']
if "Let's Encrypt" in issuer:
# Check if the certificate subject has any spoofed domains
subject = row['certificate.subject']
if any([domain in subject for domain in spoofed_domains]):
print('\n<<< Suspicious Certificate Found >>>')
pprint(row)
# Make a Virus Total query with the spoofed domain (just for fun)
query_domain = subject[3:] # Just chopping off the 'CN=' part
results = vtq.query_url(query_domain)
if results.get('positives', 0) >= 2: # At least two hits
print('\n<<< Virus Total Query >>>')
pprint(results) | zat | /zat-0.4.6.tar.gz/zat-0.4.6/examples/cert_checker.py | cert_checker.py |
from cryptography import x509
from cryptography.hazmat._oid import NameOID
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.bindings._rust import ObjectIdentifier
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import ec
import base64
class GenerateCSR:
def __init__(self):
"""
Initializes the GenerateCSR class.
"""
self.csr_type = None
self.C = None
self.N = None
self.O = None
self.OU = None
self.SN = None
self.UID = None
self.TITLE = None
self.CATEGORY = None
self.ADDRESS = None
def generate_key(self):
private_key = ec.generate_private_key(ec.SECP256K1(), backend=default_backend())
private_key_pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=serialization.NoEncryption()
)
return private_key_pem
def create_custom_extension(self,oid_string, value):
oid = ObjectIdentifier(oid_string)
ext = x509.extensions.UnrecognizedExtension(oid, value)
return ext
def generate_csr(self, csr_type, C, CN, O, OU, SN, UID, TITLE, CATEGORY, ADDRESS):
if csr_type == "sandbox":
customoid = b"..TESTZATCA-Code-Signing"
elif csr_type == "simulation":
customoid = b"..PREZATCA-Code-Signing"
else:
customoid = b"..ZATCA-Code-Signing"
private_key_pem = self.generate_key()
private_key = serialization.load_pem_private_key(private_key_pem, password=None, backend=default_backend())
custom_oid_string = "1.3.6.1.4.1.311.20.2"
custom_value = customoid
custom_extension = self.create_custom_extension(custom_oid_string, custom_value)
dn = x509.Name([
x509.NameAttribute(NameOID.COMMON_NAME, CN),
x509.NameAttribute(NameOID.COUNTRY_NAME, C),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, O),
x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, OU),
])
alt_name = x509.SubjectAlternativeName({
x509.DirectoryName(x509.Name([
x509.NameAttribute(NameOID.SURNAME, SN),
x509.NameAttribute(NameOID.USER_ID, UID),
x509.NameAttribute(NameOID.TITLE, TITLE),
x509.NameAttribute(NameOID.BUSINESS_CATEGORY, CATEGORY + "/registeredAddress=" + ADDRESS),
])),
})
csr = (
x509.CertificateSigningRequestBuilder()
.subject_name(dn)
.add_extension(custom_extension, critical=False)
.add_extension(alt_name, critical=False)
.sign(private_key, hashes.SHA256(), backend=default_backend())
)
mycsr = csr.public_bytes(serialization.Encoding.PEM)
base64csr = base64.b64encode(mycsr)
encoded_string = base64csr.decode('utf-8')
return private_key_pem,encoded_string | zatca-csr-generator | /zatca_csr_generator-0.3.0-py3-none-any.whl/zatca_csr_generator/zatca_csr.py | zatca_csr.py |
[](https://travis-ci.org/ibtehaz-shawon/zathura)
```
______ ___________
___ /______ __ /___ /_____ ______________ _
__ / _ __ `/ __/_ __ \ / / /_ ___/ __ `/
_ /__/ /_/ // /_ _ / / / /_/ /_ / / /_/ /
/____/\__,_/ \__/ /_/ /_/\__,_/ /_/ \__,_/
```
# Zathura
Zathura is a sqlite3 based logger. Currently, you can see your error and debug logs from terminal only. However, I am working on webview. Do keep in mind, you will have to handle the authentication for your own project.
-------------------------------
## How to install
<ul> <pre> pip install zathura </pre> </ul>
This will install the latest version on your virtualenv
-------------------------------
## Code Preview
<p>
1. You can just import Zathura from ZathuraProject.zathura and call insert_error_log function to start logging your errors right away.
<pre>
from ZathuraProject.zathura import Zathura
zathura = Zathura()
zathura.insert_error_log(user_id, error_name, long_error_description)
</pre>
</p>
<p>
2. View your logs from terminal
<pre>
zathura help
</pre>
This will list all the avaiable command for you. Choose from them to see how you want to sort your logs.
<pre>
usage: Zathura COMMAND [args] ...
For example: { Zathura v } will show the current version of this pacakge.
-----------------------------------------------------
-----------------------------------------------------
All commands:
[[ v ]] : Show the current version of this package
[[ developer ]] : Search based on developers name. You can filter out the result based on date and descending order
[[ debug_origin ]] : Shows debug messages based on point of origin. Point of origin is the class/function from where you are adding a message in sqlite.
[[ error_user ]] : Shows error generated under the given username
[[ all_debug ]] : Shows all debug messages
[[ error_name ]] : Shows error based on a error name.
[[ date ]] : Shows error occurred in between a specific date.
[[ all_error ]] : Shows all error messages
[[ origin ]] : Shows error generated on the given point of origin
[[ mark_resolve ]] : Given an error name and point of origin all errors logged on database, is marked resolved.
[[ delete_debug ]] : Deletes the last seven days of debug mesasges from the database. It is useful if you dont want to clutter the database with unnecessary debug info.
[[ help ]] : Shows all the commands necessary to run this package from terminal
-----------------------------------------------------
-----------------------------------------------------
</pre>
For example to see all error, without sorting you can just type
<pre> zathura all_error </pre>
</p>
-------------------------------
<h3>
It is without doubt that there might be some bugs and improvement for this project. I wrote zathura to help me with my projects. If you face any bugs or want some more modules, please open an issue. If you want to contribute, please clone my project and create a pull request for me.
</h3>
<p> Thank you. Ibtehaz </p>
-------------------------------
## Run Zathura codebase on your computer
<ol>
<li>
Clone the project and change your directory into it.
</li>
<li>
pip install -r requirements.txt
</li> <li>
pip install --upgrade setuptools wheel
</li> <li>
python3 setup.py sdist bdist_wheel
<br>
This will create a build file for you from the laterst version.
</li> <li>
pip install . <br>
To check on your machine
</li>
</ol>
| zathura | /zathura-0.0.6.0b1.tar.gz/zathura-0.0.6.0b1/README.md | README.md |
from datetime import datetime
from peewee import ModelSelect
from ZathuraProject.sqlite_definition import (DebugLog, ErrorLog, close_db,
database_connection,
database_start)
from ZathuraProject.utility import Utility
class Fetcher():
"""
This class will send all the data from the sqlite database
"""
empty_result = {'error': True}
def __error_obj_to_dict(self, error_log_object: ErrorLog):
"""
# generates and returns a dictionary from ErrorLog object
error_log_object: ErrorLog a ErrorLog object
"""
return {
Utility.Tag_User: error_log_object.user,
Utility.Tag_Error_Name: error_log_object.error_name,
Utility.Tag_Error_Description: error_log_object.error_description,
Utility.Tag_Origin: error_log_object.point_of_origin,
Utility.Tag_Logged_At: Utility.milli_to_datetime(error_log_object.logged_at),
Utility.Tag_Logged_At_Unix: error_log_object.logged_at,
Utility.Tag_Is_Resolved: Utility.Tag_Text_Resolved if error_log_object.is_resolved else Utility.Tag_Text_Not_Resolved,
Utility.Tag_Resolved_At: error_log_object.resolved_at if error_log_object.resolved_at is None else Utility.milli_to_datetime(error_log_object.resolved_at),
Utility.Tag_Resolved_At_Unix: error_log_object.resolved_at,
Utility.Tag_Warning_Level: self.__get_warning_level_in_text(error_log_object.warning_level),
}
@staticmethod
def __debug_obj_to_dict(debug_log_object: DebugLog):
"""
# generates & returns a dictionary from a DebugLog object.
debug_log_object: DebugLog a DebugLog object
"""
return {
Utility.Tag_User: debug_log_object.user,
Utility.Tag_Message: debug_log_object.message_data,
Utility.Tag_Origin: debug_log_object.point_of_origin,
Utility.Tag_Logged_At: Utility.milli_to_datetime(debug_log_object.logged_at),
Utility.Tag_Logged_At_Unix: debug_log_object.logged_at,
}
def __generate_error_return_payload(self, log_payload: ModelSelect):
"""
# generates error payload for return
"""
all_error_logs = list()
for err in log_payload:
all_error_logs.append(self.__error_obj_to_dict(err))
return {Utility.Tag_Total: len(all_error_logs), Utility.Tag_Log: all_error_logs}
@staticmethod
def __get_warning_level_in_text(warning_level: int):
_ = {
'0': 'NOTSET',
'1': 'DEBUG',
'2': 'INFO',
'3': 'WARNING',
'4': 'ERROR',
'5': 'CRITICAL'
}
return _[str(warning_level)]
def __generate_verbose_return_payload(self, debug_payload: ModelSelect):
"""
# generates debug payload for return
"""
all_logs = list()
for log in debug_payload:
all_logs.append(self.__debug_obj_to_dict(log))
return {Utility.Tag_Total: len(all_logs), Utility.Tag_Log: all_logs}
def get_all_error_log(self, show_all: bool = False, desc: bool = False):
"""
# returns all error_log table data on a list which are not resolved yet
show_all: bool filters out the is_resolved = True value if show_all is False
"""
database_connection() # initiate database connection before doing anything.
if show_all:
if desc:
err_logs = ErrorLog.select().order_by(ErrorLog.logged_at.desc())
else:
err_logs = ErrorLog.select()
else:
if desc:
err_logs = ErrorLog.select().where(ErrorLog.is_resolved !=
True).order_by(ErrorLog.logged_at.desc())
else:
err_logs = ErrorLog.select().where(ErrorLog.is_resolved != True)
close_db()
return self.__generate_error_return_payload(err_logs)
def get_all_debug_log(self):
"""
# returns all debug_log table data on a list
"""
database_connection() # initiate database connection before doing anything.
debug_logs = DebugLog.select()
close_db()
return self.__generate_verbose_return_payload(debug_logs)
def get_error_by_user(self, user: str, limit: int = 0, desc: bool = False, first_limit: datetime = None, last_limit: datetime = None):
"""
# returns error generated for a user. datetime is not both inclusive, exclude the last date.
# username is mandatory in this case.
# ascending order is by default otherwise.
# ordering is when the error is logged.
user: str error report generated under a particular user
limit: int limits the number of error searchable
desc: bool whether to show the result in ascending or descending order
first_limit: datetime shows result after this limit
last_limit: shows result before this limit (exclusive)
"""
if len(user) == 0:
result = self.empty_result
result[Utility.Tag_error_message] = "Username cannot be empty for this function!"
return result
user = user.strip()
# initiate database connection before doing anything.
database_connection()
if first_limit is None and last_limit is None:
if limit != 0:
if desc:
# descending order with limit
errors = ErrorLog.select().where(ErrorLog.user == user).order_by(
ErrorLog.logged_at.desc()).limit(limit)
else:
# ascending order with limit
errors = ErrorLog.select().where(ErrorLog.user == user).limit(limit)
else:
if desc:
# descending order without limit
errors = ErrorLog.select().where(
ErrorLog.user == user).order_by(ErrorLog.logged_at.desc())
else:
# ascending order without limit
errors = ErrorLog.select().where(ErrorLog.user == user)
else:
# filter by datetime. and same limit order
first_limit = Utility.unix_time_millis(first_limit)
if last_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(last_limit)
param_user = (ErrorLog.user == user)
param_date_filter_one = (ErrorLog.logged_at >= first_limit)
param_date_filter_two = (ErrorLog.logged_at <= last_limit)
if limit != 0:
if desc:
# descending order with limit date filter included
errors = ErrorLog.select().where(param_user & param_date_filter_one &
param_date_filter_two).order_by(ErrorLog.logged_at.desc()).limit(limit)
else:
# ascending order without limit date filter included
errors = ErrorLog.select().where(param_user & param_date_filter_one &
param_date_filter_two).limit(limit)
else:
if desc:
# descending order without limit date filter included
errors = ErrorLog.select().where(param_user & param_date_filter_one &
param_date_filter_two).order_by(ErrorLog.logged_at.desc())
else:
# ascending order without limit date filter included
errors = ErrorLog.select().where(
param_user & param_date_filter_one & param_date_filter_two)
close_db()
return self.__generate_error_return_payload(errors)
def get_error_by_date_limit(self, beginning_limit: datetime, ending_limit: datetime = None, limit: int = 0, desc: bool = False):
"""
# get reports under a date limit from all users
beginning_limit: datetime starting time, inclusive
ending_limit: datetime ending time, exclusive
limit: int limits the number of search result.
desc: bool whether to show the result in descending order
"""
if beginning_limit is None:
result = self.empty_result
result[Utility.Tag_error_message] = "Please insert the first date to search after a specific time."
return result
first_limit = Utility.unix_time_millis(beginning_limit)
if ending_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(ending_limit)
# initiate database connection before doing anything.
database_connection()
param_filter_one = (ErrorLog.logged_at >= first_limit)
param_filter_two = (ErrorLog.logged_at <= last_limit)
if limit != 0:
if desc:
# search under a limit in descending order
errors = ErrorLog.select().where(param_filter_one & param_filter_two).order_by(
ErrorLog.logged_at.desc()).limit(limit)
else:
# search under a limit in ascending order
errors = ErrorLog.select().where(param_filter_one & param_filter_two).limit(limit)
else:
if desc:
# search without limit in descending order
errors = ErrorLog.select().where(
param_filter_one & param_filter_two).order_by(ErrorLog.logged_at.desc())
else:
# search without limit in ascending order
errors = ErrorLog.select().where(param_filter_one & param_filter_two)
close_db()
return self.__generate_error_return_payload(errors)
# def search by error_name
def get_error_by_error_name(self, error_name: str, first_limit: datetime = None, last_limit: datetime = None, limit: int = 0, desc: bool = False):
"""
# searches errors by error name. filters will be applied based on parameter
error_name: what's the name of error you want to search under.
first_limit: first date limit to be applied
last_limit: last date limit to be applied, not inclusive
limit: limits the number of data on search result.
desc: sort the result in descending order or ascending order. (By default, ascending order)
"""
if error_name is None or len(error_name) == 0:
result = self.empty_result
result[Utility.Tag_error_message] = "Error name cannot be empty on this search"
return result
error_name = error_name.strip()
error_name = error_name.lower()
# initiate database connection before doing anything.
database_connection()
if first_limit is None and last_limit is None:
if limit != 0:
if desc:
# search with limit in descending order under no date limit
errors = ErrorLog.select().where(ErrorLog.error_name == error_name).order_by(
ErrorLog.logged_at.desc()).limit(limit)
else:
# search with limit in ascending order under no date limit
errors = ErrorLog.select().where(ErrorLog.error_name == error_name).limit(limit)
else:
if desc:
# search without limit in descending order under no date limit
errors = ErrorLog.select().where(ErrorLog.error_name ==
error_name).order_by(ErrorLog.logged_at.desc())
else:
# search without limit in ascending order under no date limit
errors = ErrorLog.select().where(ErrorLog.error_name == error_name)
else:
# filter under date limit
if first_limit is not None:
first_limit = Utility.unix_time_millis(first_limit)
else:
first_limit = Utility.current_time_in_milli()
if last_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(last_limit)
param_filter_one = (ErrorLog.error_name == error_name)
param_filter_two = (ErrorLog.logged_at >= first_limit)
param_filter_three = (ErrorLog.logged_at <= last_limit)
if limit != 0:
if desc:
# search with limit in descending order under date limit
errors = ErrorLog.select().where(param_filter_one & param_filter_two &
param_filter_three).order_by(ErrorLog.logged_at.desc()).limit(limit)
else:
# search with limit in ascending order under date limit
errors = ErrorLog.select().where(param_filter_one & param_filter_two &
param_filter_three).limit(limit)
else:
if desc:
# search without limit in descending order under date limit
errors = ErrorLog.select().where(param_filter_one & param_filter_two &
param_filter_three).order_by(ErrorLog.logged_at.desc())
else:
# search without limit in ascending order under date limit
errors = ErrorLog.select().where(
param_filter_one & param_filter_two & param_filter_three)
close_db()
return self.__generate_error_return_payload(errors)
def get_error_by_origin(self, origin: str, first_limit: datetime = None, last_limit: datetime = None, limit: int = 0, desc: bool = False):
"""
# searches error by point of origin, where the error is originated when the error is logged.
# But you better catch the error with an except block. and manually register it.
origin: str name of the function or class
first_limit: datetime first date limit for filtering purpose
last_limit: datetime last date limit to filter out
limit: int limits the amount of returned result.
desc: bool filter the data in descending order (Ascending is by default)
"""
if origin is not None or len(origin) != 0:
# Point of origin can be None.
origin = origin.strip()
origin = origin.lower()
# initiate database connection before doing anything.
database_connection()
if first_limit is None and last_limit is None:
if limit != 0:
# search with limit and no date limit applied
if desc:
# show result in descending order with limit but no date filter
errors = ErrorLog.select().where(ErrorLog.point_of_origin == origin).order_by(
ErrorLog.logged_at.desc()).limit(limit)
else:
# show result in ascending order with limit but no date filter
errors = ErrorLog.select().where(ErrorLog.point_of_origin == origin).limit(limit)
else:
if desc:
# show result in descending order without limit but no date filter
errors = ErrorLog.select().where(ErrorLog.point_of_origin ==
origin).order_by(ErrorLog.logged_at.desc())
else:
# show result in ascending order without but no date filter
errors = ErrorLog.select().where(ErrorLog.point_of_origin == origin)
else:
first_limit = Utility.unix_time_millis(first_limit)
if last_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(last_limit)
filter_param_one = (ErrorLog.point_of_origin == origin)
filter_param_two = (ErrorLog.logged_at >= first_limit)
filter_param_three = (ErrorLog.logged_at <= last_limit)
if limit != 0:
# search with limit and no date limit applied
if desc:
# show result in descending order with limit WITH date filter
errors = ErrorLog.select().where(filter_param_one & filter_param_two &
filter_param_three).order_by(ErrorLog.logged_at.desc()).limit(limit)
else:
# show result in ascending order with limit WITH date filter
errors = ErrorLog.select().where(filter_param_one & filter_param_two &
filter_param_three).limit(limit)
else:
if desc:
# show result in descending order without limit WITH date filter
errors = ErrorLog.select().where(filter_param_one & filter_param_two &
filter_param_three).order_by(ErrorLog.logged_at.desc())
else:
# show result in ascending order without WITH date filter
errors = ErrorLog.select().where(
filter_param_one & filter_param_two & filter_param_three)
close_db()
return self.__generate_error_return_payload(errors)
def get_debug_by_origin(self, origin: str = '', first_limit: datetime = None, last_limit: datetime = None):
"""
# returns all debug data filters by origin; if needed.
first_limit: datetime filters out data before this limit
last_limit: datetime filters out data after this limit
origin: str point of origin of any debug msg that needs to be on this list.
"""
if origin is not None and len(origin) > 0:
origin = origin.strip()
origin = origin.lower()
else:
return self.get_all_debug_log() # blind send everything.
# initiate database connection before doing anything.
database_connection()
filter_param_one = (DebugLog.point_of_origin == origin)
if first_limit is None and last_limit is None:
debugs = DebugLog.select().where(filter_param_one)
else:
first_limit = Utility.unix_time_millis(first_limit)
if last_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(last_limit)
filter_param_two = (DebugLog.logged_at >= first_limit)
filter_param_three = (DebugLog.logged_at <= last_limit)
debugs = DebugLog.select().where(
filter_param_one & filter_param_two & filter_param_three)
close_db()
return self.__generate_verbose_return_payload(debugs)
def get_debug_by_developers(self, developers_name: str = '', first_limit: datetime = None, last_limit: datetime = None):
"""
# returns all debug data filters by developers; if needed.
first_limit: datetime filters out data before this limit
last_limit: datetime filters out data after this limit
developers_name: str developers_name : who wrote the debug message. For debugging person. Could be None or empty string.
"""
if len(developers_name) == 0 or developers_name is None:
return self.get_all_debug_log()
# initiate database connection before doing anything.
database_connection()
if first_limit is None and last_limit is None:
debugs = DebugLog.select().where(DebugLog.user == developers_name)
else:
first_limit = Utility.unix_time_millis(first_limit)
if last_limit is None:
last_limit = Utility.current_time_in_milli()
else:
last_limit = Utility.unix_time_millis(last_limit)
debugs = DebugLog.select().where((DebugLog.user == developers_name) & (
DebugLog.logged_at >= first_limit) & (DebugLog.logged_at <= last_limit))
close_db()
return self.__generate_verbose_return_payload(debugs)
def mark_resolve(self, error_name: str, origin: str):
"""
# Mark resolved some errors
error_name: str the error name u want to mark as resolved.
origin: str point of origin of this particular error.
they are both necessary
"""
result = self.empty_result
if error_name is None or len(error_name) == 0:
result[Utility.Tag_error_message] = "missing error name!"
return result
if origin is None or len(origin) == 0:
result[Utility.Tag_error_message] = 'missing error origin!'
return result
# initiate database connection before doing anything.
database_connection()
error_name = error_name.strip().lower()
origin = origin.strip().lower()
filter_one = (ErrorLog.error_name == error_name)
filter_two = (ErrorLog.point_of_origin == origin)
filter_three = (ErrorLog.is_resolved != True)
query = (ErrorLog.update({ErrorLog.is_resolved: True, ErrorLog.resolved_at: Utility.current_time_in_milli()}).where(
filter_one & filter_two & filter_three))
result = query.execute()
close_db()
return result
def delete_old_debug(self):
from datetime import timedelta
# initiate database connection before doing anything.
database_connection()
limit = (datetime.now() - timedelta(days=7)).replace(hour=0,
minute=0, second=0, microsecond=0)
today = Utility.unix_time_millis(limit)
delete_stuff = DebugLog.delete().where(DebugLog.logged_at < today)
_ = delete_stuff.execute()
close_db() | zathura | /zathura-0.0.6.0b1.tar.gz/zathura-0.0.6.0b1/ZathuraProject/fetcher.py | fetcher.py |
import inspect
import logging
from datetime import datetime
from peewee import ModelSelect
from ZathuraProject.bugtracker import send_data_to_bugtracker, send_verbose_log_to_bugtracker
from ZathuraProject.sqlite_definition import (DebugLog, ErrorLog, close_db,
database_connection,
database_start)
from ZathuraProject.utility import Utility
class Zathura:
def __init__(self, bugtracker_url: str = None, project_token: str = None):
self.empty_result = {'error': True}
self.verbose_url = None
self.error_url = None
self.project_token = project_token
if bugtracker_url is not None:
if bugtracker_url[-1:] != '/':
bugtracker_url += '/'
self.error_url = bugtracker_url + "project/error/log/"
self.verbose_url = bugtracker_url + "project/verbose/log/"
def send_error_log_bugtracker(self, error_name, error_description, user=None):
"""
sends error log data on bugtracker website
:returns: bool
"""
point_of_origin = (inspect.stack()[1].function).lower()
if self.error_url is not None:
return send_data_to_bugtracker(
name=error_name,
description=error_description,
origin=point_of_origin,
token=self.project_token,
url=self.error_url,
user=user
)
return False
def send_verbose_log_bugtracker(self, descrption=None, user=None):
"""
Sends the verbose log to bugtracker website.
:returns: bool
"""
point_of_origin = (inspect.stack()[1].function).lower()
if self.verbose_url is not None:
return send_verbose_log_to_bugtracker(
origin=point_of_origin,
description=descrption,
project_token=self.project_token,
bugtracker_url=self.verbose_url,
user=user
)
return False
def insert_error_log(self, user, error_name, error_description, warning: int = 0):
"""
Inserts error log on a sqlite db
"""
if error_name is not None and error_description is not None:
from uuid import uuid4
try:
# initiate database connection before doing anything.
database_connection()
point_of_origin = (inspect.stack()[1].function).lower()
error_log = ErrorLog(_id=str(uuid4()),
user="user",
error_name=error_name.lower(),
error_description=error_description,
point_of_origin=point_of_origin,
warning_level=0)
return error_log.save() # number of modified rows are returned. (Always be 1)
except ValueError:
pass
except SyntaxError:
pass
finally:
close_db()
return 0
def insert_debug_log(self, message_data: str, developer: str = 'zathura'):
"""
# Insert debug and verbose logs. Logs will purge after a week.
# It's not going to print out anything right now.
developer: the guy who is logging this message. It will be easier to find if u name yourself.
message_data: what u want to log
point_of_origin: from where u are logging this message
"""
if message_data is not None:
from uuid import uuid4
# initiate database connection before doing anything.
database_connection()
origin = (inspect.stack()[1].function).lower()
debug_log = DebugLog(_id=str(uuid4()), user=developer,
message_data=message_data, point_of_origin=origin)
close_db()
return debug_log.save()
else:
return 0 | zathura | /zathura-0.0.6.0b1.tar.gz/zathura-0.0.6.0b1/ZathuraProject/zathura.py | zathura.py |
import os
import sys
import time
from datetime import datetime
from uuid import uuid4
import pkg_resources
import pyfiglet
from ZathuraProject.utility import Utility
from ZathuraProject.zathura import Zathura
from ZathuraProject.fetcher import Fetcher
CURRENT_VERSION = "v0.0.5 beta"
known_commands = ('v', 'developer', 'debug_origin', 'error_user', 'all_debug',
'error_name', 'date', 'all_error', 'origin', 'mark_resolve', 'delete_debug', 'help',)
def create_app():
if sys.version_info < (3, 0, 0):
print("Zathura needs python3.x to perform normally!")
sys.exit(255)
pyfiglet_ascii() # spits out zathura in speed font
if len(sys.argv) > 1:
for args in sys.argv[1:]:
if args in known_commands:
print("Current argument: {}".format(args))
fetcher = Fetcher()
if args == 'v':
print("*#$" * 20)
print("Current version: {}".format(CURRENT_VERSION))
print("*#$" * 20)
elif args == "all_error":
filter_resolved = input(
"Press 1 to see all errors, including resolved, any key for others: ")
# filters data in descending order based on logged_at time.
desc = ask_filter_and_order(ask_limit=False)
if filter_resolved == '1':
print_stuff_nice_and_good(fetcher.get_all_error_log(
show_all=True, desc=desc), "All Error logs")
else:
print_stuff_nice_and_good(
fetcher.get_all_error_log(desc=desc), "All Error logs")
elif args == "all_debug":
print_stuff_nice_and_good(
fetcher.get_all_debug_log(), "All Debug messages")
elif args == "error_name":
error_name = input("Enter the error_name: ")
generated_after, generated_before = ask_date()
desc, limit = ask_filter_and_order()
result = fetcher.get_error_by_error_name(
error_name, generated_after, generated_before, limit, desc)
print_stuff_nice_and_good(
result, "Errors based on error name", generated_after, generated_before, limit, desc, error_name)
elif args == "user":
user = input("Enter a username: ")
generated_after, generated_before = ask_date()
desc, limit = ask_filter_and_order()
logs = fetcher.get_error_by_user(
user, limit, desc, generated_after, generated_before)
print_stuff_nice_and_good(
logs, "Errors based on user", generated_after, generated_before, limit, desc, user)
elif args == 'origin':
origin = input("Enter point of origin: ")
generated_after, generated_before = ask_date()
desc, limit = ask_filter_and_order()
logs = fetcher.get_error_by_origin(
origin, generated_after, generated_before, limit, desc)
print_stuff_nice_and_good(
logs, "Errors based on origin function/class", generated_after, generated_before, limit, desc, origin)
elif args == "date":
generated_after, generated_before = ask_date()
desc, limit = ask_filter_and_order()
result = fetcher.get_error_by_date_limit(
generated_after, generated_before, limit, desc)
print_stuff_nice_and_good(
result, "Errors between a date frame", generated_after, generated_before, limit, desc)
elif args == 'debug_origin':
origin = input("Enter <DEBUG> point of origin: ")
generated_after, generated_before = ask_date()
verbose = fetcher.get_debug_by_origin(
origin, generated_after, generated_before)
print_stuff_nice_and_good(verbose, "Debug messages based on origin function/class",
generated_after, generated_before, search_criteria=origin)
elif args == 'developer':
dev = input("Enter the developers name: ")
generated_after, generated_before = ask_date()
verbose = fetcher.get_debug_by_developers(
dev, generated_after, generated_before)
print_stuff_nice_and_good(
verbose, "Debug messages based on developers name", generated_after, generated_before, search_criteria=dev)
elif args == 'mark_resolve':
error_name = input("Please provide error name: ")
origin = input("Please provide point of origin: ")
result = fetcher.mark_resolve(error_name, origin)
print("Number of modified rows {}".format(result))
elif args == 'delete_debug':
fetcher.delete_old_debug()
elif args == 'help':
command_man()
else:
print("unknown command - {}".format(args))
command_man()
break
else:
print("*#$" * 20)
print("Current version: {}".format(CURRENT_VERSION))
print("*#$" * 20)
return
def pyfiglet_ascii():
print(pyfiglet.figlet_format("Zathura", font="speed"))
def ask_filter_and_order(ask_limit=True):
desc = input(
"Do you want to filter the result in descending order? Press 1 to confirm, Press any key to continue: ")
if desc == '1':
desc = True
else:
desc = False
if ask_limit:
while True:
limit = input(
"Do you want to limit the result? Print out the number. Number must be non-zero. Press Enter to skip: ")
try:
if len(limit) == 0:
return (desc, 0)
limit = int(limit)
if limit < 1:
print("Limit must be greater than or equal to 1")
else:
return (desc, limit)
except:
pass
else:
return desc
def ask_date():
generated_after = input(
"Show logs after this date (inclusive) (limit_1): (dd/mm/yyyy format): ")
if generated_after is None or len(generated_after) == 0:
print("No date filter then")
return (None, None)
else:
day, month, year = map(int, generated_after.split('/'))
generated_after = datetime(year, month, day, 0, 0, 0)
generated_before = input(
"Show logs before this date (inclusive) (limit_2): (dd/mm/yyyy format): ")
if generated_before is None or len(generated_before) == 0:
print("Current date will be using")
generated_before = None
else:
day, month, year = map(int, generated_before.split('/'))
generated_before = datetime(year, month, day, 0, 0, 0)
return (generated_after, generated_before)
def print_stuff_nice_and_good(payload: dict, message: str = None,
date_filter_after: datetime = None,
date_filter_before: datetime = None,
limit: int = 0, desc: bool = False,
search_criteria: str = None):
"""
print stuff in cute and functional way for now.
payload: dict the payload you just received from the sqlite_utility file
message: str any extra message you want to add?
"""
if payload is None:
return
if 'error' in payload:
error_message = payload[Utility.Tag_error_message]
print("[[[[[ Error occurred. ]]]]]\nMessage: ```{}```".format(error_message))
else:
os.system('clear')
total = payload[Utility.Tag_Total] if Utility.Tag_Total in payload else None
if total is None:
return
logs = payload[Utility.Tag_Log] if Utility.Tag_Log in payload else None
if logs is None:
return
print('--------------------------------------------------------')
print('--------------------------------------------------------')
if message is not None:
print(message)
if search_criteria is not None:
print("Search Criteria: {}".format(search_criteria))
if date_filter_after is not None:
_ = "Generated from: {}".format(
date_filter_after.strftime(Utility.get_print_timeformat()))
if date_filter_before is not None:
_ += " to {}".format(date_filter_before.strftime(
Utility.get_print_timeformat()))
print(_)
if limit != 0:
print("Total result is limited to {} data only".format(limit))
if desc:
print("Result is in descending order")
else:
print("Result is in ascending order")
print('--------------------------------------------------------')
print("Logs found = {}".format(total))
print('--------------------------------------------------------\n')
counter = 1
for log in logs:
if 'error_name' in log:
print("[[ {} ]] | User: [[ {} ]] | Error: [[ {} ]] | Warning Level: [[ {} ]] | logged at: {} | Originated at [[ {} ]]".format(
counter, log['user'], log['error_name'], log['warning_level'], log['logged_at'], log['point_of_origin']))
print("Error Description: {}".format(log['error_description']))
if log['is_resolved'] == "Resolved":
print("Status: Resolved. Resolved at {}".format(
log['resolved_at']))
else:
print("Status: Not Resolved yet")
print('--------------------------------------------------------\n')
else:
print("[[ {} ]] | Developer: [[ {} ]] | logged at: {} | Location: [[ {} ]]".format(
counter, log['user'], log['logged_at'], log['point_of_origin']))
print("Message: {}".format(log['message-data']))
print('--------------------------------------------------------\n')
counter += 1
def command_man():
"""
This is command manual. This will print out the helper function of this command.
"""
helper = {
'v': 'Show the current version of this package',
'developer': 'Search based on developers name. You can filter out the result based on date and descending order',
'debug_origin': 'Shows debug messages based on point of origin. Point of origin is the class/function from where you are adding a message in sqlite.',
'all_debug': 'Shows all debug messages',
'delete_debug': 'Deletes the last seven days of debug messsages from the database. It is useful if you do not want to clutter the database with unnecessary debug info.',
'all_error': 'Shows all error messages',
'error_name': 'Shows error based on a error name.',
'date': 'Shows error occurred in between a specific date.',
'error_user': 'Shows error generated under the given username',
'origin': 'Shows error generated on the given point of origin',
'mark_resolve': 'Given an error name and point of origin all errors logged on database, is marked resolved.',
'help': 'Shows all the commands necessary to run this package from terminal',
}
print('usage: Zathura COMMAND [args] ...')
print(
'For example: { Zathura v } will show the current version of this package.')
print('-----------------------------------------------------')
print('-----------------------------------------------------')
print("All commands: ")
for commands in known_commands:
print('[[ {} ]] : {}'.format(commands, helper[commands]))
print('-----------------------------------------------------')
print('-----------------------------------------------------')
if __name__ == '__main__':
if sys.version_info < (3, 0, 0):
print("Zathura needs python3.x to perform normally!")
sys.exit(255)
else:
create_app() | zathura | /zathura-0.0.6.0b1.tar.gz/zathura-0.0.6.0b1/ZathuraProject/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function
# stdlib
import logging, os
from logging.handlers import RotatingFileHandler
# Bunch
from bunch import Bunch, bunchify
# ConfigObj
from configobj import ConfigObj
class BaseServer(object):
SERVER_TYPE = None
def __init__(self, log_type, config_dir):
self.log_type = log_type
self.config = Bunch()
self.config.dir = None
self.config.mocks = Bunch()
self.config.mocks_config = self.get_mocks_config(config_dir)
self.setup_logging()
def get_mocks_config(self, config_dir):
self.config.dir = os.path.abspath(os.path.join(os.path.expanduser(config_dir), self.SERVER_TYPE))
base_config_path = os.path.join(self.config.dir, 'config.ini')
base_config = ConfigObj(open(base_config_path))
config_paths = open(base_config_path).readlines()
include = base_config.get('apimox', {}).get('include')
if include:
base_config_dir = os.path.dirname(base_config_path)
if isinstance(include, list):
for name in include:
config_paths.extend(open(os.path.abspath(os.path.join(base_config_dir, name))).readlines())
else:
config_paths.extend(open(os.path.abspath(os.path.join(base_config_dir, include))).readlines())
return bunchify(ConfigObj(config_paths))
def setup_logging(self):
config = self.config.mocks_config.apimox
log_level = getattr(logging, config.log_level)
logger = logging.getLogger('zato')
logger.setLevel(log_level)
rfh = RotatingFileHandler(os.path.join(self.config.dir, 'logs', getattr(config, 'log_file_{}'.format(self.log_type))))
sh = logging.StreamHandler()
rfh.setLevel(log_level)
sh.setLevel(log_level)
formatter = logging.Formatter('%(asctime)s %(name)s %(message)s', '%Y-%m-%d %H:%M:%S')
rfh.setFormatter(formatter)
sh.setFormatter(formatter)
logger.addHandler(rfh)
logger.addHandler(sh)
def set_up(self):
raise NotImplementedError('Must be implemented in subclasses') | zato-apimox | /zato-apimox-1.3.tar.gz/zato-apimox-1.3/src/zato/apimox/common.py | common.py |
from __future__ import absolute_import, division, print_function
# stdlib
import os, ssl
from ast import literal_eval
from httplib import INTERNAL_SERVER_ERROR, OK, PRECONDITION_FAILED, responses
from logging import getLogger
from string import digits
from traceback import format_exc
from urlparse import parse_qs
from uuid import uuid4
# gevent
from gevent import pywsgi
# parse
from parse import compile as parse_compile
# Validate
from validate import is_integer, VdtTypeError
# Zato
from zato.apimox.common import BaseServer
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
_EMPTY = uuid4().int
_PRECONDITION_FAILED = '{} {}'.format(PRECONDITION_FAILED, responses[PRECONDITION_FAILED])
DEFAULT_CONTENT_TYPE = 'text/plain'
JSON_CHAR = '{"[' + digits
XML_CHAR = '<'
JSON_XML = JSON_CHAR + XML_CHAR
JSON_CONTENT_TYPE = 'application/json'
XML_CONTENT_TYPE = 'text/xml'
CONTENT_TYPE = {
'json': JSON_CONTENT_TYPE,
'xml': XML_CONTENT_TYPE,
'txt': DEFAULT_CONTENT_TYPE,
'csv': 'text/csv',
}
# ################################################################################################################################
class MatchData(object):
def __init__(self, match, name=None, status=None, content_type=None, response=None):
self.match = match
self.name = name
self.status = status
self.content_type = content_type
self.response = response
# ################################################################################################################################
class RequestMatch(object):
def __init__(self, config, wsgi_environ):
self.config = config
self.wsgi_environ = wsgi_environ
self.wsgi_environ_qs = self.get_qs_from_environ()
self.status = '{} {}'.format(config.status, responses[config.status])
self.content_type = config.content_type
self.response = config.response
self.qs_score = self.get_score()
def __cmp__(self, other):
return self.qs_score > other.qs_score
# ################################################################################################################################
def parse_qs_value(self, value):
try:
value = is_integer(value)
except VdtTypeError:
# OK, not an integer
pass
# Could be a dict or another simple type then
try:
value = literal_eval(value)
except Exception:
pass
# OK, let's just treat it as string
return value
# ################################################################################################################################
def get_qs_from_environ(self):
out = {}
for key, value in parse_qs(self.wsgi_environ['QUERY_STRING']).items():
out[key] = self.parse_qs_value(value[0])
return out
# ################################################################################################################################
def get_score(self):
""" Assign 200 if a query string's element matched exactly what we've got in config,
and 1 if the config allows for any value as long as keys are the same. It follows then
that we allow for up to 200 query parameters on input which should be well enough.
"""
any_value_add = 1
value_add = 200
score = 0
# Go through the request's parameters and add score for each element matching the config
for wsgi_key, wsgi_value in self.wsgi_environ_qs.items():
if wsgi_key in self.config.qs_values:
config_value = self.config.qs_values.get(wsgi_key, _EMPTY)
# Config requires an exact value
if config_value and config_value != _EMPTY:
if config_value == wsgi_value:
score += value_add
# Config requires any value
else:
score += any_value_add
# Now go through the config and substract score for each element in config which is not present in request
for config_key in self.config.qs_values:
config_value = self.config.qs_values.get(config_key, _EMPTY)
if config_key not in self.wsgi_environ_qs:
if config_value != _EMPTY:
score -= value_add
else:
score -= any_value_add
logger.info('Score {} for `{}` ({} {})'.format(
score, self.config.name, self.wsgi_environ['PATH_INFO'], self.wsgi_environ_qs))
return score
# ################################################################################################################################
class HTTPServer(BaseServer):
SERVER_TYPE = 'http'
def __init__(self, needs_tls=False, require_certs=False, log_type=None, config_dir=None):
super(HTTPServer, self).__init__(log_type, config_dir)
config = self.config.mocks_config.apimox
if needs_tls:
if require_certs:
port = config.http_tls_client_certs_port
else:
port = config.http_tls_port
else:
port = config.http_plain_port
self.port = port
self._require_certs = require_certs
self.needs_tls = needs_tls
self.require_certs = ssl.CERT_REQUIRED if require_certs else ssl.CERT_OPTIONAL
self.full_address = 'http{}://{}:{}'.format('s' if needs_tls else '', config.host, self.port)
self.set_up()
# ################################################################################################################################
def run(self):
tls_args = {}
if self.needs_tls:
pem_dir = os.path.join(self.config.dir, '..', 'pem')
tls_args.update({
'keyfile': os.path.join(pem_dir, 'server.key.pem'),
'certfile': os.path.join(pem_dir, 'server.cert.pem'),
'ca_certs': os.path.join(pem_dir, 'ca.cert.pem'),
'cert_reqs': self.require_certs,
'server_side': True,
})
msg = '{}{} listening on {}'.format('TLS ' if self.needs_tls else '', self.__class__.__name__, self.full_address)
if self.needs_tls:
msg += ' (client certs: {})'.format('required' if self._require_certs else 'optional')
logger.info(msg)
server = pywsgi.WSGIServer((self.config.mocks_config.apimox.host, int(self.port)), self.on_request, **tls_args)
server.serve_forever()
# ################################################################################################################################
def log_req_resp(self, mock_name, status, response, resp_headers, environ):
""" Log both request and response in an easy to read format.
"""
req = [' Body=`{}`'.format(environ['wsgi.input'].read())]
for key, value in sorted(environ.items()):
if key[0] == key[0].upper():
req.append(' {}=`{}`'.format(key, value))
msg = '\n\n=====Request===== \n{}'.format('\n'.join(req))
msg += '\n\n====Response==== \n Mock=`{}`\n Status=`{}`\n Headers=\n{}\n Body=`{}`\n'.format(
mock_name, status, '\n'.join(' `{}`=`{}`'.format(key, value) for key, value in sorted(resp_headers)), response)
msg += '\n'
logger.info(msg)
# ################################################################################################################################
def set_resp_headers(self, config, environ, content_type):
""" Returns headers for the response. Note that Content-Type can be set either
in one of headers or through the content_type explicitly and the latter takes precedence.
"""
out = []
has_content_type = False
for key, value in config.resp_headers.items():
if key.lower() == 'content-type':
has_content_type = True
out.append((key, value))
if not has_content_type:
out.append(('Content-Type', content_type))
return out
# ################################################################################################################################
def on_request(self, environ, start_response):
data = self.match(environ)
# We don't know if we match anything or perhaps more than one thing
if data.match:
name = data.match.config.name
status = data.match.status
content_type = data.match.content_type
response = data.match.response
else:
name = data.name
status = data.status
content_type = data.content_type
response = data.response
# Set response headers, if any
resp_headers = self.set_resp_headers(data.match.config, environ, content_type) if data.match else []
# Now only logging is left
self.log_req_resp(name, status, response, resp_headers, environ)
start_response(status, resp_headers)
return [response]
# ################################################################################################################################
def match(self, environ):
matches = []
for name, item in self.config.mocks_config.items():
# Ignore our own config
if name == 'apimox':
continue
if not item.url_path_compiled.parse(environ['PATH_INFO']):
continue
method = item.get('method')
if method and method != environ['REQUEST_METHOD']:
continue
matches.append(RequestMatch(item, environ))
if not matches:
return MatchData(None, None, _PRECONDITION_FAILED, DEFAULT_CONTENT_TYPE, 'No matching mock found\n')
# Find the max match match and then make sure it's only one of that score
# If it isn't, it's a 409 Conflict because we don't know which response to serve.
match = max(matches)
found = 0
conflicting = []
for m in matches:
if m.qs_score == match.qs_score:
found += 1
conflicting.append(m)
if found > 1:
return MatchData(None, None, _PRECONDITION_FAILED, DEFAULT_CONTENT_TYPE, 'Multiple mocks matched request: {}\n'.format(
sorted([m.config.name for m in conflicting])))
return MatchData(match)
# ################################################################################################################################
def get_file(self, config, name, default=''):
ext = name.split('.')[-1]
resp_dir = os.path.join(self.config.dir, 'response', ext)
try:
full_path = os.path.join(resp_dir, name)
data = open(full_path).read()
except IOError, e:
logger.warn('Could not open `{}`, e:`{}`'.format(full_path, format_exc(e)))
return False, ext, default
else:
return True, ext, data
# ################################################################################################################################
def get_qs_values(self, config):
qs_values = {}
for item, value in config.items():
if item.startswith('qs_'):
if value:
# This is needed because there can be a comma in query string
# which ConfigObj turned into a list, so we now need to convert it back to string.
if isinstance(value, list):
value = ','.join(value)
try:
value = literal_eval(value.strip())
except(SyntaxError, ValueError):
pass # Ok, not an int/dict or another simple value
else:
value = ''
qs_values[item.split('qs_')[1]] = value
config.pop(item)
return qs_values
def get_response(self, config):
response = config.get('response')
if response:
has_inline_resp = response[0] in JSON_XML
if has_inline_resp:
ext = 'xml' if response[0] == XML_CHAR else 'json'
else:
is_ok, ext, response = self.get_file(config, response, '(Response not found)\n')
if not is_ok:
config.status = INTERNAL_SERVER_ERROR
if not config.get('content_type'):
config.content_type = CONTENT_TYPE.get(ext, DEFAULT_CONTENT_TYPE)
else:
if not config.get('content_type'):
config.content_type = DEFAULT_CONTENT_TYPE
return response or ''
def get_resp_headers(self, config):
resp_headers = {}
file_text_headers = config.pop('resp_headers', None)
if file_text_headers:
is_ok, _, data = self.get_file(config, file_text_headers)
if is_ok:
for line in data.splitlines():
split_at = line.find('=')
key = line[0:split_at].strip()
value = line[split_at+1:].strip()
resp_headers[key] = value
for orig_key, value in config.items():
if orig_key.startswith('resp_header_'):
key = orig_key.replace('resp_header_', '', 1)
# Perhaps the value actually points to a file it can be found in
if value.endswith('.txt'):
_, _, value = self.get_file(config, value, '(Configuration error)')
resp_headers[key] = value
# No namespace clutter
config.pop(orig_key)
return resp_headers
# ################################################################################################################################
def set_up(self):
for name, config in sorted(self.config.mocks_config.items()):
# Ignore our own config
if name == 'apimox':
continue
config.name = name
config.url_path_compiled = parse_compile(config.url_path)
config.status = int(config.get('status', OK))
config.method = config.get('method', 'GET')
config.qs_values = self.get_qs_values(config)
config.response = self.get_response(config)
config.resp_headers = self.get_resp_headers(config)
qs_info = '(qs: {})'.format(config.qs_values)
logger.info('`{}`: {}{} {}'.format(name, self.full_address, config.url_path, qs_info))
# ################################################################################################################################ | zato-apimox | /zato-apimox-1.3.tar.gz/zato-apimox-1.3/src/zato/apimox/http.py | http.py |
# Originally part of Zato - open-source ESB, SOA, REST, APIs and cloud integrations in Python
# https://zato.io
from __future__ import absolute_import, division, print_function
# stdlib
import os
HTTP_CONFIG_INI = """
[apimox]
host=0.0.0.0
http_plain_port=44333
http_tls_port=44555
http_tls_client_certs_port=44777
log_level=INFO
log_file_plain=plain_http.log
log_file_tls=tls_http.log
log_file_tls_client_certs=client_certs_tls_http.log
[JSON Demo - 01]
url_path=/demo
qs_hello=world
response=demo1.json
[JSON Demo - 02]
url_path=/demo
qs_hello=sky
response=demo2.json
[JSON Demo - 03]
url_path=/something/{anything}
response='{"Responses can be":"provided inline"}'
[XML Demo - 01]
url_path=/demo
qs_format=xml
response=demo1.xml
""".strip()
ZMQ_CONFIG_INI = """
[apimox]
host=0.0.0.0
pull_port=55000
sub_port=55111
sub_prefix=
log_level=INFO
log_file_pull=pull_zmq.log
log_file_sub=sub_zmq.log
""".strip()
CA_CERT="""
-----BEGIN CERTIFICATE-----
MIIFoDCCA4igAwIBAgIJAMpUuR9ijhIRMA0GCSqGSIb3DQEBBQUAMBsxCzAJBgNV
BAYTAkFVMQwwCgYDVQQDEwNDQTIwHhcNMTQwNzIwMTgyMTU2WhcNMjQwNzE3MTgy
MTU2WjAbMQswCQYDVQQGEwJBVTEMMAoGA1UEAxMDQ0EyMIICIjANBgkqhkiG9w0B
AQEFAAOCAg8AMIICCgKCAgEAnMEaU26+UqOtQkiDkCiJdfB/Pv4sL7yef3iE9Taf
bpuTPdheqzkeR9NHxklyjKMjrAlVrIDu1D4ZboIDmgcq1Go4OCWhTwriFrATJYlp
LZhOlzd5/hC0SCJ1HljR4/mOWsVj/KanftMYzSNADjQ0cxVtPguj/H8Y7CDlQxQ4
d6I1+JPGCUIwG3HfSwC5Lxqp/QLUC6OuKqatwDetaE7+t9Ei6LXrFvOg6rPb4cuQ
jymzWnql0Q1NEOGyifbhXaQgO6mM5DaT/q3XtakqviUZDLbIo4IWJAmvlB8tbcbP
wzku+6jEBhkdTAzAb6K6evTK4wUUSrHTE6vF/PHq5+KLrGReX/NrCgdTH/LB/Aux
817IF2St4ohiI8XVtWoC/Ye94c1ju/LBWIFPZAxFoNJJ5zvlLwJN8/o1wuIVNQ3p
4FWTXVArmSOGEmQL48UTUFq/VKJeoDstUoyIsKnBn4uRMcYPIsMh1VF6Heayq1T9
eO2Uwkw75IZVLVA9WaXnCIc07peDREFbyWtyKzpDa2Bh8bLVQ/tyB+sBJkO2lGPb
PMRZl50IhdD7JENNfTG89LCBNioPDNQXN9q3XQYSZgQ9H70Zp+Y3/ipXvIAelPwq
Uyg7YoIjOTqFF25g2c/XSrwSpKCr22lb1vkCLUT7pA0tslMVdULo1FkkkfIDDiHs
FC8CAwEAAaOB5jCB4zAdBgNVHQ4EFgQUmh+yIUO2PG/fMMMjXjestsQPg48wSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAPBgNVHRMBAf8EBTADAQH/MBEGCWCG
SAGG+EIBAQQEAwIBBjAJBgNVHRIEAjAAMCsGCWCGSAGG+EIBDQQeFhxUaW55Q0Eg
R2VuZXJhdGVkIENlcnRpZmljYXRlMAkGA1UdEQQCMAAwDgYDVR0PAQH/BAQDAgEG
MA0GCSqGSIb3DQEBBQUAA4ICAQBJsjzUBFUOpuOGz3n2MwcH8IaDtHyKNTsvhuT8
rS2zVpnexUdRgMzdG0K0mgfKLc+t/oEt8oe08ZtRnpj1FVJLvz68cPbEBxsqkTWi
Kf65vtTtoZidVBnpIC4Tq7Kx0XQXg8h+3iykqFF6ObqxZix/V9hs3QDRnTNiWGE7
thGCAWWVy1r56nkS91uhQhSWt471FevmdxOdf7+4Df8OsQGcPF6sH/TQcOVgDc20
EiapNMpRxQmhyOI7HBZdYGmHM6okGTf/mtUFhBLKDfdLfBHoGhUINiv939O6M6X3
LFserZ9DEd9IIOTsvYQyWhJDijekEtvBfehwp1NjQcity/l/pwUajw/NUok56Dj7
jHBjHJSSgb5xJ9EMrtJ2Qm2a5pUZXwF2cJIxBjQR5bufJpgiYPRjzxbncStuibps
JjSGwiGvoyGbg2xLw7sSI7C2G9KGMtwbS4Di1/e0M1WfFg/ibT3Z1VhqtEL6Yr+m
CG6rI1BBiPfJqqeryLg8q9a4CQFA+vhXSzvly/pT7jZcLyXc/6pCU6GqjFZDaiGI
sBQseOvrJQ1CouAMnwc9Z8vxOOThqtMTZsGGawi+16+5NmpLwW53V/wtHUZuk39F
29ICmBRa3wrCyhNMb+AFhaPjO34jtRGqeOJA98eS29GooycDnh/Uwi3txZu6DNmZ
NVRV1A==
-----END CERTIFICATE-----
""".strip()
CLIENT_KEY_CERT="""
-----BEGIN CERTIFICATE-----
MIIF0DCCA7igAwIBAgIBBzANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDIwOVoXDTIxMTIxNTEyNDIwOVowNzEL
MAkGA1UEBhMCQVUxEDAOBgNVBAMTB0NsaWVudDQxFjAUBgkqhkiG9w0BCQEWB0Ns
aWVudDQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDMGzFZVM8OLyMF
AVWh/AwGhJ0q8T5Vh75LrD5tNJR6Qv6m/N+mKtEYPujAZzJIqDOBngmPzIYSTi2N
XELCrl6kQvTpcs/lcwQrOTRMxi75og1xna9gKtbdKvr8Zn09bDegcram6XuD6jWw
w8W05wzWxF9Ubkt1dcNdyVF3lBM87YVOsM9xOvkugLWO04AmEnRepB38s5X/3mPj
6TTKk4IKQUjmSX1xplKdobvEytKyywuQGuRr32FI3gIH7X8DS9lSw9EhvjLO52VX
Of/DtpYow5BjEusUT80n7h9OIn1sGkvbSMTKKrzuQKBVFvHW1WjZHPKn5tBwO9wp
nphetmqPgj2zkk+LKC92fEHl096BgEVZiQp6ixk7yBOJ/ne9nPvJAiJhTvrwVilt
FbtRVZjZ+T4wYEiPSGUKxyU9SHrpJliJGWY5rFq58wLibmh9LjMk2zkevLKe+fJP
DGp17iI++6Q/G5AoBKz7PbmRJaOq/xH0epZy3qLQZA0o7UVty6Jfy+VGNcHpsxgQ
ltKBxwRJ7XJlNIha+fDSKEUDQr7src2wRGwgAR1tHFETOBbp4NtMxTzCxq51CzIo
fO9kP5uV6OSBdQTIU7nni72bxUg2T8kKZ0A/3lr5e6YWeg2tRD2fZziQd8WBPp3M
HmLmiRcybUYWvHt3ZUt1UEJIBgog7QIDAQABo4IBATCB/jAJBgNVHRMEAjAAMBEG
CWCGSAGG+EIBAQQEAwIEsDArBglghkgBhvhCAQ0EHhYcVGlueUNBIEdlbmVyYXRl
ZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUffrp+KrDJFGTgUARU2M+RvvRlJkwSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAJBgNVHRIEAjAAMBIGA1UdEQQLMAmB
B0NsaWVudDQwDgYDVR0PAQH/BAQDAgWgMBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMC
MA0GCSqGSIb3DQEBBQUAA4ICAQAuspxaskmlZSNIaK4qE2gUWLm37Otr6hwJdP4P
s6B4jkjMW5n2gQ0ZjtWVXEG2xA771pTqL9XNtqBdUGRNBs3tj2lSp5n7KTuxilVX
S79EaoOVr7/vbEGscgrpRcIhYhXxS9JxdL64drWJybjMBuw945lxPmYA8G3bW3LN
R40raEN//gui9Hb0hIW+2uu/WM8Hw60Gmc50q5mQh3A3n8ZUocFxKkUfb3tLqeG3
cgqCYgUctTqISJsHbQkTI594rRhQeYyaGirg0t2OgeVaXBX+7HBnDAomR1VPxahU
hhqxc8cE6l6ufIKusljOYljWydcgcinJnwGyH/gxSdMCItolPj4gAiVvCbJ/Pu38
GNlgCPc1pfJ2vSgzoUeMr5HLTx/jwfNpHDE3on/qtiaYCWWkZqKJOC/0Nq2Jz9lM
jvbWTSnQ+oRq7B/5cH02u+M2dcuZFrrmosQq680Ov8K/f4/jBjwGgFXg46fCXzsR
mNc0s6Dx3nS2ecIocDQfR7cy+oqYVHQOhvBrp6zSbb2H265D8i82jV/i5j6DbZ6P
s/Ab7xtyW6AwGr6O+s9Wix4w6vVKds7uq5lTUIjjl5dw6JcHjpBmlmPsKvQH2izx
1fLOfvz9aFHvvXEKFqpptwd9ZQL2KpmNIrOp7jrnpQ1e18zbL8HnX6W4V0rKUAn4
svkkFA==
-----END CERTIFICATE-----
-----BEGIN RSA PRIVATE KEY-----
MIIJKQIBAAKCAgEAzBsxWVTPDi8jBQFVofwMBoSdKvE+VYe+S6w+bTSUekL+pvzf
pirRGD7owGcySKgzgZ4Jj8yGEk4tjVxCwq5epEL06XLP5XMEKzk0TMYu+aINcZ2v
YCrW3Sr6/GZ9PWw3oHK2pul7g+o1sMPFtOcM1sRfVG5LdXXDXclRd5QTPO2FTrDP
cTr5LoC1jtOAJhJ0XqQd/LOV/95j4+k0ypOCCkFI5kl9caZSnaG7xMrSsssLkBrk
a99hSN4CB+1/A0vZUsPRIb4yzudlVzn/w7aWKMOQYxLrFE/NJ+4fTiJ9bBpL20jE
yiq87kCgVRbx1tVo2Rzyp+bQcDvcKZ6YXrZqj4I9s5JPiygvdnxB5dPegYBFWYkK
eosZO8gTif53vZz7yQIiYU768FYpbRW7UVWY2fk+MGBIj0hlCsclPUh66SZYiRlm
OaxaufMC4m5ofS4zJNs5HryynvnyTwxqde4iPvukPxuQKASs+z25kSWjqv8R9HqW
ct6i0GQNKO1FbcuiX8vlRjXB6bMYEJbSgccESe1yZTSIWvnw0ihFA0K+7K3NsERs
IAEdbRxREzgW6eDbTMU8wsaudQsyKHzvZD+blejkgXUEyFO554u9m8VINk/JCmdA
P95a+XumFnoNrUQ9n2c4kHfFgT6dzB5i5okXMm1GFrx7d2VLdVBCSAYKIO0CAwEA
AQKCAgEAlmu3+9j328E7ctXf8Uha6HbVia9PPCOVSBnBzCPDBTPYjuKwRLsrbE34
pMupEEj95Jm+/D5D1LvO8G49OVLepvo9msqlkrkoKl63P5mTRyB5/fCzLhGEVmz1
mgxCYoEdod7I48wQ3lA+j25Ih6D8Ik+I3iWG8SL//1997b2wS+fUpgDCcPWAbRgo
NgGDYQuavaEABJupgW+5eF8HLAB4BuzEOAuTKq3kFw353veHPoNLm0FmdGWlQdlz
77nFMH22xTtRJigRM9DvK9CvwOIQWix+fbWUkFybmsDwS1o5yvC6VPqVJVVH9eKl
BvCo/KY85j1iTAFcPkqvX/Dk5HBVqOrmx4NQU5o/9eJnSknfcGAdsWr3952wrHxa
kGjjkwsp6fBb/NkVqJuODgzSC7XwJR0D4OwnzTuzcoi2uXwjDohAJEYd6M8rITP1
6RckzXu9upM3bh4cFnv76TF9Dbca0paBb9VPeXSUZYMZazwsXYlETWDLZjhX9RLv
CA2pk1gBSorMyqx8KOLfH2Lx8ZbB9QBdqU6WAUz00cO72TiVw2dbU8Gp34BO78N2
mpahflg98WnRLQhzb6iwcCXHzfVdHUYsHcALq5vBh4RkDK74xzXp4sjE0za3BiqA
MaO+0+Tsfw7loyXMWXimXFazxD3FZ/YLWQPNlEGJMOma/94DBEECggEBAObaShP9
9RzbpiHltH6/JIOI5f61agc7vyCnHQ9TUejOsUbXrgcsWnVcqSdNa0azpGpqtwKO
S2haF+DviKF+zM6znJJ41AyBjqIyBipDAKcF8Tervq2dPP/16SEMO/D1CX0IwFUd
M2Si1eWU49bk/y7fkH5zw/0xJXLXrKyDSBTaiyRj6+KGj6h2uJPmRhStlgvuyufu
PD0TcffBOP9tx5HfkWcGmnPJrZZ+ehe4Kn5q8BR4W11V64/a03ALbx+2f6DcOU48
8m3O9tXucExjOuDUOC9JZXMQucUEtrOMADnIMLXEjYjW/VbV5jP+QYCj+Er028Ip
xoNXjSwyFgduYd0CggEBAOJXCJ0eo9EUSJgSH1ZKPyziRCmhmnEXBVwQOPOYOO73
rPHWdpjG+HUkQSWOsFxa3Yjia9r9z3DA8ynVzPnmbG8Otc4i2IN/S4ksbFFjHtjs
F0hQBFmYI21VqkUqK8iFOOwQacFmyYs8lqg7PnQhS7GoQsnbnHE0HOpe9zjQr6Fl
T5AY6bJ9cdhXPnap/2LLP08wpNcaW0XbKWRT0+hAl5WvZry3ftn7ubNstF/HAUTU
bxLBn0CYMtTg/jAGyYtj5MvNLFGUFGx3Lg51mBS3TZWstOeF/7sAD5w453VjVWKy
Qkj4OkWJRxxbB5fuJVGrqTXc/SNh/+z25iuUX0EAMlECggEAVklhRve1loPDJQhm
3rkzPLb+wKWua+W5Gstb4U6TXyFiwcf8FFJPvW5VC4u0fUjIO76HiT0GkoqaQklG
GJb8loYsD9N57vK+DYIFK+a/Z66g6t4W922+Ty3rZZ7dCMOOOF39BdNUUllK+fUc
9EXD3BFUQO+kYg7soHBc6l5nouPM/l0a3iDNsXouo5l+uFvpqawny2kQuwN5pdFj
LJYr8ipOfuPI9156s7WyjQsZVwdBlWUnQUvMMIjqXwbnEkN0kPu/r664LrMdL/lf
oC225DJujb4xXUDzLuEEKTg7HV3mVwqQnIU/TCXHVcfDVAH13I6JVZmnyZAABHT0
JvLrQQKCAQEAmiRboWU0ezctGSN+Y+28iHyvnwqHe20KIWCK6JpKa7QQ+8HqkrEu
k9hU5Zb/VGYtaQOKIGGp3EgLUfpg1e+u+RMzjWb9vM/8STcPrX2rjF98m6qiy8Fo
nxUwGFpX5v+TfHDRFP1DVKe2kmuGZOAoBJ1qnr4JFK9A4fw6sV6tvWSZgrD0trHn
zkXcLEQpwMZaHzwphrRUZIaU8daFAi67DR2fAfaVVS6xkRf+3xtQKefinQtvwTXl
qERx15NHvr4RGxpnjEckgZnIq+A56iHLnJs5uFLxjhDEkMfQGYnEpKpxqfAi/yg2
XYFA8p8kmzIk0qHlYytid6bNqfApzsKrgQKCAQAqDHO2DSVZEiqpG9ony4WcRTMY
lZ85e3S1gCWDwDHfhGBFLgg7JgmqVsM6De1s6+gcSRK8wXVJzRbF4XWkBLmXU2Nr
FS4ZCFoSPDUFrETtd7X5a6UL14gkpmFxNp3NEfIkGHFemti2U2Iv+v2E/A23sQbR
oAhWdJru5/ToQEGSS2lThIxebj8laedmKoyI2c4muxwvkB3grrSN1FNDs7bmUSTP
CKyAiZSy8T+YHPL4r9Up5M86LRbUvHmIVy7kJaYjQTGeqNJFPX0WMqb6xTm3VA7G
4Zfx4Q3uMFdRgGHQIhwIIYe14sw8ImHbAyRKuXT0Noo/ETmWCaVZzi8pil9M
-----END RSA PRIVATE KEY-----
""".strip()
SERVER_CERT="""
-----BEGIN CERTIFICATE-----
MIIFwTCCA6mgAwIBAgIBBjANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDEzN1oXDTIxMTIxNTEyNDEzN1owOTEL
MAkGA1UEBhMCQVUxEjAQBgNVBAMTCWxvY2FsaG9zdDEWMBQGCSqGSIb3DQEJARYH
U2VydmVyNDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALbwm6LtiHAU
h2zHRVieD8z2LMa7Ka5YtNuzixRD17Omt1+y/c9oDS/Qbiu2Yf1yvVzQD2GLI2Fl
ZD9DuCW8iuKx9pDj17lCGBhc7c9gcFz7wy45a7qjVVgG4pd4OxCFKnESX8ZMemmU
RuipVjnxrCRxneOqVA0Re/VrqZEmrgt7dBIKhwtfFnA7kLQpGFcaBYkX5X5C3Y1Z
dkw9kXFzgbw58Y9TeeuvB8lR7BFN9pdMXzgcqi3i5ec+LpPg64L5sDhVYv+Io9rk
pv/qGtqyhRTy5VlG3lLXYhB0N0sPummNPveYB7FIzF36syBev2KP55ZOiYxoeBS2
icgwsjo/DBnRu5ILw5eR6NXJiYqet2P8/eWkX+ILa2atJw+7qcMmQul5Vku46+le
z3dCq0U/Wy26y3gmRGda1ojOCiLTPBlYqJUUmGkciYEDjIeRdIyFfcPQIvTsbWu9
o5gmkhaM8EMo1i9wqgsQlLLyBYhEBHYhmhhUbNw3oViNXR6QpoK07QoQiLtfmxDK
IJe+XdVT2CgB4UXG5RckpMI24KWrWo8RJCsZm7SnoNpLh/I6LFPt4cQso/uuicM3
4mLdVDYdbh86zr4XqLJd6fuSaaLQblQJuPqHz2pxMnRFxIPTkcWA9PA3aqpetkBj
vEFDC7fksHHVkUEUJIwMb19JRaLTCP3lAgMBAAGjgfEwge4wCQYDVR0TBAIwADAR
BglghkgBhvhCAQEEBAMCBkAwKwYJYIZIAYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0
ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFHewxkloJzmR2Cj0/za/ZHS0yl92MEsG
A1UdIwREMEKAFJofsiFDtjxv3zDDI143rLbED4OPoR+kHTAbMQswCQYDVQQGEwJB
VTEMMAoGA1UEAxMDQ0EyggkAylS5H2KOEhEwCQYDVR0SBAIwADASBgNVHREECzAJ
gQdTZXJ2ZXI0MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMBMA0GCSqGSIb3DQEBBQUA
A4ICAQA+EAj846j4u/PZvLITPX/kI1+8Y9JIULKwdQ2v8O5mMf9In2Pk9MQ+81RP
rpDZo3ZsfkkdoAR7j5ZeTdMargFAeyErfJpZ5Fv4LryaNotJB0/iG8vcWpOJ7qa7
bae+5hQ0vzAoeIxg7kRXN2noSyHHhd3riddOp3/TxetKoFdWSjjnMXqBvZbYzUcf
asdKMXKcvZlan01f+zV8CkR7+Scd+5uW33lNHnUmCzeGA5G8z1vA05u9TVAkwU5r
XbdJbUjCE3d+X/jkaS5IvhBu6tKSA1YFcD9Brh8CmMjtCWLk8ETv+78WJzqyjiaT
OisFTUI/jC18dKgFyyehEeeYo5SZO7BIsNgplDX2UOumQwZYdUX4M3ObRt2n33Fb
ReVhPf39oCDSOGEckRGeJX6ydVRjWJHC/qT3gDKaMPZd5lN0M1BOqyAFakM0oU/7
VPf9dUQsw/BeUvm+34hE382JIefzBA32SsyfQjNf6L6tV1JYEfeaebSI+cIny9me
lfvTgPmoabqCXVN03hyppf7/0tD8BpitC9ghFrN61oJLEgJOJ9tLuQz0h5gbxeZP
mOAkPcQs5FMuzNmP/amLSfCFfdUT5iIqZ3uIAsqnw0ftp8OOEAdyoC4/vgVx3y6b
BOX+H+pK1aZXjNzcacyPSawHJTvqexNJFWV167okb1BmOFJL9w==
-----END CERTIFICATE-----
""".strip()
SERVER_KEY="""
-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEAtvCbou2IcBSHbMdFWJ4PzPYsxrsprli027OLFEPXs6a3X7L9
z2gNL9BuK7Zh/XK9XNAPYYsjYWVkP0O4JbyK4rH2kOPXuUIYGFztz2BwXPvDLjlr
uqNVWAbil3g7EIUqcRJfxkx6aZRG6KlWOfGsJHGd46pUDRF79WupkSauC3t0EgqH
C18WcDuQtCkYVxoFiRflfkLdjVl2TD2RcXOBvDnxj1N5668HyVHsEU32l0xfOByq
LeLl5z4uk+DrgvmwOFVi/4ij2uSm/+oa2rKFFPLlWUbeUtdiEHQ3Sw+6aY0+95gH
sUjMXfqzIF6/Yo/nlk6JjGh4FLaJyDCyOj8MGdG7kgvDl5Ho1cmJip63Y/z95aRf
4gtrZq0nD7upwyZC6XlWS7jr6V7Pd0KrRT9bLbrLeCZEZ1rWiM4KItM8GViolRSY
aRyJgQOMh5F0jIV9w9Ai9Oxta72jmCaSFozwQyjWL3CqCxCUsvIFiEQEdiGaGFRs
3DehWI1dHpCmgrTtChCIu1+bEMogl75d1VPYKAHhRcblFySkwjbgpatajxEkKxmb
tKeg2kuH8josU+3hxCyj+66JwzfiYt1UNh1uHzrOvheosl3p+5JpotBuVAm4+ofP
anEydEXEg9ORxYD08Ddqql62QGO8QUMLt+SwcdWRQRQkjAxvX0lFotMI/eUCAwEA
AQKCAgEApgyTWDm+o+0eVzAw05T0xpeUYPY1iRjfYKQBU22Y9moW+/hdxMqvXX0U
4vxyyThChWIc8+71OExtx7bSCP6wGcBrC2yjvHYvpL2E5bylgODMcsKP9CKZLoNh
XRc2lXIp6eRBpp54Zii+jCRYLdQc6h9urt1F2W7LUyJcEXJIfAecfVus5Dd1CH4o
hD7g5v6pk5xrJEXRD6HqbJ1dzNqJIa5+ghfFDJYcvTFs0vAvKXma3DW4ilnvUAvy
/ysi2gmFWDy41TTTdbYhlxyJL4TmovMuFfDrj8oMKt8x6SHnlDMuulH2eYaYaZ1K
xdD6ap4wGRBEbXvNsw9U1K7Ot2vOsH+AUK46bZfkw+Oe28j6i342gL/o29z6BwSe
GP+an+VeCS87WUuYCzGugucVBU7UnbGkXyYXbSpYS1h0FrSxElqCTxXBmteo4KJL
uWo3iQXg7ik8gpPG89Xo5c8tylEVEvA9wLB7lZNPURsY9QNXLyYGffJuW8AYFJyv
ymhdiVtLNV5rBUgXmjl+g8g416u6Oj/sx5NfcCQTCw04q5LbCeiHW/KsvIhV3eHz
mj7kQ/OrAtdwZA7ER3mhm7cXqw0EutA+p+HZ87BWYi5HBV7eOgxrxHTw9SK4OIFt
OhKH6l0nghsI/P7PNBR3b+yySFkrn06ctttYCLm6NRYqRoWFrmkCggEBAOOYJOHw
bT/EgJM3vugXl6DKQs4OnfmRdQ2T08HWHCu6tMzEp7veNtE2oAz39XezpOK+gclJ
VGnGBLiZC2eTAsyb7WxAbmRW2Q17+K9NC1SXpYvFDFFaWI65sQciiZBdDZlDzUJw
NlIXgKfJSuAuqXx78slcQuV69Ii7CYys3AbbPeGgKVEqOHGn74hFhUlmWpoE2lM9
tr2p5pZMdKBIe98dyFnzPbBB81dbIfILzH5wSWJLGPuSWhB28a0cY21OAczd59Eq
FyYMTItdk5X8bZLjj0NZ803WWq1usl+X5z3Kr/2aQvV/FRJH26/UBz8z2Pqdk67D
WhBLavhTrj1k68sCggEBAM3Ftj5fr2BKV7TiGroZnLDi+9UdOE344K6OI/sM85/m
YcUJWGxJFTVgOIpMtIJQ9CxHc9xhTabFSGzJ6VOLYW4r5EbiBFY3WrL4lrUeOIzF
XAxBJQR8vt1d/wQD7h0WKDSimpToM4wOcFzEHEkyB9bVbyw2sWj+bM+sD8O5Q0gv
a5Z1W406Ssn+z1gvVBM3MDbUqrrzTTXqHvWOwdDvkxb1eIY++Kco5FIhy7NecdT1
oV+8GfOUCFMqLXTRrHg7atQgS7vcehsILuQqhXs0y3PSbbemVgLLG9E0CZ+w/zbX
HBu14Hhjj4XogSJi+HC5uyUTafNmq0bYhL29wCax5w8CggEBANAC7CK8VX1koYbr
+kWx2lmQwsIFxgilEvCX3YBZqmGlQT2ttwgTrtJENL/lmKoQvHCoYYKQzN/npcT5
y9ycFoDfOn4n3T1Dyxlx5vaBWgu0lg9Kx1lLU4kO2meE/2m8QoOD3oQMfvlElcfE
R/ThcPJfbqTu+A049WpKWA4Epwx1MPeYJGsURYZLULehopJVRBVkvg46Z1ytfhx8
QFOGLADd/ZGIqScA/+ElX78TXZFqGwgFTw4O0tYdgAER4yWxmB+f6RHYgFO8BfGS
UyNQFO2dogCSo7bOZQ4CEHEiKqzlJTiJ1wz9W0rb9kObbAwt3PAhOSsPTK973oac
JLHkHUUCggEAa3ZfsL9j5ZOtrkeO0bXigPZpsmiqKP5ayI5u+ANRkCZO1QoGZbbd
Hpz7qi5Y7t28Rwuh1Gv0k63gHwBrnDfkUBcYBnSu8x/BfEoa2sfHnKzNX5D99hP3
0b/vGHe8+O/DW4m31SBXG0PHJos8gnVgZq/ceWiuyjhlNyeSrBKqsp4hP9hWUbEp
scgjHNjKvaZKxbfW2f+KSSfVt0QwsB8N4CWeJe3pCdNvOf1wVmJybFdDSa4Al5at
qlESoDmIKtpM9i9PnfKMymVBp+MVBr0Rq5Evv4Nc0+SiyGS2yfEzt74rbcVUT0sf
fz1ngz/Qo3474Cb9ZCIwPLWCzVy1Zv/tvQKCAQEAv8uxjmM/CqtKDW9c/z4Z4y6O
squI4AjCgbml8VzC2aS1zQwbCsq0KmGYVgYALKT4dSH+B+6koy+J5GPpVX9xL0Zq
MZJlo1Hmi2hDW+gi/w+Q62iRdqO+SoqbFZJ5aX4iF3dyX9rvDyOzRFr+kddtuQ6y
tru00ATHMp2hix8LoKDo8dLY9bX6Y9RmgWAVOYbFHm4OB9wE2fya3feo6O3znJY9
EqlYKE0bzcHQQzeT0+Lh9+1KLBg6B6jfyAscVKmSgJyEHLW7gzgF/h10py8XMEVj
syS6C3/DMznzpQSyjdTHqdiGuLfagF9oHxRaRacXaxLP2CzILIUFIaEIvJevYg==
-----END RSA PRIVATE KEY-----
""".strip()
def handle(base_path):
""" Sets up runtime directories and sample mocks.
"""
# Top-level directories
http_dir = os.path.join(base_path, 'http')
zmq_dir = os.path.join(base_path, 'zmq')
pem_dir = os.path.join(base_path, 'pem')
os.mkdir(http_dir)
os.mkdir(zmq_dir)
os.mkdir(pem_dir)
# Logs
http_logs_dir = os.path.join(http_dir, 'logs')
zmq_logs_dir = os.path.join(zmq_dir, 'logs')
os.mkdir(http_logs_dir)
os.mkdir(zmq_logs_dir)
# HTTP responses
response_json_dir = os.path.join(base_path, 'http', 'response', 'json')
response_xml_dir = os.path.join(base_path, 'http', 'response', 'xml')
response_txt_dir = os.path.join(base_path, 'http', 'response', 'txt')
os.makedirs(response_json_dir)
os.makedirs(response_xml_dir)
os.makedirs(response_txt_dir)
open(os.path.join(response_json_dir, 'demo1.json'), 'w').write('{"Welcome to apimox":"How\'s things?"}\n')
open(os.path.join(response_json_dir, 'demo2.json'), 'w').write('{"Isn\'t apimox great?":"Sure it is!"}\n')
open(os.path.join(response_xml_dir, 'demo1.xml'), 'w').write(
"""<?xml version="1.0" encoding="utf-8"?>\n<root>\n <element>Greetings!</element>\n</root>\n""")
# Config files
open(os.path.join(http_dir, 'config.ini'), 'w').write(HTTP_CONFIG_INI)
open(os.path.join(zmq_dir, 'config.ini'), 'w').write(ZMQ_CONFIG_INI)
# Default crypto material
open(os.path.join(pem_dir, 'ca.cert.pem'), 'w').write(CA_CERT)
open(os.path.join(pem_dir, 'client.key-cert.pem'), 'w').write(CLIENT_KEY_CERT)
open(os.path.join(pem_dir, 'server.cert.pem'), 'w').write(SERVER_CERT)
open(os.path.join(pem_dir, 'server.key.pem'), 'w').write(SERVER_KEY) | zato-apimox | /zato-apimox-1.3.tar.gz/zato-apimox-1.3/src/zato/apimox/init.py | init.py |
# Originally part of Zato - open-source ESB, SOA, REST, APIs and cloud integrations in Python
# https://zato.io
from __future__ import absolute_import, division, print_function
# stdlib
import os, sys, tempfile, uuid
# Click
import click
# Distribute
import pkg_resources
# Zato
from zato.apimox import init as _init, run as _run
# ################################################################################################################################
_mock_types = 'http-plain', 'http-tls', 'http-tls-client-certs', 'zmq-pull', 'zmq-sub'
def print_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(pkg_resources.get_distribution('zato-apimox').version)
ctx.exit()
@click.group()
@click.option('-v', '--version', is_flag=True, is_eager=True, expose_value=False, callback=print_version)
def main():
pass
def cli_init(ctx, path, prompt_run=True):
if os.path.exists(path) and os.listdir(path):
click.echo(ctx.get_help())
click.echo('\nError: directory `{}` is not empty, quitting.'.format(path))
sys.exit(1)
if not os.path.exists(path):
click.echo('Creating directory `{}`.'.format(path))
os.makedirs(path)
_init.handle(path)
click.echo('OK, initialized.')
if prompt_run:
click.echo('Run `apimox run {}` for a live demo.'.format(path))
@click.command()
@click.argument('path', type=click.Path(exists=False, file_okay=False, resolve_path=True))
@click.pass_context
def init(ctx, path):
cli_init(ctx, path)
@click.command(context_settings=dict(allow_extra_args=True, ignore_unknown_options=True))
@click.argument('path', type=click.Path(exists=True, file_okay=False, resolve_path=True))
@click.option('-t', '--type', type=click.Choice(_mock_types))
@click.pass_context
def run(ctx, path, *args, **kwargs):
_run.handle(path, kwargs)
@click.command()
@click.argument('path', default=tempfile.gettempdir(), type=click.Path(exists=True, file_okay=False, resolve_path=True))
@click.pass_context
def demo(ctx, path):
# We're not using tempfile.mkdtemp because we may as well be run
# in a user-provided directory.
path = os.path.join(path, uuid.uuid4().hex)
cli_init(ctx, path, False)
_run.handle(path)
main.add_command(init)
main.add_command(run)
main.add_command(demo)
if __name__ == '__main__':
main() | zato-apimox | /zato-apimox-1.3.tar.gz/zato-apimox-1.3/src/zato/apimox/cli.py | cli.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import csv, operator, os, random, uuid, re
from collections import OrderedDict
from datetime import timedelta
from itertools import zip_longest
# Arrow
from arrow import api as arrow_api
# Bunch
from bunch import Bunch, bunchify
# ConfigObj
from configobj import ConfigObj
# Dateutil
from dateutil.parser import parse as parse_dt
# six
from six.moves import cStringIO as StringIO
# YAML
from yaml.representer import SafeRepresenter
# Zato
from zato.apitest import version
random.seed()
# Singleton used for storing Zato's own context across features and steps.
# Not thread/greenlet-safe so this will have to be added if need be.
context = Bunch()
# ################################################################################################################################
def get_value_from_environ(ctx, name):
return os.environ[name]
# ################################################################################################################################
def get_value_from_ctx(ctx, name):
return ctx.zato.user_ctx[name]
# ################################################################################################################################
def get_value_from_config(ctx, name):
return ctx.zato.user_config[name]
# ################################################################################################################################
def get_value_from_vault(ctx, name):
""" Returns a selected value from Vault. Will use default Vault connection unless a specific one was requested.
"""
client = ctx.zato.vault_config['default'].client
path = name.replace('vault:', '', 1)
return client.read(path)['data']['value']
# ################################################################################################################################
config_functions = {
'$': get_value_from_environ,
'#': get_value_from_ctx,
'@': get_value_from_config,
'vault:': get_value_from_vault,
}
# ################################################################################################################################
def obtain_values(func):
""" Functions decorated with this one will be able to obtain values from config sources prefixed with $, # or @.
"""
def inner(ctx, *args, **kwargs):
def replacer(match):
config_key = match.groups()[1]
config_func = config_functions[config_key]
return str(config_func(ctx, match.groups()[2]))
for kwarg, value in kwargs.items():
if value:
for config_key in config_functions:
if value.startswith(config_key):
config_func = config_functions[config_key]
kwargs[kwarg] = config_func(ctx, value[1:] if len(config_key) == 1 else value)
break
else:
kwargs[kwarg] = re.sub(r'((\$|\#|\@)\{(\w+)\})', replacer, value)
return func(ctx, *args, **kwargs)
return inner
# ################################################################################################################################
def new_context(old_ctx, environment_dir, user_config=None):
_context = Bunch()
_context.auth = {}
_context.user_ctx = {}
_context.date_formats = {'default':'YYYY-MM-DDTHH:mm:ss'}
_context.environment_dir = old_ctx.zato.environment_dir if old_ctx else environment_dir
_context.request = Bunch()
_context.request.headers = {'User-Agent':'zato-apitest/{} (+https://zato.io)'.format(version)}
_context.request.ns_map = {}
_context.cassandra_ctx = {}
config_ini = bunchify(ConfigObj(os.path.join(_context.environment_dir, 'config.ini')))
_context.user_config = user_config if user_config is not None else config_ini['user']
_context.vault_config = config_ini.get('vault', {})
for name, conn_info in _context.vault_config.items():
if conn_info.token == 'invalid':
continue
client = Client(conn_info.address, conn_info.token)
client.ping()
_context.vault_config[name]['client'] = client
context.clear()
context.update(_context)
return context
# ################################################################################################################################
def get_full_path(base_dir, *path_items):
return os.path.normpath(os.path.join(base_dir, *path_items))
def get_file(path):
return open(path).read()
def get_data(ctx, req_or_resp, data_path):
full_path = get_full_path(ctx.zato.environment_dir,
ctx.zato.request.get('response_format', ctx.zato.request.get('format', 'RAW')).lower(),
req_or_resp,
data_path)
data = get_file(full_path) if data_path else ''
if ctx.zato.request.format == 'XML' and not data:
raise ValueError('No {} in `{}`'.format(req_or_resp, data_path))
return data
# ################################################################################################################################
def parse_list(value):
return [elem.strip() for elem in tuple(csv.reader(StringIO(value)))[0]]
def any_from_list(value):
return random.choice(tuple(elem.strip() for elem in parse_list(value) if elem))
# ################################################################################################################################
def rand_string(count=1):
# First character is 'a' so it nevers starts with a digit.
# Some parsers will insist a string is an integer if they notice a digit at idx 0.
if count == 1:
return 'a' + uuid.uuid4().hex
else:
return ['a' + uuid.uuid4().hex for x in range(count)]
def rand_int(min=0, max=100, count=1):
if count == 1:
return random.choice(range(min, max))
else:
return [random.choice(range(min, max)) for x in range(count)]
def rand_float(min=0, max=100):
return float(rand_int(min, max)) + random.random()
def rand_date(format, start=None, stop=None):
if not(start and stop):
# Now is as random as any other date
return now(format)
# ################################################################################################################################
def now(format):
return arrow_api.now().format(format)
def utcnow(format):
return arrow_api.utcnow().format(format)
def utcnow_minus_hour(format):
utc = arrow_api.utcnow()
return utc.replace(hours=-1).format(format)
# ################################################################################################################################
def date_after_before(base_date, format, direction, limit, needs_parse=True):
if needs_parse:
base_date = parse_dt(base_date)
days=rand_int(0, abs(limit))
return arrow_api.get(direction(base_date, timedelta(days=days))).format(format)
def date_after(base_date, format, limit=100000, needs_parse=True):
return date_after_before(base_date, format, operator.add, limit, needs_parse)
def date_before(base_date, format, limit=100000, needs_parse=True):
return date_after_before(base_date, format, operator.sub, limit, needs_parse)
def date_between(start_date, end_date, format):
start_date = parse_dt(start_date)
end_date = parse_dt(end_date)
diff = int((start_date - end_date).days)
func = date_after if end_date > start_date else date_before
return func(start_date, format, diff, False)
# ################################################################################################################################
comparison_operators = {'equal to': '=',
'not equal to': '!=',
'less than': '<',
'greater than': '>',
'less or equal to': '<=',
'greater or equal to': '>='}
def wrap_into_quotes(values):
return '\'{}\''.format('\', \''.join(values.split(', ')))
def make_dict(*args):
components = []
phrases = OrderedDict()
for item in args:
components.append([segment.strip() for segment in item.split(',')])
for items in zip_longest(*components):
phrases[items[0]] = items[1:]
return phrases
def build_filter(*args):
filter_dict = make_dict(*args)
filter_ = ''
for i, key in enumerate(filter_dict.keys()):
operator = comparison_operators[filter_dict[key][0]]
if filter_dict[key][2] is not None:
join_by = filter_dict[key][2]
if i == 0:
filter_ += "WHERE %s%s'%s' " % (key, operator, filter_dict[key][1])
else:
filter_ += "%s %s%s'%s' " % (join_by, key, operator, filter_dict[key][1])
return filter_ | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/util.py | util.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import os
ENVIRONMENT = '''# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import os
# Zato
from zato.apitest.util import new_context
def before_feature(context, feature):
environment_dir = os.path.dirname(os.path.realpath(__file__))
context.zato = new_context(None, environment_dir)
'''
STEPS = '''# -*- coding: utf-8 -*-
"""
Copyright (C) Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# Behave
from behave import given, then
# Zato
from zato.apitest import steps as default_steps
from zato.apitest.steps.json import set_pointer
from zato.apitest.util import obtain_values
'''
CONFIG_INI = """
[behave]
options=--format pretty --no-source --no-timings
[user]
sample=Hello
[vault]
[[default]]
address=http://localhost:8200
token=invalid
"""
DEMO_FEATURE = """
Feature: Zato API Testing Demo
Scenario: *** REST API Demo ***
Given address "http://apitest-demo.zato.io:8587"
Given URL path "/demo/rest"
Given query string "?demo=1"
Given format "JSON"
Given HTTP method "POST"
Given header "X-Custom-Header" "MyValue"
Given request is "{}"
Given path "/a" in request is "abc"
Given path "/foo" in request is an integer "7"
Given path "/bar" in request is a list "1,2,3,4,5"
Given path "/baz" in request is a random string
Given path "/hi5" in request is one of "a,b,c,d,e"
When the URL is invoked
Then path "/action/msg" is "How do you do?"
And path "/action/code" is an integer "0"
And path "/action/flow" is a list "Ack,Done"
And status is "200"
And header "Server" is not empty
# You can also compare responses directly with files disk
And response is equal to that from "demo.json"
"""
DEMO_JSON_REQ = """{"hello":"world"}"""
DEMO_JSON_RESP = """{"action":{"code":0, "msg":"How do you do?", "flow":["Ack", "Done"]}}"""
def handle(base_path):
""" Sets up runtime directories and sample features.
"""
# Top-level directory for tests
features_dir = os.path.join(base_path, 'features')
os.mkdir(features_dir)
# Requests and responses
request_json_dir = os.path.join(base_path, 'features', 'json', 'request')
response_json_dir = os.path.join(base_path, 'features', 'json', 'response')
os.makedirs(request_json_dir)
os.makedirs(response_json_dir)
# Demo feature
open(os.path.join(features_dir, 'demo.feature'), 'w').write(DEMO_FEATURE)
open(os.path.join(request_json_dir, 'demo.json'), 'w').write(DEMO_JSON_REQ)
open(os.path.join(response_json_dir, 'demo.json'), 'w').write(DEMO_JSON_RESP)
# Add environment.py
open(os.path.join(features_dir, 'environment.py'), 'w').write(ENVIRONMENT)
# Add steps
steps_dir = os.path.join(features_dir, 'steps')
os.mkdir(steps_dir)
open(os.path.join(steps_dir, 'steps.py'), 'w').write(STEPS)
# User-provided CLI parameters, if any, passed to behave as they are.
# Also, user-defined config stanzas.
open(os.path.join(features_dir, 'config.ini'), 'w').write(CONFIG_INI) | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/init.py | init.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import ast
import json
import time
import os
from datetime import datetime
from logging import getLogger
# Behave
from behave import given, when, then
# Bunch
from bunch import Bunch
# datadiff
from datadiff.tools import assert_equals
# jsonpointer
from jsonpointer import resolve_pointer as get_pointer
# Request
from requests import api as req_api
from requests.auth import HTTPBasicAuth
# Zato
from .. import util
from .. import AUTH, CHANNEL_TYPE, INVALID, NO_VALUE
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
@when('the URL is invoked')
def when_the_url_is_invoked(ctx, adapters=None):
response_data = None
try:
if ctx.zato.get('zato_channel_type') == CHANNEL_TYPE.WEB_SOCKETS:
invoke_zato_web_sockets_service(ctx)
else:
invoke_http(ctx, adapters)
response_data = ctx.zato.response.data_text
# If no response_format is set, assume it's the same as the request format.
# If the request format hasn't been specified either, assume 'RAW'.
response_format = ctx.zato.request.get('response_format', ctx.zato.request.get('format', 'RAW'))
if response_format == 'JSON':
ctx.zato.response.data_impl = json.loads(ctx.zato.response.data_text)
elif response_format == 'RAW':
ctx.zato.response.data_impl = ctx.zato.response.data_text
elif response_format == 'FORM':
ctx.zato.response.data_impl = ctx.zato.response.data_text
except Exception as e:
logger.warn('Caught an exception while invoking `%s` with `%s`; req=`%s`; resp=`%s`, (%s)',
ctx.zato.full_address, ctx.zato.request.method, ctx.zato.request.data, response_data, e.args[0])
raise
# ################################################################################################################################
def invoke_http(ctx, adapters):
adapters = adapters or []
method = ctx.zato.request.get('method', 'GET')
address = ctx.zato.request.get('address')
url_path = ctx.zato.request.get('url_path', '/')
qs = ctx.zato.request.get('query_string', '')
files = None
data = ''
if 'data_impl' in ctx.zato.request:
if ctx.zato.request.is_xml:
data = etree.tostring(ctx.zato.request.data_impl)
elif ctx.zato.request.is_json:
data = json.dumps(ctx.zato.request.data_impl, indent=2)
ctx.zato.request.headers['Content-Type'] = 'application/json'
elif ctx.zato.request.is_raw:
data = ctx.zato.request.data_impl
elif ctx.zato.request.is_form:
data = ctx.zato.request.get('form', '')
files = ctx.zato.request.get('files', None)
ctx.zato.request.headers['Content-Type'] = 'application/x-www-form-urlencoded'
if files is not None:
# multipart/formdata should let requests set the content-type header
del ctx.zato.request.headers['Content-Type']
ctx.zato.request.method = method
ctx.zato.request.data = data
ctx.zato.full_address = '{}{}{}'.format(address, url_path, qs)
auth = None
# New in 1.1 hence optional
if ctx.zato.get('auth'):
if ctx.zato.auth['type'] == AUTH.BASIC_AUTH:
auth = HTTPBasicAuth(ctx.zato.auth['username'], ctx.zato.auth['password'])
ctx.zato.response = Bunch()
s = req_api.sessions.Session()
for adapter in adapters:
s.mount('http://', adapter)
s.mount('https://', adapter)
ctx.zato.response.data = s.request(
method, ctx.zato.full_address, data=data, files=files, headers=ctx.zato.request.headers, auth=auth)
ctx.zato.response.data_text = ctx.zato.response.data.text
# ################################################################################################################################
def invoke_zato_web_sockets_service(ctx):
ctx.zato.response = Bunch()
ctx.zato.response.data_text = json.dumps(ctx.zato.wsx_client.invoke(ctx.zato.request.data_impl).data)
ctx.zato.response.data_impl = json.loads(ctx.zato.response.data_text)
# ################################################################################################################################
@given('address "{address}"')
@util.obtain_values
def given_address(ctx, address):
ctx.zato.request.address = address
@given('URL path "{url_path}"')
@util.obtain_values
def given_url_path(ctx, url_path):
ctx.zato.request.url_path = url_path
@given('HTTP method "{method}"')
def given_http_method(ctx, method):
ctx.zato.request.method = method
def set_request_format(ctx, format):
ctx.zato.request.format = format
ctx.zato.request.is_xml = ctx.zato.request.format == 'XML'
ctx.zato.request.is_json = ctx.zato.request.format == 'JSON'
ctx.zato.request.is_raw = ctx.zato.request.format == 'RAW'
ctx.zato.request.is_form = ctx.zato.request.format == 'FORM'
@given('format "{format}"')
@util.obtain_values
def given_format(ctx, format):
set_request_format(ctx, format)
@given('request format "{format}"')
@util.obtain_values
def given_request_format(ctx, format):
set_request_format(ctx, format)
@given('response format "{format}"')
@util.obtain_values
def given_response_format(ctx, format):
ctx.zato.request.response_format = format
@given('user agent is "{value}"')
@util.obtain_values
def given_user_agent_is(ctx, value):
ctx.zato.request.headers['User-Agent'] = value
@given('header "{header}" "{value}"')
@util.obtain_values
def given_header(ctx, header, value):
ctx.zato.request.headers[header] = value
def given_request_impl(ctx, data):
ctx.zato.request.data = data
if ctx.zato.request.get('is_xml'):
ctx.zato.request.data_impl = etree.fromstring(ctx.zato.request.data)
elif ctx.zato.request.get('is_json'):
ctx.zato.request.data_impl = json.loads(ctx.zato.request.data)
elif ctx.zato.request.get('is_raw'):
ctx.zato.request.data_impl = ctx.zato.request.data
else:
if not ctx.zato.request.format:
raise ValueError('Format not set, cannot proceed')
@given('request "{request_path}"')
@util.obtain_values
def given_request(ctx, request_path):
return given_request_impl(ctx, util.get_data(ctx, 'request', request_path))
@given('request is "{data}"')
@util.obtain_values
def given_request_is(ctx, data):
return given_request_impl(ctx, data)
@given('request file "{name}" is "{value}"')
@util.obtain_values
def given_request_file(ctx, name, value):
ctx.zato.request.data_impl = None
files = ctx.zato.request.get('files', {})
full_path = util.get_full_path(ctx.zato.environment_dir, 'form', 'request', value)
if not os.path.isfile(full_path):
raise ValueError('File upload not found: {}'.format(full_path))
files[name] = open(full_path, 'rb')
ctx.zato.request.files = files
@given('request param "{name}" is "{value}"')
@util.obtain_values
def given_request_param(ctx, name, value):
ctx.zato.request.data_impl = None
form = ctx.zato.request.get('form', {})
if name in form:
if isinstance(form[name], list):
form[name].append(value)
else:
form[name] = [form[name], value]
else:
form[name] = value
ctx.zato.request.form = form
@given('query string "{query_string}"')
@util.obtain_values
def given_query_string(ctx, query_string):
ctx.zato.request.query_string = query_string
@given('date format "{name}" "{format}"')
@util.obtain_values
def given_date_format(ctx, name, format):
ctx.zato.date_formats[name] = format
# ################################################################################################################################
@given('Basic Auth "{username}" "{password}"')
@util.obtain_values
def given_basic_auth(ctx, username, password):
ctx.zato.auth['type'] = AUTH.BASIC_AUTH
ctx.zato.auth['username'] = username
ctx.zato.auth['password'] = password
# ################################################################################################################################
@given('I store "{value}" under "{name}"')
@util.obtain_values
def given_i_store_value_under_name(ctx, value, name):
ctx.zato.user_ctx[name] = value
# ################################################################################################################################
@given('I store a random string under "{name}"')
@util.obtain_values
def given_i_store_a_random_string_under_name(ctx, name):
ctx.zato.user_ctx[name] = util.rand_string()
# ################################################################################################################################
@given('I store a random integer under "{name}"')
@util.obtain_values
def given_i_store_a_random_integer_under_name(ctx, name):
ctx.zato.user_ctx[name] = util.rand_int()
# ################################################################################################################################
@given('I store a random float under "{name}"')
@util.obtain_values
def given_i_store_a_random_float_under_name(ctx, name):
ctx.zato.user_ctx[name] = util.rand_float()
# ################################################################################################################################
@given('I store a random date under "{name}", format "{format}"')
@util.obtain_values
def given_i_store_a_random_date_under_name(ctx, name, format):
ctx.zato.user_ctx[name] = util.rand_date(ctx.zato.date_formats[format])
# ################################################################################################################################
@then('context is cleaned up')
@util.obtain_values
def then_context_is_cleaned_up(ctx):
ctx.zato = util.new_context(ctx, None)
@then('form is cleaned up')
@util.obtain_values
def then_form_is_cleaned_up(ctx):
if 'form' in ctx.zato.request:
del ctx.zato.request['form']
if 'files' in ctx.zato.request:
del ctx.zato.request['files']
@then('status is "{expected_status}"')
@util.obtain_values
def then_status_is(ctx, expected_status):
expected_status = int(expected_status)
assert ctx.zato.response.data.status_code == expected_status, 'Status expected `{!r}`, received `{!r}`'.format(
expected_status, ctx.zato.response.data.status_code)
return True
@then('header "{expected_header}" is "{expected_value}"')
@util.obtain_values
def then_header_is(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert value == expected_value, 'Expected for header `{}` to be `{}` instead of `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" is not "{expected_value}"')
@util.obtain_values
def then_header_isnt(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert expected_value != value, 'Expected for header `{}` not to be equal to `{}`'.format(
expected_header, expected_value)
return True
@then('header "{expected_header}" contains "{expected_value}"')
@util.obtain_values
def then_header_contains(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert expected_value in value, 'Expected for header `{}` to contain `{}` in `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" does not contain "{expected_value}"')
@util.obtain_values
def then_header_doesnt_contain(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert expected_value not in value, 'Header `{}` shouldn\'t contain `{}` in `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" exists')
@util.obtain_values
def then_header_exists(ctx, expected_header):
value = ctx.zato.response.data.headers.get(expected_header, INVALID)
assert value != INVALID, 'Header `{}` should be among `{}`'.format(expected_header, ctx.zato.response.data.headers)
return True
@then('header "{expected_header}" does not exist')
@util.obtain_values
def then_header_doesnt_exist(ctx, expected_header):
value = ctx.zato.response.data.headers.get(expected_header, INVALID)
assert value == INVALID, 'Header `{}` shouldn\'t be among `{}`'.format(expected_header, ctx.zato.response.data.headers)
return True
@then('header "{expected_header}" is empty')
@util.obtain_values
def then_header_is_empty(ctx, expected_header):
value = ctx.zato.response.data.headers[expected_header]
assert value == '', 'Header `{}` should be empty instead of `{}`'.format(expected_header, value)
return True
@then('header "{expected_header}" is not empty')
@util.obtain_values
def then_header_isnt_empty(ctx, expected_header):
value = ctx.zato.response.data.headers[expected_header]
assert value != '', 'Header `{}` shouldn\'t be empty'.format(expected_header)
return True
@then('header "{expected_header}" starts with "{expected_value}"')
@util.obtain_values
def then_header_starts_with(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert value.startswith(expected_value), 'Expected for header `{}` to start with `{}` but it\'s `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" does not start with "{expected_value}"')
@util.obtain_values
def then_header_doesnt_starts_with(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert not value.startswith(expected_value), 'Expected for header `{}` not to start with `{}` yet it\'s `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" ends with "{expected_value}"')
@util.obtain_values
def then_header_ends_with(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert value.endswith(expected_value), 'Expected for header `{}` to end with `{}` but it\'s `{}`'.format(
expected_header, expected_value, value)
return True
@then('header "{expected_header}" does not end with "{expected_value}"')
@util.obtain_values
def then_header_doesnt_end_with(ctx, expected_header, expected_value):
value = ctx.zato.response.data.headers[expected_header]
assert not value.endswith(expected_value), 'Expected for header `{}` not to end with `{}` yet it\'s `{}`'.format(
expected_header, expected_value, value)
return True
# ################################################################################################################################
@then('I store "{path}" from response under "{name}", default "{default}"')
@util.obtain_values
def then_store_path_under_name_with_default(ctx, path, name, default):
if ctx.zato.request.is_xml:
value = ctx.zato.response.data_impl.xpath(path)
if value:
if len(value) == 1:
value = value[0].text
else:
value = [elem.text for elem in value]
else:
if default == NO_VALUE:
raise ValueError('No such path `{}`'.format(path))
else:
value = default
else:
value = get_pointer(ctx.zato.response.data_impl, path, default)
if value == NO_VALUE:
raise ValueError('No such path `{}`'.format(path))
ctx.zato.user_ctx[name] = value
@then('I store "{path}" from response under "{name}"')
@util.obtain_values
def then_store_path_under_name(ctx, path, name):
return then_store_path_under_name_with_default(ctx, path, name, NO_VALUE)
# ################################################################################################################################
def needs_json(func):
def inner(ctx, **kwargs):
if ctx.zato.request.get('response_format', ctx.zato.request.get('format', 'RAW')) != 'JSON':
raise TypeError('This step works with JSON replies only.')
return func(ctx, **kwargs)
return inner
def _response_is_equal_to(ctx, expected):
assert_equals(expected, ctx.zato.response.data_impl)
return True
@then('response is equal to that from "{path}"')
@needs_json
@util.obtain_values
def then_response_is_equal_to_that_from(ctx, path):
return _response_is_equal_to(ctx, json.loads(util.get_data(ctx, 'response', path)))
@then('JSON response is equal to "{expected}"')
@needs_json
@util.obtain_values
def then_response_is_equal_to(ctx, expected):
return _response_is_equal_to(ctx, json.loads(expected))
@then('response is equal to "{expected}"')
@needs_json
@util.obtain_values
def then_response_is_equal_to(ctx, expected):
return _response_is_equal_to(ctx, expected)
# ################################################################################################################################
@then('I sleep for "{sleep_time}"')
@util.obtain_values
def then_i_sleep_for(ctx, sleep_time):
time.sleep(float(sleep_time))
# ################################################################################################################################
@given('I encode "{value}" using Base64 under "{name}"')
@util.obtain_values
def given_i_encode_value_using_base64_under_name(ctx, value, name):
ctx.zato.user_ctx[name] = value.encode('base64','strict')
# ################################################################################################################################
def variable_is(variable, value):
expected_value = ast.literal_eval(value)
assert variable == expected_value, 'Value `{}` is not equal to expected `{}`'.format(variable, expected_value)
@then('variable "{variable}" is a list "{value}"')
@util.obtain_values
def and_variable_is_a_list(ctx, variable, value):
variable_is(variable, value)
@then('variable "{variable}" is an empty list')
@util.obtain_values
def and_variable_is_an_empty_list(ctx, variable):
assert variable == [], 'Value `{}` is not an empty list'.format(variable)
@then('variable "{variable}" is an integer "{value}"')
@util.obtain_values
def and_variable_is_an_integer(ctx, variable, value):
variable_is(variable, value)
@then('variable "{variable}" is a float "{value}"')
@util.obtain_values
def and_variable_is_a_float(ctx, variable, value):
variable_is(variable, value)
@then('variable "{variable}" is a string "{value}"')
@util.obtain_values
def and_variable_is_a_string(ctx, variable, value):
assert variable == value, 'Value `{}` is not equal to expected `{}`'.format(variable, value)
@then('variable "{variable}" is True')
@util.obtain_values
def and_variable_is_true(ctx, variable):
variable_is(variable, 'True')
@then('variable "{variable}" is False')
@util.obtain_values
def and_variable_is_false(ctx, variable):
variable_is(variable, 'False') | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/common.py | common.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
from datetime import datetime
from http.client import OK
import json
from os.path import split
# requests
import requests
# Behave
from behave import given, then, when
# Zato
from .. import util
from .. import CHANNEL_TYPE
# ################################################################################################################################
@given('I store "{cluster_id}" "{url_path}" "{username}" "{password}" under Zato "{conn_name}"')
@util.obtain_values
def given_i_store_zato_info_under_conn_name(ctx, cluster_id, url_path, username, password, conn_name):
ctx.zato.user_ctx[conn_name] = {
'cluster_id': cluster_id,
'url_path': url_path,
'username': username,
'password': password
}
# ################################################################################################################################
@when('I upload a Zato service from "{module_path}" to "{conn_name}"')
@util.obtain_values
def when_i_upload_a_zato_service_from_path_to_conn_details(ctx, module_path, conn_name):
with open(module_path, 'r') as module:
service_code = module.read().encode('base64', 'strict')
payload = json.dumps({
'cluster_id': conn_name['cluster_id'],'payload': service_code,
'payload_name': split(module_path)[-1]
}, ensure_ascii=False)
response = requests.get(conn_name['url_path'], auth=(conn_name['username'], conn_name['password']), data=payload)
assert response.status_code == OK
# #################################################################################################################################
@given('I use Zato WSX')
def given_i_use_zato_wsx(ctx):
ctx.zato.zato_channel_type = CHANNEL_TYPE.WEB_SOCKETS
# #################################################################################################################################
@given('I connect to a Zato WSX channel without credentials')
def given_i_connect_to_zato_wsx_without_credentials(ctx):
ctx.zato.zato_wsx_username = ''
ctx.zato.zato_wsx_secret = ''
def on_request_from_zato(msg):
pass
config = WSXConfig()
config.client_name = 'zato-apitest'
config.client_id = '{}.{}'.format(config.client_name, datetime.utcnow().isoformat())
config.address = '{}{}'.format(ctx.zato.request.address, ctx.zato.request.url_path)
config.username = ctx.zato.zato_wsx_username
config.secret = ctx.zato.zato_wsx_secret
config.on_request_callback = on_request_from_zato
client = WSXClient(config)
client.run()
if not client.is_authenticated:
raise Exception('Client `{}` could not authenticate with {} (Incorrect credentials? Server not running?)'.format(
config.username, config.address))
ctx.zato.wsx_client = client
# #################################################################################################################################
@given('I close WSX connection')
def given_i_close_wsx_connection(ctx):
ctx.zato.wsx_client.stop()
# #################################################################################################################################
@then('WSX client is authenticated')
def then_wsx_client_is_authenticated(ctx):
assert ctx.zato.wsx_client.is_authenticated is True
# #################################################################################################################################
@then('WSX client is disconnected')
def then_wsx_client_is_disconnected(ctx):
assert ctx.zato.wsx_client.conn.client_terminated is True
# ################################################################################################################################
@given('Zato WebSockets service "{service}"')
@util.obtain_values
def given_zato_websockets_service(ctx, service):
ctx.zato.zato_ws_service = service
# ################################################################################################################################ | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/zato_.py | zato_.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import csv, os, re
from datetime import date, datetime
from time import strptime
# ################################################################################################################################
def str2date(s):
s = s.replace('?', '1')
return date(*strptime(s, "%Y-%m-%d")[0:3])
def str2datetime(s):
return datetime(*strptime(s, "%Y-%m-%dT%H:%M:%S")[0:6])
def str2uni(s):
return s.decode('utf-8')
type_convert = {'integer': int,
'float': float,
'numeric': float,
'decimal': float,
'text': str2uni,
'char': str2uni,
'varchar': str2uni,
'date': str2date,
'datetime': str2datetime}
class CSVFile(object):
def __init__(self, filename, ctx=None, strip=True):
if ctx and not os.path.isabs(filename):
filename = os.path.join(ctx.zato.environment_dir, filename)
sniffer = csv.Sniffer() # sniff delimiter
sample = open(filename, 'rb')
dialect = sniffer.sniff(sample.readline())
sample.seek(0)
self.reader = csv.reader(open(filename, 'rb'),
delimiter=dialect.delimiter,
skipinitialspace=1)
self.strip = strip
self.cached_rows = []
self.index = 0
def __iter__(self):
return self
def readrow(self):
row = None
while not row:
row = self.reader.next()
return [x.strip() for x in row]
def next(self):
if self.cached_rows is None:
return self.readrow()
else:
try:
newrow = self.cached_rows[self.index]
except IndexError:
newrow = self.readrow()
self.cached_rows.append(newrow)
self.index += 1
return newrow
def rewind(self, index):
self.index = index
def getindex(self):
return self.index
def parse_columns(csvf, flag=None):
if flag:
cols = [re.findall(r"\w+",item)[0] for item in csvf.next()]
else:
colrow = csvf.next()
cols = ['col%d' % x for x in xrange(len(colrow))]
return cols
def parse_types(csv, opt=None):
types = []
if opt == 0:
# data types in dedicated line below the header line
for t in csv.next():
items = re.findall(r'\w+', t)
types.append(tuple((element) for element in items[opt:]))
if opt == 1:
csv.rewind(0)
# data types beside column names,
# values are delimited by non alphanumerich character, like:
# id:integer, name-varchar-30, income/float/5
for t in csv.next():
items = re.findall(r'\w+', t)
types.append(tuple((element) for element in items[opt:]))
if opt == 'default':
csv.rewind(0)
for item in csv.next():
types.append(('text',))
csv.rewind(0)
return types
def prepare_table(conn_name, name, coltypes):
declare_columns = []
for col, col_type in coltypes:
size = None
if len(col_type) < 2:
col_type = col_type[0]
else:
col_type, size = col_type
if size:
col_type = '%s(%s)' % (col_type, size)
declare_columns.append('"%s" %s' % (col, col_type))
return 'CREATE TABLE %s (\n%s\n);' % (name, ',\n'.join(declare_columns))
def create_table(conn_name, table_statement):
conn_name.execute(table_statement)
def insert_from_csv(conn_name, csv, table, cols, types=None):
len_cols = len(cols)
insert_stmt = """INSERT INTO %s (%s) VALUES (%s)""" % (table, ','.join('"%s"' % x for x in cols), ','.join(['%s'] * len_cols))
def get_conversion(t):
if isinstance(t, tuple):
t = t[0]
return type_convert[t]
def wrap_into_quotes(values):
return '\'{}\''.format(values)
if types is not None:
converters = map(get_conversion, types)
for row in csv:
values = [conv(val) for conv, val in zip(converters, row)]
values.extend([None] * (len_cols - len(values)))
insert = insert_stmt % (tuple((wrap_into_quotes(element)) for element in values))
conn_name.execute(insert)
else:
for row in csv:
values = [val for val in row]
values.extend([None] * (len_cols - len(values)))
insert = insert_stmt % (tuple((wrap_into_quotes(element)) for element in values))
conn_name.execute(insert)
def main(filename, tablename, conn_name, use_header=None, use_types=None):
csvf = CSVFile(filename)
cols = parse_columns(csvf, use_header)
if use_types is not None:
# use_types=default: all columns data type is 'text'
# use_types=0: data types in dedicated line below the header line
# use_types=1: data types beside column names
types = parse_types(csvf, use_types)
if len(cols) != len(types):
raise ValueError("Error: invalid number of column names and types.")
coltypes = zip(cols, types)
table_statement = prepare_table(conn_name, tablename, coltypes)
create_table(conn_name, table_statement)
insert_from_csv(conn_name, csvf, tablename, cols, types)
else:
insert_from_csv(conn_name, csvf, tablename, cols) | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/insert_csv.py | insert_csv.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import ast
from collections import OrderedDict
# Behave
from behave import given, then
# Cassandra
from cassandra.cluster import Cluster
# Zato
from .. import util
from .insert_csv import CSVFile
# ################################################################################################################################
class TypeConverter(object):
def __init__(self, csv_string):
self.lists_dict = None
self.text = None
self.csv_string = csv_string
self.altered_csv_string = self.parse_lists(self.csv_string)
self.types = self.data_converter(self.altered_csv_string, self.lists_dict)
def tryeval(self, string):
try:
data = ast.literal_eval(string)
except:
data = '\'{}\''.format(string)
return data
def find_substring_indices(self, string, sub):
listindex=[]
offset=0
i = string.find(sub, offset)
while i >= 0:
listindex.append(i)
i = string.find(sub, i + 1)
return listindex
def replace_all(self, text, dic):
for k, v in dic.iteritems():
text = text.replace(v, k)
return text
def parse_lists(self, string):
self.lists_dict = OrderedDict()
start = self.find_substring_indices(string, '[')
end = self.find_substring_indices(string, ']')
indices = [(x, y) for x, y in zip(start, end)]
for idx, item in enumerate(indices):
self.lists_dict['~' + str(idx)] = str(string[item[0]:item[1] + 1])
self.text = self.replace_all(string, self.lists_dict)
return self.text
def data_converter(self, csv_string, dic):
types = []
for item in csv_string.split(','):
if not item.strip().startswith('~'):
val = self.tryeval((item.strip()).rstrip())
types.append(val)
else:
s = (item.strip()).rstrip()
val = self.tryeval(dic[s])
types.append(val)
return types
@given('Cassandra contact points "{contact_points}"')
@util.obtain_values
def given_cassandra_contact_points(ctx, contact_points):
ctx.zato.cassandra_ctx['contact_points'] = [point.strip() for point in contact_points.split(',')]
@given('Cassandra protocol version "{protocol_version}"')
@util.obtain_values
def given_cassandra_protocol_version(ctx, protocol_version):
ctx.zato.cassandra_ctx['protocol_version'] = int(protocol_version)
@given('Cassandra port "{port}"')
@util.obtain_values
def given_cassandra_port(ctx, port):
ctx.zato.cassandra_ctx['port'] = int(port)
@given('I connect to keyspace "{keyspace}" as "{conn_name}"')
@util.obtain_values
def given_i_connect_to_keyspace_as_session(ctx, keyspace, conn_name):
if len(ctx.zato.cassandra_ctx) > 0:
cluster = Cluster(**ctx.zato.cassandra_ctx)
else:
cluster = Cluster()
session = cluster.connect(keyspace)
ctx.zato.user_ctx[conn_name] = session
@given('I store CQL query result "{cql}" under "{name}", using "{conn_name}", idx "{idx}"')
@util.obtain_values
def given_i_store_cql_query_result_under_name(ctx, cql, name, conn_name, idx):
values = []
result = ctx.zato.user_ctx[conn_name].execute(cql)
if result:
result = result[int(idx)]
result = result._asdict()
for k, v in result.items():
values.append(v)
ctx.zato.user_ctx[name] = ';'.join(values)
@given('I insert data from CSV "{filename}" to Cassandra table "{tablename}", using "{conn_name}"')
@util.obtain_values
def i_insert_data_from_csv_file_to_cassandra_table(ctx, filename, tablename, conn_name):
csvf = CSVFile(filename, ctx)
colnames = [item for item in csvf.next()]
statement = "INSERT INTO %s (%s) VALUES (%s)"
for row in csvf:
value_types = TypeConverter(','.join(row)).types
data = (tablename, ','.join('%s' % (s.strip()).rstrip() for s in colnames), ','.join('%s' % v for v in value_types))
insert = statement % data
ctx.zato.user_ctx[conn_name].execute(insert)
# ###############################################################################################################################
@then('I disconnect from Cassandra "{conn_name}"')
@util.obtain_values
def then_i_disconnect_from_cassandra(ctx, conn_name):
ctx.zato.user_ctx[conn_name].shutdown()
@then('I insert "{values}" into "{columns}" of Cassandra table "{tablename}", using "{conn_name}"')
@util.obtain_values
def then_i_insert_values_into_columns_of_cassandra_table(ctx, tablename, values, columns, conn_name):
cols = columns.split(',')
if len(cols) != len(values.split(',')):
raise ValueError("Error: invalid number of column names and values.")
value_types = TypeConverter(values).types
insert = "INSERT INTO %s (%s) VALUES (%s)" % (
tablename, ','.join('%s' % (x.strip()).rstrip() for x in cols), ','.join('%s' % x for x in value_types))
ctx.zato.user_ctx[conn_name].execute(insert)
@then('I update "{columns}" of Cassandra table "{tablename}" set "{values}" filter by "{criterion}", using "{conn_name}"')
@util.obtain_values
def then_i_update_columns_of_cassandra_table_setting_values(ctx, tablename, columns, values, conn_name, criterion):
column_value = util.make_dict(columns, values)
for key in column_value.keys():
insert = "UPDATE %s SET %s='%s' %s" % (tablename, key, column_value[key][0], criterion)
ctx.zato.user_ctx[conn_name].execute(insert)
@then('I delete from Cassandra table "{tablename}" where "{criterion}", using "{conn_name}"')
@util.obtain_values
def then_i_delete_from_cassandra_table(ctx, tablename, conn_name, criterion=None):
if not criterion:
criterion = ''
insert = "DELETE FROM %s %s" % (tablename, criterion)
ctx.zato.user_ctx[conn_name].execute(insert) | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/cassandra_.py | cassandra_.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
import ast
# Behave
from behave import given, then
# sqlalchemy
from sqlalchemy import create_engine
# Zato
from .. import util
from .insert_csv import main as insert_csv
# ################################################################################################################################
@given('I connect to "{sqlalchemy_url}" as "{conn_name}"')
@util.obtain_values
def given_i_connect_to_sqlalchemy_url_as_conn_name(ctx, sqlalchemy_url, conn_name):
engine = create_engine(sqlalchemy_url)
connection = engine.connect()
ctx.zato.user_ctx[conn_name] = connection
@given('I store "{sql}" query result under "{name}", using "{conn_name}"')
@util.obtain_values
def given_i_store_sql_query_result_under_name(ctx, sql, name, conn_name):
conn = conn_name.execute(sql)
result = conn.fetchall()
if len(result) == 1:
ctx.zato.user_ctx[name] = result[0][0]
else:
ctx.zato.user_ctx[name] = result
@then('SQL "{sql}" is equal to "{value}", using "{conn_name}"')
@util.obtain_values
def then_sql_is_equal_to_value_using_conn_name(ctx, sql, value, conn_name):
conn = conn_name.execute(sql)
actual = conn.fetchall()
expected_value = ast.literal_eval(value)
assert actual == expected_value, 'Value `{}` is not equal to expected `{}`'.format(actual, expected_value)
@then('I disconnect from SQL "{conn_name}"')
@util.obtain_values
def then_i_disconnect_from_sql(ctx, conn_name):
conn_name.close()
# ###############################################################################################################################
@given('I store filter "{colname}" is "{sign}" "{colvalue}" "{operator}" under "{name}"')
def i_store_filter_under_name(ctx, colname, sign, colvalue, name, operator=None):
criterion = util.build_filter(colname, sign, colvalue, operator)
ctx.zato.user_ctx[name] = criterion
@then('I insert "{values}" into "{columns}" of SQL table "{tablename}", using "{conn_name}"')
@util.obtain_values
def then_i_insert_values_into_columns(ctx, tablename, values, columns, conn_name):
if len(columns.split(', ')) != len(values.split(', ')):
raise ValueError("Error: invalid number of column names and values.")
insert = "INSERT INTO %s (%s) VALUES (%s)" % (tablename, columns, util.wrap_into_quotes(values))
conn_name.execute(insert)
return insert
@then('I update "{columns}" of SQL table "{tablename}" set "{values}" filter by "{criterion}", using "{conn_name}"')
@util.obtain_values
def then_i_update_columns_setting_values(ctx, tablename, columns, values, conn_name, criterion=None):
if not criterion:
criterion = ''
column_value = util.make_dict(columns, values)
for key in column_value.keys():
insert = "UPDATE %s SET %s='%s' %s" %(tablename, key, column_value[key][0], criterion)
conn_name.execute(insert)
return insert
@then('I delete from SQL table "{tablename}" where "{criterion}", using "{conn_name}"')
@util.obtain_values
def then_i_delete_from_table(ctx, tablename, conn_name, criterion=None):
if not criterion:
criterion = ''
insert = "DELETE FROM %s %s" %(tablename, criterion)
conn_name.execute(insert)
return insert
# this step's purpose is insertion of data from csv file to existing table;
# names of columns are taken from the header line of csv file
@then('I insert data from csv "{filename}" to SQL table "{tablename}", using "{conn_name}"')
@util.obtain_values
def i_insert_data_from_csv_file(ctx, **kwargs):
insert_csv(use_header=1, **kwargs)
# this step's purpose is creation of a new table and insertion of data from csv file upon it;
# names of columns are automatically generated as col0, col1, col2 and so on
@then('I create SQL table "{tablename}" and insert data from csv "{filename}", using "{conn_name}"')
@util.obtain_values
def i_create_table_and_insert_data_from_csv(ctx, **kwargs):
insert_csv(use_types='default', **kwargs)
# this step's purpose is creation of a new table and insertion of data from csv file upon it;
# data types and names of columns are taken from the header line of csv file
@then('I create SQL table "{tablename}" and insert data from csv "{filename}", \
using "{conn_name}" and names, data types from the header')
@util.obtain_values
def i_create_table_and_insert_data_from_csv_file_using_types_and_header(ctx, **kwargs):
insert_csv(use_header=1, use_types=1, **kwargs)
# this step's purpose is creation of a new table and insertion of data from csv file upon it;
# names of columns are taken from the header line of csv file and data types are from dedicated line below the header
@then('I create SQL table "{tablename}" and insert data from csv "{filename}", using "{conn_name}", \
names from the header and data types from the line below')
@util.obtain_values
def i_create_table_and_insert_data_from_csv_file_using_header(ctx, **kwargs):
insert_csv(use_header=1, use_types=0, **kwargs) | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/sql.py | sql.py |
# Part of Zato - Open-Source ESB, SOA, REST, APIs and Cloud Integrations in Python
# https://zato.io
# stdlib
from ast import literal_eval
# base32_crockford
from base32_crockford import decode as crockford_decode
# Behave
from behave import given, then
# datadiff
from datadiff.tools import assert_equals
# jsonpointer
from jsonpointer import resolve_pointer as get_pointer, set_pointer as _set_pointer
# json
from .common import needs_json
import json
# Zato
from .. import util
from .. import INVALID
import uuid
# Integer types for testing 'path {path} is any integer'
try:
int_types = (int, long)
except:
int_types = (int,) # python 3 doesn't have the long type
# ################################################################################################################################
def set_pointer(ctx, path, value):
if 'data_impl' not in ctx.zato.request:
raise ValueError('path called but no request set')
_set_pointer(ctx.zato.request.data_impl, path, value)
# ################################################################################################################################
@given('path "{path}" in request is "{value}"')
@util.obtain_values
def given_json_pointer_in_request_is(ctx, path, value):
set_pointer(ctx, path, value)
@given('path "{path}" in request is "{value}" (with literal_eval)')
@util.obtain_values
def given_json_pointer_in_request_is_with_literal_eval(ctx, path, value):
set_pointer(ctx, path, literal_eval(value))
@given('path "{path}" in request is a UUID')
@util.obtain_values
def given_json_pointer_in_request_is_a_uuid(ctx, path):
set_pointer(ctx, path, uuid.uuid4().hex)
@given('path "{path}" in request is an integer "{value}"')
@util.obtain_values
def given_json_pointer_in_request_is_an_integer(ctx, path, value):
set_pointer(ctx, path, int(value))
@given('path "{path}" in request is a float "{value}"')
@util.obtain_values
def given_json_pointer_in_request_is_a_float(ctx, path, value):
set_pointer(ctx, path, float(value))
@given('path "{path}" in request is a list "{value}"')
@util.obtain_values
def given_json_pointer_in_request_is_a_list(ctx, path, value):
set_pointer(ctx, path, util.parse_list(value))
@given('path "{path}" in request is a random string')
@util.obtain_values
def given_json_pointer_in_request_is_a_random_string(ctx, path):
set_pointer(ctx, path, util.rand_string())
@given('path "{path}" in request is a random integer')
@util.obtain_values
def given_json_pointer_in_request_is_a_random_integer(ctx, path):
set_pointer(ctx, path, util.rand_int())
@given('path "{path}" in request is a random float')
@util.obtain_values
def given_json_pointer_in_request_is_a_random_float(ctx, path):
set_pointer(ctx, path, util.rand_float())
@given('path "{path}" in request is one of "{value}"')
@util.obtain_values
def given_json_pointer_in_request_is_one_of(ctx, path, value):
set_pointer(ctx, path, util.any_from_list(value))
@given('path "{path}" in request is True')
@util.obtain_values
def given_json_pointer_in_request_is_true(ctx, path):
set_pointer(ctx, path, True)
@given('path "{path}" in request is False')
@util.obtain_values
def given_json_pointer_in_request_is_true(ctx, path):
set_pointer(ctx, path, False)
# ################################################################################################################################
@given('path "{path}" in request is a random date "{format}"')
@util.obtain_values
def given_json_pointer_is_rand_date(ctx, path, format):
set_pointer(ctx, path, util.rand_date(ctx.zato.date_formats[format]))
@given('path "{path}" in request is now "{format}"')
@util.obtain_values
def given_json_pointer_is_now(ctx, path, format):
set_pointer(ctx, path, util.now(format=ctx.zato.date_formats[format]))
@given('path "{path}" in request is UTC now "{format}"')
@util.obtain_values
def given_json_pointer_is_utc_now(ctx, path, format):
set_pointer(ctx, path, util.utcnow(format=ctx.zato.date_formats[format]))
@given('path "{path}" in request is UTC now "{format}" minus one hour')
@util.obtain_values
def given_json_pointer_is_utc_now_minus_one_hour(ctx, path, format):
set_pointer(ctx, path, util.utcnow_minus_hour(format=ctx.zato.date_formats[format]))
@given('path "{path}" in request is a random date after "{date_start}" "{format}"')
@util.obtain_values
def given_json_pointer_is_rand_date_after(ctx, path, date_start, format):
set_pointer(ctx, path, util.date_after(date_start, ctx.zato.date_formats[format]))
@given('path "{path}" in request is a random date before "{date_end}" "{format}"')
@util.obtain_values
def given_json_pointer_is_rand_date_before(ctx, path, date_end, format):
set_pointer(ctx, path, util.date_before(date_end, ctx.zato.date_formats[format]))
@given('path "{path}" in request is a random date between "{date_start}" and "{date_end}" "{format}"')
@util.obtain_values
def given_json_pointer_is_rand_date_between(ctx, path, date_start, date_end, format):
set_pointer(ctx, path, util.date_between(date_start, date_end, ctx.zato.date_formats[format]))
# ################################################################################################################################
def assert_value(ctx, path, value, wrapper=None):
if 'data_impl' not in ctx.zato.response:
raise ValueError('Assertion called but no format set')
value = wrapper(value) if wrapper else value
actual = get_pointer(ctx.zato.response.data_impl, path)
assert_equals(value, actual)
return True
@then('path "{path}" is "{value}"')
@util.obtain_values
def then_json_pointer_is(ctx, path, value):
return assert_value(ctx, path, value)
@then('path "{path}" is "{value}" (with literal_eval)')
@util.obtain_values
def then_json_pointer_is_with_literal_eval(ctx, path, value):
return assert_value(ctx, path, literal_eval(value))
@then('path "{path}" is JSON "{value}"')
@needs_json
@util.obtain_values
def then_json_pointer_is_json(ctx, path, value):
return assert_value(ctx, path, json.loads(value))
@then('path "{path}" is JSON equal to that from "{value}"')
@needs_json
@util.obtain_values
def then_json_pointer_is_json_equal_to_that_from(ctx, path, value):
return assert_value(ctx, path, json.loads(util.get_data(ctx, 'response', value)))
@then('path "{path}" is an integer "{value}"')
@util.obtain_values
def then_json_pointer_is_an_integer(ctx, path, value):
return assert_value(ctx, path, value, int)
@then('path "{path}" is any integer')
@util.obtain_values
def then_json_pointer_is_any_integer(ctx, path):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert isinstance(actual, int_types), \
'Expected an integer in {}, got a `{}`'.format(path, type(actual))
return True
@then('path "{path}" is a float "{value}"')
@util.obtain_values
def then_json_pointer_is_a_float(ctx, path, value):
return assert_value(ctx, path, value, float)
@then('path "{path}" is any float')
@util.obtain_values
def then_json_pointer_is_any_float(ctx, path):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert isinstance(actual, float), \
'Expected a float in {}, got a `{}`'.format(path, type(actual))
return True
@then('path "{path}" is any bool')
@util.obtain_values
def then_json_pointer_is_any_bool(ctx, path):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert isinstance(actual, bool), \
'Expected a bool in {}, got a `{}`'.format(path, type(actual))
return True
@then('path "{path}" is a list "{value}"')
@util.obtain_values
def then_json_pointer_is_a_list(ctx, path, value):
return assert_value(ctx, path, value, util.parse_list)
@then('path "{path}" is empty')
@util.obtain_values
def then_json_pointer_is_empty(ctx, path):
return assert_value(ctx, path, '')
@then('path "{path}" is not empty')
@util.obtain_values
def then_json_pointer_isnt_empty(ctx, path):
actual = get_pointer(ctx.zato.response.data_impl, path, INVALID)
assert actual != INVALID, 'Path `{}` Should not be empty'.format(path)
@then('path "{path}" is not an empty list')
@util.obtain_values
def then_json_pointer_isnt_an_empty_list(ctx, path):
actual = get_pointer(ctx.zato.response.data_impl, path, [])
assert isinstance(actual, list), 'Path `{}` should be a list'.format(path)
assert actual, 'Path `{}` should not be an empty list'.format(path)
@then('path "{path}" is one of "{value}"')
@util.obtain_values
def then_json_pointer_is_one_of(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
value = util.parse_list(value)
assert actual in value, 'Expected for `{}` ({}) to be in `{}`'.format(actual, path, value)
@then('path "{path}" is not one of "{value}"')
@util.obtain_values
def then_json_pointer_isnt_one_of(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
value = util.parse_list(value)
assert actual not in value, 'Expected for `{}` ({}) not to be in `{}`'.format(actual, path, value)
@then('path "{path}" is a BASE32 Crockford, checksum "{checksum}"')
@util.obtain_values
def then_json_pointer_is_a_base32_crockford(ctx, path, checksum):
actual = get_pointer(ctx.zato.response.data_impl, path)
crockford_decode(actual.replace('-', ''), checksum.lower() == 'true')
# ###############################################################################################################################
@then('path "{path}" is True')
@util.obtain_values
def then_json_pointer_is_true(ctx, path):
return assert_value(ctx, path, True)
@then('path "{path}" is False')
@util.obtain_values
def then_json_pointer_is_false(ctx, path):
return assert_value(ctx, path, False)
@then('path "{path}" is null')
@util.obtain_values
def then_json_pointer_is_null(ctx, path):
return assert_value(ctx, path, None)
@then('path "{path}" is an empty list')
@util.obtain_values
def then_json_pointer_is_an_empty_list(ctx, path):
return assert_value(ctx, path, [])
@then('path "{path}" is an empty dict')
@util.obtain_values
def then_json_pointer_is_an_empty_dict(ctx, path):
return assert_value(ctx, path, {})
@then('path "{path}" is not a string "{value}"')
@util.obtain_values
def then_json_pointer_isnt_a_string(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert actual != value, 'Expected `{}` != `{}`'.format(actual, value)
@then('JSON response exists')
@util.obtain_values
def then_json_response_exists(ctx):
assert ctx.zato.response.data_impl
@then('JSON response does not exist')
@util.obtain_values
def then_json_response_doesnt_exist(ctx):
assert not ctx.zato.response.data_impl
@then('path "{path}" starts with "{value}"')
@util.obtain_values
def then_json_pointer_starts_with(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert actual.startswith(value), 'Expected for `{}` to start with `{}`'.format(actual, value)
@then('path "{path}" starts with any of "{value}"')
@util.obtain_values
def then_json_pointer_starts_with_any_of(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
value = util.parse_list(value)
for elem in value:
if actual.startswith(elem):
break
else:
raise AssertionError('Path `{}` ({}) does not start with any of `{}`'.format(path, actual, value))
@then('path "{path}" ends with "{value}"')
@util.obtain_values
def then_json_pointer_ends_with(ctx, path, value):
actual = get_pointer(ctx.zato.response.data_impl, path)
assert actual.endswith(value), 'Expected for `{}` to end with `{}`'.format(actual, value)
# ###############################################################################################################################
def _then_json_pointer_contains(ctx, path, expected):
actual_list = get_pointer(ctx.zato.response.data_impl, path)
for item in actual_list:
try:
assert_equals(item, expected)
except AssertionError:
pass
else:
return True
else:
raise AssertionError('Expected data `{}` not in `{}`'.format(expected, actual_list))
@then('path "{path}" contains "{value}"')
@util.obtain_values
def then_json_pointer_contains(ctx, path, value):
return _then_json_pointer_contains(ctx, path, json.loads(value))
@then('path "{path}" contains data from "{value}"')
@util.obtain_values
def then_json_pointer_contains_data_from(ctx, path, value):
return _then_json_pointer_contains(ctx, path, json.loads(util.get_data(ctx, 'response', value)))
# ############################################################################################################################### | zato-apitest | /zato-apitest-1.13.tar.gz/zato-apitest-1.13/src/zato/apitest/steps/json.py | json.py |
# stdlib
import logging
from base64 import b64decode, b64encode
from datetime import datetime
from http.client import OK
from json import dumps, loads
from traceback import format_exc
# Bunch
from bunch import bunchify
# lxml
from lxml import objectify
# requests
import requests
# urllib3 - need for requests
from urllib3.util.retry import Retry
# Python 2/3 compatibility
from builtins import str as text
from six import PY3
# Zato
from zato.common.api import BROKER, ZATO_NOT_GIVEN, ZATO_OK
from zato.common.exception import ZatoException
from zato.common.xml_ import soap_data_path, soap_data_xpath, soap_fault_xpath, zato_data_path, \
zato_data_xpath, zato_details_xpath, zato_result_xpath
from zato.common.log_message import CID_LENGTH
from zato.common.odb.model import Server
# Set max_cid_repr to CID_NO_CLIP if it's desired to return the whole of a CID
# in a response's __repr__ method.
CID_NO_CLIP = int(CID_LENGTH / 2)
DEFAULT_MAX_RESPONSE_REPR = 2500
DEFAULT_MAX_CID_REPR = 5
mod_logger = logging.getLogger(__name__)
# ################################################################################################################################
# Version
# ################################################################################################################################
version = '3.2.1'
# ################################################################################################################################
# ################################################################################################################################
def default_json_handler(value):
if isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, bytes):
return value.decode('utf8')
raise TypeError('Cannot serialize `{}`'.format(value))
# ################################################################################################################################
class _APIResponse(object):
""" A class to represent data returned by API services.
"""
def __init__(self, inner, _OK=OK):
self.inner = inner
self.is_ok = self.inner.status_code == _OK
self.cid = self.inner.headers.get('x-zato-cid', '(None)')
if self.is_ok:
self.data = loads(self.inner.text)
self.details = None
else:
self.data = ''
self.details = self.inner.text
# ################################################################################################################################
class APIClient(object):
def __init__(self, address, username, password, path='/zato/api/invoke/{}', tls_verify=None, tls_cert=None):
self.address = address
self.username = username
self.password = password
self.path = path
self.tls_verify = tls_verify
self.tls_cert = tls_cert
self.session = requests.Session()
self.session.auth = (self.username, self.password)
self.session.verify = self.tls_verify
self.session.cert = self.tls_cert
def _invoke(self, verb, service_name, request=None):
func = getattr(self.session, verb)
url_path = self.path.format(service_name)
full_address = '{}{}'.format(self.address, url_path)
response = func(full_address, verify=self.tls_verify, data=dumps(request, default=default_json_handler))
return _APIResponse(response)
def invoke(self, *args, **kwargs):
return self._invoke('post', *args, **kwargs)
def get(self, *args, **kwargs):
return self._invoke('get', *args, **kwargs)
def post(self, *args, **kwargs):
return self._invoke('post', *args, **kwargs)
def patch(self, *args, **kwargs):
return self._invoke('patch', *args, **kwargs)
def put(self, *args, **kwargs):
return self._invoke('put', *args, **kwargs)
def delete(self, *args, **kwargs):
return self._invoke('delete', *args, **kwargs)
def by_verb(self, verb, *args, **kwargs):
return self._invoke(verb, *args, **kwargs)
# ################################################################################################################################
# Clients below are preserved only for compatibility with pre-3.0 environments and will be removed at one point
# ################################################################################################################################
# ################################################################################################################################
class _Response(object):
""" A base class for all specific response types client may return.
"""
def __init__(self, inner, to_bunch, max_response_repr, max_cid_repr, logger, output_repeated=False):
self.inner = inner # Acutal response from the requests module
self.to_bunch = to_bunch
self.max_response_repr = max_response_repr
self.max_cid_repr = max_cid_repr
self.logger = logger
self.sio_result = None
self.ok = False
self.has_data = False
self.output_repeated = output_repeated
self.data = [] if self.output_repeated else None
self.meta = {}
self.cid = self.inner.headers.get('x-zato-cid', '(None)')
self.details = None
self.init()
def __repr__(self):
if self.max_cid_repr >= CID_NO_CLIP:
cid = '[{}]'.format(self.cid)
else:
cid = '[{}..{}]'.format(self.cid[:self.max_cid_repr], self.cid[-self.max_cid_repr:])
return '<{} at {} ok:[{}] inner.status_code:[{}] cid:{}, inner.text:[{}]>'.format(
self.__class__.__name__, hex(id(self)), self.ok, self.inner.status_code,
cid, self.inner.text[:self.max_response_repr])
def __iter__(self):
return iter(self.data)
def init(self):
raise NotImplementedError('Must be defined by subclasses')
# ################################################################################################################################
class _StructuredResponse(_Response):
""" Any non-raw and non-SIO response.
"""
def init(self):
if self.set_data():
self.set_has_data()
self.set_ok()
def _set_data_details(self):
try:
self.data = self.load_func(self.inner.text.encode('utf-8'))
except Exception:
self.details = format_exc()
else:
return True
def load_func(self):
raise NotImplementedError('Must be defined by subclasses')
def set_data(self):
return self._set_data_details()
def set_has_data(self):
raise NotImplementedError('Must be defined by subclasses')
def set_ok(self):
self.ok = self.inner.ok
class JSONResponse(_StructuredResponse):
""" Stores responses from JSON services.
"""
def load_func(self, data):
return loads(data)
def set_has_data(self):
self.has_data = bool(self.data)
class XMLResponse(_StructuredResponse):
""" Stores responses from XML services.
"""
def load_func(self, data):
return objectify.fromstring(data)
def set_has_data(self):
self.has_data = self.data is not None
class SOAPResponse(XMLResponse):
""" Stores responses from SOAP services.
"""
path, xpath = soap_data_path, soap_data_xpath
def init(self):
if self.set_data():
self.set_has_data()
def set_data(self):
if self._set_data_details():
data = self.xpath(self.data)
if not data:
self.details = 'No {} in SOAP response'.format(self.path)
else:
if soap_fault_xpath(data[0]):
self.details = data[0]
else:
self.data = data[0]
self.ok = True
return True
# ################################################################################################################################
class JSONSIOResponse(_Response):
""" Stores responses from JSON SIO services.
"""
def init(self, _non_data=('zato_env', '_meta')):
try:
json = loads(self.inner.text)
except ValueError:
msg = 'inner.status_code `{}`, JSON parsing error `{}`'.format(self.inner.status_code, self.inner.text)
self.logger.error(msg)
raise ValueError(msg)
if 'zato_env' in json:
has_zato_env = True
self.details = json['zato_env']['details']
self.sio_result = json['zato_env']['result']
self.ok = self.sio_result == ZATO_OK
else:
has_zato_env = False
self.details = self.inner.text
self.ok = self.inner.ok
if self.ok:
value = None
if has_zato_env:
# There will be two keys, zato_env and the actual payload
for key, _value in json.items():
if key not in _non_data:
value = _value
break
else:
value = json
if value:
if self.set_data(value, has_zato_env):
self.has_data = True
if self.to_bunch:
self.data = bunchify(self.data)
def set_data(self, payload, _ignored):
self.data = payload
return True
class SOAPSIOResponse(_Response):
""" Stores responses from SOAP SIO services.
"""
def init(self):
response = objectify.fromstring(self.inner.text)
soap_fault = soap_fault_xpath(response)
if soap_fault:
self.details = soap_fault[0]
else:
zato_data = zato_data_xpath(response)
if not zato_data:
msg = 'Server did not send a business payload ({} element is missing), soap_response:[{}]'.format(
zato_data_path, self.inner.text)
self.details = msg
# We have a payload but hadn't there been any errors at the server's side?
zato_result = zato_result_xpath(response)
if zato_result[0] == ZATO_OK:
self.ok = True
self.data = zato_data[0]
self.has_data = True
else:
self.details = zato_details_xpath(response)[0]
class ServiceInvokeResponse(JSONSIOResponse):
""" Stores responses from SIO services invoked through the zato.service.invoke service.
"""
def __init__(self, *args, **kwargs):
self.inner_service_response = None
super(ServiceInvokeResponse, self).__init__(*args, **kwargs)
def _handle_response_with_meta(self, data):
if isinstance(data, dict):
self.meta = data.get('_meta')
data_keys = list(data.keys())
if len(data_keys) == 1:
data_key = data_keys[0]
if isinstance(data_key, text) and data_key.startswith('zato'):
self.data = data[data_key]
else:
self.data = data
else:
self.data = data
else:
self.data = data
def set_data(self, payload, has_zato_env):
if has_zato_env:
payload = b64decode(payload)
payload = payload.decode('utf8') if isinstance(payload, bytes) else payload
self.inner_service_response = payload
try:
data = loads(self.inner_service_response)
except ValueError:
# Not a JSON response
self.data = self.inner_service_response
else:
self._handle_response_with_meta(data)
else:
try:
data = loads(payload)
except ValueError:
# Not a JSON response
self.data = payload
else:
self._handle_response_with_meta(data)
return True
# ################################################################################################################################
class RawDataResponse(_Response):
""" Stores responses from services that weren't invoked using any particular
data format
"""
def init(self):
self.ok = self.inner.ok
if self.set_data():
self.has_data = True
def set_data(self):
if self.ok:
self.data = self.inner.text
else:
self.details = self.inner.text
return self.data and len(self.data) > 0
# ################################################################################################################################
class _Client(object):
""" A base class of convenience clients for invoking Zato services from other Python applications.
"""
def __init__(self, address, path, auth=None, session=None, to_bunch=False,
max_response_repr=DEFAULT_MAX_RESPONSE_REPR, max_cid_repr=DEFAULT_MAX_CID_REPR, logger=None,
tls_verify=True):
self.address = address
self.service_address = '{}{}'.format(address, path)
self.session = session or requests.session()
for adapter in self.session.adapters.values():
retry = Retry(connect=4, backoff_factor=0.1)
adapter.max_retries = retry
self.to_bunch = to_bunch
self.max_response_repr = max_response_repr
self.max_cid_repr = max_cid_repr
self.logger = logger or mod_logger
self.tls_verify = tls_verify
self.has_debug = self.logger.isEnabledFor(logging.DEBUG)
if not self.session.auth:
self.session.auth = auth
def inner_invoke(self, request, response_class, is_async, headers, output_repeated=False):
""" Actually invokes a service through HTTP and returns its response.
"""
raw_response = self.session.post(self.service_address, request, headers=headers, verify=self.tls_verify)
response = response_class(
raw_response, self.to_bunch, self.max_response_repr,
self.max_cid_repr, self.logger, output_repeated)
if isinstance(request, (bytes, bytearray)):
request = request.decode('utf-8')
if self.has_debug:
msg = 'request:[%s]\nresponse_class:[%s]\nis_async:[%s]\nheaders:[%s]\n text:[%s]\ndata:[%s]'
self.logger.debug(msg, request, response_class, is_async, headers, raw_response.text, response.data)
return response
def invoke(self, request, response_class, is_async=False, headers=None, output_repeated=False):
""" Input parameters are like when invoking a service directly.
"""
headers = headers or {}
return self.inner_invoke(request, response_class, is_async, headers)
# ################################################################################################################################
class _JSONClient(_Client):
""" Base class for all JSON clients.
"""
response_class = None
def invoke(self, payload='', headers=None, to_json=True):
if to_json:
payload = dumps(payload, default=default_json_handler)
return super(_JSONClient, self).invoke(payload, self.response_class, headers=headers)
class JSONClient(_JSONClient):
""" Client for services that accept JSON input.
"""
response_class = JSONResponse
# ################################################################################################################################
class JSONSIOClient(_JSONClient):
""" Client for services that accept Simple IO (SIO) in JSON.
"""
response_class = JSONSIOResponse
class SOAPSIOClient(_Client):
""" Client for services that accept Simple IO (SIO) in SOAP.
"""
def invoke(self, soap_action, payload=None, headers=None):
headers = headers or {}
headers['SOAPAction'] = soap_action
return super(SOAPSIOClient, self).invoke(payload, SOAPSIOResponse, headers=headers)
class AnyServiceInvoker(_Client):
""" Uses zato.service.invoke to invoke other services. The services being invoked
don't have to be available through any channels, it suffices for zato.service.invoke
to be exposed over HTTP.
"""
def _invoke(self, name=None, payload='', headers=None, channel='invoke', data_format='json',
transport=None, is_async=False, expiration=BROKER.DEFAULT_EXPIRATION, id=None,
to_json=True, output_repeated=ZATO_NOT_GIVEN, pid=None, all_pids=False, timeout=None,
skip_response_elem=True, **kwargs):
if not(name or id):
raise ZatoException(msg='Either name or id must be provided')
if name and output_repeated == ZATO_NOT_GIVEN:
output_repeated = name.lower().endswith('list')
if to_json:
payload = dumps(payload, default=default_json_handler)
id_, value = ('name', name) if name else ('id', id)
request = {
id_: value,
'payload': b64encode(payload.encode('utf8') if PY3 else payload),
'channel': channel,
'data_format': data_format,
'transport': transport,
'is_async': is_async,
'expiration':expiration,
'pid':pid,
'all_pids': all_pids,
'timeout': timeout,
'skip_response_elem': skip_response_elem,
}
return super(AnyServiceInvoker, self).invoke(dumps(request, default=default_json_handler),
ServiceInvokeResponse, is_async, headers, output_repeated)
def invoke(self, *args, **kwargs):
return self._invoke(is_async=False, *args, **kwargs)
def invoke_async(self, *args, **kwargs):
return self._invoke(is_async=True, *args, **kwargs)
# ################################################################################################################################
class XMLClient(_Client):
def invoke(self, payload='', headers=None):
return super(XMLClient, self).invoke(payload, XMLResponse, headers=headers)
class SOAPClient(_Client):
def invoke(self, soap_action, payload='', headers=None):
headers = headers or {}
headers['SOAPAction'] = soap_action
return super(SOAPClient, self).invoke(payload, SOAPResponse, headers=headers)
# ################################################################################################################################
class RawDataClient(_Client):
""" Client which doesn't process requests before passing them into a service.
Likewise, no parsing of response is performed.
"""
def invoke(self, payload='', headers=None):
return super(RawDataClient, self).invoke(payload, RawDataResponse, headers=headers)
# ################################################################################################################################
def get_client_from_server_conf(server_dir, client_auth_func, get_config_func, server_url=None, stdin_data=None):
""" Returns a Zato client built out of data found in a given server's config files.
"""
# stdlib
import os
# To avoid circular references
from zato.common.crypto.api import ServerCryptoManager
from zato.common.ext.configobj_ import ConfigObj
from zato.common.util.api import get_odb_session_from_server_config, get_repo_dir_from_component_dir
from zato.common.util.cli import read_stdin_data
class ZatoClient(AnyServiceInvoker):
def __init__(self, *args, **kwargs):
super(ZatoClient, self).__init__(*args, **kwargs)
self.cluster_id = None
self.odb_session = None
repo_location = get_repo_dir_from_component_dir(server_dir)
stdin_data = stdin_data or read_stdin_data()
crypto_manager = ServerCryptoManager.from_repo_dir(None, repo_location, stdin_data=stdin_data)
secrets_config = ConfigObj(os.path.join(repo_location, 'secrets.conf'), use_zato=False)
secrets_conf = get_config_func(
repo_location, 'secrets.conf', needs_user_config=False,
crypto_manager=crypto_manager, secrets_conf=secrets_config)
config = get_config_func(repo_location, 'server.conf', crypto_manager=crypto_manager, secrets_conf=secrets_conf)
server_url = server_url if server_url else config.main.gunicorn_bind
client_auth = client_auth_func(config, repo_location, crypto_manager, False)
client = ZatoClient('http://{}'.format(server_url), '/zato/admin/invoke', client_auth, max_response_repr=15000)
session = get_odb_session_from_server_config(config, None, False)
client.cluster_id = session.query(Server).\
filter(Server.token == config.main.token).\
one().cluster_id
client.odb_session = session
return client
# ################################################################################################################################
# ################################################################################################################################ | zato-client | /zato-client-3.2.1.tar.gz/zato-client-3.2.1/src/zato/client/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato - Cython
from zato.simpleio import BoolConfig, IntConfig, SecretConfig, SIOServerConfig
# Python 2/3 compatibility
from past.builtins import basestring, unicode
# ################################################################################################################################
if 0:
from zato.cy.simpleio import BoolConfig as PyBoolConfig
from zato.cy.simpleio import IntConfig as PyIntConfig
from zato.cy.simpleio import SecretConfig as PySecretConfig
from zato.cy.simpleio import SIOServerConfig as PySIOServerConfig
PyBoolConfig = PyBoolConfig
PyIntConfig = PyIntConfig
PySecretConfig = PySecretConfig
PySIOServerConfig = PySIOServerConfig
# ################################################################################################################################
# ################################################################################################################################
def get_bytes_to_str_encoding():
return 'utf8'
# ################################################################################################################################
default_input_required_name = 'input_required'
default_input_optional_name = 'input_optional'
default_output_required_name = 'output_required'
default_output_optional_name = 'output_optional'
default_value = 'default_value'
default_input_value = 'default_input_value'
default_output_value = 'default_output_value'
default_response_elem = 'response'
default_skip_empty_keys = False
default_skip_empty_request_keys = False
default_skip_empty_response_keys = False
default_prefix_as_is = 'a'
default_prefix_bool = 'b'
default_prefix_csv = 'c'
default_prefix_date = 'date'
default_prefix_date_time = 'dt'
default_prefix_dict = 'd'
default_prefix_dict_list = 'dl'
default_prefix_float = 'f'
default_prefix_int = 'i'
default_prefix_list = 'l'
default_prefix_text = 't'
default_prefix_uuid = 'u'
simple_io_conf_contents = f"""
[bool]
exact=
prefix=by_, has_, is_, may_, needs_, should_
suffix=
[int]
exact=id
prefix=
suffix=_count, _id, _size, _size_min, _size_max, _timeout
[secret]
exact=auth_data, auth_token, password, password1, password2, secret_key, tls_pem_passphrase, token
prefix=
suffix=
[bytes_to_str]
encoding={{bytes_to_str_encoding}}
[default]
default_value=
default_input_value=
default_output_value=
response_elem=response
skip_empty_keys = False
skip_empty_request_keys = False
skip_empty_response_keys = False
# Configuration below is reserved for future use
input_required_name = "input_required"
input_optional_name = "input_optional"
output_required_name = "output_required"
output_optional_name = "output_optional"
prefix_as_is = {default_prefix_as_is}
prefix_bool = {default_prefix_bool}
prefix_csv = {default_prefix_csv}
prefix_date = {default_prefix_date}
prefix_date_time = {default_prefix_date_time}
prefix_dict = {default_prefix_dict}
prefix_dict_list = {default_prefix_dict_list}
prefix_float = {default_prefix_float}
prefix_int = {default_prefix_int}
prefix_list = {default_prefix_list}
prefix_text = {default_prefix_text}
prefix_uuid = {default_prefix_uuid}
""".lstrip()
# ################################################################################################################################
def c18n_sio_fs_config(sio_fs_config):
for name in 'bool', 'int', 'secret':
config_entry = sio_fs_config[name]
exact = config_entry.get('exact') or []
exact = exact if isinstance(exact, list) else [exact]
prefix = config_entry.get('prefix') or []
prefix = prefix if isinstance(prefix, list) else [prefix]
suffix = config_entry.get('suffix') or []
suffix = suffix if isinstance(suffix, list) else [suffix]
config_entry.exact = set(exact)
config_entry.prefix = set(prefix)
config_entry.suffix = set(suffix)
for key, value in sio_fs_config.get('default', {}).items():
if isinstance(value, basestring):
if not isinstance(value, unicode):
value = value.decode('utf8')
sio_fs_config.default[key] = value
# ################################################################################################################################
def get_sio_server_config(sio_fs_config):
c18n_sio_fs_config(sio_fs_config)
sio_server_config = SIOServerConfig() # type: PySIOServerConfig
bool_config = BoolConfig() # type: PyBoolConfig
bool_config.exact = sio_fs_config.bool.exact
bool_config.prefixes = sio_fs_config.bool.prefix
bool_config.suffixes = sio_fs_config.bool.suffix
int_config = IntConfig() # type: PyIntConfig
int_config.exact = sio_fs_config.int.exact
int_config.prefixes = sio_fs_config.int.prefix
int_config.suffixes = sio_fs_config.int.suffix
secret_config = SecretConfig() # type: PySecretConfig
secret_config.exact = sio_fs_config.secret.exact
secret_config.prefixes = sio_fs_config.secret.prefix
secret_config.suffixes = sio_fs_config.secret.suffix
sio_server_config.bool_config = bool_config
sio_server_config.int_config = int_config
sio_server_config.secret_config = secret_config
sio_fs_config_default = sio_fs_config.get('default')
if sio_fs_config_default:
sio_server_config.input_required_name = sio_fs_config.default.get('input_required_name', default_input_required_name)
sio_server_config.input_optional_name = sio_fs_config.default.get('input_optional_name', default_input_optional_name)
sio_server_config.output_required_name = sio_fs_config.default.get('output_required_name', default_output_required_name)
sio_server_config.output_optional_name = sio_fs_config.default.get('output_optional_name', default_output_optional_name)
sio_server_config.default_value = sio_fs_config.default.get('default_value', default_value)
sio_server_config.default_input_value = sio_fs_config.default.get('default_input_value', default_input_value)
sio_server_config.default_output_value = sio_fs_config.default.get('default_output_value', default_output_value)
sio_server_config.response_elem = sio_fs_config.default.get('response_elem', default_response_elem)
sio_server_config.skip_empty_keys = sio_fs_config.default.get('skip_empty_keys', default_skip_empty_keys)
sio_server_config.skip_empty_request_keys = sio_fs_config.default.get(
'skip_empty_request_keys', default_skip_empty_request_keys)
sio_server_config.skip_empty_response_keys = sio_fs_config.default.get(
'skip_empty_response_keys', default_skip_empty_response_keys)
sio_server_config.prefix_as_is = sio_fs_config.default.get('prefix_as_is', default_prefix_as_is)
sio_server_config.prefix_bool = sio_fs_config.default.get('prefix_bool', default_prefix_bool)
sio_server_config.prefix_csv = sio_fs_config.default.get('prefix_csv', default_prefix_csv)
sio_server_config.prefix_date = sio_fs_config.default.get('prefix_date', default_prefix_date)
sio_server_config.prefix_date_time = sio_fs_config.default.get('prefix_date_time', default_prefix_date_time)
sio_server_config.prefix_dict = sio_fs_config.default.get('prefix_dict', default_prefix_dict)
sio_server_config.prefix_dict_list = sio_fs_config.default.get('prefix_dict_list', default_prefix_dict_list)
sio_server_config.prefix_float = sio_fs_config.default.get('prefix_float', default_prefix_float)
sio_server_config.prefix_int = sio_fs_config.default.get('prefix_int', default_prefix_int)
sio_server_config.prefix_list = sio_fs_config.default.get('prefix_list', default_prefix_list)
sio_server_config.prefix_text = sio_fs_config.default.get('prefix_text', default_prefix_text)
sio_server_config.prefix_uuid = sio_fs_config.default.get('prefix_uuid', default_prefix_uuid)
else:
sio_server_config.input_required_name = default_input_required_name
sio_server_config.input_optional_name = default_input_optional_name
sio_server_config.output_required_name = default_output_required_name
sio_server_config.output_optional_name = default_output_optional_name
sio_server_config.default_value = default_value
sio_server_config.default_input_value = default_input_value
sio_server_config.default_output_value = default_output_value
sio_server_config.response_elem = default_response_elem
sio_server_config.skip_empty_keys = default_skip_empty_keys
sio_server_config.skip_empty_request_keys = default_skip_empty_request_keys
sio_server_config.skip_empty_response_keys = default_skip_empty_response_keys
bytes_to_str_encoding = sio_fs_config.bytes_to_str.encoding
if not isinstance(bytes_to_str_encoding, unicode):
bytes_to_str_encoding = bytes_to_str_encoding.decode('utf8')
sio_server_config.bytes_to_str_encoding = bytes_to_str_encoding
sio_server_config.json_encoder.bytes_to_str_encoding = bytes_to_str_encoding
return sio_server_config
# ################################################################################################################################
def drop_sio_elems(elems, *to_drop):
out = list(set(elems))
for elem in to_drop:
out.remove(elem)
return out
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/simpleio_.py | simpleio_.py |
# Zato
from datetime import datetime, timedelta
from zato.common.odb.model import KVData as KVDataModel
from zato.common.typing_ import dataclass, optional
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.odb.api import SessionWrapper
SessionWrapper = SessionWrapper
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
default_expiry_time = datetime(year=2345, month=12, day=31)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class KeyCtx:
key: str
value: optional[str] = None
data_type: str = 'string'
creation_time: datetime = None
expiry_time: optional[datetime] = None
# ################################################################################################################################
# ################################################################################################################################
class KVDataAPI:
def __init__(self, cluster_id, session_wrapper):
# type: (int, SessionWrapper) -> None
self.cluster_id = cluster_id
self.session_wrapper = session_wrapper
# ################################################################################################################################
def _get_session(self):
return self.session_wrapper.session()
# ################################################################################################################################
def get(self, key):
# type: (str) -> optional[KeyCtx]
# We always operate on bytes
key = key.encode('utf8') if isinstance(key, str) else key
# Get a new SQL session ..
session = self._get_session()
# .. prepare the query ..
query = session.query(KVDataModel).\
filter(KVDataModel.cluster_id==self.cluster_id).\
filter(KVDataModel.key==key).\
filter(KVDataModel.expiry_time > utcnow())
# .. run it ..
result = query.first() # type: KVDataModel
# .. convert the result to a business object ..
if result:
ctx = KeyCtx()
ctx.key = result.key.decode('utf8')
ctx.value = result.value
ctx.data_type = result.data_type
ctx.creation_time = result.creation_time
ctx.expiry_time = result.expiry_time
if ctx.value:
ctx.value = ctx.value.decode('utf8')
return ctx
# ################################################################################################################################
def set(self, key, value, expiry_sec=None, expiry_time=None):
# type: (str, str, int, datetime)
ctx = KeyCtx()
ctx.key = key
ctx.value = value
ctx.expiry_time = expiry_time if expiry_time else utcnow() + timedelta(seconds=expiry_sec)
self.set_with_ctx(ctx)
# ################################################################################################################################
def set_with_ctx(self, ctx, data_type='string'):
# type: (KeyCtx, str) -> None
key = ctx.key.encode('utf8') if isinstance(ctx.key, str) else ctx.key
value = ctx.value.encode('utf8') if isinstance(ctx.value, str) else ctx.value
item = KVDataModel()
item.cluster_id = self.cluster_id
item.key = key
item.value = value
item.creation_time = ctx.creation_time or utcnow()
item.expiry_time = ctx.expiry_time or default_expiry_time
session = self._get_session()
session.add(item)
session.commit()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/kv_data.py | kv_data.py |
# stdlib
from logging import getLogger
# Python 2/3 compatibility
from past.builtins import unicode
# Zato
from zato.common.api import GENERIC
from zato.common.util.api import new_cid
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
logger = getLogger('zato_pubsub.msg')
logger_zato = getLogger('zato')
# ################################################################################################################################
sk_lists = ('reply_to_sk', 'deliver_to_sk')
skip_to_external=('delivery_status', 'topic_id', 'cluster_id', 'pub_pattern_matched', 'sub_pattern_matched',
'published_by_id', 'data_prefix', 'data_prefix_short', 'pub_time', 'expiration_time', 'recv_time',
'pub_msg_id', 'pub_correl_id', 'zato_ctx') + sk_lists
_data_keys=('data', 'data_prefix', 'data_prefix_short')
msg_pub_attrs = ('topic', 'sub_key', 'pub_msg_id', 'pub_correl_id', 'in_reply_to', 'ext_client_id', 'group_id',
'position_in_group', 'pub_time', 'ext_pub_time', 'data', 'data_prefix', 'data_prefix_short', 'mime_type', 'priority',
'expiration', 'expiration_time', 'has_gd', 'delivery_status', 'size', 'published_by_id', 'topic_id',
'is_in_sub_queue', 'topic_name', 'cluster_id', 'pub_time_iso', 'ext_pub_time_iso', 'expiration_time_iso',
'recv_time', 'data_prefix_short', 'server_name', 'server_pid', 'pub_pattern_matched', 'sub_pattern_matched',
'delivery_count', 'user_ctx', 'zato_ctx')
class MSG_PREFIX:
GROUP_ID = 'zpsg'
MSG_ID = 'zpsm'
SUB_KEY = 'zpsk'
SERVICE_SK = 'zpsk.srv'
# ################################################################################################################################
def new_msg_id(_new_cid=new_cid, _prefix=MSG_PREFIX.MSG_ID):
return '%s%s' % (_prefix, _new_cid())
# ################################################################################################################################
def new_sub_key(endpoint_type, ext_client_id='', _new_cid=new_cid, _prefix=MSG_PREFIX.SUB_KEY):
_ext_client_id = '.%s' % (ext_client_id,) if ext_client_id else (ext_client_id or '')
return '%s.%s%s.%s' % (_prefix, endpoint_type, _ext_client_id, _new_cid(3))
# ################################################################################################################################
def new_group_id(_new_cid=new_cid, _prefix=MSG_PREFIX.GROUP_ID):
return '%s%s' % (_prefix, _new_cid())
# ################################################################################################################################
class PubSubMessage(object):
""" Base container class for pub/sub message wrappers.
"""
# We are not using __slots__ because they can't be inherited by subclasses
# and this class, as well as its subclasses, will be rewritten in Cython anyway.
pub_attrs = msg_pub_attrs + sk_lists
def __init__(self):
self.recv_time = utcnow_as_ms()
self.server_name = None
self.server_pid = None
self.topic = None
self.sub_key = None
self.pub_msg_id = None
self.pub_correl_id = None
self.in_reply_to = None
self.ext_client_id = None
self.group_id = None
self.position_in_group = None
self.pub_time = None
self.ext_pub_time = None
self.data = ''
self.data_prefix = ''
self.data_prefix_short = ''
self.mime_type = None
self.priority = None
self.expiration = None
self.expiration_time = None
self.has_gd = None
self.delivery_status = None
self.pub_pattern_matched = None
self.sub_pattern_matched = {}
self.size = None
self.published_by_id = None
self.topic_id = None
self.is_in_sub_queue = None
self.topic_name = None
self.cluster_id = None
self.delivery_count = 0
self.pub_time_iso = None
self.ext_pub_time_iso = None
self.expiration_time_iso = None
self.reply_to_sk = []
self.deliver_to_sk = []
self.user_ctx = None
self.zato_ctx = None
self.serialized = None # May be set by hooks to provide an explicitly serialized output for this message
setattr(self, GENERIC.ATTR_NAME, None) # To make this class look more like an SQLAlchemy one
def to_dict(self, skip=None, needs_utf8_encode=False, add_id_attrs=False, _data_keys=_data_keys):
""" Returns a dict representation of self.
"""
skip = skip or []
out = {}
for key in sorted(PubSubMessage.pub_attrs):
if key != 'topic' and key not in skip:
value = getattr(self, key)
if value is not None:
if needs_utf8_encode:
if key in _data_keys:
value = value.encode('utf8') if isinstance(value, unicode) else value
out[key] = value
if add_id_attrs:
out['msg_id'] = self.pub_msg_id
if self.pub_correl_id:
out['correl_id'] = self.pub_correl_id
# Append the generic opaque attribute to make the output look as though it was produced from an SQLAlchemy object
# but do it only if there is any value, otherwise skip it.
opaque_value = getattr(self, GENERIC.ATTR_NAME)
if opaque_value:
out[GENERIC.ATTR_NAME] = opaque_value
return out
# For compatibility with code that already expects dictalchemy objects with their .asdict method
def asdict(self):
out = self.to_dict()
out[GENERIC.ATTR_NAME] = getattr(self, GENERIC.ATTR_NAME)
return out
def to_external_dict(self, skip=skip_to_external, needs_utf8_encode=False):
""" Returns a dict representation of self ready to be delivered to external systems,
i.e. without internal attributes on output.
"""
out = self.to_dict(skip, needs_utf8_encode, True)
if self.reply_to_sk:
out['ctx'] = {
'reply_to_sk': self.reply_to_sk
}
return out
# ################################################################################################################################
class SkipDelivery(Exception):
""" Raised to indicate to delivery tasks that a given message should be skipped - but not deleted altogether,
the delivery will be attempted in the next iteration of the task.
"""
# ################################################################################################################################
class HandleNewMessageCtx(object):
""" Encapsulates information on new messages that a pubsub tool is about to process.
"""
__slots__ = ('cid', 'has_gd', 'sub_key_list', 'non_gd_msg_list', 'is_bg_call', 'pub_time_max')
def __init__(self, cid, has_gd, sub_key_list, non_gd_msg_list, is_bg_call, pub_time_max=None):
self.cid = cid
self.has_gd = has_gd
self.sub_key_list = sub_key_list
self.non_gd_msg_list = non_gd_msg_list
self.is_bg_call = is_bg_call
self.pub_time_max = pub_time_max
# ################################################################################################################################
class HookCtx(object):
""" Data and metadata that pub/sub hooks receive on input to their methods.
"""
__slots__ = ('msg', 'response', 'soap_suds_client')
def __init__(self, msg, soap_suds_client=None):
self.msg = msg
self.soap_suds_client
self.response = None
# ################################################################################################################################
# PubSub's attributes listed separately for ease of making them part of SimpleIO definitions
pubsub_main_data = 'cluster_id', 'server_name', 'server_pid', 'server_api_address', 'keep_running', 'subscriptions_by_topic', \
'subscriptions_by_sub_key', 'sub_key_servers', 'endpoints', 'topics', 'sec_id_to_endpoint_id', \
'ws_channel_id_to_endpoint_id', 'service_id_to_endpoint_id', 'topic_name_to_id', 'pub_buffer_gd', 'pub_buffer_non_gd', \
'pubsub_tool_by_sub_key', 'pubsub_tools', 'sync_backlog', 'msg_pub_counter', 'has_meta_endpoint', \
'endpoint_meta_store_frequency', 'endpoint_meta_data_len', 'endpoint_meta_max_history', 'data_prefix_len', \
'data_prefix_short_len'
# ################################################################################################################################
class dict_keys:
endpoint = 'id', 'name', 'endpoint_type', 'role', 'is_active', 'is_internal', 'topic_patterns', \
'pub_topic_patterns', 'sub_topic_patterns'
subscription = 'id', 'creation_time', 'sub_key', 'endpoint_id', 'endpoint_name', 'topic_id', 'topic_name', \
'sub_pattern_matched', 'task_delivery_interval', 'unsub_on_wsx_close', 'ext_client_id'
topic = 'id', 'name', 'is_active', 'is_internal', 'max_depth_gd', 'max_depth_non_gd', 'has_gd', 'depth_check_freq',\
'pub_buffer_size_gd', 'task_delivery_interval', 'meta_store_frequency', 'task_sync_interval', 'msg_pub_counter', \
'msg_pub_counter_gd', 'msg_pub_counter_non_gd', 'last_synced', 'sync_has_gd_msg', 'sync_has_non_gd_msg', \
'gd_pub_time_max'
sks = 'sub_key', 'cluster_id', 'server_name', 'server_pid', 'endpoint_type', 'channel_name', 'pub_client_id', \
'ext_client_id', 'wsx_info', 'creation_time', 'endpoint_id'
all_dict_keys = dict_keys.endpoint + dict_keys.subscription + dict_keys.topic + dict_keys.sks
all_dict_keys = list(set(all_dict_keys))
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/pubsub.py | pubsub.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from inspect import isclass
# candv
from candv import Constants as _Constants, ValueConstant as _ValueConstant
# Python 2/3 compatibility
from future.utils import iteritems
from past.builtins import cmp
class Constants(_Constants):
values = _Constants.constants
class ValueConstant(_ValueConstant):
def __cmp__(self, other):
return cmp(self.value, (other.value if isinstance(other, ValueConstant) else other))
class MESSAGE:
MESSAGE_TYPE_LENGTH = 4
TOKEN_LENGTH = 32
TOKEN_START = MESSAGE_TYPE_LENGTH
TOKEN_END = MESSAGE_TYPE_LENGTH + TOKEN_LENGTH
PAYLOAD_START = MESSAGE_TYPE_LENGTH + TOKEN_LENGTH
NULL_TOKEN = '0' * TOKEN_LENGTH
class MESSAGE_TYPE:
TO_SCHEDULER = '0000'
TO_PARALLEL_ANY = '0001'
TO_PARALLEL_ALL = '0002'
TO_AMQP_PUBLISHING_CONNECTOR_ALL = '0003'
TO_AMQP_CONSUMING_CONNECTOR_ALL = '0004'
TO_AMQP_CONNECTOR_ALL = '0005'
TO_JMS_WMQ_PUBLISHING_CONNECTOR_ALL = '0006'
TO_JMS_WMQ_CONSUMING_CONNECTOR_ALL = '0007'
TO_JMS_WMQ_CONNECTOR_ALL = '0008'
USER_DEFINED_START = '5000'
TOPICS = {
MESSAGE_TYPE.TO_SCHEDULER: '/zato/to-scheduler',
MESSAGE_TYPE.TO_PARALLEL_ANY: '/zato/to-parallel/any',
MESSAGE_TYPE.TO_PARALLEL_ALL: '/zato/to-parallel/all',
MESSAGE_TYPE.TO_AMQP_PUBLISHING_CONNECTOR_ALL: '/zato/connector/amqp/publishing/all',
MESSAGE_TYPE.TO_AMQP_CONSUMING_CONNECTOR_ALL: '/zato/connector/amqp/consuming/all',
MESSAGE_TYPE.TO_AMQP_CONNECTOR_ALL: '/zato/connector/amqp/all',
MESSAGE_TYPE.TO_JMS_WMQ_PUBLISHING_CONNECTOR_ALL: '/zato/connector/jms-wmq/publishing/all',
MESSAGE_TYPE.TO_JMS_WMQ_CONSUMING_CONNECTOR_ALL: '/zato/connector/jms-wmq/consuming/all',
MESSAGE_TYPE.TO_JMS_WMQ_CONNECTOR_ALL: '/zato/connector/jms-wmq/all',
}
KEYS = {k:v.replace('/zato','').replace('/',':') for k,v in TOPICS.items()}
class SCHEDULER(Constants):
code_start = 100000
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
EXECUTE = ValueConstant('')
JOB_EXECUTED = ValueConstant('')
SET_JOB_INACTIVE = ValueConstant('')
class ZMQ_SOCKET(Constants):
code_start = 100200
CLOSE = ValueConstant('')
class SECURITY(Constants):
code_start = 100400
BASIC_AUTH_CREATE = ValueConstant('')
BASIC_AUTH_EDIT = ValueConstant('')
BASIC_AUTH_DELETE = ValueConstant('')
BASIC_AUTH_CHANGE_PASSWORD = ValueConstant('')
JWT_CREATE = ValueConstant('')
JWT_EDIT = ValueConstant('')
JWT_DELETE = ValueConstant('')
JWT_CHANGE_PASSWORD = ValueConstant('')
WSS_CREATE = ValueConstant('')
WSS_EDIT = ValueConstant('')
WSS_DELETE = ValueConstant('')
WSS_CHANGE_PASSWORD = ValueConstant('')
OAUTH_CREATE = ValueConstant('')
OAUTH_EDIT = ValueConstant('')
OAUTH_DELETE = ValueConstant('')
OAUTH_CHANGE_PASSWORD = ValueConstant('')
NTLM_CREATE = ValueConstant('')
NTLM_EDIT = ValueConstant('')
NTLM_DELETE = ValueConstant('')
NTLM_CHANGE_PASSWORD = ValueConstant('')
AWS_CREATE = ValueConstant('')
AWS_EDIT = ValueConstant('')
AWS_DELETE = ValueConstant('')
AWS_CHANGE_PASSWORD = ValueConstant('')
APIKEY_CREATE = ValueConstant('')
APIKEY_EDIT = ValueConstant('')
APIKEY_DELETE = ValueConstant('')
APIKEY_CHANGE_PASSWORD = ValueConstant('')
XPATH_SEC_CREATE = ValueConstant('')
XPATH_SEC_EDIT = ValueConstant('')
XPATH_SEC_DELETE = ValueConstant('')
XPATH_SEC_CHANGE_PASSWORD = ValueConstant('')
TLS_CA_CERT_CREATE = ValueConstant('')
TLS_CA_CERT_EDIT = ValueConstant('')
TLS_CA_CERT_DELETE = ValueConstant('')
TLS_CHANNEL_SEC_CREATE = ValueConstant('')
TLS_CHANNEL_SEC_EDIT = ValueConstant('')
TLS_CHANNEL_SEC_DELETE = ValueConstant('')
TLS_KEY_CERT_CREATE = ValueConstant('')
TLS_KEY_CERT_EDIT = ValueConstant('')
TLS_KEY_CERT_DELETE = ValueConstant('')
class DEFINITION(Constants):
code_start = 100600
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_CHANGE_PASSWORD = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_CHANGE_PASSWORD = ValueConstant('')
WMQ_PING = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
CASSANDRA_CREATE = ValueConstant('')
CASSANDRA_EDIT = ValueConstant('')
CASSANDRA_DELETE = ValueConstant('')
CASSANDRA_CHANGE_PASSWORD = ValueConstant('')
class OUTGOING(Constants):
code_start = 100800
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_PUBLISH = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_SEND = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
ZMQ_SEND = ValueConstant('')
SQL_CREATE_EDIT = ValueConstant('') # Same for creating and updating the pools
SQL_CHANGE_PASSWORD = ValueConstant('')
SQL_DELETE = ValueConstant('')
HTTP_SOAP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
HTTP_SOAP_DELETE = ValueConstant('')
FTP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
FTP_DELETE = ValueConstant('')
FTP_CHANGE_PASSWORD = ValueConstant('')
ODOO_CREATE = ValueConstant('')
ODOO_EDIT = ValueConstant('')
ODOO_DELETE = ValueConstant('')
ODOO_CHANGE_PASSWORD = ValueConstant('')
SAP_CREATE = ValueConstant('')
SAP_EDIT = ValueConstant('')
SAP_DELETE = ValueConstant('')
SAP_CHANGE_PASSWORD = ValueConstant('')
SFTP_CREATE = ValueConstant('')
SFTP_EDIT = ValueConstant('')
SFTP_DELETE = ValueConstant('')
SFTP_CHANGE_PASSWORD = ValueConstant('')
SFTP_EXECUTE = ValueConstant('')
SFTP_PING = ValueConstant('')
class CHANNEL(Constants):
code_start = 101000
AMQP_CREATE = ValueConstant('')
AMQP_EDIT = ValueConstant('')
AMQP_DELETE = ValueConstant('')
AMQP_MESSAGE_RECEIVED = ValueConstant('')
WMQ_CREATE = ValueConstant('')
WMQ_EDIT = ValueConstant('')
WMQ_DELETE = ValueConstant('')
WMQ_MESSAGE_RECEIVED = ValueConstant('')
ZMQ_CREATE = ValueConstant('')
ZMQ_EDIT = ValueConstant('')
ZMQ_DELETE = ValueConstant('')
ZMQ_MESSAGE_RECEIVED = ValueConstant('')
HTTP_SOAP_CREATE_EDIT = ValueConstant('') # Same for creating and updating
HTTP_SOAP_DELETE = ValueConstant('')
WEB_SOCKET_CREATE = ValueConstant('')
WEB_SOCKET_EDIT = ValueConstant('')
WEB_SOCKET_DELETE = ValueConstant('')
WEB_SOCKET_BROADCAST = ValueConstant('')
FTP_CREATE = ValueConstant('')
FTP_EDIT = ValueConstant('')
FTP_DELETE = ValueConstant('')
FTP_PING = ValueConstant('')
FTP_USER_CREATE = ValueConstant('')
FTP_USER_EDIT = ValueConstant('')
FTP_USER_DELETE = ValueConstant('')
FTP_USER_CHANGE_PASSWORD = ValueConstant('')
class AMQP_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101200
CLOSE = ValueConstant('')
class JMS_WMQ_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101400
CLOSE = ValueConstant('')
class ZMQ_CONNECTOR(Constants):
""" Since 3.0, this is not used anymore.
"""
code_start = 101600
CLOSE = ValueConstant('')
class SERVICE(Constants):
code_start = 101800
EDIT = ValueConstant('')
DELETE = ValueConstant('')
PUBLISH = ValueConstant('')
class STATS(Constants):
code_start = 102000
DELETE = ValueConstant('')
DELETE_DAY = ValueConstant('')
class HOT_DEPLOY(Constants):
code_start = 102200
CREATE_SERVICE = ValueConstant('')
CREATE_STATIC = ValueConstant('')
CREATE_USER_CONF = ValueConstant('')
AFTER_DEPLOY = ValueConstant('')
class SINGLETON(Constants):
code_start = 102400
CLOSE = ValueConstant('')
class MSG_NS(Constants):
code_start = 102600
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class MSG_XPATH(Constants):
code_start = 102800
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class MSG_JSON_POINTER(Constants):
code_start = 103000
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class PUB_SUB_TOPIC(Constants):
code_start = 103200
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
ADD_DEFAULT_PRODUCER = ValueConstant('')
DELETE_DEFAULT_PRODUCER = ValueConstant('')
class PUB_SUB_PRODUCER(Constants):
code_start = 103400
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class PUB_SUB_CONSUMER(Constants):
code_start = 103600
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
class CLOUD(Constants):
code_start = 103800
AWS_S3_CREATE_EDIT = ValueConstant('')
AWS_S3_DELETE = ValueConstant('')
class NOTIF(Constants):
code_start = 104000
RUN_NOTIFIER = ValueConstant('')
SQL_CREATE = ValueConstant('')
SQL_EDIT = ValueConstant('')
SQL_DELETE = ValueConstant('')
class SEARCH(Constants):
code_start = 104200
CREATE = ValueConstant('')
EDIT = ValueConstant('')
DELETE = ValueConstant('')
ES_CREATE = ValueConstant('')
ES_EDIT = ValueConstant('')
ES_DELETE = ValueConstant('')
ES_CHANGE_PASSWORD = ValueConstant('')
SOLR_CREATE = ValueConstant('')
SOLR_EDIT = ValueConstant('')
SOLR_DELETE = ValueConstant('')
SOLR_CHANGE_PASSWORD = ValueConstant('')
class QUERY(Constants):
code_start = 104400
CASSANDRA_CREATE = ValueConstant('')
CASSANDRA_EDIT = ValueConstant('')
CASSANDRA_DELETE = ValueConstant('')
CASSANDRA_CHANGE_PASSWORD = ValueConstant('')
class EMAIL(Constants):
code_start = 104800
SMTP_CREATE = ValueConstant('')
SMTP_EDIT = ValueConstant('')
SMTP_DELETE = ValueConstant('')
SMTP_CHANGE_PASSWORD = ValueConstant('')
IMAP_CREATE = ValueConstant('')
IMAP_EDIT = ValueConstant('')
IMAP_DELETE = ValueConstant('')
IMAP_CHANGE_PASSWORD = ValueConstant('')
class RBAC(Constants):
code_start = 105200
ROLE_CREATE = ValueConstant('')
ROLE_EDIT = ValueConstant('')
ROLE_DELETE = ValueConstant('')
CLIENT_ROLE_CREATE = ValueConstant('')
CLIENT_ROLE_DELETE = ValueConstant('')
PERMISSION_CREATE = ValueConstant('')
PERMISSION_EDIT = ValueConstant('')
PERMISSION_DELETE = ValueConstant('')
ROLE_PERMISSION_CREATE = ValueConstant('')
ROLE_PERMISSION_EDIT = ValueConstant('')
ROLE_PERMISSION_DELETE = ValueConstant('')
class VAULT(Constants):
code_start = 105400
CONNECTION_CREATE = ValueConstant('')
CONNECTION_EDIT = ValueConstant('')
CONNECTION_DELETE = ValueConstant('')
POLICY_CREATE = ValueConstant('')
POLICY_EDIT = ValueConstant('')
POLICY_DELETE = ValueConstant('')
class PUBSUB(Constants):
code_start = 105600
ENDPOINT_CREATE = ValueConstant('')
ENDPOINT_EDIT = ValueConstant('')
ENDPOINT_DELETE = ValueConstant('')
SUBSCRIPTION_CREATE = ValueConstant('')
SUBSCRIPTION_EDIT = ValueConstant('')
SUBSCRIPTION_DELETE = ValueConstant('')
TOPIC_CREATE = ValueConstant('')
TOPIC_EDIT = ValueConstant('')
TOPIC_DELETE = ValueConstant('')
SUB_KEY_SERVER_SET = ValueConstant('') # This is shared by WSX and other endpoint types
WSX_CLIENT_SUB_KEY_SERVER_REMOVE = ValueConstant('')
DELIVERY_SERVER_CHANGE = ValueConstant('')
class SMS(Constants):
code_start = 106000
TWILIO_CREATE = ValueConstant('')
TWILIO_EDIT = ValueConstant('')
TWILIO_DELETE = ValueConstant('')
class CACHE(Constants):
code_start = 106400
BUILTIN_CREATE = ValueConstant('')
BUILTIN_EDIT = ValueConstant('')
BUILTIN_DELETE = ValueConstant('')
BUILTIN_STATE_CHANGED_CLEAR = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_DELETE_CONTAINS_ANY = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_EXPIRE_CONTAINS_ANY = ValueConstant('')
BUILTIN_STATE_CHANGED_SET = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_PREFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_SUFFIX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_BY_REGEX = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_NOT_CONTAINS = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS_ALL = ValueConstant('')
BUILTIN_STATE_CHANGED_SET_CONTAINS_ANY = ValueConstant('')
MEMCACHED_CREATE = ValueConstant('')
MEMCACHED_EDIT = ValueConstant('')
MEMCACHED_DELETE = ValueConstant('')
class SERVER_STATUS(Constants):
code_start = 106800
STATUS_CHANGED = ValueConstant('')
class GENERIC(Constants):
code_start = 107000
CONNECTION_CREATE = ValueConstant('')
CONNECTION_EDIT = ValueConstant('')
CONNECTION_DELETE = ValueConstant('')
CONNECTION_CHANGE_PASSWORD = ValueConstant('')
class SSO(Constants):
code_start = 107200
USER_CREATE = ValueConstant('')
USER_EDIT = ValueConstant('')
LINK_AUTH_CREATE = ValueConstant('')
LINK_AUTH_DELETE = ValueConstant('')
class EVENT(Constants):
code_start = 107400
PUSH = ValueConstant('')
code_to_name = {}
# To prevent 'RuntimeError: dictionary changed size during iteration'
item_name, item = None, None
_globals = list(iteritems(globals()))
for item_name, item in _globals:
if isclass(item) and issubclass(item, Constants) and item is not Constants:
for idx, (attr, const) in enumerate(item.items()):
const.value = str(item.code_start + idx)
code_to_name[const.value] = '{}_{}'.format(item_name, attr) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/broker_message.py | broker_message.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from string import Template
from traceback import format_exc
# lxml
from lxml import etree
from lxml.objectify import ObjectPath as _ObjectPath
# Zato
from zato.common.exception import ParsingException
# ################################################################################################################################
# ################################################################################################################################
# XML namespace for use in all Zato's own services.
zato_namespace = 'https://zato.io/ns/v1'
zato_ns_map = {None: zato_namespace}
soapenv11_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soapenv12_namespace = 'http://www.w3.org/2003/05/soap-envelope'
wsse_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
wsu_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
common_namespaces = {
'soapenv':soapenv11_namespace,
'wsse':wsse_namespace,
'wsu':wsu_namespace,
'zato':zato_namespace
}
soap_doc = Template("""<soap:Envelope xmlns:soap='%s'><soap:Body>$body</soap:Body></soap:Envelope>""" % soapenv11_namespace)
soap_body_path = '/soapenv:Envelope/soapenv:Body'
soap_body_xpath = etree.XPath(soap_body_path, namespaces=common_namespaces)
soap_fault_path = '/soapenv:Envelope/soapenv:Body/soapenv:Fault'
soap_fault_xpath = etree.XPath(soap_fault_path, namespaces=common_namespaces)
wsse_password_type_text = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'
supported_wsse_password_types = (wsse_password_type_text,)
wsse_username_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Username'
wsse_username_xpath = etree.XPath(wsse_username_path, namespaces=common_namespaces)
wsse_password_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password'
wsse_password_xpath = etree.XPath(wsse_password_path, namespaces=common_namespaces)
wsse_password_type_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password/@Type'
wsse_password_type_xpath = etree.XPath(wsse_password_type_path, namespaces=common_namespaces)
wsse_nonce_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Nonce'
wsse_nonce_xpath = etree.XPath(wsse_nonce_path, namespaces=common_namespaces)
wsu_username_created_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsu:Created'
wsu_username_created_xpath = etree.XPath(wsu_username_created_path, namespaces=common_namespaces)
wsu_expires_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsu:Timestamp/wsu:Expires'
wsu_expires_xpath = etree.XPath(wsu_expires_path, namespaces=common_namespaces)
wsse_username_objectify = '{}Security'.format(wsse_namespace)
wsse_username_token_objectify = '{}UsernameToken'.format(wsse_namespace)
zato_data_path = soap_data_path = '/soapenv:Envelope/soapenv:Body/*[1]'
zato_data_xpath = soap_data_xpath = etree.XPath(zato_data_path, namespaces=common_namespaces)
zato_result_path = '//zato:zato_env/zato:result'
zato_result_xpath = etree.XPath(zato_result_path, namespaces=common_namespaces)
zato_cid_path = '//zato:zato_env/zato:cid'
zato_cid_xpath = etree.XPath(zato_result_path, namespaces=common_namespaces)
zato_details_path = '//zato:zato_env/zato:details'
zato_details_xpath = etree.XPath(zato_details_path, namespaces=common_namespaces)
# ################################################################################################################################
# ################################################################################################################################
class path(object):
def __init__(self, path, raise_on_not_found=False, ns='', text_only=False):
self.path = path
self.ns = ns
self.raise_on_not_found = raise_on_not_found
self.text_only = text_only
self.children_only = False
self.children_only_idx = None
def get_from(self, elem):
if self.ns:
_path = '{{{}}}{}'.format(self.ns, self.path)
else:
_path = self.path
try:
if self.children_only:
elem = elem.getchildren()[self.children_only_idx]
value = _ObjectPath(_path)(elem)
if self.text_only:
return value.text
return value
except(ValueError, AttributeError):
if self.raise_on_not_found:
raise ParsingException(None, format_exc())
else:
return None
# ################################################################################################################################
# ################################################################################################################################
class zato_path(path):
def __init__(self, path, raise_on_not_found=False, text_only=False):
super(zato_path, self).__init__(path, raise_on_not_found, zato_namespace, text_only)
self.children_only = True
self.children_only_idx = 1 # 0 is zato_env
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/xml_.py | xml_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from tempfile import NamedTemporaryFile
from traceback import format_exc
# Zato
from zato.common.util.api import make_repr, timeouting_popen
logger = getLogger(__name__)
# We'll wait up to that many seconds for HAProxy to validate the config file.
HAPROXY_VALIDATE_TIMEOUT = 0.6
# Statistics commands understood by HAproxy 1.3.x and newer. Note that the
# command numbers must be consecutively increasing across HAProxy versions.
haproxy_stats = {
("1", "3"): {
# A special command interpreted by the agent as a request for
# describing the commands available
0: ("ZATO_DESCRIBE_COMMANDS", "Describe commands"),
1: ("show info", "Show info"),
2: ("show stat", "Show stats"),
3: ("show errors", "Show errors"),
4: ("show sess", "Show sessions"),
},
("1", "4"): {
}
}
# timeout_id -> name, value in milliseconds
timeouts = {
1: (250, "250ms"),
2: (500, "500ms"),
3: (1000, "1s"),
4: (3000, "3s"),
5: (5000, "10s"),
6: (30000, "30s")
}
http_log = {
1: ("nolog", "No log"),
2: ("httplog", "HTTP log"),
}
tcp_log = {
1: ("nolog", "No log"),
2: ("tcplog", "TCP log"),
}
reversed_http_log = dict((v[0],k) for k,v in http_log.items())
reversed_tcp_log = dict((v[0],k) for k,v in tcp_log.items())
class Config(object):
""" An object for representing a HAProxy configuration file.
"""
def __init__(self):
self.global_ = {}
self.defaults = {}
self.backend = {'bck_http_plain': {}}
self.frontend = {"front_http_plain": {}}
def __repr__(self):
return make_repr(self)
def set_value(self, name, data):
if name == 'global:log':
host, port, facility, level = data
self.global_['log'] = {}
self.global_['log']['host'] = host
self.global_['log']['port'] = port
self.global_['log']['facility'] = facility
self.global_['log']['level'] = level
elif name == 'global:stats_socket':
stats_socket = data[0]
self.global_['stats_socket'] = stats_socket
elif name == 'defaults:timeout connect':
timeout = data[0]
self.defaults['timeout_connect'] = timeout
elif name == 'defaults:timeout client':
timeout = data[0]
self.defaults['timeout_client'] = timeout
elif name == 'defaults:timeout server':
timeout = data[0]
self.defaults['timeout_server'] = timeout
elif name == 'defaults:stats uri':
stats_uri = data[0]
self.defaults['stats_uri'] = stats_uri
elif name.startswith('backend bck_http_plain:server'):
backend_name, address, port, extra = data
extra = extra.strip()
backend_name = backend_name.split('http_plain--')[1]
self.backend['bck_http_plain'][backend_name] = {}
self.backend['bck_http_plain'][backend_name]['address'] = address
self.backend['bck_http_plain'][backend_name]['port'] = port
self.backend['bck_http_plain'][backend_name]['extra'] = extra
elif name == 'backend bck_http_plain:option httpchk':
method, path = data
self.backend['bck_http_plain']['option_httpchk'] = {}
self.backend['bck_http_plain']['option_httpchk']['method'] = method
self.backend['bck_http_plain']['option_httpchk']['path'] = path
elif name == 'frontend front_http_plain:monitor-uri':
path = data[0]
self.frontend['front_http_plain']['monitor_uri'] = path
elif name == 'frontend front_http_plain:option log-http-requests':
option = reversed_http_log[data[0]]
self.frontend['front_http_plain']['log_http_requests'] = option
elif name == 'frontend front_http_plain:bind':
address, port = data
self.frontend['front_http_plain']['bind'] = {}
self.frontend['front_http_plain']['bind']['address'] = address
self.frontend['front_http_plain']['bind']['port'] = port
elif name == 'frontend front_http_plain:maxconn':
maxconn = data[0]
self.frontend['front_http_plain']['maxconn'] = maxconn
else:
msg = 'Could not parse config, name:[{name}], data:[{data}]'.format(name=name, data=data)
logger.error(msg)
raise Exception(msg)
def validate_haproxy_config(config_data, haproxy_command):
""" Writes the config into a temporary file and validates it using the HAProxy's
-c check mode.
"""
try:
with NamedTemporaryFile(prefix='zato-tmp') as tf:
tf.write(config_data.encode('utf8'))
tf.flush()
common_msg = 'config_file:`{}`'
common_msg = common_msg.format(open(tf.name).read())
timeout_msg = 'HAProxy didn\'t respond in `{}` seconds. '
rc_non_zero_msg = 'Failed to validate the config file using HAProxy. '
command = [haproxy_command, '-c', '-f', tf.name]
timeouting_popen(command, HAPROXY_VALIDATE_TIMEOUT, timeout_msg, rc_non_zero_msg, common_msg)
except Exception:
msg = 'Caught an exception, e:`{}`'.format(format_exc())
logger.error(msg)
raise Exception(msg) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/haproxy.py | haproxy.py |
# stdlib
from datetime import datetime
# gevent
from gevent.lock import RLock
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class InRAMStore:
""" Base class for stores keeping data in RAM, optionally synchronising it to persistent storage.
"""
def __init__(self, sync_threshold, sync_interval):
# type: (int, int) -> None
# Sync to storage once in that many events ..
self.sync_threshold = sync_threshold
# .. or once in that many seconds.
self.sync_interval = sync_interval
# Total events received since startup
self.total_events = 0
# How many events we have received since the last synchronisation with persistent storage
self.num_events_since_sync = 0
# Reset each time we synchronise in-RAM state with the persistent storage
self.last_sync_time = utcnow()
# Maps action opcodes to actual methods so that the latter do not have to be looked up in runtime
self.opcode_to_func = {}
# A coarse-grained update lock used while modifying the in-RAM database or DB key locks
self.update_lock = RLock()
# Maps DB keys to fine-grained locks
self.key_lock = {}
# Interal usage counters and telemetry
self.telemetry = {}
# ################################################################################################################################
def get_lock(self, key):
# type: (str) -> RLock
with self.update_lock:
key_lock = self.key_lock.get(key)
if not key_lock:
key_lock = RLock()
self.key_lock[key] = key_lock
return key_lock
# ################################################################################################################################
def should_sync(self):
# type: () -> bool
sync_by_threshold = self.num_events_since_sync % self.sync_threshold == 0
sync_by_time = (utcnow() - self.last_sync_time).total_seconds() >= self.sync_interval
return sync_by_threshold or sync_by_time
# ################################################################################################################################
def sync_state(self):
raise NotImplementedError('InRAMStore.sync_state')
# ################################################################################################################################
def post_modify_state(self):
# .. update counters ..
self.num_events_since_sync += 1
self.total_events += 1
# .. check if sync is needed only if our class implements the method ..
if self.sync_state:
# .. check if we should sync RAM with persistent storage ..
if self.should_sync():
# .. save in persistent storage ..
self.sync_state()
# .. update metadata.
self.num_events_since_sync = 0
self.last_sync_time = utcnow()
# ################################################################################################################################
def access_state(self, opcode, data):
# type: (str, object) -> None
with self.update_lock:
# Maps the incoming upcode to an actual function to handle data ..
func = self.opcode_to_func[opcode]
# .. store in RAM ..
func(data)
# .. update metadata and, possibly, sync state (storage).
self.post_modify_state()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/in_ram.py | in_ram.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
import os
import socket
# Zato
from zato.common.util.api import get_current_user
# Python 2/3 compatibility
from six import PY2
# ################################################################################################################################
logger = logging.getLogger(__name__)
logger_bzr = logging.getLogger('bzr')
logger_bzr.setLevel(logging.WARN)
logger_sh = logging.getLogger('sh.command')
logger_sh.setLevel(logging.WARN)
# ################################################################################################################################
# We use Bazaar under Zato 3.0 with Python 2.7. Any newer version of Zato, or Zato 3.0 with Python 3.x, uses git.
# ################################################################################################################################
# ################################################################################################################################
class _BaseRepoManager(object):
def __init__(self, repo_location='.'):
self.repo_location = os.path.abspath(os.path.expanduser(repo_location))
# ################################################################################################################################
# ################################################################################################################################
class NoneRepoManager(_BaseRepoManager):
def ensure_repo_consistency(self):
pass
# ################################################################################################################################
# ################################################################################################################################
class GitRepoManager(_BaseRepoManager):
def ensure_repo_consistency(self):
# Use sh for git commands
import sh
# Always work in the same directory as the repository is in
sh.cd(self.repo_location)
# (Re-)init the repository
sh.git.init(self.repo_location)
# Set user info
current_user = get_current_user()
sh.git.config('user.name', current_user)
sh.git.config('user.email', '{}@{}'.format(current_user, socket.getfqdn()))
# Default branch is called 'main'
sh.git.checkout('-B', 'main')
# Add all files
sh.git.add('-A', self.repo_location)
output = sh.git.status('--porcelain') # type: str
output = output.strip()
# And commit changes if there are any
if output:
sh.git.commit('-m', 'Committing latest changes')
# ################################################################################################################################
# ################################################################################################################################
if PY2:
RepoManager = NoneRepoManager
else:
RepoManager = GitRepoManager
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/repo.py | repo.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# PyTDS
import pytds
# SQLAlchemy
from sqlalchemy.pool import QueuePool as SAQueuePool
from sqlalchemy.pool.dbapi_proxy import _DBProxy
# Zato
from zato.common.api import MS_SQL
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
def get_queue_pool(pool_kwargs):
class _QueuePool(SAQueuePool):
def __init__(self, creator, *args, **kwargs):
super(_QueuePool, self).__init__(creator, **pool_kwargs)
return _QueuePool
# ################################################################################################################################
class SimpleSession(object):
""" A simple object simulating SQLAlchemy sessions.
"""
def __init__(self, api):
# type: (MSSQLDirectAPI)
self.api = api
def __call__(self):
return self
def execute(self, *args, **kwargs):
return self.api.execute(*args, **kwargs)
def callproc(self, *args, **kwargs):
return self.api.callproc(*args, **kwargs)
def ping(self, *args, **kwargs):
return self.api.ping(*args, **kwargs)
# ################################################################################################################################
class MSSQLDirectAPI(object):
""" An object through which MS SQL connections can be obtained and stored procedures invoked.
"""
name = MS_SQL.ZATO_DIRECT
ping_query = 'SELECT 1'
def __init__(self, name, pool_size, connect_kwargs):
# type: (str, int, dict) -> None
self._name = name
self._connect_kwargs = connect_kwargs
self._pool_kwargs = {
'pool_size': pool_size,
'max_overflow': 0,
# This is a pool-level checkout timeout, not an SQL query-level one
# so we do not need to make it configurable
'timeout': 3
}
self._pool = _DBProxy(pytds, get_queue_pool(self._pool_kwargs))
# ################################################################################################################################
def connect(self):
return self._pool.connect(**self._connect_kwargs)
# ################################################################################################################################
def dispose(self):
self._pool.dispose()
# ################################################################################################################################
def execute(self, *args, **kwargs):
conn = None
try:
conn = self.connect()
with conn.cursor() as cursor:
cursor.execute(*args, **kwargs)
return cursor.fetchall()
finally:
if conn:
conn.close()
# ################################################################################################################################
def ping(self):
return self.execute(self.ping_query)
# ################################################################################################################################
def _return_proc_rows(self, conn, proc_name, params=None):
""" Calls a procedure and returns all the rows it produced as a single list.
"""
# Result to return
result = []
# This is optional in case getting a new cursor will fail
cursor = None
# Will be set to True in the exception block
has_exception = False
try:
# Obtain a connection from pool
conn = self.connect()
# Get a new cursor
cursor = conn.cursor()
# Call the proceudre
cursor.callproc(proc_name, params or [])
while True:
result.append(cursor.fetchall())
if not cursor.nextset():
break
except Exception:
has_exception = True
logger.warn(format_exc())
raise
finally:
if cursor:
cursor.close()
conn.commit()
conn.close()
# Return the result only if there was no exception along the way
if not has_exception:
return result
# ################################################################################################################################
def _yield_proc_rows(self, conn, proc_name, params=None):
""" Calls a procedure and yields all the rows it produced, one by one.
"""
# This is optional in case getting a new cursor will fail
cursor = None
try:
# Get a new cursor
cursor = conn.cursor()
# Call the proceudre
cursor.callproc(proc_name, params or [])
while True:
yield cursor.fetchall()
if not cursor.nextset():
break
except Exception:
logger.warn(format_exc())
raise
finally:
if cursor:
cursor.close()
conn.commit()
conn.close()
# ################################################################################################################################
def callproc(self, name, params=None, use_yield=False):
params = params or []
# Obtain a connection from pool
conn = self.connect()
return self._yield_proc_rows(conn, name, params) if use_yield else self._return_proc_rows(conn, name, params)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/mssql_direct.py | mssql_direct.py |
from __future__ import absolute_import, division, print_function, unicode_literals
"""
Copyright (C) 2019 Zato Source s.r.o. https://zato.io
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
# stdlib
import logging
import subprocess
from datetime import datetime, timedelta
from traceback import format_exc
from uuid import uuid4
# gevent
from gevent import sleep, spawn
# six
from six import binary_type
from six.moves.http_client import OK
# ws4py
from ws4py.client.geventclient import WebSocketClient
# Zato
from zato.common.json_ import dumps
from zato.common.json_internal import loads
# ################################################################################################################################
logger = logging.getLogger('zato.wsx_client')
# ################################################################################################################################
class MSG_PREFIX:
_COMMON = 'zato.ws.client.{}'
INVOKE_SERVICE = _COMMON.format('invs.{}')
SEND_AUTH = _COMMON.format('auth.{}')
SEND_RESP = _COMMON.format('resp.{}')
# ################################################################################################################################
zato_keep_alive_ping = 'zato-keep-alive-ping'
_invalid = '_invalid.' + uuid4().hex
# ################################################################################################################################
class Config(object):
def __init__(self, client_name=None, client_id=None, address=None, username=None, secret=None, on_request_callback=None,
wait_time=5):
self.client_name = client_name
self.client_id = client_id
self.address = address
self.username = username
self.secret = secret
self.on_request_callback = on_request_callback
self.wait_time = wait_time
self.needs_auth = bool(self.username)
# ################################################################################################################################
class MessageToZato(object):
""" An individual message from a WebSocket client to Zato, either request or response to a previous request from Zato.
"""
action = _invalid
def __init__(self, msg_id, config, token=None):
self.config = config
self.msg_id = msg_id
self.token = token
def serialize(self, _now=datetime.utcnow):
return dumps(self.enrich({
'data': {},
'meta': {
'action': self.action,
'id': self.msg_id,
'timestamp': _now().isoformat(),
'token': self.token,
'client_id': self.config.client_id,
'client_name': self.config.client_name,
}
}))
def enrich(self, msg):
""" Implemented by subclasses that need to add extra information.
"""
return msg
# ################################################################################################################################
class AuthRequest(MessageToZato):
""" Logs a client into a WebSocket connection.
"""
action = 'create-session'
def enrich(self, msg):
msg['meta']['username'] = self.config.username
msg['meta']['secret'] = self.config.secret
return msg
# ################################################################################################################################
class ServiceInvokeRequest(MessageToZato):
""" Encapsulates information about an invocation of a Zato service.
"""
action = 'invoke-service'
def __init__(self, request_id, data, *args, **kwargs):
self.data = data
super(ServiceInvokeRequest, self).__init__(request_id, *args, **kwargs)
def enrich(self, msg):
msg['data'].update(self.data)
return msg
# ################################################################################################################################
class ResponseFromZato(object):
""" A response from Zato to a previous request by this client.
"""
__slots__ = ('id', 'timestamp', 'in_reply_to', 'status', 'is_ok', 'data', 'msg_impl')
def __init__(self):
self.id = None
self.timestamp = None
self.in_reply_to = None
self.status = None
self.is_ok = None
self.data = None
self.msg_impl = None
@staticmethod
def from_json(msg):
response = ResponseFromZato()
response.msg_impl = msg
meta = msg['meta']
response.id = meta['id']
response.timestamp = meta['timestamp']
response.in_reply_to = meta['in_reply_to']
response.status = meta['status']
response.is_ok = response.status == OK
response.data = msg.get('data')
return response
# ################################################################################################################################
class RequestFromZato(object):
""" A request from Zato to this client.
"""
__slots__ = ('id', 'timestamp', 'data', 'msg_impl')
def __init__(self):
self.id = None
self.timestamp = None
self.data = None
self.msg_impl = None
@staticmethod
def from_json(msg):
request = RequestFromZato()
request.msg_impl = msg
request.id = msg['meta']['id']
request.timestamp = msg['meta']['timestamp']
request.data = msg['data']
return request
# ################################################################################################################################
class ResponseToZato(MessageToZato):
""" A response from this client to a previous request from Zato.
"""
action = 'client-response'
def __init__(self, in_reply_to, data, *args, **kwargs):
self.in_reply_to = in_reply_to
self.data = data
super(ResponseToZato, self).__init__(*args, **kwargs)
def enrich(self, msg):
msg['meta']['in_reply_to'] = self.in_reply_to
msg['data']['response'] = self.data
return msg
# ################################################################################################################################
class _WSClient(WebSocketClient):
""" A low-level subclass of around ws4py's WebSocket client functionality.
"""
def __init__(self, on_connected_callback, on_message_callback, on_error_callback, on_closed_callback, *args, **kwargs):
self.on_connected_callback = on_connected_callback
self.on_message_callback = on_message_callback
self.on_error_callback = on_error_callback
self.on_closed_callback = on_closed_callback
super(_WSClient, self).__init__(*args, **kwargs)
def opened(self):
spawn(self.on_connected_callback)
def received_message(self, msg):
self.on_message_callback(msg)
def unhandled_error(self, error):
spawn(self.on_error_callback, error)
def closed(self, code, reason=None):
super(_WSClient, self).closed(code, reason)
self.on_closed_callback(code, reason)
# ################################################################################################################################
class Client(object):
""" A WebSocket client that knows how to invoke Zato services.
"""
def __init__(self, config):
# type: (Config)
self.config = config
self.conn = _WSClient(self.on_connected, self.on_message, self.on_error, self.on_closed, self.config.address)
self.keep_running = True
self.is_authenticated = False
self.is_connected = False
self.is_auth_needed = bool(self.config.username)
self.auth_token = None
self.on_request_callback = self.config.on_request_callback
self.on_closed_callback = self.config.on_closed_callback
self.needs_auth = self.config.needs_auth
# Keyed by IDs of requests sent from this client to Zato
self.requests_sent = {}
# Same key as self.requests_sent but the dictionary contains responses to previously sent requests
self.responses_received = {}
# Requests initiated by Zato, keyed by their IDs
self.requests_received = {}
# ################################################################################################################################
def send(self, msg_id, msg, wait_time=2):
""" Spawns a greenlet to send a message to Zato.
"""
spawn(self._send, msg_id, msg, msg.serialize(), wait_time)
# ################################################################################################################################
def _send(self, msg_id, msg, serialized, wait_time):
""" Sends a request to Zato and waits up to wait_time or self.config.wait_time seconds for a reply.
"""
logger.info('Sending msg `%s`', serialized)
# So that it can be correlated with a future response
self.requests_sent[msg_id] = msg
# Actually send the messageas string now
self.conn.send(serialized)
# ################################################################################################################################
def _wait_for_response(self, request_id, wait_time=None, _now=datetime.utcnow, _delta=timedelta, _sleep=sleep):
""" Wait until a response arrives and return it
or return None if there is no response up to wait_time or self.config.wait_time.
"""
now = _now()
until = now + _delta(seconds=wait_time or self.config.wait_time)
while now < until:
response = self.responses_received.get(request_id)
if response:
return response
else:
_sleep(0.01)
now = _now()
# ################################################################################################################################
def authenticate(self, request_id):
""" Authenticates the client with Zato.
"""
logger.info('Authenticating as `%s` (%s %s)', self.config.username, self.config.client_name, self.config.client_id)
spawn(self.send, request_id, AuthRequest(request_id, self.config, self.auth_token))
# ################################################################################################################################
def on_connected(self):
""" Invoked upon establishing an initial connection - logs the client in with self.config's credentials
"""
logger.info('Connected to `%s` %s (%s %s)',
self.config.address,
'as `{}`'.format(self.config.username) if self.config.username else 'without credentials',
self.config.client_name, self.config.client_id)
request_id = MSG_PREFIX.SEND_AUTH.format(uuid4().hex)
self.authenticate(request_id)
response = self._wait_for_response(request_id)
if not response:
logger.warn('No response to authentication request `%s`', request_id)
else:
self.auth_token = response.data['token']
self.is_authenticated = True
del self.responses_received[request_id]
logger.info('Authenticated successfully as `%s` (%s %s)',
self.config.username, self.config.client_name, self.config.client_id)
# ################################################################################################################################
def on_message(self, msg, _uuid4=uuid4):
""" Invoked for each message received from Zato, both for responses to previous requests and for incoming requests.
"""
_msg = loads(msg.data.decode('utf-8') if isinstance(msg.data, binary_type) else msg.data)
logger.info('Received message `%s`', _msg)
in_reply_to = _msg['meta'].get('in_reply_to')
# Reply from Zato to one of our requests
if in_reply_to:
self.responses_received[in_reply_to] = ResponseFromZato.from_json(_msg)
# Request from Zato
else:
data = self.on_request_callback(RequestFromZato.from_json(_msg))
response_id = MSG_PREFIX.SEND_RESP.format(_uuid4().hex)
self.send(response_id, ResponseToZato(_msg['meta']['id'], data, response_id, self.config, self.auth_token))
# ################################################################################################################################
def on_closed(self, code, reason=None):
logger.info('Closed WSX client connection to `%s` (remote code:%s reason:%s)', self.config.address, code, reason)
if self.on_closed_callback:
self.on_closed_callback(code, reason)
# ################################################################################################################################
def on_error(self, error):
""" Invoked for each unhandled error in the lower-level ws4py library.
"""
logger.warn('Caught error %s', error)
# ################################################################################################################################
def _run(self, max_wait=10, _sleep_time=2):
needs_connect = True
start = now = datetime.utcnow()
# In the first few seconds, do not warn about socket errors in case
# the other end is intrinsically slow to connect to.
warn_from = start + timedelta(seconds=3)
use_warn = False
# Wait for max_wait seconds until we have the connection
until = now + timedelta(seconds=max_wait)
while self.keep_running and needs_connect and now < until:
try:
if self.conn.sock:
self.conn.connect()
else:
raise ValueError('No WSX connection to {} after {}'.format(self.config.address, now - start))
except Exception as e:
if use_warn:
log_func = logger.warn
else:
if now >= warn_from:
log_func = logger.warn
use_warn = True
else:
log_func = logger.debug
log_func('Exception caught `%s` while connecting to WSX `%s (%s)`', e, self.config.address, format_exc())
sleep(_sleep_time)
now = datetime.utcnow()
else:
needs_connect = False
self.is_connected = True
# ################################################################################################################################
def run(self, max_wait=20):
self._run()
now = datetime.utcnow()
until = now + timedelta(seconds=max_wait)
while not self.is_connected:
sleep(0.01)
now = datetime.utcnow()
if now >= until:
return
# ################################################################################################################################
def stop(self, reason=''):
self.keep_running = False
self.conn.close(reason=reason)
self.is_connected = False
# ################################################################################################################################
def invoke(self, request, timeout=5):
if self.needs_auth and (not self.is_authenticated):
raise Exception('Client is not authenticated')
request_id = MSG_PREFIX.INVOKE_SERVICE.format(uuid4().hex)
spawn(self.send, request_id, ServiceInvokeRequest(request_id, request, self.config, self.auth_token))
response = self._wait_for_response(request_id, wait_time=timeout)
if not response:
logger.warn('No response to invocation request `%s`', request_id)
else:
return response
# ################################################################################################################################
if __name__ == '__main__':
def on_request_from_zato(msg):
try:
return subprocess.check_output(msg.data['cmd'])
except Exception as e:
return format_exc(e)
config = Config()
config.client_name = 'My Client'
config.client_id = '32351b3f5d16'
address = 'ws://127.0.0.1:47043/zato.ws.apitests'
config.address = address
config.username = 'user1'
config.secret = 'secret1'
config.on_request_callback = on_request_from_zato
client = Client(config)
client.run()
client.invoke({'service':'zato.ping'})
logger.info('Press Ctrl-C to quit')
try:
x = 0
while x < 1000 and client.keep_running:
sleep(0.2)
except KeyboardInterrupt:
client.stop()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/wsx_client.py | wsx_client.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
from traceback import format_exc
# Zato
from zato.common.api import NotGiven
from zato.common.exception import BadRequest, InternalServerError
from zato.common.rate_limiting.common import RateLimitReached as RateLimitReachedError
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
# stdlib
from typing import Callable
# Zato
from zato.common.json_schema import ValidationException as JSONSchemaValidationException
from zato.server.service import ChannelInfo
from zato.server.service.store import ServiceStore
# For pyflakes
Callable = Callable
ChannelInfo = ChannelInfo
JSONSchemaValidationException = JSONSchemaValidationException
ServiceStore = ServiceStore
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
json_rpc_version_supported = '2.0'
# ################################################################################################################################
# ################################################################################################################################
class RequestContext(object):
__slots__ = ('cid', 'orig_message', 'message')
def __init__(self):
self.cid = None # type: str
self.orig_message = None # type: object
self.message = None # type: str
# ################################################################################################################################
# ################################################################################################################################
class ErrorCtx(object):
__slots__ = ('cid', 'code', 'message')
def __init__(self):
self.cid = None # type: str
self.code = None # type: int
self.message = None # type: str
def to_dict(self):
# type: () -> dict
return {
'code': self.code,
'message': self.message,
'data': {
'ctx': {
'cid': self.cid
}
}
}
# ################################################################################################################################
# ################################################################################################################################
class ItemResponse(object):
__slots__ = ('id', 'cid', 'error', 'result')
def __init__(self):
self.id = None # type: int
self.cid = None # type: str
self.error = None # type: ErrorCtx
self.result = NotGiven # type: object
def to_dict(self, _json_rpc_version=json_rpc_version_supported):
# type: (str) -> dict
out = {
'jsonrpc': _json_rpc_version,
'id': self.id,
}
if self.result is not NotGiven:
out['result'] = self.result
else:
out['error'] = self.error.to_dict()
return out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCException(object):
code = -32000
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCBadRequest(JSONRPCException, BadRequest):
def __init__(self, cid, message):
# type: (str, str)
BadRequest.__init__(self, cid, msg=message)
# ################################################################################################################################
# ################################################################################################################################
class InvalidRequest(JSONRPCBadRequest):
code = -32600
# ################################################################################################################################
# ################################################################################################################################
class MethodNotFound(JSONRPCBadRequest):
code = -32601
# ################################################################################################################################
# ################################################################################################################################
class InternalError(JSONRPCException, InternalServerError):
code = -32603
# ################################################################################################################################
# ################################################################################################################################
class ParseError(JSONRPCBadRequest):
code = -32700
# ################################################################################################################################
# ################################################################################################################################
class Forbidden(JSONRPCBadRequest):
code = -32403
# ################################################################################################################################
# ################################################################################################################################
class RateLimitReached(JSONRPCBadRequest):
code = -32429
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCItem(object):
""" An object describing an individual JSON-RPC request.
"""
__slots__ = 'jsonrpc', 'method', 'params', 'id', 'needs_response'
# ################################################################################################################################
def __init__(self):
self.jsonrpc = None # type: str
self.method = None # type: str
self.params = None # type: object
self.id = None # type: str
self.needs_response = None # type: bool
# ################################################################################################################################
def to_dict(self):
# type: () -> dict
return {
'jsonrpc': self.jsonrpc,
'method': self.method,
'params': self.params,
'id': self.id
}
# ################################################################################################################################
@staticmethod
def from_dict(item):
# type: (dict) -> JSONRPCItem
# Our object to return
out = JSONRPCItem()
# At this stage we only create a Python-level object and input
# validation is performed by our caller.
out.jsonrpc = item.get('jsonrpc')
out.id = item.get('id', -123456789)
out.method = item.get('method')
out.params = item.get('params')
out.needs_response = out.id is not NotGiven
return out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCHandler(object):
def __init__(self, service_store, wsgi_environ, config, invoke_func, channel_info, JSONSchemaValidationException):
# type: (ServiceStore, dict, dict, Callable, ChannelInfo, JSONSchemaValidationException)
self.service_store = service_store
self.wsgi_environ = wsgi_environ
self.config = config
self.invoke_func = invoke_func
self.channel_info = channel_info
# Kept here and provided by the caller to remove circular imports between common/json_rpc.py and common/json_schema.py
self.JSONSchemaValidationException = JSONSchemaValidationException
# ################################################################################################################################
def handle(self, ctx):
# type: (RequestContext) -> object
if isinstance(ctx.message, list):
return self.handle_list(ctx)
else:
return self.handle_one_item(ctx)
# ################################################################################################################################
def can_handle(self, method):
# type: (str) -> bool
return method in self.config['service_whitelist']
# ################################################################################################################################
def _handle_one_item(self, cid, message, orig_message, _json_rpc_version=json_rpc_version_supported):
# type: (RequestContext, str) -> dict
try:
# Response to return
out = ItemResponse()
# Construct a Python object out of incoming data
item = JSONRPCItem.from_dict(message)
# We should have the ID at this point
out.id = item.id
# Confirm that we can handle the JSON-RPC version requested
if item.jsonrpc != json_rpc_version_supported:
raise InvalidRequest(cid, 'Unsupported JSON-RPC version `{}` in `{}`'.format(
item.jsonrpc, orig_message.decode('utf8')))
# Confirm that method requested is one that we can handle
if not self.can_handle(item.method):
raise MethodNotFound(cid, 'Method not supported `{}` in `{}`'.format(item.method, orig_message.decode('utf8')))
# Try to invoke the service ..
skip_response_elem = self.service_store.has_sio(item.method)
service_response = self.invoke_func(item.method, item.params, channel_info=self.channel_info,
skip_response_elem=skip_response_elem, wsgi_environ=self.wsgi_environ)
# .. no exception here = invocation was successful
out.result = service_response
return out.to_dict() if item.needs_response else None
except Exception as e:
is_schema_error = isinstance(e, self.JSONSchemaValidationException)
is_rate_limit_error = isinstance(e, RateLimitReachedError)
error_ctx = ErrorCtx()
error_ctx.cid = cid
# JSON Schema validator error
if is_schema_error:
err_code = InvalidRequest.code
err_message = e.error_msg_details if e.needs_err_details else e.error_msg
elif is_rate_limit_error:
err_code = RateLimitReached.code
err_message = 'Too Many Requests'
else:
# Any JSON-RPC error
if isinstance(e, JSONRPCException):
err_code = e.code
err_message = e.args[0]
# Any other error
else:
err_code = -32000
err_message = 'Message could not be handled'
if is_schema_error:
logger.warn('JSON Schema validation error in JSON-RPC channel `%s` (%s); msg:`%s`, e:`%s`, details:`%s`',
self.config.name, cid, orig_message, format_exc(), e.error_msg_details)
else:
logger.warn('JSON-RPC exception in `%s` (%s); msg:`%s`, e:`%s`',
self.config.name, cid, orig_message, format_exc())
error_ctx.code = err_code
error_ctx.message = err_message
out.error = error_ctx
return out.to_dict()
# ################################################################################################################################
def handle_one_item(self, ctx, _json_rpc_version=json_rpc_version_supported):
# type: (RequestContext) -> dict
return self._handle_one_item(ctx.cid, ctx.message, ctx.orig_message)
# ################################################################################################################################
def handle_list(self, ctx):
# type: (RequestContext) -> list
out = []
for item in ctx.message: # type: dict
out.append(self._handle_one_item(ctx.cid, item, ctx.orig_message))
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/json_rpc.py | json_rpc.py |
from __future__ import absolute_import, division, print_function, unicode_literals
"""
A set of settings kept in an SQLite database.
"""
# stdlib
import os
from logging import getLogger
# SQLAlchemy
from sqlalchemy import Column, create_engine, Integer, Sequence, String, Text, UniqueConstraint
from sqlalchemy.ext.declarative import declarative_base
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
Base = declarative_base()
# ################################################################################################################################
class Setting(Base):
__tablename__ = 'settings'
__table_args__ = (UniqueConstraint('name'), {})
id = Column(Integer, Sequence('settings_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(Text, nullable=True)
data_type = Column(String(20), nullable=False)
# ################################################################################################################################
class DATA_TYPE:
INTEGER = 'integer'
STRING = 'string'
data_type_handler = {
DATA_TYPE.INTEGER: int,
DATA_TYPE.STRING: lambda value: value,
}
# ################################################################################################################################
class SettingsDB(object):
""" Keeps simple settings in an SQLite database. It's new in 3.0 so to ease in migration from pre-3.0 releases
the class takes care itself of making sure that its underlying database actually exists - a future Zato version
will simply assume that it does.
"""
def __init__(self, db_path, session):
self.db_path = db_path
self.session = session
# Older environments did not have this database
if not os.path.exists(self.db_path):
self.create_db()
def get_engine(self):
return create_engine('sqlite:///{}'.format(self.db_path))
def create_db(self):
Base.metadata.create_all(self.get_engine())
def get(self, name, default=None, needs_object=False):
data = self.session.query(Setting).\
filter(Setting.name==name).\
first() or None
if needs_object:
return data
return data_type_handler[data.data_type](data.value) if data else default
def set(self, name, value, data_type=DATA_TYPE.INTEGER):
s = self.get(name, needs_object=True) or Setting()
s.name = name
s.value = value
s.data_type = data_type
self.session.add(s)
self.session.commit()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/settings_db.py | settings_db.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from logging import getLogger
# JSON Schema
from jsonschema import validate as js_validate
from jsonschema.exceptions import ValidationError as JSValidationError
from jsonschema.validators import validator_for
# Zato
from zato.common.api import CHANNEL, NotGiven
from zato.common.json_internal import dumps, loads
from zato.common.json_rpc import ErrorCtx, JSONRPCBadRequest, ItemResponse
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
# stdlib
from typing import Callable
# Bunch
from bunch import Bunch
# Zato
from zato.server.base.parallel import ParallelServer
# For pyflakes
Bunch = Bunch
Callable = Callable
ParallelServer = ParallelServer
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
def get_service_config(item, server):
# type: (Bunch, ParallelServer) -> dict
# By default services are allowed to validate input using JSON Schema
is_json_schema_enabled = item.get('is_json_schema_enabled', True)
# Unless configured per each service separately, we use server defaults here
needs_json_schema_err_details = item.get('needs_json_schema_err_details', NotGiven)
if needs_json_schema_err_details is NotGiven:
needs_json_schema_err_details = server.fs_server_config.misc.return_json_schema_errors
return {
'is_json_schema_enabled': is_json_schema_enabled,
'needs_json_schema_err_details': needs_json_schema_err_details
}
# ################################################################################################################################
# ################################################################################################################################
class ValidationException(Exception):
def __init__(self, cid, object_type, object_name, needs_err_details, error_msg, error_msg_details):
# type: (str, str, str, bool, str, str)
self.cid = cid
self.object_type = object_type
self.object_name = object_name
self.needs_err_details = needs_err_details
self.error_msg = error_msg
self.error_msg_details = error_msg_details
super(ValidationException, self).__init__('JSON Schema validation error in `{}` ({}), e:`{}`'.format(
self.object_name, cid, self.error_msg))
# ################################################################################################################################
# ################################################################################################################################
class ValidationError(object):
""" Base class for validation error-related classes.
"""
__slots__ = 'cid', 'needs_err_details', 'error_msg', 'error_extra', 'needs_prefix'
def __init__(self, cid, needs_err_details, error_msg, error_extra=None, needs_prefix=True):
# type: (str, bool, str, dict, bool)
self.cid = cid
self.needs_err_details = needs_err_details
self.error_msg = error_msg
self.error_extra = error_extra
self.needs_prefix = needs_prefix
def get_error_message(self, needs_error_msg=False):
# type: (bool) -> str
out = 'Invalid request' if self.needs_prefix else ''
if needs_error_msg or self.needs_err_details:
if out:
out += '; '
out += self.error_msg
return out
def serialize(self):
raise NotImplementedError('Must be overridden in subclasses')
# ################################################################################################################################
# ################################################################################################################################
class DictError(ValidationError):
""" An error reporter that serializes JSON Schema validation errors into Python dict responses.
"""
def serialize(self, to_string=False):
# type: (bool) -> object
out = {
'is_ok': False,
'cid': self.cid,
'message': self.get_error_message()
}
return dumps(out) if to_string else out
# ################################################################################################################################
# ################################################################################################################################
class JSONRPCError(ValidationError):
""" An error reporter that serializes JSON Schema validation errors into JSON-RPC responses.
"""
def serialize(self):
# type: () -> dict
error_ctx = ErrorCtx()
error_ctx.cid = self.cid
error_ctx.code = JSONRPCBadRequest.code
error_ctx.message = 'Invalid request'
# This may be optionally turned off
error_ctx.message = self.get_error_message()
out = ItemResponse()
out.id = self.error_extra['json_rpc_id']
out.error = error_ctx
return out.to_dict()
# ################################################################################################################################
channel_type_to_error_class = {
CHANNEL.HTTP_SOAP: DictError,
CHANNEL.JSON_RPC: JSONRPCError,
CHANNEL.SERVICE: DictError,
}
# ################################################################################################################################
# ################################################################################################################################
class ValidationConfig(object):
""" An individual set of configuration options - each object requiring validation (e.g. each channel)
will have its own instance of this class assigned to its validator.
"""
__slots__ = 'is_enabled', 'object_type', 'object_name', 'schema_path', 'schema', 'validator', 'needs_err_details'
def __init__(self):
self.is_enabled = None # type: bool
# Object type is channel type or, in the future, one of outgoing connections
# whose requests to external resources we may also want to validate.
self.object_type = None # type: str
self.object_name = None # type: str
self.schema_path = None # type: str
self.schema = None # type: dict
self.validator = None # type: object
self.needs_err_details = None # type: bool
# ################################################################################################################################
# ################################################################################################################################
class Result(object):
__slots__ = 'is_ok', 'cid', 'needs_err_details', 'error_msg', 'error_extra', 'object_type'
def __init__(self):
self.is_ok = None # type: bool
self.cid = None # type: str
self.needs_err_details = None # type: bool
self.error_msg = None # type: str
self.error_extra = None # type: dict
self.object_type = None # type: str
def __bool__(self):
return bool(self.is_ok)
__nonzero__ = __bool__
def get_error(self):
# type: () -> ValidationError
ErrorClass = channel_type_to_error_class[self.object_type]
error = ErrorClass(self.cid, self.needs_err_details, self.error_msg, self.error_extra) # type: ValidationError
return error
# ################################################################################################################################
# ################################################################################################################################
class Validator(object):
""" Validates JSON requests against a previously assigned schema and serializes errors according to the caller's type,
e.g. using REST or JSON-RPC.
"""
__slots__ = 'is_initialized', 'config'
def __init__(self):
self.is_initialized = False # type: bool
self.config = None # type: ValidationConfig
def init(self):
if not self.config.is_enabled:
logger.info('Skipped initialization of JSON Schema validation for `%s` (%s)',
self.config.object_name, self.config.object_type)
return
if not os.path.exists(self.config.schema_path):
raise ValidationException('JSON schema not found `{}` ({})'.format(self.config.schema_path, self.config.object_name))
# The file is sure to exist
with open(self.config.schema_path) as f:
schema = f.read()
# Parse the contents as JSON
schema = loads(schema)
# Assign the schema and validator for the schema for later use
self.config.schema = schema
self.config.validator = validator_for(schema)
# Everything is set up = we are initialized
self.is_initialized = True
def validate(self, cid, data, object_type=None, object_name=None, needs_err_details=False, _validate=js_validate):
# type: (str, object, str, str, Callable) -> Result
# Result we will return
result = Result()
result.cid = cid
object_type = object_type or self.config.object_type
object_name or self.config.object_name
needs_err_details = needs_err_details or self.config.needs_err_details
try:
js_validate(data, self.config.schema, self.config.validator)
except JSValidationError as e:
# These will be always used, no matter the object/channel type
result.is_ok = False
result.object_type = object_type
result.needs_err_details = needs_err_details
result.error_msg = str(e)
# This is applicable only to JSON-RPC
if object_type == CHANNEL.JSON_RPC:
result.error_extra = {'json_rpc_id': data.get('id')}
else:
result.is_ok = True
return result
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/json_schema.py | json_schema.py |
# ################################################################################################################################
# ################################################################################################################################
# stdlib
from typing import Optional as optional
# dacite
from dacite import from_dict
# Be explicit about which import error we want to catch
try:
import dataclasses # noqa: F401
# Python 3.6
except ImportError:
from zato.common.ext.dataclasses import * # noqa: F401
# Python 3.6+
else:
from dataclasses import * # noqa: F401
# ################################################################################################################################
# ################################################################################################################################
#
# TypedDict
#
try:
from typing import TypedDict
except ImportError:
from zato.common.ext.typing_extensions import TypedDict
# ################################################################################################################################
# ################################################################################################################################
# For flake8
from_dict = from_dict
optional = optional
TypedDict = TypedDict
# ################################################################################################################################
# ################################################################################################################################
def instance_from_dict(class_, data):
# type: (object, dict) -> object
instance = class_()
for key, value in data.items():
setattr(instance, key, value)
return instance
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/typing_.py | typing_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import os
from datetime import datetime
from itertools import groupby
from io import StringIO
from operator import attrgetter
from time import time
# psutil
from psutil import Process
# PyYAML
import yaml
# pytz
from pytz import UTC
# Texttable
from texttable import Texttable
# Zato
from zato.common.api import INFO_FORMAT, MISC, ZATO_INFO_FILE
from zato.common.json_internal import dumps as json_dumps, loads as json_loads
from zato.common.util.api import current_host
def format_connections(conns, format):
""" Formats a list of connections according to the output format.
"""
groups = (groupby(conns, key=attrgetter('status')))
out = {}
for status, items in groups:
items = list(items)
items.sort(key=attrgetter('raddr'))
out_items = out.setdefault(status, [])
for item in items:
laddr_str = ':'.join(str(elem) for elem in item.laddr).ljust(21)
raddr_str = ':'.join(str(elem) for elem in item.raddr).rjust(21)
out_item = {
'from': '{}:{}'.format(*item.laddr),
'to': None,
'formatted': None,
}
if item.raddr:
out_item['to'] = '{}:{}'.format(*item.raddr)
out_item['formatted'] = '{} -> {}'.format(laddr_str, raddr_str)
else:
out_item['formatted'] = '{}:{}'.format(*item.laddr)
out_items.append(out_item)
return out
def get_worker_pids(component_path):
""" Returns PIDs of all workers of a given server, which must be already started.
"""
master_proc_pid = int(open(os.path.join(component_path, MISC.PIDFILE)).read())
return sorted(elem.pid for elem in Process(master_proc_pid).children())
def get_info(component_path, format, _now=datetime.utcnow):
component_details = open(os.path.join(component_path, ZATO_INFO_FILE)).read()
out = {
'component_details': component_details,
'component_full_path': component_path,
'component_host': current_host(),
'component_running': False,
'current_time': datetime.now().isoformat(),
'current_time_utc': datetime.utcnow().isoformat(),
'master_proc_connections': None,
'master_proc_pid': None,
'master_proc_name': None,
'master_proc_create_time': None,
'master_proc_create_time_utc': None,
'master_proc_username': None,
'master_proc_workers_no': None,
'master_proc_workers_pids': None,
}
master_proc_pid = None
try:
master_proc_pid = int(open(os.path.join(component_path, MISC.PIDFILE)).read())
except(IOError, ValueError):
# Ok, no such file or it's empty
pass
if master_proc_pid:
out['component_running'] = True
master_proc = Process(master_proc_pid)
workers_pids = sorted(elem.pid for elem in master_proc.children())
now = datetime.fromtimestamp(time(), UTC)
mater_proc_create_time = master_proc.create_time()
mater_proc_create_time_utc = datetime.fromtimestamp(mater_proc_create_time, UTC)
out['mater_proc_uptime'] = now - mater_proc_create_time_utc
out['mater_proc_uptime_seconds'] = int(out['mater_proc_uptime'].total_seconds())
out['master_proc_connections'] = format_connections(master_proc.connections(), format)
out['master_proc_pid'] = master_proc.pid
out['master_proc_create_time'] = datetime.fromtimestamp(mater_proc_create_time).isoformat()
out['master_proc_create_time_utc'] = mater_proc_create_time_utc.isoformat()
out['master_proc_username'] = master_proc.username()
out['master_proc_name'] = master_proc.name()
out['master_proc_workers_no'] = len(workers_pids)
out['master_proc_workers_pids'] = workers_pids
for pid in workers_pids:
worker = Process(pid)
worker_create_time = worker.create_time()
worker_create_time_utc = datetime.fromtimestamp(worker_create_time, UTC)
out['worker_{}_uptime'.format(pid)] = now - worker_create_time_utc
out['worker_{}_uptime_seconds'.format(pid)] = int(out['worker_{}_uptime'.format(pid)].total_seconds())
out['worker_{}_create_time'.format(pid)] = datetime.fromtimestamp(worker_create_time).isoformat()
out['worker_{}_create_time_utc'.format(pid)] = worker_create_time_utc.isoformat()
out['worker_{}_connections'.format(pid)] = format_connections(worker.connections(), format)
return out
def format_info(value, format, cols_width=None, dumper=None):
if format in(INFO_FORMAT.DICT, INFO_FORMAT.JSON, INFO_FORMAT.YAML):
value['component_details'] = json_loads(value['component_details'])
if format == INFO_FORMAT.JSON:
return json_dumps(value)
elif format == INFO_FORMAT.YAML:
buff = StringIO()
yaml.dump_all([value], default_flow_style=False, indent=4, Dumper=dumper, stream=buff)
value = buff.getvalue()
buff.close()
return value
elif format == INFO_FORMAT.TEXT:
cols_width = (elem.strip() for elem in cols_width.split(','))
cols_width = [int(elem) for elem in cols_width]
table = Texttable()
table.set_cols_width(cols_width)
# Use text ('t') instead of auto so that boolean values don't get converted into ints
table.set_cols_dtype(['t', 't'])
rows = [['Key', 'Value']]
rows.extend(sorted(value.items()))
table.add_rows(rows)
return table.draw()
else:
return value | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/component_info.py | component_info.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import itertools
# ipaddress
from ipaddress import ip_address, ip_network
# netifaces
from netifaces import AF_INET, ifaddresses as net_ifaddresses, interfaces as net_ifaces
# Python 2/3 compatibility
from builtins import bytes
from future.moves.urllib.parse import urlparse
from six import PY2
# ################################################################################################################################
def to_ip_network(adddress):
""" Converts address to a network object assuming it is feasible at all, otherwise returns None.
"""
try:
return ip_network(adddress)
except ValueError:
pass
else:
return True
# ################################################################################################################################
def ip_list_from_interface(interface, allow_loopback=False):
""" Return the list of IP address for the given interface, possibly including loopback addresses
"""
addresses = []
af_inet = net_ifaddresses(interface).get(AF_INET)
if af_inet:
_addresses = [elem.get('addr') for elem in af_inet]
if PY2:
_addresses = [elem.decode('utf8') for elem in _addresses]
for address in _addresses:
address = ip_address(address)
if address.is_loopback and not allow_loopback:
continue
addresses.append(address)
return addresses
# ################################################################################################################################
def get_preferred_ip(base_bind, user_prefs):
""" Given user preferences, iterate over all address in all interfaces and check if any matches what users prefer.
Note that preferences can include actual names of interfaces, not only IP or IP ranges.
"""
# First check out if the base address to bind does not already specify a concrete IP.
# If it does, then this will be the preferred one.
parsed = urlparse('https://{}'.format(base_bind))
if parsed.hostname != '0.0.0.0':
return parsed.hostname
# What is preferred
preferred = user_prefs.ip
# What actually exists in the system
current_ifaces = net_ifaces()
# Would be very weird not to have anything, even loopback, but oh well
if not current_ifaces:
return None
current_ifaces.sort()
current_addresses = [net_ifaddresses(elem).get(AF_INET) for elem in current_ifaces]
current_addresses = [[elem.get('addr') for elem in x] for x in current_addresses if x]
current_addresses = list(itertools.chain.from_iterable(current_addresses))
# Preferences broken out into interfacs and network ranges/IP addresses
pref_interfaces = [elem for elem in preferred if elem in net_ifaces()]
pref_networks = [to_ip_network(elem) for elem in preferred]
pref_networks = [elem for elem in pref_networks if elem]
# If users prefer a named interface and we have it then we need to return its IP
for elem in pref_interfaces:
# If any named interface is found, returns its first IP, if there is any
ip_list = ip_list_from_interface(elem, user_prefs.allow_loopback)
if ip_list:
return str(ip_list[0])
# No address has been found by its interface but perhaps one has been specified explicitly
# or through a network range.
for current in current_addresses:
for preferred in pref_networks:
if ip_address(current.decode('utf8') if isinstance(current, bytes) else current) in preferred:
return current
# Ok, still nothing, so we need to find something ourselves
loopback_ip = None
# First let's try the first non-loopback interface.
for elem in current_ifaces:
for ip in ip_list_from_interface(elem, True):
if ip.is_loopback:
loopback_ip = ip
return str(ip)
# If there is only loopback and we are allowed to use it then so be it
if user_prefs.allow_loopback:
return loopback_ip
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/ipaddress_.py | ipaddress_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# ipaddress
from ipaddress import IPv4Address, IPv6Address
# Zato
from zato.common.json_internal import dumps
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class AuditPII(object):
""" Audit log for personally identifiable information (PII).
"""
def __init__(self):
self._logger = logging.getLogger('zato_audit_pii')
# ################################################################################################################################
def _log(self, func, cid, op, current_user='', target_user='', result='', extra='', _dumps=dumps):
remote_addr = extra.get('remote_addr')
if isinstance(extra, dict):
if not remote_addr:
extra['remote_addr'] = ''
else:
if isinstance(remote_addr, list) and remote_addr:
if isinstance(remote_addr[0], (IPv4Address, IPv6Address)):
extra['remote_addr'] = ';'.join(elem.exploded for elem in extra['remote_addr'])
entry = {
'cid': cid,
'op': op,
}
if current_user:
entry['current_user'] = current_user
if target_user:
entry['target_user'] = target_user
if result:
entry['result'] = result
if extra:
entry['extra'] = extra
entry = dumps(entry)
self._logger.info('%s' % entry)
# ################################################################################################################################
def info(self, *args, **kwargs):
self._log(self._logger.info, *args, **kwargs)
# ################################################################################################################################
def warn(self, *args, **kwargs):
self._log(self._logger.warn, *args, **kwargs)
# ################################################################################################################################
def error(self, *args, **kwargs):
self._log(self._logger.error, *args, **kwargs)
# ################################################################################################################################
# A singleton available everywhere
audit_pii = AuditPII()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/audit.py | audit.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# ################################################################################################################################
class SFTPOutput(object):
""" Represents output resulting from execution of SFTP command(s).
"""
__slots__ = 'is_ok', 'cid', 'command', 'command_no', 'stdout', 'stderr', 'details', 'response_time'
def __init__(self, cid, command_no, command=None, is_ok=None, stdout=None, stderr=None, details=None, response_time=None):
# type: (str, int, str, bool, str, str, str) -> None
self.cid = cid
self.command_no = command_no
self.command = command
self.is_ok = is_ok
self.stdout = stdout
self.stderr = stderr
self.details = details
self.response_time = response_time
# ################################################################################################################################
def __str__(self):
return '<{} at {}, cid:{}, command_no:{}, is_ok:{}, rt:{}>'.format(self.__class__.__name__, hex(id(self)), self.cid,
self.command_no, self.is_ok, self.response_time)
# ################################################################################################################################
def strip_stdout_prefix(self):
if self.stdout:
out = []
for line in self.stdout.splitlines():
if not line.startswith('sftp>'):
out.append(line)
self.stdout = '\n'.join(out)
# ################################################################################################################################
def to_dict(self):
# type: () -> dict
return {
'is_ok': self.is_ok,
'cid': self.cid,
'command': self.command,
'command_no': self.command_no,
'stdout': self.stdout,
'stderr': self.stderr,
'details': self.details,
'response_time': self.response_time,
}
# ################################################################################################################################
@staticmethod
def from_dict(data):
# type: (dict) -> SFTPOutput
return SFTPOutput(**data)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/sftp.py | sftp.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from collections import OrderedDict
from io import StringIO
from numbers import Number
from sys import maxsize
# Bunch
from bunch import Bunch
# ################################################################################################################################
# SQL ODB
engine_def = '{engine}://{username}:{password}@{host}:{port}/{db_name}'
engine_def_sqlite = 'sqlite:///{sqlite_path}'
# Convenience access functions and constants.
megabyte = 10 ** 6
# Hook methods whose func.im_func.func_defaults contains this argument will be assumed to have not been overridden by users
# and ServiceStore will be allowed to override them with None so that they will not be called in Service.update_handle
# which significantly improves performance (~30%).
zato_no_op_marker = 'zato_no_op_marker'
SECRET_SHADOW = '******'
# TRACE1 logging level, even more details than DEBUG
TRACE1 = 6
SECONDS_IN_DAY = 86400 # 60 seconds * 60 minutes * 24 hours (and we ignore leap seconds)
scheduler_date_time_format = '%Y-%m-%d %H:%M:%S'
soap_date_time_format = '%Y-%m-%dT%H:%M:%S.%fZ'
# TODO: Classes that have this attribute defined (no matter the value) will not be deployed
# onto servers.
DONT_DEPLOY_ATTR_NAME = 'zato_dont_import'
# A convenient constant used in several places, simplifies passing around
# arguments which are, well, not given (as opposed to being None, an empty string etc.)
ZATO_NOT_GIVEN = b'ZATO_NOT_GIVEN'
# Separates command line arguments in shell commands.
CLI_ARG_SEP = 'ZATO_ZATO_ZATO'
# Also used in a couple of places.
ZATO_OK = 'ZATO_OK'
ZATO_ERROR = 'ZATO_ERROR'
ZATO_WARNING = 'ZATO_WARNING'
ZATO_NONE = 'ZATO_NONE'
ZATO_DEFAULT = 'ZATO_DEFAULT'
ZATO_SEC_USE_RBAC = 'ZATO_SEC_USE_RBAC'
DELEGATED_TO_RBAC = 'Delegated to RBAC'
# Default HTTP method outgoing connections use to ping resources
# TODO: Move it to MISC
DEFAULT_HTTP_PING_METHOD = 'HEAD'
# Default size of an outgoing HTTP connection's pool (plain, SOAP, any).
# This is a per-outconn setting
# TODO: Move it to MISC
DEFAULT_HTTP_POOL_SIZE = 20
# Used when there's a need for encrypting/decrypting a well-known data.
# TODO: Move it to MISC
ZATO_CRYPTO_WELL_KNOWN_DATA = 'ZATO'
# Used if it could not be established what remote address a request came from
NO_REMOTE_ADDRESS = '(None)'
# Pattern matching order
TRUE_FALSE = 'true_false'
FALSE_TRUE = 'false_true'
simple_types = (bytes, str, dict, list, tuple, bool, Number)
# ################################################################################################################################
# ################################################################################################################################
generic_attrs = ('is_rate_limit_active', 'rate_limit_type', 'rate_limit_def', 'rate_limit_check_parent_def',
'is_audit_log_sent_active', 'is_audit_log_received_active', 'max_len_messages_sent', 'max_len_messages_received',
'max_bytes_per_message_sent', 'max_bytes_per_message_received', 'hl7_version', 'json_path', 'data_encoding',
'max_msg_size', 'read_buffer_size', 'recv_timeout', 'logging_level', 'should_log_messages', 'start_seq', 'end_seq',
'max_wait_time')
# ################################################################################################################################
# ################################################################################################################################
# These are used by web-admin only because servers and scheduler use sql.conf
ping_queries = {
'db2': 'SELECT current_date FROM sysibm.sysdummy1',
'mssql': 'SELECT 1',
'mysql+pymysql': 'SELECT 1+1',
'oracle': 'SELECT 1 FROM dual',
'postgresql': 'SELECT 1',
'postgresql+pg8000': 'SELECT 1',
'sqlite': 'SELECT 1',
}
engine_display_name = {
'db2': 'DB2',
'mssql': 'MS SQL',
'zato+mssql1': 'MS SQL (Direct)',
'mysql+pymysql': 'MySQL',
'oracle': 'Oracle',
'postgresql': 'PostgreSQL',
'postgresql+pg8000': 'PostgreSQL',
'sqlite': 'SQLite',
}
# ################################################################################################################################
# ################################################################################################################################
# All URL types Zato understands.
class URL_TYPE:
SOAP = 'soap'
PLAIN_HTTP = 'plain_http'
def __iter__(self):
return iter((self.SOAP, self.PLAIN_HTTP))
# ################################################################################################################################
# ################################################################################################################################
# Whether WS-Security passwords are transmitted in clear-text or not.
ZATO_WSS_PASSWORD_CLEAR_TEXT = Bunch(name='clear_text', label='Clear text')
ZATO_WSS_PASSWORD_TYPES = {
ZATO_WSS_PASSWORD_CLEAR_TEXT.name:ZATO_WSS_PASSWORD_CLEAR_TEXT.label,
}
# ################################################################################################################################
# ################################################################################################################################
ZATO_FIELD_OPERATORS = {
'is-equal-to': '==',
'is-not-equal-to': '!=',
}
# ################################################################################################################################
# ################################################################################################################################
ZMQ_OUTGOING_TYPES = ('PUSH', 'PUB')
# ################################################################################################################################
# ################################################################################################################################
class ZMQ:
PULL = 'PULL'
PUSH = 'PUSH'
PUB = 'PUB'
SUB = 'SUB'
MDP = 'MDP'
MDP01 = MDP + '01'
MDP01_HUMAN = 'Majordomo 0.1 (MDP)'
class POOL_STRATEGY_NAME:
SINGLE = 'single'
UNLIMITED = 'unlimited'
class SERVICE_SOURCE_NAME:
ZATO = 'zato'
MDP01 = 'mdp01'
CHANNEL = OrderedDict({
PULL: 'Pull',
SUB: 'Sub',
MDP01: MDP01_HUMAN,
})
OUTGOING = OrderedDict({
PUSH: 'Push',
PUB: 'Pub',
})
class METHOD_NAME:
BIND = 'bind'
CONNECT = 'connect'
METHOD = {
METHOD_NAME.BIND: 'Bind',
METHOD_NAME.CONNECT: 'Connect',
}
POOL_STRATEGY = OrderedDict({
POOL_STRATEGY_NAME.SINGLE: 'Single',
POOL_STRATEGY_NAME.UNLIMITED: 'Unlimited',
})
SERVICE_SOURCE = OrderedDict({
SERVICE_SOURCE_NAME.ZATO: 'Zato',
SERVICE_SOURCE_NAME.MDP01: MDP01_HUMAN,
})
# ################################################################################################################################
# ################################################################################################################################
ZATO_ODB_POOL_NAME = 'ZATO_ODB'
# ################################################################################################################################
# ################################################################################################################################
SOAP_VERSIONS = ('1.1', '1.2')
SOAP_CHANNEL_VERSIONS = ('1.1',)
# ################################################################################################################################
# ################################################################################################################################
class SEARCH:
class ES:
class DEFAULTS:
BODY_AS = 'POST'
HOSTS = '127.0.0.1:9200\n'
class SOLR:
class DEFAULTS:
ADDRESS = 'http://127.0.0.1:8983/solr'
PING_PATH = '/solr/admin/ping'
TIMEOUT = '10'
POOL_SIZE = '5'
class ZATO:
class DEFAULTS:
PAGE_SIZE = 50
PAGINATE_THRESHOLD = PAGE_SIZE + 1
# ################################################################################################################################
# ################################################################################################################################
class SEC_DEF_TYPE:
APIKEY = 'apikey'
AWS = 'aws'
BASIC_AUTH = 'basic_auth'
JWT = 'jwt'
NTLM = 'ntlm'
OAUTH = 'oauth'
TLS_CHANNEL_SEC = 'tls_channel_sec'
TLS_KEY_CERT = 'tls_key_cert'
WSS = 'wss'
VAULT = 'vault_conn_sec'
XPATH_SEC = 'xpath_sec'
# ################################################################################################################################
# ################################################################################################################################
SEC_DEF_TYPE_NAME = {
SEC_DEF_TYPE.APIKEY: 'API key',
SEC_DEF_TYPE.AWS: 'AWS',
SEC_DEF_TYPE.BASIC_AUTH: 'HTTP Basic Auth',
SEC_DEF_TYPE.JWT: 'JWT',
SEC_DEF_TYPE.NTLM: 'NTLM',
SEC_DEF_TYPE.OAUTH: 'OAuth 1.0',
SEC_DEF_TYPE.TLS_CHANNEL_SEC: 'TLS channel',
SEC_DEF_TYPE.TLS_KEY_CERT: 'TLS key/cert',
SEC_DEF_TYPE.WSS: 'WS-Security',
SEC_DEF_TYPE.VAULT: 'Vault',
SEC_DEF_TYPE.XPATH_SEC: 'XPath',
}
# ################################################################################################################################
# ################################################################################################################################
class AUTH_RESULT:
class BASIC_AUTH:
INVALID_PREFIX = 'invalid-prefix'
NO_AUTH = 'no-auth'
# ################################################################################################################################
# ################################################################################################################################
DEFAULT_STATS_SETTINGS = {
'scheduler_per_minute_aggr_interval':60,
'scheduler_raw_times_interval':90,
'scheduler_raw_times_batch':99999,
'atttention_slow_threshold':2000,
'atttention_top_threshold':10,
}
# ################################################################################################################################
# ################################################################################################################################
class BATCH_DEFAULTS:
PAGE_NO = 1
SIZE = 25
MAX_SIZE = 1000
# ################################################################################################################################
# ################################################################################################################################
class MSG_SOURCE:
DUPLEX = 'duplex'
# ################################################################################################################################
# ################################################################################################################################
class NameId:
""" Wraps both an attribute's name and its ID.
"""
def __init__(self, name, id=None):
self.name = name
self.id = id or name
def __repr__(self):
return '<{} at {}; name={}; id={}>'.format(self.__class__.__name__, hex(id(self)), self.name, self.id)
# ################################################################################################################################
# ################################################################################################################################
class NotGiven:
pass # A marker for lazily-initialized attributes
# ################################################################################################################################
# ################################################################################################################################
class Attrs(type):
""" A container for class attributes that can be queried for an existence
of an attribute using the .has class-method.
"""
attrs = NotGiven
@classmethod
def has(cls, attr):
if cls.attrs is NotGiven:
cls.attrs = []
for cls_attr in dir(cls):
if cls_attr == cls_attr.upper():
cls.attrs.append(getattr(cls, cls_attr))
return attr in cls.attrs
# ################################################################################################################################
# ################################################################################################################################
class DATA_FORMAT(Attrs):
CSV = 'csv'
DICT = 'dict'
HL7 = 'hl7'
JSON = 'json'
POST = 'post'
SOAP = 'soap'
XML = 'xml'
def __iter__(self):
# Note that DICT and other attributes aren't included because they're never exposed to the external world as-is,
# they may at most only used so that services can invoke each other directly
return iter((self.XML, self.JSON, self.CSV, self.POST, self.HL7))
# ################################################################################################################################
# ################################################################################################################################
class DEPLOYMENT_STATUS(Attrs):
DEPLOYED = 'deployed'
AWAITING_DEPLOYMENT = 'awaiting-deployment'
IGNORED = 'ignored'
# ################################################################################################################################
# ################################################################################################################################
class SERVER_JOIN_STATUS(Attrs):
ACCEPTED = 'accepted'
# ################################################################################################################################
# ################################################################################################################################
class SERVER_UP_STATUS(Attrs):
RUNNING = 'running'
CLEAN_DOWN = 'clean-down'
# ################################################################################################################################
# ################################################################################################################################
class CACHE:
API_USERNAME = 'pub.zato.cache'
class TYPE:
BUILTIN = 'builtin'
MEMCACHED = 'memcached'
class BUILTIN_KV_DATA_TYPE:
STR = NameId('String/unicode', 'str')
INT = NameId('Integer', 'int')
def __iter__(self):
return iter((self.STR, self.INT))
class STATE_CHANGED:
CLEAR = 'CLEAR'
DELETE = 'DELETE'
DELETE_BY_PREFIX = 'DELETE_BY_PREFIX'
DELETE_BY_SUFFIX= 'DELETE_BY_SUFFIX'
DELETE_BY_REGEX = 'DELETE_BY_REGEX'
DELETE_CONTAINS = 'DELETE_CONTAINS'
DELETE_NOT_CONTAINS = 'DELETE_NOT_CONTAINS'
DELETE_CONTAINS_ALL = 'DELETE_CONTAINS_ALL'
DELETE_CONTAINS_ANY = 'DELETE_CONTAINS_ANY'
EXPIRE = 'EXPIRE'
EXPIRE_BY_PREFIX = 'EXPIRE_BY_PREFIX'
EXPIRE_BY_SUFFIX = 'EXPIRE_BY_SUFFIX'
EXPIRE_BY_REGEX = 'EXPIRE_BY_REGEX'
EXPIRE_CONTAINS = 'EXPIRE_CONTAINS'
EXPIRE_NOT_CONTAINS = 'EXPIRE_NOT_CONTAINS'
EXPIRE_CONTAINS_ALL = 'EXPIRE_CONTAINS_ALL'
EXPIRE_CONTAINS_ANY = 'EXPIRE_CONTAINS_ANY'
GET = 'GET'
SET = 'SET'
SET_BY_PREFIX = 'SET_BY_PREFIX'
SET_BY_SUFFIX = 'SET_BY_SUFFIX'
SET_BY_REGEX = 'SET_BY_REGEX'
SET_CONTAINS = 'SET_CONTAINS'
SET_NOT_CONTAINS = 'SET_NOT_CONTAINS'
SET_CONTAINS_ALL = 'SET_CONTAINS_ALL'
SET_CONTAINS_ANY = 'SET_CONTAINS_ANY'
class DEFAULT:
MAX_SIZE = 10000
MAX_ITEM_SIZE = 10000 # In characters for string/unicode, bytes otherwise
class PERSISTENT_STORAGE:
NO_PERSISTENT_STORAGE = NameId('No persistent storage', 'no-persistent-storage')
SQL = NameId('SQL', 'sql')
def __iter__(self):
return iter((self.NO_PERSISTENT_STORAGE, self.SQL))
class SYNC_METHOD:
NO_SYNC = NameId('No synchronization', 'no-sync')
IN_BACKGROUND = NameId('In background', 'in-background')
def __iter__(self):
return iter((self.NO_SYNC, self.IN_BACKGROUND))
# ################################################################################################################################
# ################################################################################################################################
class KVDB(Attrs):
SEPARATOR = ':::'
DICTIONARY_ITEM = 'zato:kvdb:data-dict:item'
DICTIONARY_ITEM_ID = DICTIONARY_ITEM + ':id' # ID of the last created dictionary ID, always increasing.
LOCK_PREFIX = 'zato:lock:'
LOCK_SERVER_PREFIX = '{}server:'.format(LOCK_PREFIX)
LOCK_SERVER_ALREADY_DEPLOYED = '{}already-deployed:'.format(LOCK_SERVER_PREFIX)
LOCK_SERVER_STARTING = '{}starting:'.format(LOCK_SERVER_PREFIX)
LOCK_PACKAGE_PREFIX = '{}package:'.format(LOCK_PREFIX)
LOCK_PACKAGE_UPLOADING = '{}uploading:'.format(LOCK_PACKAGE_PREFIX)
LOCK_PACKAGE_ALREADY_UPLOADED = '{}already-uploaded:'.format(LOCK_PACKAGE_PREFIX)
LOCK_DELIVERY = '{}delivery:'.format(LOCK_PREFIX)
LOCK_DELIVERY_AUTO_RESUBMIT = '{}auto-resubmit:'.format(LOCK_DELIVERY)
LOCK_SERVICE_PREFIX = '{}service:'.format(LOCK_PREFIX)
LOCK_CONFIG_PREFIX = '{}config:'.format(LOCK_PREFIX)
LOCK_FANOUT_PATTERN = '{}fanout:{{}}'.format(LOCK_PREFIX)
LOCK_PARALLEL_EXEC_PATTERN = '{}parallel-exec:{{}}'.format(LOCK_PREFIX)
LOCK_ASYNC_INVOKE_WITH_TARGET_PATTERN = '{}async-invoke-with-pattern:{{}}:{{}}'.format(LOCK_PREFIX)
TRANSLATION = 'zato:kvdb:data-dict:translation'
TRANSLATION_ID = TRANSLATION + ':id'
SERVICE_USAGE = 'zato:stats:service:usage:'
SERVICE_TIME_BASIC = 'zato:stats:service:time:basic:'
SERVICE_TIME_RAW = 'zato:stats:service:time:raw:'
SERVICE_TIME_RAW_BY_MINUTE = 'zato:stats:service:time:raw-by-minute:'
SERVICE_TIME_AGGREGATED_BY_MINUTE = 'zato:stats:service:time:aggr-by-minute:'
SERVICE_TIME_AGGREGATED_BY_HOUR = 'zato:stats:service:time:aggr-by-hour:'
SERVICE_TIME_AGGREGATED_BY_DAY = 'zato:stats:service:time:aggr-by-day:'
SERVICE_TIME_AGGREGATED_BY_MONTH = 'zato:stats:service:time:aggr-by-month:'
SERVICE_TIME_SLOW = 'zato:stats:service:time:slow:'
SERVICE_SUMMARY_PREFIX_PATTERN = 'zato:stats:service:summary:{}:'
SERVICE_SUMMARY_BY_DAY = 'zato:stats:service:summary:by-day:'
SERVICE_SUMMARY_BY_WEEK = 'zato:stats:service:summary:by-week:'
SERVICE_SUMMARY_BY_MONTH = 'zato:stats:service:summary:by-month:'
SERVICE_SUMMARY_BY_YEAR = 'zato:stats:service:summary:by-year:'
ZMQ_CONFIG_READY_PREFIX = 'zato:zmq.config.ready.{}'
REQ_RESP_SAMPLE = 'zato:req-resp:sample:'
RESP_SLOW = 'zato:resp:slow:'
DELIVERY_PREFIX = 'zato:delivery:'
DELIVERY_BY_TARGET_PREFIX = '{}by-target:'.format(DELIVERY_PREFIX)
FANOUT_COUNTER_PATTERN = 'zato:fanout:counter:{}'
FANOUT_DATA_PATTERN = 'zato:fanout:data:{}'
PARALLEL_EXEC_COUNTER_PATTERN = 'zato:parallel-exec:counter:{}'
PARALLEL_EXEC_DATA_PATTERN = 'zato:parallel-exec:data:{}'
ASYNC_INVOKE_PROCESSED_FLAG_PATTERN = 'zato:async-invoke-with-pattern:processed:{}:{}'
ASYNC_INVOKE_PROCESSED_FLAG = '1'
# ################################################################################################################################
# ################################################################################################################################
class SCHEDULER:
InitialSleepTime = 5
DefaultHost = '127.0.0.1'
DefaultPort = 31530
class JOB_TYPE(Attrs):
ONE_TIME = 'one_time'
INTERVAL_BASED = 'interval_based'
CRON_STYLE = 'cron_style'
class ON_MAX_RUNS_REACHED:
DELETE = 'delete'
INACTIVATE = 'inactivate'
# ################################################################################################################################
# ################################################################################################################################
class CHANNEL(Attrs):
AMQP = 'amqp'
DELIVERY = 'delivery'
FANOUT_CALL = 'fanout-call'
FANOUT_ON_FINAL = 'fanout-on-final'
FANOUT_ON_TARGET = 'fanout-on-target'
HTTP_SOAP = 'http-soap'
INTERNAL_CHECK = 'internal-check'
INVOKE = 'invoke'
INVOKE_ASYNC = 'invoke-async'
INVOKE_ASYNC_CALLBACK = 'invoke-async-callback'
IPC = 'ipc'
JSON_RPC = 'json-rpc'
NEW_INSTANCE = 'new-instance'
NOTIFIER_RUN = 'notifier-run'
NOTIFIER_TARGET = 'notifier-target'
PARALLEL_EXEC_CALL = 'parallel-exec-call'
PARALLEL_EXEC_ON_TARGET = 'parallel-exec-on-target'
PUBLISH = 'publish'
SCHEDULER = 'scheduler'
SCHEDULER_AFTER_ONE_TIME = 'scheduler-after-one-time'
SERVICE = 'service'
SSO_USER = 'sso-user'
STARTUP_SERVICE = 'startup-service'
URL_DATA = 'url-data'
WEB_SOCKET = 'web-socket'
IBM_MQ = 'websphere-mq'
WORKER = 'worker'
ZMQ = 'zmq'
# ################################################################################################################################
# ################################################################################################################################
class CONNECTION:
CHANNEL = 'channel'
OUTGOING = 'outgoing'
# ################################################################################################################################
# ################################################################################################################################
class INVOCATION_TARGET(Attrs):
CHANNEL_AMQP = 'channel-amqp'
CHANNEL_WMQ = 'channel-wmq'
CHANNEL_ZMQ = 'channel-zmq'
OUTCONN_AMQP = 'outconn-amqp'
OUTCONN_WMQ = 'outconn-wmq'
OUTCONN_ZMQ = 'outconn-zmq'
SERVICE = 'service'
# ################################################################################################################################
# ################################################################################################################################
class DELIVERY_STATE(Attrs):
IN_DOUBT = 'in-doubt'
IN_PROGRESS_ANY = 'in-progress-any' # A wrapper for all in-progress-* states
IN_PROGRESS_RESUBMITTED = 'in-progress-resubmitted'
IN_PROGRESS_RESUBMITTED_AUTO = 'in-progress-resubmitted-auto'
IN_PROGRESS_STARTED = 'in-progress'
IN_PROGRESS_TARGET_OK = 'in-progress-target-ok'
IN_PROGRESS_TARGET_FAILURE = 'in-progress-target-failure'
CONFIRMED = 'confirmed'
FAILED = 'failed'
UNKNOWN = 'unknown'
# ################################################################################################################################
# ################################################################################################################################
class DELIVERY_CALLBACK_INVOKER(Attrs):
SOURCE = 'source'
TARGET = 'target'
# ################################################################################################################################
# ################################################################################################################################
class BROKER:
DEFAULT_EXPIRATION = 15 # In seconds
# ################################################################################################################################
# ################################################################################################################################
class MISC:
DEFAULT_HTTP_TIMEOUT=10
OAUTH_SIG_METHODS = ['HMAC-SHA1', 'PLAINTEXT']
PIDFILE = 'pidfile'
SEPARATOR = ':::'
# ################################################################################################################################
# ################################################################################################################################
class HTTP_SOAP:
UNUSED_MARKER = 'unused'
class ACCEPT:
ANY = '*/*'
ANY_INTERNAL = 'haany'
class METHOD:
ANY_INTERNAL = 'hmany'
# ################################################################################################################################
# ################################################################################################################################
class ADAPTER_PARAMS:
APPLY_AFTER_REQUEST = 'apply-after-request'
APPLY_BEFORE_REQUEST = 'apply-before-request'
# ################################################################################################################################
# ################################################################################################################################
class INFO_FORMAT:
DICT = 'dict'
TEXT = 'text'
JSON = 'json'
YAML = 'yaml'
# ################################################################################################################################
# ################################################################################################################################
class MSG_MAPPER:
DICT_TO_DICT = 'dict-to-dict'
DICT_TO_XML = 'dict-to-xml'
XML_TO_DICT = 'xml-to-dict'
XML_TO_XML = 'xml-to-xml'
# ################################################################################################################################
# ################################################################################################################################
class CLOUD:
class AWS:
class S3:
class STORAGE_CLASS:
STANDARD = 'STANDARD'
REDUCED_REDUNDANCY = 'REDUCED_REDUNDANCY'
GLACIER = 'GLACIER'
DEFAULT = STANDARD
def __iter__(self):
return iter((self.STANDARD, self.REDUCED_REDUNDANCY, self.GLACIER))
class DEFAULTS:
ADDRESS = 'https://s3.amazonaws.com/'
CONTENT_TYPE = 'application/octet-stream' # Taken from boto.s3.key.Key.DefaultContentType
DEBUG_LEVEL = 0
POOL_SIZE = 5
PROVIDER = 'aws'
# ################################################################################################################################
# ################################################################################################################################
class URL_PARAMS_PRIORITY:
PATH_OVER_QS = 'path-over-qs'
QS_OVER_PATH = 'qs-over-path'
DEFAULT = QS_OVER_PATH
class __metaclass__(type):
def __iter__(self):
return iter((self.PATH_OVER_QS, self.QS_OVER_PATH, self.DEFAULT))
# ################################################################################################################################
# ################################################################################################################################
class PARAMS_PRIORITY:
CHANNEL_PARAMS_OVER_MSG = 'channel-params-over-msg'
MSG_OVER_CHANNEL_PARAMS = 'msg-over-channel-params'
DEFAULT = CHANNEL_PARAMS_OVER_MSG
def __iter__(self):
return iter((self.CHANNEL_PARAMS_OVER_MSG, self.MSG_OVER_CHANNEL_PARAMS, self.DEFAULT))
# ################################################################################################################################
# ################################################################################################################################
class NONCE_STORE:
KEY_PATTERN = 'zato:nonce-store:{}:{}' # E.g. zato:nonce-store:oauth:27
DEFAULT_MAX_LOG = 25000
# ################################################################################################################################
# ################################################################################################################################
class MSG_PATTERN_TYPE:
JSON_POINTER = NameId('JSONPointer', 'json-pointer')
XPATH = NameId('XPath', 'xpath')
def __iter__(self):
return iter((self.JSON_POINTER, self.XPATH))
# ################################################################################################################################
# ################################################################################################################################
class HTTP_SOAP_SERIALIZATION_TYPE:
STRING_VALUE = NameId('String', 'string')
SUDS = NameId('Suds', 'suds')
DEFAULT = STRING_VALUE
def __iter__(self):
return iter((self.STRING_VALUE, self.SUDS))
# ################################################################################################################################
# ################################################################################################################################
class PUBSUB:
SKIPPED_PATTERN_MATCHING = '<skipped>'
# All float values are converted to strings of that precision
# to make sure pg8000 does not round up the floats with loss of precision.
FLOAT_STRING_CONVERT = '{:.7f}'
class DATA_FORMAT:
CSV = NameId('CSV', DATA_FORMAT.CSV)
DICT = NameId('Dict', DATA_FORMAT.DICT)
JSON = NameId('JSON', DATA_FORMAT.JSON)
POST = NameId('POST', DATA_FORMAT.POST)
SOAP = NameId('SOAP', DATA_FORMAT.SOAP)
XML = NameId('XML', DATA_FORMAT.XML)
def __iter__(self):
return iter((self.CSV, self.DICT, self.JSON, self.POST, self.SOAP, self.XML))
class HOOK_TYPE:
BEFORE_PUBLISH = 'pubsub_before_publish'
BEFORE_DELIVERY = 'pubsub_before_delivery'
ON_OUTGOING_SOAP_INVOKE = 'pubsub_on_topic_outgoing_soap_invoke'
ON_SUBSCRIBED = 'pubsub_on_subscribed'
ON_UNSUBSCRIBED = 'pubsub_on_unsubscribed'
class HOOK_ACTION:
SKIP = 'skip'
DELETE = 'delete'
DELIVER = 'deliver'
def __iter__(self):
return iter((self.SKIP, self.DELETE, self.DELIVER))
class DELIVER_BY:
PRIORITY = 'priority'
EXT_PUB_TIME = 'ext_pub_time'
PUB_TIME = 'pub_time'
def __iter__(self):
return iter((self.PRIORITY, self.EXT_PUB_TIME, self.PUB_TIME))
class ON_NO_SUBS_PUB:
ACCEPT = NameId('Accept', 'accept')
DROP = NameId('Drop', 'drop')
class DEFAULT:
DATA_FORMAT = 'text'
MIME_TYPE = 'text/plain'
TOPIC_MAX_DEPTH_GD = 10000
TOPIC_MAX_DEPTH_NON_GD = 1000
DEPTH_CHECK_FREQ = 100
EXPIRATION = 2147483647 * 1000 # (2 ** 31 - 1) * 1000 milliseconds = around 70 years
GET_BATCH_SIZE = 50
DELIVERY_BATCH_SIZE = 500
DELIVERY_MAX_RETRY = 123456789
DELIVERY_MAX_SIZE = 500000 # 500 kB
PUB_BUFFER_SIZE_GD = 0
TASK_SYNC_INTERVAL = 500
TASK_DELIVERY_INTERVAL = 2000
WAIT_TIME_SOCKET_ERROR = 10
WAIT_TIME_NON_SOCKET_ERROR = 3
INTERNAL_ENDPOINT_NAME = 'zato.pubsub.default.internal.endpoint'
ON_NO_SUBS_PUB = 'accept'
SK_OPAQUE = ('deliver_to_sk', 'reply_to_sk')
class SERVICE_SUBSCRIBER:
NAME = 'zato.pubsub.service.endpoint'
TOPICS_ALLOWED = 'sub=/zato/s/to/*'
class TOPIC_PATTERN:
TO_SERVICE = '/zato/s/to/{}'
class QUEUE_TYPE:
STAGING = 'staging'
CURRENT = 'current'
def __iter__(self):
return iter((self.STAGING, self.CURRENT))
class GD_CHOICE:
DEFAULT_PER_TOPIC = NameId('----------', 'default-per-topic')
YES = NameId('Yes', 'true')
NO = NameId('No', 'false')
def __iter__(self):
return iter((self.DEFAULT_PER_TOPIC, self.YES, self.NO))
class QUEUE_ACTIVE_STATUS:
FULLY_ENABLED = NameId('Pub and sub', 'pub-sub')
PUB_ONLY = NameId('Pub only', 'pub-only')
SUB_ONLY = NameId('Sub only', 'sub-only')
DISABLED = NameId('Disabled', 'disabled')
def __iter__(self):
return iter((self.FULLY_ENABLED, self.PUB_ONLY, self.SUB_ONLY, self.DISABLED))
class DELIVERY_METHOD:
NOTIFY = NameId('Notify', 'notify')
PULL = NameId('Pull', 'pull')
WEB_SOCKET = NameId('WebSocket', 'web-socket')
def __iter__(self):
# Note that WEB_SOCKET is not included because it's not shown in GUI for subscriptions
return iter((self.NOTIFY, self.PULL))
class DELIVERY_STATUS:
DELIVERED = 1
INITIALIZED = 2
TO_DELETE = 3
WAITING_FOR_CONFIRMATION = 4
class PRIORITY:
DEFAULT = 5
MIN = 1
MAX = 9
class ROLE:
PUBLISHER = NameId('Publisher', 'pub-only')
SUBSCRIBER = NameId('Subscriber', 'sub-only')
PUBLISHER_SUBSCRIBER = NameId('Publisher/subscriber', 'pub-sub')
def __iter__(self):
return iter((self.PUBLISHER, self.SUBSCRIBER, self.PUBLISHER_SUBSCRIBER))
class RunDeliveryStatus:
class StatusCode:
OK = 1
Warning = 2
Error = 3
class ReasonCode:
Error_IO = 1
Error_Other = 2
No_Msg = 3
class ENDPOINT_TYPE:
AMQP = NameId('AMQP', 'amqp')
FILES = NameId('Files', 'files')
FTP = NameId('FTP', 'ftp')
IMAP = NameId('IMAP', 'imap')
INTERNAL = NameId('Internal', 'internal')
REST = NameId('REST', 'rest')
SERVICE = NameId('Service', 'srv')
SMS_TWILIO = NameId('SMS - Twilio', 'smstw')
SMTP = NameId('SMTP', 'smtp')
SOAP = NameId('SOAP', 'soap')
SQL = NameId('SQL', 'sql')
WEB_SOCKETS = NameId('WebSockets', 'wsx')
def __iter__(self):
return iter((self.AMQP.id, self.INTERNAL.id, self.REST.id, self.SERVICE.id, self.SOAP.id,
self.WEB_SOCKETS.id, self.SERVICE.id))
class REDIS:
META_TOPIC_LAST_KEY = 'zato.ps.meta.topic.last.%s.%s'
META_ENDPOINT_PUB_KEY = 'zato.ps.meta.endpoint.pub.%s.%s'
META_ENDPOINT_SUB_KEY = 'zato.ps.meta.endpoint.sub.%s.%s'
class MIMEType:
Zato = 'application/vnd.zato.ps.msg'
# ################################################################################################################################
# ################################################################################################################################
class _PUBSUB_SUBSCRIBE_CLASS:
classes = {
PUBSUB.ENDPOINT_TYPE.AMQP.id: 'zato.pubsub.subscription.subscribe-amqp',
PUBSUB.ENDPOINT_TYPE.REST.id: 'zato.pubsub.subscription.subscribe-rest',
PUBSUB.ENDPOINT_TYPE.SERVICE.id: 'zato.pubsub.subscription.subscribe-service',
PUBSUB.ENDPOINT_TYPE.SOAP.id: 'zato.pubsub.subscription.subscribe-soap',
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id: 'zato.pubsub.subscription.create-wsx-subscription',
}
@staticmethod
def get(name):
return _PUBSUB_SUBSCRIBE_CLASS.classes[name]
# ################################################################################################################################
# ################################################################################################################################
PUBSUB.SUBSCRIBE_CLASS = _PUBSUB_SUBSCRIBE_CLASS
# ################################################################################################################################
# ################################################################################################################################
# Not to be made available externally yet.
skip_endpoint_types = (
PUBSUB.ENDPOINT_TYPE.FTP.id,
PUBSUB.ENDPOINT_TYPE.INTERNAL.id,
PUBSUB.ENDPOINT_TYPE.IMAP.id,
PUBSUB.ENDPOINT_TYPE.SERVICE.id,
PUBSUB.ENDPOINT_TYPE.SMS_TWILIO.id,
PUBSUB.ENDPOINT_TYPE.SMTP.id,
PUBSUB.ENDPOINT_TYPE.SQL.id,
PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id, # This will never be made because WSX clients need to use APIs to subscribe
)
# ################################################################################################################################
# ################################################################################################################################
class EMAIL:
class DEFAULT:
TIMEOUT = 10
PING_ADDRESS = 'invalid@invalid'
GET_CRITERIA = 'UNSEEN'
IMAP_DEBUG_LEVEL = 0
class IMAP:
class MODE:
PLAIN = 'plain'
SSL = 'ssl'
def __iter__(self):
return iter((self.PLAIN, self.SSL))
class SMTP:
class MODE:
PLAIN = 'plain'
SSL = 'ssl'
STARTTLS = 'starttls'
def __iter__(self):
return iter((self.PLAIN, self.SSL, self.STARTTLS))
# ################################################################################################################################
# ################################################################################################################################
class NOTIF:
class DEFAULT:
CHECK_INTERVAL = 5 # In seconds
CHECK_INTERVAL_SQL = 600 # In seconds
NAME_PATTERN = '**'
GET_DATA_PATTERN = '**'
class TYPE:
SQL = 'sql'
# ################################################################################################################################
# ################################################################################################################################
class CASSANDRA:
class DEFAULT:
CONTACT_POINTS = '127.0.0.1\n'
EXEC_SIZE = 2
PORT = 9042
PROTOCOL_VERSION = 4
KEYSPACE = 'not-set'
class COMPRESSION:
DISABLED = 'disabled'
ENABLED_NEGOTIATED = 'enabled-negotiated'
ENABLED_LZ4 = 'enabled-lz4'
ENABLED_SNAPPY = 'enabled-snappy'
# ################################################################################################################################
# ################################################################################################################################
class TLS:
# All the BEGIN/END blocks we don't want to store in logs.
# Taken from https://github.com/openssl/openssl/blob/master/crypto/pem/pem.h
# Note that the last one really is empty to denote 'BEGIN PRIVATE KEY' alone.
BEGIN_END = ('ANY ', 'RSA ', 'DSA ', 'EC ', 'ENCRYPTED ', '')
# Directories in a server's config/tls directory keeping the material
DIR_CA_CERTS = 'ca-certs'
DIR_KEYS_CERTS = 'keys-certs'
class DEFAULT:
VERSION = 'SSLv23'
CIPHERS = 'ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:' \
'ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:' \
'ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256'
class VERSION:
SSLv23 = NameId('SSLv23')
TLSv1 = NameId('TLSv1')
TLSv1_1 = NameId('TLSv1_1')
TLSv1_2 = NameId('TLSv1_2')
def __iter__(self):
return iter((self.SSLv23, self.TLSv1, self.TLSv1_1, self.TLSv1_2))
class CERT_VALIDATE:
CERT_NONE = NameId('Disabled', 'CERT_NONE')
CERT_OPTIONAL = NameId('Optional', 'CERT_OPTIONAL')
CERT_REQUIRED = NameId('Required', 'CERT_REQUIRED')
def __iter__(self):
return iter((self.CERT_NONE, self.CERT_OPTIONAL, self.CERT_REQUIRED))
class RATE_LIMIT:
class TYPE:
APPROXIMATE = NameId('Approximate', 'APPROXIMATE')
EXACT = NameId('Exact', 'EXACT')
def __iter__(self):
return iter((self.APPROXIMATE, self.EXACT))
class OBJECT_TYPE:
HTTP_SOAP = 'http_soap'
SERVICE = 'service'
SEC_DEF = 'sec_def'
SSO_USER = 'sso_user'
# ################################################################################################################################
# ################################################################################################################################
class ODOO:
class CLIENT_TYPE:
OPENERP_CLIENT_LIB = 'openerp-client-lib'
class DEFAULT:
PORT = 8069
POOL_SIZE = 3
class PROTOCOL:
XML_RPC = NameId('XML-RPC', 'xmlrpc')
XML_RPCS = NameId('XML-RPCS', 'xmlrpcs')
JSON_RPC = NameId('JSON-RPC', 'jsonrpc')
JSON_RPCS = NameId('JSON-RPCS', 'jsonrpcs')
def __iter__(self):
return iter((self.XML_RPC, self.XML_RPCS, self.JSON_RPC, self.JSON_RPCS))
# ################################################################################################################################
# ################################################################################################################################
class SAP:
class DEFAULT:
INSTANCE = '00'
POOL_SIZE = 1
# ################################################################################################################################
# ################################################################################################################################
class STOMP:
class DEFAULT:
ADDRESS = '127.0.0.1:61613'
PROTOCOL = '1.0'
TIMEOUT = 10 # In seconds
USERNAME = 'guest'
ACK_MODE = 'client-individual'
# ################################################################################################################################
# ################################################################################################################################
CONTENT_TYPE = Bunch(
JSON = 'application/json',
PLAIN_XML = 'application/xml',
SOAP11 = 'text/xml',
SOAP12 = 'application/soap+xml; charset=utf-8',
)
# ################################################################################################################################
# ################################################################################################################################
class IPC:
class ACTION:
INVOKE_SERVICE = 'invoke-service'
INVOKE_WORKER_STORE = 'invoke-worker-store'
class STATUS:
SUCCESS = 'zs'
FAILURE = 'zf'
LENGTH = 2 # Length of either success or failure messages
class CONNECTOR:
class USERNAME:
FTP = 'zato.connector.ftp'
IBM_MQ = 'zato.connector.wmq'
SFTP = 'zato.connector.sftp'
# ################################################################################################################################
# ################################################################################################################################
class WEB_SOCKET:
AUDIT_KEY = 'wsx-connection'
class DEFAULT:
NEW_TOKEN_TIMEOUT = 5
TOKEN_TTL = 3600
FQDN_UNKNOWN = '(Unknown)'
INTERACT_UPDATE_INTERVAL = 60 # 60 minutes = 1 hour
PINGS_MISSED_THRESHOLD = 2
PING_INTERVAL = 30
class PATTERN:
BY_EXT_ID = 'zato.by-ext-id.{}'
BY_CHANNEL = 'zato.by-channel.{}'
MSG_BROWSER_PREFIX = 'zato.msg-browser.' # This is used as a prefix in SQL queries
MSG_BROWSER = MSG_BROWSER_PREFIX + '{}'
class ACTION:
CLIENT_RESPONSE = 'client-response'
CREATE_SESSION = 'create-session'
INVOKE_SERVICE = 'invoke-service'
class OUT_MSG_TYPE:
CONNECT = 'connect'
MESSAGE = 'message'
CLOSE = 'close'
class HOOK_TYPE:
ON_CONNECTED = 'wsx_on_connected'
ON_DISCONNECTED = 'wsx_on_disconnected'
ON_PUBSUB_RESPONSE = 'wsx_on_pubsub_response'
ON_VAULT_MOUNT_POINT_NEEDED = 'wsx_on_vault_mount_point_needed'
# ################################################################################################################################
# ################################################################################################################################
class APISPEC:
OPEN_API_V3 = 'openapi_v3'
SOAP_12 = 'soap_12'
NAMESPACE_NULL = ''
DEFAULT_TAG = 'public'
GENERIC_INVOKE_PATH = '/zato/api/invoke/{service_name}' # OpenAPI
SOAP_INVOKE_PATH = '/zato/api/soap/invoke' # SOAP
# ################################################################################################################################
# ################################################################################################################################
class PADDING:
LEFT = 'left'
RIGHT = 'right'
# ################################################################################################################################
# ################################################################################################################################
class AMQP:
class DEFAULT:
POOL_SIZE = 10
PRIORITY = 5
PREFETCH_COUNT = 0
class ACK_MODE:
ACK = NameId('Ack', 'ack')
REJECT = NameId('Reject', 'reject')
def __iter__(self):
return iter((self.ACK, self.REJECT))
# ################################################################################################################################
# ################################################################################################################################
class REDIS:
class DEFAULT:
PORT = 6379
DB = 0
# ################################################################################################################################
# ################################################################################################################################
class SERVER_STARTUP:
class PHASE:
FS_CONFIG_ONLY = 'fs-config-only'
IMPL_BEFORE_RUN = 'impl-before-run'
ON_STARTING = 'on-starting'
BEFORE_POST_FORK = 'before-post-fork'
AFTER_POST_FORK = 'after-post-fork'
IN_PROCESS_FIRST = 'in-process-first'
IN_PROCESS_OTHER = 'in-process-other'
AFTER_STARTED = 'after-started'
# ################################################################################################################################
# ################################################################################################################################
class GENERIC:
ATTR_NAME = 'opaque1'
class CONNECTION:
class TYPE:
CHANNEL_FILE_TRANSFER = 'channel-file-transfer'
CHANNEL_HL7_MLLP = 'channel-hl7-mllp'
CLOUD_DROPBOX = 'cloud-dropbox'
DEF_KAFKA = 'def-kafka'
OUTCONN_HL7_MLLP = 'outconn-hl7-mllp'
OUTCONN_IM_SLACK = 'outconn-im-slack'
OUTCONN_IM_TELEGRAM = 'outconn-im-telegram'
OUTCONN_LDAP = 'outconn-ldap'
OUTCONN_MONGODB = 'outconn-mongodb'
OUTCONN_SFTP = 'outconn-sftp'
OUTCONN_WSX = 'outconn-wsx'
# ################################################################################################################################
# ################################################################################################################################
class AuditLog:
class Direction:
received = 'received'
sent = 'sent'
class Default:
max_len_messages = 50
max_data_stored_per_message = 500 # In kilobytes
# ################################################################################################################################
# ################################################################################################################################
class TOTP:
default_label = '<default-label>'
# ################################################################################################################################
# ################################################################################################################################
class LDAP:
class DEFAULT:
CONNECT_TIMEOUT = 10
POOL_EXHAUST_TIMEOUT = 5
POOL_KEEP_ALIVE = 30
POOL_LIFETIME = 3600
POOL_MAX_CYCLES = 1
POOL_SIZE = 10
class AUTH_TYPE:
NTLM = NameId('NTLM', 'NTLM')
SIMPLE = NameId('Simple', 'SIMPLE')
def __iter__(self):
return iter((self.SIMPLE, self.NTLM))
class AUTO_BIND:
DEFAULT = NameId('Default', 'DEFAULT')
NO_TLS = NameId('No TLS', 'NO_TLS')
NONE = NameId('None', 'NONE')
TLS_AFTER_BIND = NameId('Bind -> TLS', 'TLS_AFTER_BIND')
TLS_BEFORE_BIND = NameId('TLS -> Bind', 'TLS_BEFORE_BIND')
def __iter__(self):
return iter((self.DEFAULT, self.NONE, self.NO_TLS, self.TLS_AFTER_BIND, self.TLS_BEFORE_BIND))
class GET_INFO:
ALL = NameId('All', 'ALL')
DSA = NameId('DSA', 'DSA')
NONE = NameId('None', 'NONE')
SCHEMA = NameId('Schema', 'SCHEMA')
OFFLINE_EDIR_8_8_8 = NameId('EDIR 8.8.8', 'OFFLINE_EDIR_8_8_8')
OFFLINE_AD_2012_R2 = NameId('AD 2012.R2', 'OFFLINE_AD_2012_R2')
OFFLINE_SLAPD_2_4 = NameId('SLAPD 2.4', 'OFFLINE_SLAPD_2_4')
OFFLINE_DS389_1_3_3 = NameId('DS 389.1.3.3', 'OFFLINE_DS389_1_3_3')
def __iter__(self):
return iter((self.NONE, self.ALL, self.SCHEMA, self.DSA,
self.OFFLINE_EDIR_8_8_8, self.OFFLINE_AD_2012_R2, self.OFFLINE_SLAPD_2_4, self.OFFLINE_DS389_1_3_3))
class IP_MODE:
IP_V4_ONLY = NameId('Only IPv4', 'IP_V4_ONLY')
IP_V6_ONLY = NameId('Only IPv6', 'IP_V6_ONLY')
IP_V4_PREFERRED = NameId('Prefer IPv4', 'IP_V4_PREFERRED')
IP_V6_PREFERRED = NameId('Prefer IPv6', 'IP_V6_PREFERRED')
IP_SYSTEM_DEFAULT = NameId('System default', 'IP_SYSTEM_DEFAULT')
def __iter__(self):
return iter((self.IP_V4_ONLY, self.IP_V6_ONLY, self.IP_V4_PREFERRED, self.IP_V6_PREFERRED, self.IP_SYSTEM_DEFAULT))
class POOL_HA_STRATEGY:
FIRST = NameId('First', 'FIRST')
RANDOM = NameId('Random', 'RANDOM')
ROUND_ROBIN = NameId('Round robin', 'ROUND_ROBIN')
def __iter__(self):
return iter((self.FIRST, self.RANDOM, self.ROUND_ROBIN))
class SASL_MECHANISM:
GSSAPI = NameId('GSS-API', 'GSSAPI')
EXTERNAL = NameId('External', 'EXTERNAL')
def __iter__(self):
return iter((self.EXTERNAL, self.GSSAPI))
# ################################################################################################################################
# ################################################################################################################################
class MONGODB:
class DEFAULT:
AUTH_SOURCE = 'admin'
HB_FREQUENCY = 10
MAX_IDLE_TIME = 600
MAX_STALENESS = -1
POOL_SIZE_MIN = 0
POOL_SIZE_MAX = 5
SERVER_LIST = '127.0.0.1:27017'
WRITE_TO_REPLICA = ''
WRITE_TIMEOUT = 5
ZLIB_LEVEL = -1
class TIMEOUT:
CONNECT = 10
SERVER_SELECT = 5
SOCKET = 30
WAIT_QUEUE = 10
class READ_PREF:
PRIMARY = NameId('Primary', 'primary')
PRIMARY_PREFERRED = NameId('Primary pref.', 'primaryPreferred')
SECONDARY = NameId('Secondary', 'secondary')
SECONDARY_PREFERRED = NameId('Secondary pref.', 'secondaryPreferred')
NEAREST = NameId('Nearest', 'nearest')
def __iter__(self):
return iter((self.PRIMARY, self.PRIMARY_PREFERRED, self.SECONDARY, self.SECONDARY_PREFERRED, self.NEAREST))
class AUTH_MECHANISM:
SCRAM_SHA_1 = NameId('SCRAM-SHA-1')
SCRAM_SHA_256 = NameId('SCRAM-SHA-256')
def __iter__(self):
return iter((self.SCRAM_SHA_1, self.SCRAM_SHA_256))
# ################################################################################################################################
# ################################################################################################################################
class KAFKA:
class DEFAULT:
BROKER_VERSION = '0.9.0'
SERVER_LIST = '127.0.0.1:2181'
class TIMEOUT:
SOCKET = 1
OFFSETS = 10
# ################################################################################################################################
# ################################################################################################################################
class TELEGRAM:
class DEFAULT:
ADDRESS = 'https://api.telegram.org/bot{token}/{method}'
class TIMEOUT:
CONNECT = 5
INVOKE = 10
# ################################################################################################################################
# ################################################################################################################################
class SFTP:
class DEFAULT:
BANDWIDTH_LIMIT = 10
BUFFER_SIZE = 32768
COMMAND_SFTP = 'sftp'
COMMAND_PING = 'ls .'
PORT = 22
class LOG_LEVEL:
LEVEL0 = NameId('0', '0')
LEVEL1 = NameId('1', '1')
LEVEL2 = NameId('2', '2')
LEVEL3 = NameId('3', '3')
LEVEL4 = NameId('4', '4')
def __iter__(self):
return iter((self.LEVEL0, self.LEVEL1, self.LEVEL2, self.LEVEL3, self.LEVEL4))
def is_valid(self, value):
return value in (elem.id for elem in self)
class IP_TYPE:
IPV4 = NameId('IPv4', 'ipv4')
IPV6 = NameId('IPv6', 'ipv6')
def __iter__(self):
return iter((self.IPV4, self.IPV6))
def is_valid(self, value):
return value in (elem.id for elem in self)
# ################################################################################################################################
# ################################################################################################################################
class DROPBOX:
class DEFAULT:
MAX_RETRIES_ON_ERROR = 5
MAX_RETRIES_ON_RATE_LIMIT = None
OAUTH2_ACCESS_TOKEN_EXPIRATION = None
POOL_SIZE = 10
TIMEOUT = 60
USER_AGENT = None
# ################################################################################################################################
# ################################################################################################################################
class JSON_RPC:
class PREFIX:
CHANNEL = 'json.rpc.channel'
OUTGOING = 'json.rpc.outconn'
# ################################################################################################################################
# ################################################################################################################################
class CONFIG_FILE:
USER_DEFINED = 'user-defined'
# We need to use such a constant because we can sometimes be interested in setting
# default values which evaluate to boolean False.
NO_DEFAULT_VALUE = 'NO_DEFAULT_VALUE'
PLACEHOLDER = 'zato_placeholder'
# ################################################################################################################################
# ################################################################################################################################
class MS_SQL:
ZATO_DIRECT = 'zato+mssql1'
EXTRA_KWARGS = 'login_timeout', 'appname', 'blocksize', 'use_mars', 'readonly', 'use_tz', 'bytes_to_unicode', \
'cafile', 'validate_host'
# ################################################################################################################################
# ################################################################################################################################
class FILE_TRANSFER:
SCHEDULER_SERVICE = 'pub.zato.channel.file-transfer.handler'
class DEFAULT:
FILE_PATTERNS = '*'
ENCODING = 'utf-8'
class SOURCE_TYPE:
LOCAL = NameId('Local', 'local')
FTP = NameId('FTP', 'ftp')
SFTP = NameId('SFTP', 'sftp')
def __iter__(self):
return iter((self.LOCAL, self.FTP, self.SFTP))
class SOURCE_TYPE_IMPL:
LOCAL_INOTIFY = 'local-inotify'
LOCAL_SNAPSHOT = 'local-snapshot'
# ################################################################################################################################
# ################################################################################################################################
class HL7:
class Default:
""" Default values for HL7 objects.
"""
# Default TCP port for MLLP connections
address = '0.0.0.0:30901'
# Assume that UTF-8 is sent in by default
data_encoding = 'utf-8'
# Each message may be of at most that many bytes
max_msg_size = '1_000_000'
# How many seconds to wait for HL7 MLLP responses when invoking a remote end
max_wait_time = 60
# At most that many bytes will be read from a socket at a time
read_buffer_size = 2048
# We wait at most that many milliseconds for data from a socket in each iteration of the main loop
recv_timeout = 250
# At what level to log messages (Python logging)
logging_level = 'INFO'
# Should we store the contents of messages in logs (Python logging)
should_log_messages = False
# How many concurrent outgoing connections we allow
pool_size = 10
# An MLLP message may begin with these bytes ..
start_seq = '0b'
# .. and end with these below.
end_seq = '1c 0d'
class Const:
""" Various HL7-related constants.
"""
class Version:
# A generic v2 message, without an indication of a specific release.
v2 = NameId('HL7 v2', 'hl7-v2')
def __iter__(self):
return iter((self.v2,))
class LoggingLevel:
Info = NameId('INFO', 'INFO')
Debug = NameId('DEBUG', 'DEBUG')
def __iter__(self):
return iter((self.Info, self.Debug))
class ImplClass:
hl7apy = 'hl7apy'
zato = 'Zato'
# ################################################################################################################################
# ################################################################################################################################
# TODO: SIMPLE_IO.FORMAT should be removed with in favour of plain DATA_FORMAT
class SIMPLE_IO:
class FORMAT(Attrs):
JSON = DATA_FORMAT.JSON
XML = DATA_FORMAT.XML
COMMON_FORMAT = OrderedDict()
COMMON_FORMAT[DATA_FORMAT.JSON] = 'JSON'
COMMON_FORMAT[DATA_FORMAT.XML] = 'XML'
HTTP_SOAP_FORMAT = OrderedDict()
HTTP_SOAP_FORMAT[DATA_FORMAT.JSON] = 'JSON'
HTTP_SOAP_FORMAT[DATA_FORMAT.XML] = 'XML'
HTTP_SOAP_FORMAT[HL7.Const.Version.v2.id] = HL7.Const.Version.v2.name
# ################################################################################################################################
# ################################################################################################################################
class UNITTEST:
SQL_ENGINE = 'zato+unittest'
HTTP = 'zato+unittest'
VAULT_URL = 'https://zato+unittest'
class HotDeploy:
UserPrefix = 'hot-deploy.user'
# ################################################################################################################################
# ################################################################################################################################
class ZatoKVDB:
SlowResponsesName = 'zato.service.slow_responses'
UsageSamplesName = 'zato.service.usage_samples'
CurrentUsageName = 'zato.service.current_usage'
PubSubMetadataName = 'zato.pubsub.metadata'
SlowResponsesPath = SlowResponsesName + '.json'
UsageSamplesPath = UsageSamplesName + '.json'
CurrentUsagePath = CurrentUsageName + '.json'
PubSubMetadataPath = PubSubMetadataName + '.json'
DefaultSyncThreshold = 3_000
DefaultSyncInterval = 3
# ################################################################################################################################
# ################################################################################################################################
class Stats:
# This is in milliseconds, for how long do we keep old statistics in persistent storage. Defaults to two years.
# 1k ms * 60 s * 60 min * 24 hours * 365 days * 2 years = 94_608_000_000 milliseconds (or two years).
# We use milliseconds because that makes it easier to construct tests.
MaxRetention = 1000 * 60 * 60 * 24 * 365 * 2
# By default, statistics will be aggregated into time buckets of that duration
DefaultAggrTimeFreq = '5min' # Five minutes
# We always tabulate by object_id (e.g. service name)
TabulateAggr = 'object_id'
# ################################################################################################################################
# ################################################################################################################################
class StatsKey:
CurrentValue = 'current_value'
PerKeyMin = 'min'
PerKeyMax = 'max'
PerKeyMean = 'mean'
PerKeyValue = 'value'
PerKeyLastTimestamp = 'last_timestamp'
PerKeyLastDuration = 'last_duration'
# ################################################################################################################################
# ################################################################################################################################
class SSO:
class EmailTemplate:
SignupConfirm = 'signup-confirm.txt'
SignupWelcome = 'signup-welcome.txt'
PasswordResetLink = 'password-reset-link.txt'
# ################################################################################################################################
# ################################################################################################################################
ZATO_INFO_FILE = '.zato-info'
# ################################################################################################################################
# ################################################################################################################################
class SourceCodeInfo:
""" A bunch of attributes dealing the service's source code.
"""
__slots__ = 'source', 'source_html', 'len_source', 'path', 'hash', 'hash_method', 'server_name'
def __init__(self):
self.source = '' # type: str
self.source_html = '' # type: str
self.len_source = 0 # type: int
self.path = None # type: str
self.hash = None # type: str
self.hash_method = None # type: str
self.server_name = None # type: str
# ################################################################################################################################
# ################################################################################################################################
class StatsElem:
""" A single element of a statistics query result concerning a particular service.
All values make sense only within the time interval of the original query, e.g. a 'min_resp_time'
may be 18 ms in this element because it represents statistics regarding, say,
the last hour yet in a different period the 'min_resp_time' may be a completely
different value. Likewise, 'all' in the description of parameters below means
'all that matched given query criteria' rather than 'all that ever existed'.
service_name - name of the service this element describes
usage - how many times the service has been invoked
mean - an arithmetical average of all the mean response times (in ms)
rate - usage rate in requests/s (up to 1 decimal point)
time - time spent by this service on processing the messages (in ms)
usage_trend - a CSV list of values representing the service usage
usage_trend_int - a list of integers representing the service usage
mean_trend - a CSV list of values representing mean response times (in ms)
mean_trend_int - a list of integers representing mean response times (in ms)
min_resp_time - minimum service response time (in ms)
max_resp_time - maximum service response time (in ms)
all_services_usage - how many times all the services have been invoked
all_services_time - how much time all the services spent on processing the messages (in ms)
mean_all_services - an arithmetical average of all the mean response times of all services (in ms)
usage_perc_all_services - this service's usage as a percentage of all_services_usage (up to 2 decimal points)
time_perc_all_services - this service's share as a percentage of all_services_time (up to 2 decimal points)
expected_time_elems - an OrderedDict of all the time slots mapped to a mean time and rate
temp_rate - a temporary place for keeping request rates, needed to get a weighted mean of uneven execution periods
temp_mean - just like temp_rate but for mean response times
temp_mean_count - how many periods containing a mean rate there were
"""
def __init__(self, service_name=None, mean=None):
self.service_name = service_name
self.usage = 0
self.mean = mean
self.rate = 0.0
self.time = 0
self.usage_trend_int = []
self.mean_trend_int = []
self.min_resp_time = maxsize # Assuming that there sure will be at least one response time lower than that
self.max_resp_time = 0
self.all_services_usage = 0
self.all_services_time = 0
self.mean_all_services = 0
self.usage_perc_all_services = 0
self.time_perc_all_services = 0
self.expected_time_elems = OrderedDict()
self.temp_rate = 0
self.temp_mean = 0
self.temp_mean_count = 0
def get_attrs(self, ignore=[]):
for attr in dir(self):
if attr.startswith('__') or attr.startswith('temp_') or callable(getattr(self, attr)) or attr in ignore:
continue
yield attr
def to_dict(self, ignore=None):
if not ignore:
ignore = ['expected_time_elems', 'mean_trend_int', 'usage_trend_int']
return {attr: getattr(self, attr) for attr in self.get_attrs(ignore)}
@staticmethod
def from_json(item):
stats_elem = StatsElem()
for k, v in item.items():
setattr(stats_elem, k, v)
return stats_elem
@staticmethod
def from_xml(item):
stats_elem = StatsElem()
for child in item.getchildren():
setattr(stats_elem, child.xpath('local-name()'), child.pyval)
return stats_elem
def __repr__(self):
buff = StringIO()
buff.write('<{} at {} '.format(self.__class__.__name__, hex(id(self))))
attrs = ('{}=[{}]'.format(attr, getattr(self, attr)) for attr in self.get_attrs())
buff.write(', '.join(attrs))
buff.write('>')
value = buff.getvalue()
buff.close()
return value
def __iadd__(self, other):
self.max_resp_time = max(self.max_resp_time, other.max_resp_time)
self.min_resp_time = min(self.min_resp_time, other.min_resp_time)
self.usage += other.usage
return self
def __bool__(self):
return bool(self.service_name) # Empty stats_elems won't have a service name set
# ################################################################################################################################
# ################################################################################################################################
class SMTPMessage:
def __init__(self, from_=None, to=None, subject='', body='', attachments=None, cc=None, bcc=None, is_html=False, headers=None,
charset='utf8', is_rfc2231=True):
self.from_ = from_
self.to = to
self.subject = subject
self.body = body
self.attachments = attachments or []
self.cc = cc
self.bcc = bcc
self.is_html = is_html
self.headers = headers or {}
self.charset = charset
self.is_rfc2231 = is_rfc2231
def attach(self, name, contents):
self.attachments.append({'name':name, 'contents':contents})
# ################################################################################################################################
# ################################################################################################################################
class IDEDeploy:
Username = 'ide_publisher'
# ################################################################################################################################
# ################################################################################################################################
class IMAPMessage:
def __init__(self, uid, conn, data):
self.uid = uid
self.conn = conn
self.data = data
def __repr__(self):
return '<{} at {}, uid:`{}`, conn.config:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.uid, self.conn.config_no_sensitive)
def delete(self):
self.conn.delete(self.uid)
def mark_seen(self):
self.conn.mark_seen(self.uid)
# ################################################################################################################################
# ################################################################################################################################
class IBMMQCallData:
""" Metadata for information returned by IBM MQ in response to underlying MQPUT calls.
"""
__slots__ = ('msg_id', 'correlation_id')
def __init__(self, msg_id, correlation_id):
self.msg_id = msg_id
self.correlation_id = correlation_id
# For compatibility with Zato < 3.2
WebSphereMQCallData = IBMMQCallData
# ################################################################################################################################
# ################################################################################################################################
default_internal_modules = {
'zato.server.service.internal': True,
'zato.server.service.internal.apispec': True,
'zato.server.service.internal.audit_log': True,
'zato.server.service.internal.cache.builtin': True,
'zato.server.service.internal.cache.builtin.entry': True,
'zato.server.service.internal.cache.builtin.pubapi': True,
'zato.server.service.internal.cache.memcached': True,
'zato.server.service.internal.channel.amqp_': True,
'zato.server.service.internal.channel.file_transfer': True,
'zato.server.service.internal.channel.jms_wmq': True,
'zato.server.service.internal.channel.json_rpc': True,
'zato.server.service.internal.channel.web_socket': True,
'zato.server.service.internal.channel.web_socket.cleanup': True,
'zato.server.service.internal.channel.web_socket.client': True,
'zato.server.service.internal.channel.web_socket.subscription': True,
'zato.server.service.internal.channel.zmq': True,
'zato.server.service.internal.cloud.aws.s3': True,
'zato.server.service.internal.connector.amqp_': True,
'zato.server.service.internal.crypto': True,
'zato.server.service.internal.definition.amqp_': True,
'zato.server.service.internal.definition.cassandra': True,
'zato.server.service.internal.definition.jms_wmq': True,
'zato.server.service.internal.email.imap': True,
'zato.server.service.internal.email.smtp': True,
'zato.server.service.internal.generic.connection': True,
'zato.server.service.internal.helpers': True,
'zato.server.service.internal.hot_deploy': True,
'zato.server.service.internal.ide_deploy': True,
'zato.server.service.internal.info': True,
'zato.server.service.internal.http_soap': True,
'zato.server.service.internal.kv_data': True,
'zato.server.service.internal.kvdb': True,
'zato.server.service.internal.kvdb.data_dict.dictionary': True,
'zato.server.service.internal.kvdb.data_dict.impexp': True,
'zato.server.service.internal.kvdb.data_dict.translation': True,
'zato.server.service.internal.message.namespace': True,
'zato.server.service.internal.message.xpath': True,
'zato.server.service.internal.message.json_pointer': True,
'zato.server.service.internal.notif': True,
'zato.server.service.internal.notif.sql': True,
'zato.server.service.internal.outgoing.amqp_': True,
'zato.server.service.internal.outgoing.ftp': True,
'zato.server.service.internal.outgoing.jms_wmq': True,
'zato.server.service.internal.outgoing.odoo': True,
'zato.server.service.internal.outgoing.redis': True,
'zato.server.service.internal.outgoing.sql': True,
'zato.server.service.internal.outgoing.sap': True,
'zato.server.service.internal.outgoing.sftp': True,
'zato.server.service.internal.outgoing.zmq': True,
'zato.server.service.internal.pattern': True,
'zato.server.service.internal.pickup': True,
'zato.server.service.internal.pattern.invoke_retry': True,
'zato.server.service.internal.pubsub': True,
'zato.server.service.internal.pubsub.delivery': True,
'zato.server.service.internal.pubsub.endpoint': True,
'zato.server.service.internal.pubsub.hook': True,
'zato.server.service.internal.pubsub.message': True,
'zato.server.service.internal.pubsub.migrate': True,
'zato.server.service.internal.pubsub.pubapi': True,
'zato.server.service.internal.pubsub.publish': True,
'zato.server.service.internal.pubsub.subscription': True,
'zato.server.service.internal.pubsub.queue': True,
'zato.server.service.internal.pubsub.task': True,
'zato.server.service.internal.pubsub.task.delivery': True,
'zato.server.service.internal.pubsub.task.delivery.message': True,
'zato.server.service.internal.pubsub.task.delivery.server': True,
'zato.server.service.internal.pubsub.task.sync': True,
'zato.server.service.internal.pubsub.topic': True,
'zato.server.service.internal.query.cassandra': True,
'zato.server.service.internal.scheduler': True,
'zato.server.service.internal.search.es': True,
'zato.server.service.internal.search.solr': True,
'zato.server.service.internal.security': True,
'zato.server.service.internal.security.apikey': True,
'zato.server.service.internal.security.aws': True,
'zato.server.service.internal.security.basic_auth': True,
'zato.server.service.internal.security.jwt': True,
'zato.server.service.internal.security.ntlm': True,
'zato.server.service.internal.security.oauth': True,
'zato.server.service.internal.security.rbac': True,
'zato.server.service.internal.security.rbac.client_role': True,
'zato.server.service.internal.security.rbac.permission': True,
'zato.server.service.internal.security.rbac.role': True,
'zato.server.service.internal.security.rbac.role_permission': True,
'zato.server.service.internal.security.tls.ca_cert': True,
'zato.server.service.internal.security.tls.channel': True,
'zato.server.service.internal.security.tls.key_cert': True,
'zato.server.service.internal.security.wss': True,
'zato.server.service.internal.security.vault.connection': True,
'zato.server.service.internal.security.vault.policy': True,
'zato.server.service.internal.security.xpath': True,
'zato.server.service.internal.server': True,
'zato.server.service.internal.service': True,
'zato.server.service.internal.sms': True,
'zato.server.service.internal.sms.twilio': True,
'zato.server.service.internal.sso': True,
'zato.server.service.internal.sso.cleanup': True,
'zato.server.service.internal.sso.password_reset': True,
'zato.server.service.internal.sso.session': True,
'zato.server.service.internal.sso.session_attr': True,
'zato.server.service.internal.sso.signup': True,
'zato.server.service.internal.sso.user': True,
'zato.server.service.internal.sso.user_attr': True,
'zato.server.service.internal.stats': True,
'zato.server.service.internal.stats.summary': True,
'zato.server.service.internal.stats.trends': True,
'zato.server.service.internal.updates': True,
}
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
# globre
from globre import match as globre_match
# Paste
from paste.util.converters import asbool
# Zato
from zato.common.api import FALSE_TRUE, TRUE_FALSE
logger = logging.getLogger(__name__)
class Matcher(object):
def __init__(self):
self.config = None
self.items = {True:[], False:[]}
self.order1 = None
self.order2 = None
self.is_allowed_cache = {}
self.special_case = None
def read_config(self, config):
self.config = config
order = config.get('order', FALSE_TRUE)
self.order1, self.order2 = (True, False) if order == TRUE_FALSE else (False, True)
for key, value in config.items():
# Ignore meta key(s)
if key == 'order':
continue
value = asbool(value)
# Add new items
self.items[value].append(key)
# Now sort everything lexicographically, the way it will be used in run-time
for key in self.items:
self.items[key] = list(reversed(sorted(self.items[key])))
for empty, non_empty in ((True, False), (False, True)):
if not self.items[empty] and '*' in self.items[non_empty]:
self.special_case = non_empty
break
def is_allowed(self, value):
logger.debug('Cache:`%s`, value:`%s`', self.is_allowed_cache, value)
if self.special_case is not None:
return self.special_case
try:
return self.is_allowed_cache[value]
except KeyError:
_match = globre_match
is_allowed = None
for order in self.order1, self.order2:
for pattern in self.items[order]:
if _match(pattern, value):
is_allowed = order
# No match at all - we don't allow it in that case
is_allowed = is_allowed if (is_allowed is not None) else False
self.is_allowed_cache[value] = is_allowed
return is_allowed | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/match.py | match.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from http.client import BAD_REQUEST, CONFLICT, FORBIDDEN, INTERNAL_SERVER_ERROR, METHOD_NOT_ALLOWED, NOT_FOUND, \
SERVICE_UNAVAILABLE, UNAUTHORIZED
# Zato
from zato.common.http_ import HTTP_RESPONSES
# ################################################################################################################################
# ################################################################################################################################
# https://tools.ietf.org/html/rfc6585
TOO_MANY_REQUESTS = 429
# ################################################################################################################################
# ################################################################################################################################
class ZatoException(Exception):
""" Base class for all Zato custom exceptions.
"""
def __init__(self, cid=None, msg=None):
super(ZatoException, self).__init__(msg)
self.cid = cid
self.msg = msg
def __repr__(self):
return '<{} at {} cid:`{}`, msg:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.cid, self.msg)
__str__ = __repr__
# ################################################################################################################################
class ClientSecurityException(ZatoException):
""" An exception for signalling errors stemming from security problems
on the client side, such as invalid username or password.
"""
# ################################################################################################################################
class ConnectionException(ZatoException):
""" Encountered a problem with an external connections, such as to AMQP brokers.
"""
# ################################################################################################################################
class TimeoutException(ConnectionException):
pass
# ################################################################################################################################
class StatusAwareException(ZatoException):
""" Raised when the underlying error condition can be easily expressed
as one of the HTTP status codes.
"""
def __init__(self, cid, msg, status):
super(StatusAwareException, self).__init__(cid, msg)
self.status = status
self.reason = HTTP_RESPONSES[status]
def __repr__(self):
return '<{} at {} cid:`{}`, status:`{}`, msg:`{}`>'.format(
self.__class__.__name__, hex(id(self)), self.cid, self.status, self.msg)
# ################################################################################################################################
class HTTPException(StatusAwareException):
pass
# ################################################################################################################################
class ParsingException(ZatoException):
""" Raised when the error is to do with parsing of documents, such as an input
XML document.
"""
# ################################################################################################################################
class NoDistributionFound(ZatoException):
""" Raised when an attempt is made to import services from a Distutils2 archive
or directory but they don't contain a proper Distutils2 distribution.
"""
def __init__(self, path):
super(NoDistributionFound, self).__init__(None, 'No Disutils distribution in path:[{}]'.format(path))
# ################################################################################################################################
class Inactive(ZatoException):
""" Raised when an attempt was made to use an inactive resource, such
as an outgoing connection or a channel.
"""
def __init__(self, name):
super(Inactive, self).__init__(None, '`{}` is inactive'.format(name))
# ################################################################################################################################
# ################################################################################################################################
# Below are HTTP exceptions
class Reportable(HTTPException):
def __init__(self, cid, msg, status):
super(ClientHTTPError, self).__init__(cid, msg, status)
# Backward compatibility with pre 3.0
ClientHTTPError = Reportable
# ################################################################################################################################
class BadRequest(Reportable):
def __init__(self, cid, msg='Received a bad request'):
super(BadRequest, self).__init__(cid, msg, BAD_REQUEST)
# ################################################################################################################################
class Conflict(Reportable):
def __init__(self, cid, msg):
super(Conflict, self).__init__(cid, msg, CONFLICT)
# ################################################################################################################################
class Forbidden(Reportable):
def __init__(self, cid, msg='You are not allowed to access this resource', *ignored_args, **ignored_kwargs):
super(Forbidden, self).__init__(cid, msg, FORBIDDEN)
# ################################################################################################################################
class MethodNotAllowed(Reportable):
def __init__(self, cid, msg):
super(MethodNotAllowed, self).__init__(cid, msg, METHOD_NOT_ALLOWED)
# ################################################################################################################################
class NotFound(Reportable):
def __init__(self, cid, msg):
super(NotFound, self).__init__(cid, msg, NOT_FOUND)
# ################################################################################################################################
class Unauthorized(Reportable):
def __init__(self, cid, msg, challenge):
super(Unauthorized, self).__init__(cid, msg, UNAUTHORIZED)
self.challenge = challenge
# ################################################################################################################################
class TooManyRequests(Reportable):
def __init__(self, cid, msg):
super(TooManyRequests, self).__init__(cid, msg, TOO_MANY_REQUESTS)
# ################################################################################################################################
class InternalServerError(Reportable):
def __init__(self, cid, msg='Internal server error'):
super(InternalServerError, self).__init__(cid, msg, INTERNAL_SERVER_ERROR)
# ################################################################################################################################
class ServiceUnavailable(Reportable):
def __init__(self, cid, msg):
super(ServiceUnavailable, self).__init__(cid, msg, SERVICE_UNAVAILABLE)
# ################################################################################################################################
class PubSubSubscriptionExists(BadRequest):
pass
# ################################################################################################################################
class ConnectorClosedException(Exception):
def __init__(self, exc, message):
self.inner_exc = exc
super().__init__(message)
# ################################################################################################################################
class IBMMQException(Exception):
pass
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/exception.py | exception.py |
# ################################################################################################################################
# ################################################################################################################################
def get_sys_info():
import platform
system = platform.system()
is_linux = 'linux' in system.lower()
is_windows = 'windows' in system.lower()
is_mac = 'darwin' in system.lower()
if is_linux:
try:
import distro
info = distro.info()
codename = info['codename'].lower()
codename = codename.replace('/', '')
out = '{}.{}'.format(info['id'], info['version'])
if codename:
out += '-{}'.format(codename)
except ImportError:
out = 'linux'
elif is_windows:
_platform = platform.platform().lower()
_edition = platform.win32_edition()
out = '{}-{}'.format(_platform, _edition)
elif is_mac:
out = 'mac'
else:
out = 'os.unrecognised'
return out
# ################################################################################################################################
# ################################################################################################################################
def get_version():
# stdlib
import os
import sys
from sys import version_info as py_version_info
# Python 2/3 compatibility
from past.builtins import execfile
try:
# Make sure the underlying git command runs in our git repository ..
code_dir = os.path.dirname(sys.executable)
os.chdir(code_dir)
curdir = os.path.dirname(os.path.abspath(__file__))
_version_py = os.path.normpath(os.path.join(curdir, '..', '..', '..', '..', '.version.py'))
_locals = {}
execfile(_version_py, _locals)
version = 'Zato {}'.format(_locals['version'])
except IOError:
version = '3.2'
finally:
sys_info = get_sys_info()
version = '{}-py{}.{}.{}-{}'.format(
version,
py_version_info.major,
py_version_info.minor,
py_version_info.micro,
sys_info)
return version
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/version.py | version.py |
# stdlib
from collections import deque
from datetime import datetime
from logging import getLogger
# gevent
from gevent.lock import RLock
# Zato
from zato.common.api import AuditLog as CommonAuditLog, CHANNEL, GENERIC, WEB_SOCKET
from zato.common.util.api import new_cid
# ################################################################################################################################
# ################################################################################################################################
_sent = CommonAuditLog.Direction.sent
_received = CommonAuditLog.Direction.received
event_attrs = 'direction', 'data', 'event_id', 'timestamp', 'msg_id', 'in_reply_to', 'type_', 'object_id', 'conn_id'
transfer_attrs = 'total_bytes_received', 'total_messages_received', 'avg_msg_size_received', 'first_received', 'last_received', \
'total_bytes_sent', 'total_messages_sent', 'avg_msg_size_sent', 'first_sent', 'last_sent', \
'data', 'messages'
config_attrs = 'type_', 'object_id', 'max_len_messages_received', 'max_len_messages_sent', \
'max_bytes_per_message_received', 'max_bytes_per_message_sent', \
'max_bytes_per_message'
# ################################################################################################################################
# ################################################################################################################################
def new_event_id(prefix='zae', _new_cid=new_cid):
return '{}{}'.format(prefix, _new_cid())
# ################################################################################################################################
# ################################################################################################################################
class DataEvent:
def __init__(self, direction, _utcnow=datetime.utcnow, _new_event_id=new_event_id):
self.direction = direction
self.event_id = _new_event_id()
self.data = ''
self.timestamp = _utcnow()
self.msg_id = ''
self.in_reply_to = ''
self.type_ = ''
self.object_id = ''
self.conn_id = ''
# This will be the other half of a request or response,
# e.g. it will link DataSent to DataReceived or ther other way around.
self.counterpart = None # type: DataEvent
# ################################################################################################################################
def to_dict(self):
out = {}
for name in event_attrs:
out[name] = getattr(self, name)
return out
# ################################################################################################################################
# ################################################################################################################################
class DataSent(DataEvent):
""" An individual piece of data sent by Zato to a remote end.
This can be a request or a reply to a previous one sent by an API client.
"""
__slots__ = event_attrs
def __init__(self, _direction=_sent):
super().__init__(_direction)
# ################################################################################################################################
# ################################################################################################################################
class DataReceived(DataEvent):
""" An individual piece of data received by Zato from a remote end.
This can be a request or a reply to a previous one sent by an API client.
"""
__slots__ = event_attrs
def __init__(self, _direction=_received):
super().__init__(_direction)
# ################################################################################################################################
# ################################################################################################################################
class LogContainerConfig:
""" Data retention configuration for a specific object.
"""
__slots__ = config_attrs
def __init__(self):
self.type_ = '<log-container-config-type_-not-set>'
self.object_id = '<log-container-config-object_id-not-set>'
self.max_len_messages_received = 0
self.max_len_messages_sent = 0
self.max_bytes_per_message_received = 0
self.max_bytes_per_message_sent = 0
# ################################################################################################################################
# ################################################################################################################################
class LogContainer:
""" Stores messages for a specific object, e.g. an individual REST or HL7 channel.
"""
__slots__ = config_attrs + transfer_attrs + ('lock',)
def __init__(self, config, _sent=_sent, _received=_received):
# type: (LogContainerConfig)
# To serialise access to the underlying storage
self.lock = {
_sent: RLock(),
_received: RLock(),
}
self.type_ = config.type_
self.object_id = config.object_id
self.max_len_messages_sent = config.max_len_messages_sent
self.max_len_messages_received = config.max_len_messages_received
self.max_bytes_per_message = {
_sent: config.max_bytes_per_message_sent,
_received: config.max_bytes_per_message_received,
}
self.total_bytes_sent = 0
self.total_messages_sent = 0
self.avg_msg_size_sent = 0
self.first_sent = None # type: datetime
self.last_sent = None # type: datetime
self.total_bytes_received = 0
self.total_messages_received = 0
self.avg_msg_size_received = 0
self.first_received = None # type: datetime
self.last_received = None # type: datetime
# These two deques are where the actual data is kept
self.messages = {}
self.messages[_sent] = deque(maxlen=self.max_len_messages_sent)
self.messages[_received] = deque(maxlen=self.max_len_messages_received)
# ################################################################################################################################
def store(self, data_event):
with self.lock[data_event.direction]:
# Make sure we do not exceed our limit of bytes stored
max_len = self.max_bytes_per_message[data_event.direction]
data_event.data = data_event.data[:max_len]
storage = self.messages[data_event.direction] # type: deque
storage.append(data_event)
# ################################################################################################################################
def to_dict(self, _sent=_sent, _received=_received):
out = {
_sent: [],
_received: []
}
for name in (_sent, _received):
messages = out[name]
with self.lock[name]:
for message in self.messages[name]: # type: DataEvent
messages.append(message.to_dict())
return out
# ################################################################################################################################
# ################################################################################################################################
class AuditLog:
""" Stores a log of messages for channels, outgoing connections or other objects.
"""
def __init__(self):
# Update lock
self.lock = RLock()
# The main log - keys are object types, values are dicts mapping object IDs to LogContainer objects
self._log = {
CHANNEL.HTTP_SOAP: {},
CHANNEL.WEB_SOCKET: {},
GENERIC.CONNECTION.TYPE.CHANNEL_HL7_MLLP: {},
WEB_SOCKET.AUDIT_KEY: {},
}
# Python logging
self.logger = getLogger('zato')
# ################################################################################################################################
def get_container(self, type_, object_id):
# type: (str, str) -> LogContainer
# Note that below we ignore any key errors, effectively silently dropping invalid requests.
return self._log.get(type_, {}).get(object_id)
# ################################################################################################################################
def _create_container(self, config):
# type: (LogContainerConfig)
# Make sure the object ID is a string (it can be an int)
config.object_id = str(config.object_id)
# Get the mapping of object types to object IDs ..
container_dict = self._log.setdefault(config.type_, {})
# .. make sure we do not have such an object already ..
if config.object_id in container_dict:
raise ValueError('Container already found `{}` ({})'.format(config.object_id, config.type_))
# .. if we are here, it means that we are really adding a new container ..
container = LogContainer(config)
# .. finally, we can attach it to the log by the object's ID.
container_dict[config.object_id] = container
# ################################################################################################################################
def create_container(self, config):
# type: (LogContainerConfig)
with self.lock:
self._create_container(config)
# ################################################################################################################################
def _delete_container(self, type_, object_id):
# type: (str, str)
# Make sure the object ID is a string (it can be an int)
object_id = str(object_id)
# Get the mapping of object types to object IDs ..
try:
container_dict = self._log[type_] # type: dict
except KeyError:
raise ValueError('Container type not found `{}` among `{}` ({})'.format(type_, sorted(self._log), object_id))
# No KeyError = we recognised that type ..
# .. so we can now try to delete that container by its object's ID.
# Note that we use .pop on purpose - e.g. when a server has just started,
# it may not have any such an object yet but the user may already try to edit
# the object this log is attached to. Using .pop ignores non-existing keys.
container_dict.pop(object_id, None)
# ################################################################################################################################
def delete_container(self, type_, object_id):
# type: (str, str)
with self.lock:
self._delete_container(type_, object_id)
# ################################################################################################################################
def edit_container(self, config):
# type: (LogContainerConfig)
with self.lock:
self._delete_container(config.type_, config.object_id)
self._create_container(config)
# ################################################################################################################################
def store_data(self, data_event):
# type: (DataEvent) -> None
# We always store IDs as string objects
data_event.object_id = str(data_event.object_id)
# At this point we assume that all the dicts and containers already exist
container_dict = self._log[data_event.type_]
container = container_dict[data_event.object_id] # type: LogContainer
container.store(data_event)
# ################################################################################################################################
def store_data_received(self, data_event):
# type: (DataReceived) -> None
self.store_data(data_event)
# ################################################################################################################################
def store_data_sent(self, data_event):
# type: (DataSent) -> None
self.store_data(data_event)
# ################################################################################################################################
# ################################################################################################################################
if __name__ == '__main__':
pass | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/audit_log.py | audit_log.py |
# ################################################################################################################################
# stdlib
from calendar import timegm
from importlib import import_module
from logging import getLogger
from time import gmtime
# Cryptography
from cryptography.fernet import InvalidToken
# Python 2/3 compatibility
from past.builtins import basestring
# Zato
from zato.common.api import KVDB as _KVDB, NONCE_STORE
from zato.common.util import spawn_greenlet
from zato.common.util.kvdb import has_redis_sentinels
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
class KVDB(object):
""" A wrapper around the Zato's key-value database.
"""
def __init__(self, config=None, decrypt_func=None):
self.conn = None
self.config = config
self.decrypt_func = decrypt_func
self.conn_class = None # Introduced so it's easier to test the class
self.has_sentinel = False
# ################################################################################################################################
def _get_connection_class(self):
""" Returns a concrete class to create Redis connections off basing on whether we use Redis sentinels or not.
Abstracted out to a separate method so it's easier to test the whole class in separation.
"""
if self.has_sentinel:
from redis.sentinel import Sentinel
return Sentinel
else:
from redis import StrictRedis
return StrictRedis
# ################################################################################################################################
def _parse_sentinels(self, item):
if item:
if isinstance(item, basestring):
item = [item]
out = []
for elem in item:
elem = elem.split(':')
# This will always exist ..
host = elem[0]
# .. which is why we can always use it ..
to_append = [host]
# .. but port can be optional ..
if len(elem) > 1:
port = elem[1]
port = int(port)
to_append.append(port)
out.append(tuple(to_append))
return out
# ################################################################################################################################
def init(self):
config = {}
self.has_sentinel = has_redis_sentinels(self.config)
if self.has_sentinel:
sentinels = self._parse_sentinels(self.config.get('redis_sentinels'))
if not sentinels:
raise ValueError('kvdb.redis_sentinels must be provided')
sentinel_master = self.config.get('redis_sentinels_master', None)
if not sentinel_master:
raise ValueError('kvdb.redis_sentinels_master must be provided')
config['sentinels'] = sentinels
config['sentinel_master'] = sentinel_master
else:
if self.config.get('host'):
config['host'] = self.config.host
if self.config.get('port'):
config['port'] = int(self.config.port)
if self.config.get('unix_socket_path'):
config['unix_socket_path'] = self.config.unix_socket_path
if self.config.get('db'):
config['db'] = int(self.config.db)
if self.config.get('password'):
# Heuristics - gA is a prefix of encrypted secrets so there is a chance
# we need to decrypt it. If the decryption fails, this is fine, we need
# assume in such a case that it was an actual password starting with this prefix.
if self.config.password.startswith('gA'):
try:
config['password'] = self.decrypt_func(self.config.password)
except InvalidToken:
config['password'] = self.config.password
else:
config['password'] = self.config.password
if self.config.get('socket_timeout'):
config['socket_timeout'] = float(self.config.socket_timeout)
if self.config.get('connection_pool'):
split = self.config.connection_pool.split('.')
module, class_name = split[:-1], split[-1]
mod = import_module(module)
config['connection_pool'] = getattr(mod, class_name)
if self.config.get('charset'):
config['charset'] = self.config.charset
if self.config.get('errors'):
config['errors'] = self.config.errors
self.conn_class = self._get_connection_class()
if self.has_sentinel:
instance = self.conn_class(config['sentinels'], min_other_sentinels=0, password=config.get('password'),
socket_timeout=config.get('socket_timeout'), decode_responses=True)
self.conn = instance.master_for(config['sentinel_master'])
else:
self.conn = self.conn_class(charset='utf-8', decode_responses=True, **config)
# Confirm whether we can connect
self.ping()
# ################################################################################################################################
def pubsub(self):
return self.conn.pubsub()
# ################################################################################################################################
def publish(self, *args, **kwargs):
return self.conn.publish(*args, **kwargs)
# ################################################################################################################################
def subscribe(self, *args, **kwargs):
return self.conn.subscribe(*args, **kwargs)
# ################################################################################################################################
def translate(self, system1, key1, value1, system2, key2, default=''):
return self.conn.hget(
_KVDB.SEPARATOR.join(
(_KVDB.TRANSLATION, system1, key1, value1, system2, key2)), 'value2') or default
# ################################################################################################################################
def reconfigure(self, config):
# type: (dict) -> None
self.config = config
self.init()
# ################################################################################################################################
def set_password(self, password):
# type: (dict) -> None
self.config['password'] = password
self.init()
# ################################################################################################################################
def copy(self):
""" Returns an KVDB with the configuration copied over from self. Note that
the object returned isn't initialized, in particular, the connection to the
database won't have been initialized.
"""
kvdb = KVDB()
kvdb.config = self.config
kvdb.decrypt_func = self.decrypt_func
return kvdb
# ################################################################################################################################
def close(self):
self.conn.connection_pool.disconnect()
# ################################################################################################################################
def ping(self):
try:
spawn_greenlet(self.conn.ping)
except Exception as e:
logger.warn('Could not ping %s due to `%s`', self.conn, e.args[0])
else:
logger.info('Redis ping OK -> %s', self.conn)
# ################################################################################################################################
@staticmethod
def is_config_enabled(config):
""" Returns True if the configuration indicates that Redis is enabled.
"""
# type: (dict) -> bool
return config.get('host') and config.get('port')
# ################################################################################################################################
# OAuth
def add_oauth_nonce(self, username, nonce, max_nonce_log):
""" Adds an OAuth to the set containing last N used ones for a given username.
"""
key = NONCE_STORE.KEY_PATTERN.format('oauth', username)
# This lets us trim the set to top (last) N nonces
score = timegm(gmtime())
self.conn.zadd(key, score, nonce)
self.conn.zremrangebyrank(key, 0, -max_nonce_log)
def has_oauth_nonce(self, username, nonce):
""" Returns a boolean flag indicating if there's an OAuth nonce for a given
username stored in KVDB.
"""
return self.conn.zscore(NONCE_STORE.KEY_PATTERN.format('oauth', username), nonce)
# ################################################################################################################################
# ################################################################################################################################
'''
# -*- coding: utf-8 -*-
# Zato
from zato.common.util import get_config
from zato.server.service import AsIs, Bool, Int, Service, SIOElem
from zato.server.service.internal import AdminService
# ################################################################################################################################
# ################################################################################################################################
if 0:
from typing import Union as union
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
# ################################################################################################################################
# ################################################################################################################################
class MyService(AdminService):
name = 'kvdb1.get-list'
class SimpleIO:
input_optional = 'id', 'name'
output_optional = AsIs('id'), 'is_active', 'name', 'host', Int('port'), 'db', Bool('use_redis_sentinels'), \
'redis_sentinels', 'redis_sentinels_master'
default_value = None
# ################################################################################################################################
def get_data(self):
# Response to produce
out = []
# For now, we only return one item containing data read from server.conf
item = {
'id': 'default',
'name': 'default',
'is_active': True,
}
repo_location = self.server.repo_location
config_name = 'server.conf'
config = get_config(repo_location, config_name, bunchified=False)
config = config['kvdb']
for elem in self.SimpleIO.output_optional:
# Extract the embedded name or use it as is
name = elem.name if isinstance(elem, SIOElem) else elem
# These will not exist in server.conf
if name in ('id', 'is_active', 'name'):
continue
# Add it to output
item[name] = config[name]
# Add our only item to response
out.append(item)
return out
# ################################################################################################################################
def handle(self):
self.response.payload[:] = self.get_data()
# ################################################################################################################################
# ################################################################################################################################
class Edit(AdminService):
name = 'kvdb1.edit'
class SimpleIO:
input_optional = AsIs('id'), 'name', Bool('use_redis_sentinels')
input_required = 'host', 'port', 'db', 'redis_sentinels', 'redis_sentinels_master'
output_optional = 'name'
def handle(self):
# Local alias
input = self.request.input
# If provided, turn sentinels configuration into a format expected by the underlying KVDB object
redis_sentinels = input.redis_sentinels or '' # type: str
if redis_sentinels:
redis_sentinels = redis_sentinels.splitlines()
redis_sentinels = ', '.join(redis_sentinels)
# Assign new server-wide configuration ..
self.server.fs_server_config.kvdb.host = input.host
self.server.fs_server_config.kvdb.port = int(input.port)
self.server.fs_server_config.kvdb.redis_sentinels = redis_sentinels
self.server.fs_server_config.kvdb.redis_sentinels_master = input.redis_sentinels_master or ''
# .. and rebuild the Redis connection object.
self.server.kvdb.reconfigure(self.server.fs_server_config.kvdb)
self.response.payload.name = self.request.input.name
# ################################################################################################################################
# ################################################################################################################################
''' | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/kvdb/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
# stdlib
from logging import getLogger
from string import punctuation
# PyParsing
from pyparsing import alphanums, oneOf, OneOrMore, Optional, White, Word
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
# Redis PyParsing grammar
quot = Optional(oneOf(('"', "'")))
command = oneOf((
'CONFIG', 'DBSIZE', 'DECR', 'DECRBY', 'DEL', 'DUMP', 'ECHO',
'EXISTS', 'EXPIRE', 'EXPIREAT', 'FLUSHDB', 'GET',
'HDEL', 'HEXISTS', 'HGET', 'HGETALL', 'HINCRBY', 'HKEYS', 'HLEN', 'HSET', 'HSETNX',
'HVALS', 'INCR', 'INCRBY', 'INFO', 'KEYS', 'LLEN', 'LPOP', 'LPUSH', 'LPUSHX',
'LRANGE', 'LREM', 'LSET', 'LTRIM', 'MGET', 'MSET', 'MSETNX', 'OBJECT', 'PERSIST',
'PEXPIRE', 'PEXPIREAT', 'PING', 'PSETEX', 'PTTL', 'RANDOMKEY', 'RENAME', 'RENAMENX',
'RESTORE', 'RPOP', 'SADD', 'SET', 'SISMEMBER', 'SMEMBERS', 'SREM', 'TIME', 'TTL', 'TYPE',
'ZADD', 'ZRANGE', 'ZREM'), caseless=True).setResultsName('command')
parameters = (OneOrMore(Word(alphanums + '-' + punctuation))).setResultsName('parameters')
redis_grammar = command + Optional(White().suppress() + parameters)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/kvdb/parsing.py | parsing.py |
# Python 2/3 compatibility
from six import PY2
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright 2006-2011 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# ################################################################################################################################
# ################################################################################################################################
# stdlib
import http.client as http_client
import socket
import ssl
class CAValidatingHTTPSConnection(http_client.HTTPConnection):
""" This class allows communication via SSL/TLS and takes Certificate Authorities
into account.
"""
def __init__(self, host, port=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=None, strict=None, ssl_version=None,
timeout=None):
http_client.HTTPConnection.__init__(self, host, port, strict, timeout)
self.ca_certs = ca_certs
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
def connect(self):
""" Connect to a host on a given (SSL/TLS) port.
"""
sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
self.sock = self.wrap_socket(sock)
def wrap_socket(self, sock):
""" Gets a socket object and wraps it into an SSL/TLS-aware one. May be
overridden in subclasses if the wrapping process needs to be customized.
"""
return ssl.wrap_socket(sock, self.keyfile, self.certfile,
ca_certs=self.ca_certs, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version)
class CAValidatingHTTPS(http_client.HTTPConnection):
""" A subclass of http.client.HTTPConnection which is aware of Certificate Authorities
used in SSL/TLS transactions.
"""
_connection_class = CAValidatingHTTPSConnection
def __init__(self, host=None, port=None, strict=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=None, ssl_version=None, timeout=None):
self._setup(self._connection_class(host, port, ca_certs, keyfile, certfile,
cert_reqs, strict, ssl_version, timeout))
# ################################################################################################################################
# ################################################################################################################################
"""
Copyright 2006-2011 SpringSource (http://springsource.com), All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# stdlib
import logging
import sys
import traceback
if PY2:
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from xmlrpclib import ServerProxy, Transport
else:
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from xmlrpc.client import ServerProxy, Transport
class VerificationException(Exception):
""" Raised when the verification of a certificate's fields fails.
"""
# ##############################################################################
# Server
# ##############################################################################
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ("/", "/RPC2",)
def setup(self):
self.connection = self.request # for doPOST
self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
self.wfile = socket._fileobject(self.request, "wb", self.wbufsize)
class SSLServer(SimpleXMLRPCServer):
def __init__(self, host=None, port=None, keyfile=None, certfile=None,
ca_certs=None, cert_reqs=ssl.CERT_NONE, ssl_version=ssl.PROTOCOL_TLSv1,
do_handshake_on_connect=True, suppress_ragged_eofs=True, ciphers=None,
log_requests=True, **kwargs):
if PY2:
SimpleXMLRPCServer.__init__(self, (host, port), requestHandler=RequestHandler)
else:
SimpleXMLRPCServer.__init__(self, (host, port))
self.keyfile = keyfile
self.certfile = certfile
self.ca_certs = ca_certs
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
self.ciphers = ciphers
# Looks awkward to use camelCase here but that's what SimpleXMLRPCRequestHandler
# expects.
self.logRequests = log_requests
# 'verify_fields' is taken from kwargs to allow for adding more keywords
# in future versions.
self.verify_fields = kwargs.get("verify_fields")
def get_request(self):
""" Overridden from SocketServer.TCPServer.get_request, wraps the socket in
an SSL context.
"""
sock, from_addr = self.socket.accept()
# 'ciphers' argument is new in 2.7 and we must support 2.6 so add it
# to kwargs conditionally, depending on the Python version.
kwargs = {"keyfile":self.keyfile, "certfile":self.certfile,
"server_side":True, "cert_reqs":self.cert_reqs, "ssl_version":self.ssl_version,
"ca_certs":self.ca_certs, "do_handshake_on_connect":self.do_handshake_on_connect,
"suppress_ragged_eofs":self.suppress_ragged_eofs}
if sys.version_info >= (2, 7):
kwargs["ciphers"] = self.ciphers
sock = ssl.wrap_socket(sock, **kwargs)
if self.logger.isEnabledFor(logging.DEBUG):
self.logger.debug("get_request cert='%s', from_addr='%s'" % (sock.getpeercert(), from_addr))
return sock, from_addr
def verify_request(self, sock, from_addr):
""" Overridden from SocketServer.TCPServer.verify_request, adds validation of the
other side's certificate fields.
"""
try:
if self.verify_fields:
cert = sock.getpeercert()
if not cert:
msg = "Couldn't verify fields, peer didn't send the certificate, from_addr='%s'" % (from_addr,)
raise VerificationException(msg)
allow_peer, reason = self.verify_peer(cert, from_addr)
if not allow_peer:
self.logger.error(reason)
sock.close()
return False
except Exception:
# It was either an error on our side or the client didn't send the
# certificate even though self.cert_reqs was CERT_OPTIONAL (it couldn't
# have been CERT_REQUIRED because we wouldn't have got so far, the
# session would've been terminated much earlier in ssl.wrap_socket call).
# Regardless of the reason we cannot accept the client in that case.
msg = "Verification error='%s', cert='%s', from_addr='%s'" % (
traceback.format_exc(), sock.getpeercert(), from_addr)
self.logger.error(msg)
sock.close()
return False
return True
def verify_peer(self, cert, from_addr):
""" Verifies the other side's certificate. May be overridden in subclasses
if the verification process needs to be customized.
"""
subject = cert.get("subject")
if not subject:
msg = "Peer certificate doesn't have the 'subject' field, cert='%s'" % cert
raise VerificationException(msg)
subject = dict(elem[0] for elem in subject)
for verify_field in self.verify_fields:
expected_value = self.verify_fields[verify_field]
cert_value = subject.get(verify_field, None)
if not cert_value:
reason = "Peer didn't send the '%s' field, subject fields received '%s'" % (
verify_field, subject)
return False, reason
if expected_value != cert_value:
reason = "Expected the subject field '%s' to have value '%s' instead of '%s', subject='%s'" % (
verify_field, expected_value, cert_value, subject)
return False, reason
return True, None
def register_functions(self):
raise NotImplementedError("Must be overridden by subclasses")
# ##############################################################################
# Client
# ##############################################################################
class SSLClientTransport(Transport):
""" Handles an HTTPS transaction to an XML-RPC server.
"""
user_agent = "SSL XML-RPC Client (by http://springpython.webfactional.com)"
def __init__(self, ca_certs=None, keyfile=None, certfile=None, cert_reqs=None,
ssl_version=None, timeout=None, strict=None):
self.ca_certs = ca_certs
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.timeout = timeout
self.strict = strict
Transport.__init__(self)
def make_connection(self, host):
return CAValidatingHTTPS(host, strict=self.strict, ca_certs=self.ca_certs,
keyfile=self.keyfile, certfile=self.certfile, cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version, timeout=self.timeout)
class SSLClient(ServerProxy):
def __init__(self, uri=None, ca_certs=None, keyfile=None, certfile=None,
cert_reqs=ssl.CERT_REQUIRED, ssl_version=ssl.PROTOCOL_TLSv1,
transport=None, encoding=None, verbose=0, allow_none=0, use_datetime=0,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT, strict=None):
if not transport:
_transport=SSLClientTransport(ca_certs, keyfile, certfile, cert_reqs,
ssl_version, timeout, strict)
else:
_transport=transport(ca_certs, keyfile, certfile, cert_reqs, ssl_version, timeout, strict)
ServerProxy.__init__(self, uri, _transport, encoding, verbose, allow_none, use_datetime)
self.logger = logging.getLogger(self.__class__.__name__)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/py23_/spring_.py | spring_.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint
# ################################################################################################################################
# Type checking
if 0:
from zato.common.odb.model import Cluster
Cluster = Cluster
# ################################################################################################################################
# ################################################################################################################################
class ODBPostProcess(object):
""" SQL post-processing functionality, e.g. creation of objects only after aserver has started.
"""
def __init__(self, session, cluster, cluster_id):
# type: (object, Cluster, int)
if not (cluster or cluster_id):
raise ValueError('At least one of cluster or cluster_id is required in place of `{}` `{}`'.format(
cluster, cluster_id))
self.session = session
self.cluster = cluster
self.cluster_id = cluster_id
# ################################################################################################################################
def run(self):
self.add_pubsub_service_endpoint()
self.session.commit()
# ################################################################################################################################
def add_pubsub_service_endpoint(self, _name=PUBSUB.SERVICE_SUBSCRIBER.NAME):
existing = self.session.query(PubSubEndpoint.id).\
filter(PubSubEndpoint.name==_name).\
first()
if not existing:
endpoint = PubSubEndpoint()
endpoint.name = _name
endpoint.is_internal = True
endpoint.role = PUBSUB.ROLE.SUBSCRIBER.id
endpoint.topic_patterns = PUBSUB.SERVICE_SUBSCRIBER.TOPICS_ALLOWED
endpoint.endpoint_type = PUBSUB.ENDPOINT_TYPE.SERVICE.id
if self.cluster:
endpoint.cluster = self.cluster
else:
endpoint.cluster_id = self.cluster_id
self.session.add(endpoint)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/post_process.py | post_process.py |
# stdlib
import logging
from contextlib import closing
from copy import deepcopy
from datetime import datetime
from io import StringIO
from logging import DEBUG, getLogger
from threading import RLock
from time import time
from traceback import format_exc
# SQLAlchemy
from sqlalchemy import and_, create_engine, event, select
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import sessionmaker, scoped_session
from sqlalchemy.orm.query import Query
from sqlalchemy.pool import NullPool
from sqlalchemy.sql.expression import true
from sqlalchemy.sql.type_api import TypeEngine
# Bunch
from bunch import Bunch, bunchify
# Zato
from zato.common.api import DEPLOYMENT_STATUS, GENERIC, HTTP_SOAP, MS_SQL, NotGiven, PUBSUB, SEC_DEF_TYPE, SECRET_SHADOW, \
SERVER_UP_STATUS, UNITTEST, ZATO_NONE, ZATO_ODB_POOL_NAME
from zato.common.exception import Inactive
from zato.common.mssql_direct import MSSQLDirectAPI, SimpleSession
from zato.common.odb import query
from zato.common.odb.ping import get_ping_query
from zato.common.odb.model import APIKeySecurity, Cluster, DeployedService, DeploymentPackage, DeploymentStatus, HTTPBasicAuth, \
JWT, OAuth, PubSubEndpoint, SecurityBase, Server, Service, TLSChannelSecurity, XPathSecurity, \
WSSDefinition, VaultConnection
from zato.common.odb.testing import UnittestEngine
from zato.common.odb.query.pubsub import subscription as query_ps_subscription
from zato.common.odb.query import generic as query_generic
from zato.common.util.api import current_host, get_component_name, get_engine_url, new_cid, parse_extra_into_dict, \
parse_tls_channel_security_definition, spawn_greenlet
from zato.common.util.sql import ElemsWithOpaqueMaker, elems_with_opaque
from zato.common.util.url_dispatcher import get_match_target
from zato.sso.odb.query import get_rate_limiting_info as get_sso_user_rate_limiting_info
# ################################################################################################################################
# Type checking
import typing
if typing.TYPE_CHECKING:
from zato.server.base.parallel import ParallelServer
# For pyflakes
ParallelServer = ParallelServer
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
rate_limit_keys = 'is_rate_limit_active', 'rate_limit_def', 'rate_limit_type', 'rate_limit_check_parent_def'
unittest_fs_sql_config = {
UNITTEST.SQL_ENGINE: {
'ping_query': 'SELECT 1+1'
}
}
# ################################################################################################################################
ServiceTable = Service.__table__
ServiceTableInsert = ServiceTable.insert
DeployedServiceTable = DeployedService.__table__
DeployedServiceInsert = DeployedServiceTable.insert
DeployedServiceDelete = DeployedServiceTable.delete
# ################################################################################################################################
# ################################################################################################################################
# Based on https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WriteableTuple
class WritableKeyedTuple(object):
def __init__(self, elem):
object.__setattr__(self, '_elem', elem)
# ################################################################################################################################
def __getattr__(self, key):
return getattr(self._elem, key)
# ################################################################################################################################
def __getitem__(self, idx):
return self._elem.__getitem__(idx)
# ################################################################################################################################
def __setitem__(self, idx, value):
return self._elem.__setitem__(idx, value)
# ################################################################################################################################
def __nonzero__(self):
return bool(self._elem)
# ################################################################################################################################
def __repr__(self):
return '<WritableKeyedTuple at {}>'.format(hex(id(self)))
# ################################################################################################################################
def get_value(self):
return self._elem._asdict()
# ################################################################################################################################
# ################################################################################################################################
class SessionWrapper(object):
""" Wraps an SQLAlchemy session.
"""
def __init__(self):
self.session_initialized = False
self.pool = None # type: SQLConnectionPool
self.config = None # type: dict
self.is_sqlite = None # type: bool
self.logger = logging.getLogger(self.__class__.__name__)
def init_session(self, *args, **kwargs):
spawn_greenlet(self._init_session, *args, **kwargs)
def _init_session(self, name, config, pool, use_scoped_session=True):
# type: (str, dict, SQLConnectionPool, bool)
self.config = config
self.fs_sql_config = config['fs_sql_config']
self.pool = pool
try:
self.pool.ping(self.fs_sql_config)
except Exception as e:
msg = 'Could not ping:`%s`, session will be left uninitialised, e:`%s`'
if self.config['is_active']:
err_details = format_exc()
else:
err_details = e.args[0]
self.logger.warn(msg, name, err_details)
else:
if config['engine'] == MS_SQL.ZATO_DIRECT:
self._Session = SimpleSession(self.pool.engine)
else:
if use_scoped_session:
self._Session = scoped_session(sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery))
else:
self._Session = sessionmaker(bind=self.pool.engine, query_cls=WritableTupleQuery)
self._session = self._Session()
self.session_initialized = True
self.is_sqlite = self.pool.engine and self.pool.engine.name == 'sqlite'
def session(self):
return self._Session()
def close(self):
self._session.close()
# ################################################################################################################################
# ################################################################################################################################
class WritableTupleQuery(Query):
def __iter__(self):
out = super(WritableTupleQuery, self).__iter__()
columns_desc = self.column_descriptions
first_type = columns_desc[0]['type']
len_columns_desc = len(columns_desc)
# This is a simple result of a query such as session.query(ObjectName).count()
if len_columns_desc == 1 and isinstance(first_type, TypeEngine):
return out
# A list of objects, e.g. from .all()
elif len_columns_desc > 1:
return (WritableKeyedTuple(elem) for elem in out)
# Anything else
else:
return out
# ################################################################################################################################
# ################################################################################################################################
class SQLConnectionPool(object):
""" A pool of SQL connections wrapping an SQLAlchemy engine.
"""
def __init__(self, name, config, config_no_sensitive, should_init=True):
# type: (str, dict, dict) -> None
self.name = name
self.config = config
self.config_no_sensitive = config_no_sensitive
self.logger = getLogger(self.__class__.__name__)
self.has_debug = self.logger.isEnabledFor(DEBUG)
self.engine = None
self.engine_name = config['engine'] # self.engine.name is 'mysql' while 'self.engine_name' is mysql+pymysql
if should_init:
self.init()
def init(self):
_extra = {
'pool_pre_ping': True, # Make sure SQLAlchemy 1.2+ can refresh connections on transient errors
}
# MySQL only
if self.engine_name.startswith('mysql'):
_extra['pool_recycle'] = 600
# Postgres-only
elif self.engine_name.startswith('postgres'):
_extra['connect_args'] = {'application_name': get_component_name()}
extra = self.config.get('extra') # Optional, hence .get
_extra.update(parse_extra_into_dict(extra))
# SQLite has no pools
if self.engine_name != 'sqlite':
_extra['pool_size'] = int(self.config.get('pool_size', 1))
if _extra['pool_size'] == 0:
_extra['poolclass'] = NullPool
engine_url = get_engine_url(self.config)
try:
self.engine = self._create_engine(engine_url, self.config, _extra)
except Exception as e:
self.logger.warn('Could not create SQL connection `%s`, e:`%s`', self.config['name'], e.args[0])
if self.engine and (not self._is_unittest_engine(engine_url)) and self._is_sa_engine(engine_url):
event.listen(self.engine, 'checkin', self.on_checkin)
event.listen(self.engine, 'checkout', self.on_checkout)
event.listen(self.engine, 'connect', self.on_connect)
event.listen(self.engine, 'first_connect', self.on_first_connect)
self.checkins = 0
self.checkouts = 0
self.checkins = 0
self.checkouts = 0
# ################################################################################################################################
def __str__(self):
return '<{} at {}, config:[{}]>'.format(self.__class__.__name__, hex(id(self)), self.config_no_sensitive)
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def _is_sa_engine(self, engine_url):
# type: (str)
return 'zato+mssql1' not in engine_url
# ################################################################################################################################
def _is_unittest_engine(self, engine_url):
# type: (str)
return 'zato+unittest' in engine_url
# ################################################################################################################################
def _create_unittest_engine(self, engine_url, config):
# type: (str, dict)
return UnittestEngine(engine_url, config)
# ################################################################################################################################
def _create_engine(self, engine_url, config, extra):
if self._is_unittest_engine(engine_url):
return self._create_unittest_engine(engine_url, config)
elif self._is_sa_engine(engine_url):
return create_engine(engine_url, **extra)
else:
# This is a direct MS SQL connection
connect_kwargs = {
'dsn': config['host'],
'port': config['port'],
'database': config['db_name'],
'user': config['username'],
'password': config['password'],
'login_timeout': 3,
'as_dict': True,
}
for name in MS_SQL.EXTRA_KWARGS:
value = extra.get(name, NotGiven)
if value is not NotGiven:
connect_kwargs[name] = value
return MSSQLDirectAPI(config['name'], config['pool_size'], connect_kwargs)
# ################################################################################################################################
def on_checkin(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Checked in dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
self.checkins += 1
# ################################################################################################################################
def on_checkout(self, dbapi_conn, conn_record, conn_proxy):
if self.has_debug:
self.logger.debug('Checked out dbapi_conn:%s, conn_record:%s, conn_proxy:%s',
dbapi_conn, conn_record, conn_proxy)
self.checkouts += 1
self.logger.debug('co-cin-diff %d-%d-%d', self.checkouts, self.checkins, self.checkouts - self.checkins)
# ################################################################################################################################
def on_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('Connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def on_first_connect(self, dbapi_conn, conn_record):
if self.has_debug:
self.logger.debug('First connect dbapi_conn:%s, conn_record:%s', dbapi_conn, conn_record)
# ################################################################################################################################
def ping(self, fs_sql_config):
""" Pings the SQL database and returns the response time, in milliseconds.
"""
if not self.engine:
return
if hasattr(self.engine, 'ping'):
func = self.engine.ping
query = self.engine.ping_query
args = []
else:
func = self.engine.connect().execute
query = get_ping_query(fs_sql_config, self.config)
args = [query]
self.logger.debug('About to ping the SQL connection pool:`%s`, query:`%s`', self.config_no_sensitive, query)
start_time = time()
func(*args)
response_time = time() - start_time
self.logger.debug('Ping OK, pool:`%s`, response_time:`%s` s', self.config_no_sensitive, response_time)
return response_time
# ################################################################################################################################
def _conn(self):
""" Returns an SQLAlchemy connection object.
"""
return self.engine.connect()
# ################################################################################################################################
conn = property(fget=_conn, doc=_conn.__doc__)
# ################################################################################################################################
def _impl(self):
""" Returns the underlying connection's implementation, the SQLAlchemy engine.
"""
return self.engine
# ################################################################################################################################
impl = property(fget=_impl, doc=_impl.__doc__)
# ################################################################################################################################
class PoolStore(object):
""" A main class for accessing all of the SQL connection pools. Each server
thread has its own store.
"""
def __init__(self, sql_conn_class=SQLConnectionPool):
self.sql_conn_class = sql_conn_class
self._lock = RLock()
self.wrappers = {}
self.logger = getLogger(self.__class__.__name__)
# ################################################################################################################################
def __getitem__(self, name, enforce_is_active=True):
""" Checks out the connection pool. If enforce_is_active is False,
the pool's is_active flag will be ignored.
"""
with self._lock:
if enforce_is_active:
wrapper = self.wrappers[name]
if wrapper.config['is_active']:
return wrapper
raise Inactive(name)
else:
return self.wrappers[name]
# ################################################################################################################################
get = __getitem__
# ################################################################################################################################
def __setitem__(self, name, config):
""" Stops a connection pool if it exists and replaces it with a new one
using updated settings.
"""
with self._lock:
if name in self.wrappers:
del self[name]
config_no_sensitive = {}
for key in config:
if key != 'callback_func':
config_no_sensitive[key] = config[key]
config_no_sensitive['password'] = SECRET_SHADOW
pool = self.sql_conn_class(name, config, config_no_sensitive)
wrapper = SessionWrapper()
wrapper.init_session(name, config, pool)
self.wrappers[name] = wrapper
set_item = __setitem__
# ################################################################################################################################
def add_unittest_item(self, name, fs_sql_config=unittest_fs_sql_config):
self.set_item(name, {
'password': 'password.{}'.format(new_cid),
'engine': UNITTEST.SQL_ENGINE,
'fs_sql_config': fs_sql_config,
'is_active': True,
})
# ################################################################################################################################
def __delitem__(self, name):
""" Stops a pool and deletes it from the store.
"""
with self._lock:
engine = self.wrappers[name].pool.engine
if engine:
engine.dispose()
del self.wrappers[name]
# ################################################################################################################################
def __str__(self):
out = StringIO()
out.write('<{} at {} wrappers:['.format(self.__class__.__name__, hex(id(self))))
out.write(', '.join(sorted(self.wrappers.keys())))
out.write(']>')
return out.getvalue()
# ################################################################################################################################
__repr__ = __str__
# ################################################################################################################################
def change_password(self, name, password):
""" Updates the password which means recreating the pool using the new
password.
"""
with self._lock:
# Do not check if the connection is active when changing the password,
# sometimes it is desirable to change it even if it is Inactive.
item = self.get(name, enforce_is_active=False)
item.pool.engine.dispose()
config = deepcopy(self.wrappers[name].pool.config)
config['password'] = password
self[name] = config
# ################################################################################################################################
def cleanup_on_stop(self):
""" Invoked when the server is stopping.
"""
with self._lock:
for name, wrapper in self.wrappers.items():
wrapper.pool.engine.dispose()
# ################################################################################################################################
class _Server(object):
""" A plain Python object which is used instead of an SQLAlchemy model so the latter is not tied to a session
for as long a server is up.
"""
def __init__(self, odb_server, odb_cluster):
self.id = odb_server.id
self.name = odb_server.name
self.last_join_status = odb_server.last_join_status
self.token = odb_server.token
self.cluster_id = odb_cluster.id
self.cluster = odb_cluster
# ################################################################################################################################
class ODBManager(SessionWrapper):
""" Manages connections to a given component's Operational Database.
"""
def __init__(self, parallel_server=None, well_known_data=None, token=None, crypto_manager=None, server_id=None,
server_name=None, cluster_id=None, pool=None, decrypt_func=None):
# type: (ParallelServer, str, str, object, int, str, int, object, object)
super(ODBManager, self).__init__()
self.parallel_server = parallel_server
self.well_known_data = well_known_data
self.token = token
self.crypto_manager = crypto_manager
self.server_id = server_id
self.server_name = server_name
self.cluster_id = cluster_id
self.pool = pool
self.decrypt_func = decrypt_func
# ################################################################################################################################
def on_deployment_finished(self):
""" Commits all the implicit BEGIN blocks opened by SELECTs.
"""
self._session.commit()
# ################################################################################################################################
def fetch_server(self, odb_config):
""" Fetches the server from the ODB. Also sets the 'cluster' attribute
to the value pointed to by the server's .cluster attribute.
"""
if not self.session_initialized:
self.init_session(ZATO_ODB_POOL_NAME, odb_config, self.pool, False)
with closing(self.session()) as session:
try:
server = session.query(Server).\
filter(Server.token == self.token).\
one()
self.server = _Server(server, server.cluster)
self.server_id = server.id
self.cluster = server.cluster
self.cluster_id = server.cluster.id
return self.server
except Exception:
msg = 'Could not find server in ODB, token:`{}`'.format(
self.token)
logger.error(msg)
raise
# ################################################################################################################################
def get_servers(self, up_status=SERVER_UP_STATUS.RUNNING, filter_out_self=True):
""" Returns all servers matching criteria provided on input.
"""
with closing(self.session()) as session:
query = session.query(Server).\
filter(Server.cluster_id == self.cluster_id)
if up_status:
query = query.filter(Server.up_status == up_status)
if filter_out_self:
query = query.filter(Server.id != self.server_id)
return query.all()
# ################################################################################################################################
def get_default_internal_pubsub_endpoint(self):
with closing(self.session()) as session:
return session.query(PubSubEndpoint).\
filter(PubSubEndpoint.name==PUBSUB.DEFAULT.INTERNAL_ENDPOINT_NAME).\
filter(PubSubEndpoint.endpoint_type==PUBSUB.ENDPOINT_TYPE.INTERNAL.id).\
filter(PubSubEndpoint.cluster_id==self.cluster_id).\
one()
# ################################################################################################################################
def get_missing_services(self, server, locally_deployed):
""" Returns services deployed on the server given on input that are not among locally_deployed.
"""
missing = set()
with closing(self.session()) as session:
server_services = session.query(
Service.id, Service.name,
DeployedService.source_path, DeployedService.source).\
join(DeployedService, Service.id==DeployedService.service_id).\
join(Server, DeployedService.server_id==Server.id).\
filter(Service.is_internal!=true()).\
all()
for item in server_services:
if item.name not in locally_deployed:
missing.add(item)
return missing
# ################################################################################################################################
def server_up_down(self, token, status, update_host=False, bind_host=None, bind_port=None, preferred_address=None,
crypto_use_tls=None):
""" Updates the information regarding the server is RUNNING or CLEAN_DOWN etc.
and what host it's running on.
"""
with closing(self.session()) as session:
server = session.query(Server).\
filter(Server.token==token).\
first()
# It may be the case that the server has been deleted from web-admin before it shut down,
# in which case during the shut down it will not be able to find itself in ODB anymore.
if not server:
logger.info('No server found for token `%s`, status:`%s`', token, status)
return
server.up_status = status
server.up_mod_date = datetime.utcnow()
if update_host:
server.host = current_host()
server.bind_host = bind_host
server.bind_port = bind_port
server.preferred_address = preferred_address
server.crypto_use_tls = crypto_use_tls
session.add(server)
session.commit()
# ################################################################################################################################
def _copy_rate_limiting_config(self, copy_from, copy_to, _keys=rate_limit_keys):
for key in _keys:
copy_to[key] = copy_from.get(key)
# ################################################################################################################################
def get_url_security(self, cluster_id, connection=None, any_internal=HTTP_SOAP.ACCEPT.ANY_INTERNAL):
""" Returns the security configuration of HTTP URLs.
"""
# Temporary cache of security definitions visited so as not to
# look the same ones for each HTTP object that uses them.
sec_def_cache = {}
with closing(self.session()) as session:
# What DB class to fetch depending on the string value of the security type.
sec_type_db_class = {
SEC_DEF_TYPE.APIKEY: APIKeySecurity,
SEC_DEF_TYPE.BASIC_AUTH: HTTPBasicAuth,
SEC_DEF_TYPE.JWT: JWT,
SEC_DEF_TYPE.OAUTH: OAuth,
SEC_DEF_TYPE.TLS_CHANNEL_SEC: TLSChannelSecurity,
SEC_DEF_TYPE.WSS: WSSDefinition,
SEC_DEF_TYPE.VAULT: VaultConnection,
SEC_DEF_TYPE.XPATH_SEC: XPathSecurity,
}
result = {}
q = query.http_soap_security_list(session, cluster_id, connection)
columns = Bunch()
# So ConfigDict has its data in the format it expects
for c in q.statement.columns:
columns[c.name] = None
for item in elems_with_opaque(q):
target = get_match_target({
'http_accept': item.get('http_accept'),
'http_method': item.get('method'),
'soap_action': item.soap_action,
'url_path': item.url_path,
}, http_methods_allowed_re=self.parallel_server.http_methods_allowed_re)
result[target] = Bunch()
result[target].is_active = item.is_active
result[target].transport = item.transport
result[target].data_format = item.data_format
result[target].sec_use_rbac = item.sec_use_rbac
if item.security_id:
# For later use
result[target].sec_def = Bunch()
# We either have already seen this security definition ..
if item.security_id in sec_def_cache:
sec_def = sec_def_cache[item.security_id]
# .. or we have not, in which case we need to look it up
# and then cache it for later use.
else:
# Will raise KeyError if the DB gets somehow misconfigured.
db_class = sec_type_db_class[item.sec_type]
sec_def_item = session.query(db_class).\
filter(db_class.id==item.security_id).\
one()
sec_def = bunchify(sec_def_item.asdict())
ElemsWithOpaqueMaker.process_config_dict(sec_def)
sec_def_cache[item.security_id] = sec_def
# Common things first
result[target].sec_def.id = sec_def.id
result[target].sec_def.name = sec_def.name
result[target].sec_def.password = self.decrypt_func(sec_def.password or '')
result[target].sec_def.sec_type = item.sec_type
if item.sec_type == SEC_DEF_TYPE.BASIC_AUTH:
result[target].sec_def.username = sec_def.username
result[target].sec_def.realm = sec_def.realm
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.JWT:
result[target].sec_def.username = sec_def.username
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.APIKEY:
result[target].sec_def.username = 'HTTP_{}'.format(sec_def.username.upper().replace('-', '_'))
self._copy_rate_limiting_config(sec_def, result[target].sec_def)
elif item.sec_type == SEC_DEF_TYPE.WSS:
result[target].sec_def.username = sec_def.username
result[target].sec_def.password_type = sec_def.password_type
result[target].sec_def.reject_empty_nonce_creat = sec_def.reject_empty_nonce_creat
result[target].sec_def.reject_stale_tokens = sec_def.reject_stale_tokens
result[target].sec_def.reject_expiry_limit = sec_def.reject_expiry_limit
result[target].sec_def.nonce_freshness_time = sec_def.nonce_freshness_time
elif item.sec_type == SEC_DEF_TYPE.TLS_CHANNEL_SEC:
result[target].sec_def.value = dict(parse_tls_channel_security_definition(sec_def.value))
elif item.sec_type == SEC_DEF_TYPE.XPATH_SEC:
result[target].sec_def.username = sec_def.username
result[target].sec_def.username_expr = sec_def.username_expr
result[target].sec_def.password_expr = sec_def.password_expr
else:
result[target].sec_def = ZATO_NONE
return result, columns
# ################################################################################################################################
def get_sql_internal_service_list(self, cluster_id):
""" Returns a list of service name and IDs for input cluster ID. It represents what is currently found in the ODB
and is used during server startup to decide if any new services should be added from what is found in the filesystem.
"""
with closing(self.session()) as session:
return session.query(
Service.id,
Service.impl_name,
Service.is_active,
Service.slow_threshold,
).\
filter(Service.cluster_id==cluster_id).\
all()
# ################################################################################################################################
def get_basic_data_service_list(self, session):
""" Returns basic information about all the services in ODB.
"""
query = select([
ServiceTable.c.id,
ServiceTable.c.name,
ServiceTable.c.impl_name,
]).where(
ServiceTable.c.cluster_id==self.cluster_id
)
return session.execute(query).\
fetchall()
# ################################################################################################################################
def get_basic_data_deployed_service_list(self):
""" Returns basic information about all the deployed services in ODB.
"""
with closing(self.session()) as session:
query = select([
ServiceTable.c.name,
DeployedServiceTable.c.source,
]).where(and_(
DeployedServiceTable.c.service_id==ServiceTable.c.id,
DeployedServiceTable.c.server_id==self.server_id
))
return session.execute(query).\
fetchall()
# ################################################################################################################################
def add_services(self, session, data):
# type: (list[dict]) -> None
try:
session.execute(ServiceTableInsert().values(data))
except IntegrityError:
# This can be ignored because it is possible that there will be
# more than one server trying to insert rows related to services
# that are hot-deployed from web-admin or another source.
logger.debug('Ignoring IntegrityError with `%s`', data)
# ################################################################################################################################
def add_deployed_services(self, session, data):
# type: (list[dict]) -> None
session.execute(DeployedServiceInsert().values(data))
# ################################################################################################################################
def drop_deployed_services_by_name(self, session, service_id_list):
session.execute(
DeployedServiceDelete().\
where(DeployedService.service_id.in_(service_id_list))
)
# ################################################################################################################################
def drop_deployed_services(self, server_id):
""" Removes all the deployed services from a server.
"""
with closing(self.session()) as session:
session.execute(
DeployedServiceDelete().\
where(DeployedService.server_id==server_id)
)
session.commit()
# ################################################################################################################################
def is_service_active(self, service_id):
""" Returns whether the given service is active or not.
"""
with closing(self.session()) as session:
return session.query(Service.is_active).\
filter(Service.id==service_id).\
one()[0]
# ################################################################################################################################
def hot_deploy(self, deployment_time, details, payload_name, payload, server_id):
""" Inserts hot-deployed data into the DB along with setting the preliminary
AWAITING_DEPLOYMENT status for each of the servers this server's cluster
is aware of.
"""
with closing(self.session()) as session:
# Create the deployment package info ..
dp = DeploymentPackage()
dp.deployment_time = deployment_time
dp.details = details
dp.payload_name = payload_name
dp.payload = payload
dp.server_id = server_id
# .. add it to the session ..
session.add(dp)
# .. for each of the servers in this cluster set the initial status ..
servers = session.query(Cluster).\
filter(Cluster.id == self.server.cluster_id).\
one().servers
for server in servers:
ds = DeploymentStatus()
ds.package_id = dp.id
ds.server_id = server.id
ds.status = DEPLOYMENT_STATUS.AWAITING_DEPLOYMENT
ds.status_change_time = datetime.utcnow()
session.add(ds)
session.commit()
return dp.id
# ################################################################################################################################
def add_delivery(self, deployment_time, details, service, source_info):
""" Adds information about the server's deployed service into the ODB.
"""
raise NotImplementedError()
# ################################################################################################################################
def get_internal_channel_list(self, cluster_id, needs_columns=False):
""" Returns the list of internal HTTP/SOAP channels, that is,
channels pointing to internal services.
"""
with closing(self.session()) as session:
return query.internal_channel_list(session, cluster_id, needs_columns)
def get_http_soap_list(self, cluster_id, connection=None, transport=None, needs_columns=False):
""" Returns the list of all HTTP/SOAP connections.
"""
with closing(self.session()) as session:
return query.http_soap_list(session, cluster_id, connection, transport, True, None, needs_columns)
# ################################################################################################################################
def get_job_list(self, cluster_id, needs_columns=False):
""" Returns a list of jobs defined on the given cluster.
"""
with closing(self.session()) as session:
return query.job_list(session, cluster_id, None, needs_columns)
# ################################################################################################################################
def get_service_list(self, cluster_id, needs_columns=False):
""" Returns a list of services defined on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.service_list(session, cluster_id, needs_columns=needs_columns))
# ################################################################################################################################
def get_service_id_list(self, session, cluster_id, name_list):
""" Returns a list of IDs matching input service names.
"""
# type: (object, int, list)
return query.service_id_list(session, cluster_id, name_list)
# ################################################################################################################################
def get_service_list_with_include(self, session, cluster_id, include_list, needs_columns=False):
""" Returns a list of all services from the input include_list.
"""
# type: (object, int, list)
return query.service_list_with_include(session, cluster_id, include_list, needs_columns)
# ################################################################################################################################
def get_apikey_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of API keys existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.apikey_security_list(session, cluster_id, needs_columns))
# ################################################################################################################################
def get_aws_security_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.aws_security_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_basic_auth_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of HTTP Basic Auth definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.basic_auth_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_jwt_list(self, cluster_id, cluster_name, needs_columns=False):
""" Returns a list of JWT definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return elems_with_opaque(query.jwt_list(session, cluster_id, cluster_name, needs_columns))
# ################################################################################################################################
def get_ntlm_list(self, cluster_id, needs_columns=False):
""" Returns a list of NTLM definitions existing on the given cluster.
"""
with closing(self.session()) as session:
return query.ntlm_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_oauth_list(self, cluster_id, needs_columns=False):
""" Returns a list of OAuth accounts existing on the given cluster.
"""
with closing(self.session()) as session:
return query.oauth_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_ca_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS CA certs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_ca_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_channel_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of definitions for securing TLS channels.
"""
with closing(self.session()) as session:
return query.tls_channel_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_tls_key_cert_list(self, cluster_id, needs_columns=False):
""" Returns a list of TLS key/cert pairs on the given cluster.
"""
with closing(self.session()) as session:
return query.tls_key_cert_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_wss_list(self, cluster_id, needs_columns=False):
""" Returns a list of WS-Security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.wss_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_vault_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of Vault connections on the given cluster.
"""
with closing(self.session()) as session:
return query.vault_connection_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_sec_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath-based security definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.xpath_sec_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_definition_amqp(self, cluster_id, def_id):
""" Returns an AMQP definition's details.
"""
with closing(self.session()) as session:
return query.definition_amqp(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_amqp(self, cluster_id, out_id):
""" Returns an outgoing AMQP connection's details.
"""
with closing(self.session()) as session:
return query.out_amqp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing AMQP connections.
"""
with closing(self.session()) as session:
return query.out_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_amqp(self, cluster_id, channel_id):
""" Returns a particular AMQP channel.
"""
with closing(self.session()) as session:
return query.channel_amqp(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_amqp_list(self, cluster_id, needs_columns=False):
""" Returns a list of AMQP channels.
"""
with closing(self.session()) as session:
return query.channel_amqp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_def_wmq(self, cluster_id, def_id):
""" Returns an IBM MQ definition's details.
"""
with closing(self.session()) as session:
return query.definition_wmq(session, cluster_id, def_id)
# ################################################################################################################################
def get_definition_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ definitions on the given cluster.
"""
with closing(self.session()) as session:
return query.definition_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_wmq(self, cluster_id, out_id):
""" Returns an outgoing IBM MQ connection's details.
"""
with closing(self.session()) as session:
return query.out_wmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing IBM MQ connections.
"""
with closing(self.session()) as session:
return query.out_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_wmq(self, cluster_id, channel_id):
""" Returns a particular IBM MQ channel.
"""
with closing(self.session()) as session:
return query.channel_wmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_wmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of IBM MQ channels.
"""
with closing(self.session()) as session:
return query.channel_wmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_zmq(self, cluster_id, out_id):
""" Returns an outgoing ZMQ connection's details.
"""
with closing(self.session()) as session:
return query.out_zmq(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing ZMQ connections.
"""
with closing(self.session()) as session:
return query.out_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_zmq(self, cluster_id, channel_id):
""" Returns a particular ZMQ channel.
"""
with closing(self.session()) as session:
return query.channel_zmq(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_zmq_list(self, cluster_id, needs_columns=False):
""" Returns a list of ZMQ channels.
"""
with closing(self.session()) as session:
return query.channel_zmq_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_channel_file_transfer_list(self, cluster_id, needs_columns=False):
""" Returns a list of file transfer channels.
"""
with closing(self.session()) as session:
return query_generic.connection_list(
session, cluster_id, GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER, needs_columns)
# ################################################################################################################################
def get_channel_web_socket(self, cluster_id, channel_id):
""" Returns a particular WebSocket channel.
"""
with closing(self.session()) as session:
return query.channel_web_socket(session, cluster_id, channel_id)
# ################################################################################################################################
def get_channel_web_socket_list(self, cluster_id, needs_columns=False):
""" Returns a list of WebSocket channels.
"""
with closing(self.session()) as session:
return query.channel_web_socket_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sql(self, cluster_id, out_id):
""" Returns an outgoing SQL connection's details.
"""
with closing(self.session()) as session:
return query.out_sql(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SQL connections.
"""
with closing(self.session()) as session:
return query.out_sql_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_odoo(self, cluster_id, out_id):
""" Returns an outgoing Odoo connection's details.
"""
with closing(self.session()) as session:
return query.out_odoo(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_odoo_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing Odoo connections.
"""
with closing(self.session()) as session:
return query.out_odoo_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sap(self, cluster_id, out_id):
""" Returns an outgoing SAP RFC connection's details.
"""
with closing(self.session()) as session:
return query.out_sap(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_sap_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SAP RFC connections.
"""
with closing(self.session()) as session:
return query.out_sap_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_out_sftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing SFTP connections.
"""
with closing(self.session()) as session:
return query_generic.connection_list(session, cluster_id, GENERIC.CONNECTION.TYPE.OUTCONN_SFTP, needs_columns)
# ################################################################################################################################
def get_out_ftp(self, cluster_id, out_id):
""" Returns an outgoing FTP connection's details.
"""
with closing(self.session()) as session:
return query.out_ftp(session, cluster_id, out_id)
# ################################################################################################################################
def get_out_ftp_list(self, cluster_id, needs_columns=False):
""" Returns a list of outgoing FTP connections.
"""
with closing(self.session()) as session:
return query.out_ftp_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_builtin(self, cluster_id, id):
""" Returns a built-in cache definition's details.
"""
with closing(self.session()) as session:
return query.cache_builtin(session, cluster_id, id)
# ################################################################################################################################
def get_cache_builtin_list(self, cluster_id, needs_columns=False):
""" Returns a list of built-in cache definitions.
"""
with closing(self.session()) as session:
return query.cache_builtin_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cache_memcached(self, cluster_id, id):
""" Returns a Memcached-based definition's details.
"""
with closing(self.session()) as session:
return query.cache_memcached(session, cluster_id, id)
# ################################################################################################################################
def get_cache_memcached_list(self, cluster_id, needs_columns=False):
""" Returns a list of Memcached-based cache definitions.
"""
with closing(self.session()) as session:
return query.cache_memcached_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_namespace_list(self, cluster_id, needs_columns=False):
""" Returns a list of XML namespaces.
"""
with closing(self.session()) as session:
return query.namespace_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_xpath_list(self, cluster_id, needs_columns=False):
""" Returns a list of XPath expressions.
"""
with closing(self.session()) as session:
return query.xpath_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_json_pointer_list(self, cluster_id, needs_columns=False):
""" Returns a list of JSON Pointer expressions.
"""
with closing(self.session()) as session:
return query.json_pointer_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cloud_aws_s3_list(self, cluster_id, needs_columns=False):
""" Returns a list of AWS S3 connections.
"""
with closing(self.session()) as session:
return query.cloud_aws_s3_list(session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_topic_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub topics defined in a cluster.
"""
return elems_with_opaque(query.pubsub_topic_list(self._session, cluster_id, needs_columns))
# ################################################################################################################################
def get_pubsub_subscription_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub subscriptions defined in a cluster.
"""
return query_ps_subscription.pubsub_subscription_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_notif_sql_list(self, cluster_id, needs_columns=False):
""" Returns a list of SQL notification definitions.
"""
return query.notif_sql_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_conn_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra connections.
"""
return query.cassandra_conn_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_cassandra_query_list(self, cluster_id, needs_columns=False):
""" Returns a list of Cassandra queries.
"""
return query.cassandra_query_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_es_list(self, cluster_id, needs_columns=False):
""" Returns a list of ElasticSearch connections.
"""
return query.search_es_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_search_solr_list(self, cluster_id, needs_columns=False):
""" Returns a list of Solr connections.
"""
return query.search_solr_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_sms_twilio_list(self, cluster_id, needs_columns=False):
""" Returns a list of Twilio connections.
"""
return query.sms_twilio_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_smtp_list(self, cluster_id, needs_columns=False):
""" Returns a list of SMTP connections.
"""
return query.email_smtp_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_email_imap_list(self, cluster_id, needs_columns=False):
""" Returns a list of IMAP connections.
"""
return query.email_imap_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions.
"""
return query.rbac_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles.
"""
return query.rbac_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_client_role_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC roles assigned to clients.
"""
return query.rbac_client_role_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_rbac_role_permission_list(self, cluster_id, needs_columns=False):
""" Returns a list of RBAC permissions for roles.
"""
return query.rbac_role_permission_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_pubsub_endpoint_list(self, cluster_id, needs_columns=False):
""" Returns a list of pub/sub endpoints.
"""
return query.pubsub_endpoint_list(self._session, cluster_id, needs_columns)
# ################################################################################################################################
def get_generic_connection_list(self, cluster_id, needs_columns=False):
""" Returns a list of generic connections.
"""
return query_generic.connection_list(self._session, cluster_id, needs_columns=needs_columns)
# ################################################################################################################################
def get_sso_user_rate_limiting_info(self):
""" Returns a list of SSO users that have rate limiting enabled.
"""
with closing(self.session()) as session:
return get_sso_user_rate_limiting_info(session)
# ################################################################################################################################
def _migrate_30_encrypt_sec_base(self, session, id, attr_name, encrypted_value):
""" Sets an encrypted value of a named attribute in a security definition.
"""
item = session.query(SecurityBase).\
filter(SecurityBase.id==id).\
one()
setattr(item, attr_name, encrypted_value)
session.add(item)
_migrate_30_encrypt_sec_apikey = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_aws = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_basic_auth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_jwt = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_ntlm = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_oauth = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_vault_conn_sec = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_wss = _migrate_30_encrypt_sec_base
_migrate_30_encrypt_sec_xpath_sec = _migrate_30_encrypt_sec_base
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/api.py | api.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# ################################################################################################################################
def ping_database(params, ping_query):
connection = None
try:
#
# MySQL
#
if params['engine'].startswith('mysql'):
import pymysql
connection = pymysql.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
db = params['db_name'],
)
#
# PostgreSQL
#
elif params['engine'].startswith('postgres'):
import pg8000
connection = pg8000.connect(
host = params['host'],
port = int(params['port']),
user = params['username'],
password = params['password'],
database = params['db_name'],
)
#
# SQLite
#
elif params['engine'].startswith('sqlite'):
pass
#
# Unrecognised
#
else:
raise ValueError('Unrecognised database `{}`'.format(params['engine']))
finally:
if connection:
connection.close()
# ################################################################################################################################
def create_pool(engine_params, ping_query, query_class=None):
# stdlib
import copy
# SQLAlchemy
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
# Zato
from zato.common.util.api import get_engine_url
engine_params = copy.deepcopy(engine_params)
if engine_params['engine'] != 'sqlite':
engine_params['password'] = str(engine_params['password'])
engine_params['extra']['pool_size'] = engine_params.pop('pool_size')
engine = create_engine(get_engine_url(engine_params), **engine_params.get('extra', {}))
engine.execute(ping_query)
Session = sessionmaker()
Session.configure(bind=engine, query_cls=query_class)
session = Session()
return session
# ################################################################################################################################
# Taken from http://www.siafoo.net/snippet/85
# Licensed under BSD2 - http://opensource.org/licenses/bsd-license.php
def drop_all(engine):
""" Drops all tables and sequences (but not VIEWS) from a Postgres database
"""
# stdlib
import logging
from traceback import format_exc
# SQLAlchemy
from sqlalchemy.sql import text
logger = logging.getLogger('zato')
sequence_sql="""SELECT sequence_name FROM information_schema.sequences
WHERE sequence_schema='public'
"""
table_sql="""SELECT table_name FROM information_schema.tables
WHERE table_schema='public' AND table_type != 'VIEW' AND table_name NOT LIKE 'pg_ts_%%'
"""
for table in [name for (name,) in engine.execute(text(table_sql))]:
try:
engine.execute(text('DROP TABLE %s CASCADE' % table))
except Exception:
logger.warn(format_exc())
for seq in [name for (name,) in engine.execute(text(sequence_sql))]:
try:
engine.execute(text('DROP SEQUENCE %s CASCADE' % seq))
except Exception:
logger.warn(format_exc())
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# SQLAlchemy
from sqlalchemy import and_, exists, insert, update
# Zato
from zato.common.api import GENERIC, FILE_TRANSFER
from zato.common.odb.model import GenericConn as ModelGenericConn, GenericObject as ModelGenericObject
from zato.common.odb.query import query_wrapper
from zato.common.util.sql import get_dict_with_opaque
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger('zato')
# ################################################################################################################################
# ################################################################################################################################
_generic_attr_name = GENERIC.ATTR_NAME
ModelGenericObjectTable = ModelGenericObject.__table__
# ################################################################################################################################
# ################################################################################################################################
class GenericObjectWrapper:
""" Wraps access to generic objects.
"""
type_ = None
subtype = None
model_class = ModelGenericObject
def __init__(self, session, cluster_id):
# type: (object, int)
self.session = session
self.cluster_id = cluster_id
# ################################################################################################################################
def _build_get_where_query(self, name):
# type: (str) -> object
return and_(
self.model_class.name==name,
self.model_class.type_==self.type_,
self.model_class.cluster_id==self.cluster_id,
)
# ################################################################################################################################
def get(self, name):
# type: (str) -> object
item = self.session.query(self.model_class).\
filter(self.model_class.name==name).\
filter(self.model_class.type_==self.type_).\
filter(self.model_class.cluster_id==self.cluster_id).\
first()
return get_dict_with_opaque(item) if item else None
# ################################################################################################################################
def exists(self, name):
""" Returns a boolean flag indicating whether the input name is already stored in the ODB. False otherwise.
"""
where_query = self._build_get_where_query(name)
exists_query = exists().where(where_query)
return self.session.query(exists_query).\
scalar()
# ################################################################################################################################
def create(self, name, opaque):
""" Creates a new row for input data.
"""
return insert(self.model_class).values(**{
'name': name,
'type_': self.type_,
'subtype': self.subtype,
'cluster_id': self.cluster_id,
_generic_attr_name: opaque,
})
def update(self, name, opaque):
""" Updates an already existing object.
"""
# type: (str, str) -> object
return update(ModelGenericObjectTable).\
values({
_generic_attr_name: opaque,
}).\
where(and_(
ModelGenericObjectTable.c.name==name,
ModelGenericObjectTable.c.type_==self.type_,
ModelGenericObjectTable.c.cluster_id==self.cluster_id,
))
# ################################################################################################################################
def store(self, name, opaque):
""" Inserts new data or updates an already existing row matching the input.
"""
# type: (str, str)
already_exists = self.exists(name)
query = self.update(name, opaque) if already_exists else self.create(name, opaque)
self.session.execute(query)
self.session.commit()
# ################################################################################################################################
# ################################################################################################################################
class FileTransferWrapper(GenericObjectWrapper):
type_ = GENERIC.CONNECTION.TYPE.CHANNEL_FILE_TRANSFER
class FTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.FTP.id
class SFTPFileTransferWrapper(FileTransferWrapper):
subtype = FILE_TRANSFER.SOURCE_TYPE.SFTP.id
# ################################################################################################################################
# ################################################################################################################################
@query_wrapper
def connection_list(session, cluster_id, type_=None, needs_columns=False):
""" A list of generic connections by their type.
"""
q = session.query(ModelGenericConn).\
filter(ModelGenericConn.cluster_id==cluster_id)
if type_:
q = q.filter(ModelGenericConn.type_==type_)
q = q.order_by(ModelGenericConn.name)
return q
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/generic.py | generic.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
from functools import wraps
# Bunch
from bunch import bunchify
# SQLAlchemy
from sqlalchemy import and_, func, not_, or_
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import case
# Zato
from zato.common.api import CACHE, DEFAULT_HTTP_PING_METHOD, DEFAULT_HTTP_POOL_SIZE, GENERIC, HTTP_SOAP_SERIALIZATION_TYPE, \
PARAMS_PRIORITY, PUBSUB, URL_PARAMS_PRIORITY
from zato.common.json_internal import loads
from zato.common.odb.model import AWSS3, APIKeySecurity, AWSSecurity, Cache, CacheBuiltin, CacheMemcached, CassandraConn, \
CassandraQuery, ChannelAMQP, ChannelWebSocket, ChannelWMQ, ChannelZMQ, Cluster, ConnDefAMQP, ConnDefWMQ, \
CronStyleJob, ElasticSearch, HTTPBasicAuth, HTTPSOAP, IMAP, IntervalBasedJob, Job, JSONPointer, JWT, \
MsgNamespace, NotificationSQL as NotifSQL, NTLM, OAuth, OutgoingOdoo, \
OutgoingAMQP, OutgoingFTP, OutgoingWMQ, OutgoingZMQ, PubSubEndpoint, \
PubSubEndpointTopic, PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, PubSubTopic, RBACClientRole, \
RBACPermission, RBACRole, RBACRolePermission, SecurityBase, Server, Service, SMSTwilio, SMTP, Solr, SQLConnectionPool, \
TLSCACert, TLSChannelSecurity, TLSKeyCertSecurity, WebSocketClient, WebSocketClientPubSubKeys, WebSocketSubscription, \
WSSDefinition, VaultConnection, XPath, XPathSecurity, OutgoingSAP
from zato.common.util.search import SearchResults as _SearchResults
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
_not_given = object()
_no_page_limit = 2 ** 24 # ~16.7 million results, tops
_gen_attr = GENERIC.ATTR_NAME
# ################################################################################################################################
def count(session, q):
_q = q.statement.with_only_columns([func.count()]).order_by(None)
return session.execute(_q).scalar()
# ################################################################################################################################
class _QueryConfig:
@staticmethod
def supports_kwargs(query_func):
""" Returns True if the given query func supports kwargs, False otherwise.
"""
return query_func in (
http_soap_list,
)
# ################################################################################################################################
class _SearchWrapper(object):
""" Wraps results in pagination and/or filters out objects by their name or other attributes.
"""
def __init__(self, q, default_page_size=_no_page_limit, **config):
# Apply WHERE conditions
where = config.get('where') or _not_given
if where is not _not_given:
q = q.filter(where)
else:
# If there are multiple filters, they are by default OR-joined
# to ease in look ups over more than one column.
filter_op = and_ if config.get('filter_op') == 'and' else or_
filters = []
for filter_by in config.get('filter_by', []):
for criterion in config.get('query', []):
filters.append(filter_by.contains(criterion))
q = q.filter(filter_op(*filters))
# Total number of results
total_q = q.statement.with_only_columns([func.count()]).order_by(None)
self.total = q.session.execute(total_q).scalar()
# Pagination
page_size = config.get('page_size', default_page_size)
cur_page = config.get('cur_page', 0)
slice_from = cur_page * page_size
slice_to = slice_from + page_size
self.q = q.slice(slice_from, slice_to)
# ################################################################################################################################
def query_wrapper(func):
""" A decorator for queries which works out whether a given query function should return the result only
or a column list retrieved in addition to the result. This is useful because some callers prefer the former
and some need the latter. Also, paginates the results if requested to by the caller.
"""
@wraps(func)
def inner(*args, **kwargs):
# Each query function will have the last argument either False or True
# depending on whether columns are needed or not.
needs_columns = args[-1]
if _QueryConfig.supports_kwargs(func):
result = func(*args, **kwargs)
else:
result = func(*args)
tool = _SearchWrapper(result, **kwargs)
result = _SearchResults(tool.q, tool.q.all(), tool.q.statement.columns, tool.total)
if needs_columns:
return result, result.columns
return result
return inner
# ################################################################################################################################
def bunch_maker(func):
""" Turns SQLAlchemy rows into bunch instances, taking opaque elements into account.
"""
@wraps(func)
def inner(*args, **kwargs):
result = func(*args, **kwargs)
out = bunchify(result._asdict())
opaque = out.pop(_gen_attr, None)
if opaque:
opaque = loads(opaque)
out.update(opaque)
return out
return inner
# ################################################################################################################################
def internal_channel_list(session, cluster_id):
""" All the HTTP/SOAP channels that point to internal services.
"""
return session.query(
HTTPSOAP.soap_action, Service.name).\
filter(HTTPSOAP.cluster_id==Cluster.id).\
filter(HTTPSOAP.service_id==Service.id).\
filter(Service.is_internal==True).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==HTTPSOAP.cluster_id) # noqa: E712
# ################################################################################################################################
def _job(session, cluster_id):
return session.query(
Job.id,
Job.name,
Job.is_active,
Job.job_type,
Job.start_date,
Job.extra,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name'),
Service.id.label('service_id'),
IntervalBasedJob.weeks,
IntervalBasedJob.days,
IntervalBasedJob.hours,
IntervalBasedJob.minutes,
IntervalBasedJob.seconds,
IntervalBasedJob.repeats,
CronStyleJob.cron_definition
).\
outerjoin(IntervalBasedJob, Job.id==IntervalBasedJob.job_id).\
outerjoin(CronStyleJob, Job.id==CronStyleJob.job_id).\
filter(Job.cluster_id==Cluster.id).\
filter(Job.service_id==Service.id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def job_list(session, cluster_id, service_name=None, needs_columns=False):
""" All the scheduler's jobs defined in the ODB.
"""
q = _job(session, cluster_id)
if service_name:
q = q.filter(Service.name==service_name)
return q.\
order_by(Job.name)
def job_by_id(session, cluster_id, job_id):
""" A scheduler's job fetched by its ID.
"""
return _job(session, cluster_id).\
filter(Job.id==job_id).\
one()
def job_by_name(session, cluster_id, name):
""" A scheduler's job fetched by its name.
"""
return _job(session, cluster_id).\
filter(Job.name==name).\
one()
# ################################################################################################################################
def _sec_base(session, cluster_id):
return session.query(
SecurityBase.id,
SecurityBase.is_active,
SecurityBase.sec_type,
SecurityBase.name,
SecurityBase.username).\
filter(SecurityBase.cluster_id==Cluster.id).\
filter(Cluster.id==cluster_id)
def sec_base(session, cluster_id, sec_base_id):
return _sec_base(session, cluster_id).\
filter(SecurityBase.id==sec_base_id).\
one()
@query_wrapper
def apikey_security_list(session, cluster_id, needs_columns=False):
""" All the API keys.
"""
return session.query(
APIKeySecurity.id,
APIKeySecurity.name,
APIKeySecurity.is_active,
APIKeySecurity.username,
APIKeySecurity.password,
APIKeySecurity.sec_type,
APIKeySecurity.opaque1,
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==APIKeySecurity.cluster_id).\
filter(SecurityBase.id==APIKeySecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def aws_security_list(session, cluster_id, needs_columns=False):
""" All the Amazon security definitions.
"""
return session.query(
AWSSecurity.id, AWSSecurity.name,
AWSSecurity.is_active,
AWSSecurity.username,
AWSSecurity.password, AWSSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==AWSSecurity.cluster_id).\
filter(SecurityBase.id==AWSSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def basic_auth_list(session, cluster_id, cluster_name, needs_columns=False):
""" All the HTTP Basic Auth definitions.
"""
q = session.query(
HTTPBasicAuth.id,
HTTPBasicAuth.name,
HTTPBasicAuth.is_active,
HTTPBasicAuth.username,
HTTPBasicAuth.realm,
HTTPBasicAuth.password,
HTTPBasicAuth.sec_type,
HTTPBasicAuth.password_type,
HTTPBasicAuth.opaque1,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==HTTPBasicAuth.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==HTTPBasicAuth.id).\
order_by(SecurityBase.name)
return q
def _jwt(session, cluster_id, cluster_name, needs_columns=False):
""" All the JWT definitions.
"""
q = session.query(
JWT.id,
JWT.name,
JWT.is_active,
JWT.username,
JWT.password,
JWT.ttl,
JWT.sec_type,
JWT.password_type,
JWT.opaque1,
Cluster.id.label('cluster_id'),
Cluster.name.label('cluster_name')).\
filter(Cluster.id==JWT.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.filter(SecurityBase.id==JWT.id).\
order_by(SecurityBase.name)
return q
@query_wrapper
def jwt_list(*args, **kwargs):
return _jwt(*args, **kwargs)
def jwt_by_username(session, cluster_id, username, needs_columns=False):
""" An individual JWT definition by its username.
"""
return _jwt(session, cluster_id, None, needs_columns).\
filter(JWT.username==username).\
one()
@query_wrapper
def ntlm_list(session, cluster_id, needs_columns=False):
""" All the NTLM definitions.
"""
return session.query(
NTLM.id, NTLM.name,
NTLM.is_active,
NTLM.username,
NTLM.password, NTLM.sec_type,
NTLM.password_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==NTLM.cluster_id).\
filter(SecurityBase.id==NTLM.id).\
order_by(SecurityBase.name)
@query_wrapper
def oauth_list(session, cluster_id, needs_columns=False):
""" All the OAuth definitions.
"""
return session.query(
OAuth.id, OAuth.name,
OAuth.is_active,
OAuth.username, OAuth.password,
OAuth.proto_version, OAuth.sig_method,
OAuth.max_nonce_log, OAuth.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OAuth.cluster_id).\
filter(SecurityBase.id==OAuth.id).\
order_by(SecurityBase.name)
@query_wrapper
def tls_ca_cert_list(session, cluster_id, needs_columns=False):
""" TLS CA certs.
"""
return session.query(TLSCACert).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSCACert.cluster_id).\
order_by(TLSCACert.name)
@query_wrapper
def tls_channel_sec_list(session, cluster_id, needs_columns=False):
""" TLS-based channel security.
"""
return session.query(
TLSChannelSecurity.id, TLSChannelSecurity.name,
TLSChannelSecurity.is_active, TLSChannelSecurity.value,
TLSChannelSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSChannelSecurity.cluster_id).\
filter(SecurityBase.id==TLSChannelSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def tls_key_cert_list(session, cluster_id, needs_columns=False):
""" TLS key/cert pairs.
"""
return session.query(
TLSKeyCertSecurity.id, TLSKeyCertSecurity.name,
TLSKeyCertSecurity.is_active, TLSKeyCertSecurity.info,
TLSKeyCertSecurity.auth_data, TLSKeyCertSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==TLSKeyCertSecurity.cluster_id).\
filter(SecurityBase.id==TLSKeyCertSecurity.id).\
order_by(SecurityBase.name)
@query_wrapper
def wss_list(session, cluster_id, needs_columns=False):
""" All the WS-Security definitions.
"""
return session.query(
WSSDefinition.id, WSSDefinition.name, WSSDefinition.is_active,
WSSDefinition.username, WSSDefinition.password, WSSDefinition.password_type,
WSSDefinition.reject_empty_nonce_creat, WSSDefinition.reject_stale_tokens,
WSSDefinition.reject_expiry_limit, WSSDefinition.nonce_freshness_time,
WSSDefinition.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==WSSDefinition.cluster_id).\
filter(SecurityBase.id==WSSDefinition.id).\
order_by(SecurityBase.name)
@query_wrapper
def xpath_sec_list(session, cluster_id, needs_columns=False):
""" All the XPath security definitions.
"""
return session.query(
XPathSecurity.id, XPathSecurity.name, XPathSecurity.is_active, XPathSecurity.username, XPathSecurity.username_expr,
XPathSecurity.password_expr, XPathSecurity.password, XPathSecurity.sec_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==XPathSecurity.cluster_id).\
filter(SecurityBase.id==XPathSecurity.id).\
order_by(SecurityBase.name)
# ################################################################################################################################
def _definition_amqp(session, cluster_id):
return session.query(
ConnDefAMQP.name, ConnDefAMQP.id, ConnDefAMQP.host,
ConnDefAMQP.port, ConnDefAMQP.vhost, ConnDefAMQP.username,
ConnDefAMQP.frame_max, ConnDefAMQP.heartbeat, ConnDefAMQP.password).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefAMQP.name)
def definition_amqp(session, cluster_id, id):
""" A particular AMQP definition
"""
return _definition_amqp(session, cluster_id).\
filter(ConnDefAMQP.id==id).\
one()
@query_wrapper
def definition_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP connection definitions.
"""
return _definition_amqp(session, cluster_id)
# ################################################################################################################################
def _def_wmq(session, cluster_id):
return session.query(
ConnDefWMQ.id, ConnDefWMQ.name, ConnDefWMQ.host,
ConnDefWMQ.port, ConnDefWMQ.queue_manager, ConnDefWMQ.channel,
ConnDefWMQ.cache_open_send_queues, ConnDefWMQ.cache_open_receive_queues,
ConnDefWMQ.use_shared_connections, ConnDefWMQ.ssl, ConnDefWMQ.ssl_cipher_spec,
ConnDefWMQ.ssl_key_repository, ConnDefWMQ.needs_mcd, ConnDefWMQ.max_chars_printed,
ConnDefWMQ.username, ConnDefWMQ.password, ConnDefWMQ.use_jms).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ConnDefWMQ.name)
def definition_wmq(session, cluster_id, id):
""" A particular IBM MQ definition
"""
return _def_wmq(session, cluster_id).\
filter(ConnDefWMQ.id==id).\
one()
@query_wrapper
def definition_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ connection definitions.
"""
return _def_wmq(session, cluster_id)
# ################################################################################################################################
def _out_amqp(session, cluster_id):
return session.query(
OutgoingAMQP.id, OutgoingAMQP.name, OutgoingAMQP.is_active,
OutgoingAMQP.delivery_mode, OutgoingAMQP.priority, OutgoingAMQP.content_type,
OutgoingAMQP.content_encoding, OutgoingAMQP.expiration, OutgoingAMQP.pool_size, OutgoingAMQP.user_id,
OutgoingAMQP.app_id, ConnDefAMQP.name.label('def_name'), OutgoingAMQP.def_id).\
filter(OutgoingAMQP.def_id==ConnDefAMQP.id).\
filter(ConnDefAMQP.id==OutgoingAMQP.def_id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingAMQP.name)
def out_amqp(session, cluster_id, id):
""" An outgoing AMQP connection.
"""
return _out_amqp(session, cluster_id).\
filter(OutgoingAMQP.id==id).\
one()
@query_wrapper
def out_amqp_list(session, cluster_id, needs_columns=False):
""" Outgoing AMQP connections.
"""
return _out_amqp(session, cluster_id)
# ################################################################################################################################
def _out_wmq(session, cluster_id):
return session.query(
OutgoingWMQ.id, OutgoingWMQ.name, OutgoingWMQ.is_active,
OutgoingWMQ.delivery_mode, OutgoingWMQ.priority, OutgoingWMQ.expiration,
ConnDefWMQ.name.label('def_name'), OutgoingWMQ.def_id).\
filter(OutgoingWMQ.def_id==ConnDefWMQ.id).\
filter(ConnDefWMQ.id==OutgoingWMQ.def_id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingWMQ.name)
def out_wmq(session, cluster_id, id):
""" An outgoing IBM MQ connection (by ID).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.id==id).\
one()
def out_wmq_by_name(session, cluster_id, name):
""" An outgoing IBM MQ connection (by name).
"""
return _out_wmq(session, cluster_id).\
filter(OutgoingWMQ.name==name).\
first()
@query_wrapper
def out_wmq_list(session, cluster_id, needs_columns=False):
""" Outgoing IBM MQ connections.
"""
return _out_wmq(session, cluster_id)
# ################################################################################################################################
def _channel_amqp(session, cluster_id):
return session.query(
ChannelAMQP.id, ChannelAMQP.name, ChannelAMQP.is_active,
ChannelAMQP.queue, ChannelAMQP.consumer_tag_prefix,
ConnDefAMQP.name.label('def_name'), ChannelAMQP.def_id,
ChannelAMQP.pool_size, ChannelAMQP.ack_mode, ChannelAMQP.prefetch_count,
ChannelAMQP.data_format,
Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelAMQP.def_id==ConnDefAMQP.id).\
filter(ChannelAMQP.service_id==Service.id).\
filter(Cluster.id==ConnDefAMQP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelAMQP.name)
def channel_amqp(session, cluster_id, id):
""" A particular AMQP channel.
"""
return _channel_amqp(session, cluster_id).\
filter(ChannelAMQP.id==id).\
one()
@query_wrapper
def channel_amqp_list(session, cluster_id, needs_columns=False):
""" AMQP channels.
"""
return _channel_amqp(session, cluster_id)
# ################################################################################################################################
def _channel_wmq(session, cluster_id):
return session.query(
ChannelWMQ.id, ChannelWMQ.name, ChannelWMQ.is_active,
ChannelWMQ.queue, ConnDefWMQ.name.label('def_name'), ChannelWMQ.def_id,
ChannelWMQ.data_format, Service.name.label('service_name'),
Service.impl_name.label('service_impl_name')).\
filter(ChannelWMQ.def_id==ConnDefWMQ.id).\
filter(ChannelWMQ.service_id==Service.id).\
filter(Cluster.id==ConnDefWMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWMQ.name)
def channel_wmq(session, cluster_id, id):
""" A particular IBM MQ channel.
"""
return _channel_wmq(session, cluster_id).\
filter(ChannelWMQ.id==id).\
one()
@query_wrapper
def channel_wmq_list(session, cluster_id, needs_columns=False):
""" IBM MQ channels.
"""
return _channel_wmq(session, cluster_id)
# ################################################################################################################################
def _out_zmq(session, cluster_id):
return session.query(
OutgoingZMQ.id, OutgoingZMQ.name, OutgoingZMQ.is_active,
OutgoingZMQ.address, OutgoingZMQ.socket_type, OutgoingZMQ.socket_method).\
filter(Cluster.id==OutgoingZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingZMQ.name)
def out_zmq(session, cluster_id, id):
""" An outgoing ZeroMQ connection.
"""
return _out_zmq(session, cluster_id).\
filter(OutgoingZMQ.id==id).\
one()
@query_wrapper
def out_zmq_list(session, cluster_id, needs_columns=False):
""" Outgoing ZeroMQ connections.
"""
return _out_zmq(session, cluster_id)
# ################################################################################################################################
def _channel_zmq(session, cluster_id):
return session.query(
ChannelZMQ.id, ChannelZMQ.name, ChannelZMQ.is_active,
ChannelZMQ.address, ChannelZMQ.socket_type, ChannelZMQ.socket_method, ChannelZMQ.sub_key,
ChannelZMQ.pool_strategy, ChannelZMQ.service_source, ChannelZMQ.data_format,
Service.name.label('service_name'), Service.impl_name.label('service_impl_name')).\
filter(Service.id==ChannelZMQ.service_id).\
filter(Cluster.id==ChannelZMQ.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelZMQ.name)
def channel_zmq(session, cluster_id, id):
""" An incoming ZeroMQ connection.
"""
return _channel_zmq(session, cluster_id).\
filter(ChannelZMQ.id==id).\
one()
@query_wrapper
def channel_zmq_list(session, cluster_id, needs_columns=False):
""" Incoming ZeroMQ connections.
"""
return _channel_zmq(session, cluster_id)
# ################################################################################################################################
def _http_soap(session, cluster_id):
return session.query(
HTTPSOAP.id,
HTTPSOAP.name,
HTTPSOAP.is_active,
HTTPSOAP.is_internal,
HTTPSOAP.transport,
HTTPSOAP.host,
HTTPSOAP.url_path,
HTTPSOAP.method,
HTTPSOAP.soap_action,
HTTPSOAP.soap_version,
HTTPSOAP.data_format,
HTTPSOAP.security_id,
HTTPSOAP.has_rbac,
HTTPSOAP.connection,
HTTPSOAP.content_type,
case([(HTTPSOAP.ping_method != None, HTTPSOAP.ping_method)], else_=DEFAULT_HTTP_PING_METHOD).label('ping_method'), # noqa
case([(HTTPSOAP.pool_size != None, HTTPSOAP.pool_size)], else_=DEFAULT_HTTP_POOL_SIZE).label('pool_size'),
case([(HTTPSOAP.merge_url_params_req != None, HTTPSOAP.merge_url_params_req)], else_=True).label('merge_url_params_req'),
case([(HTTPSOAP.url_params_pri != None, HTTPSOAP.url_params_pri)], else_=URL_PARAMS_PRIORITY.DEFAULT).label('url_params_pri'),
case([(HTTPSOAP.params_pri != None, HTTPSOAP.params_pri)], else_=PARAMS_PRIORITY.DEFAULT).label('params_pri'),
case([(
HTTPSOAP.serialization_type != None, HTTPSOAP.serialization_type)],
else_=HTTP_SOAP_SERIALIZATION_TYPE.DEFAULT.id).label('serialization_type'),
HTTPSOAP.timeout,
HTTPSOAP.sec_tls_ca_cert_id,
HTTPSOAP.sec_use_rbac,
HTTPSOAP.cache_id,
HTTPSOAP.cache_expiry,
HTTPSOAP.content_encoding,
HTTPSOAP.opaque1,
Cache.name.label('cache_name'),
Cache.cache_type,
TLSCACert.name.label('sec_tls_ca_cert_name'),
SecurityBase.sec_type,
Service.name.label('service_name'),
Service.id.label('service_id'),
Service.impl_name.label('service_impl_name'),
SecurityBase.name.label('security_name'),
SecurityBase.username.label('username'),
SecurityBase.password.label('password'),
SecurityBase.password_type.label('password_type'),).\
outerjoin(Service, Service.id==HTTPSOAP.service_id).\
outerjoin(Cache, Cache.id==HTTPSOAP.cache_id).\
outerjoin(TLSCACert, TLSCACert.id==HTTPSOAP.sec_tls_ca_cert_id).\
outerjoin(SecurityBase, HTTPSOAP.security_id==SecurityBase.id).\
filter(Cluster.id==HTTPSOAP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(HTTPSOAP.name)
def http_soap_security_list(session, cluster_id, connection=None):
""" HTTP/SOAP security definitions.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
return q
def http_soap(session, cluster_id, item_id=None, name=None):
""" An HTTP/SOAP connection.
"""
q = _http_soap(session, cluster_id)
if item_id:
q = q.filter(HTTPSOAP.id==item_id)
elif name:
q = q.filter(HTTPSOAP.name==name)
else:
raise Exception('Exactly one of \'id\' or \'name\' is required')
return q.one()
@query_wrapper
def http_soap_list(session, cluster_id, connection=None, transport=None, return_internal=True, data_format=None,
needs_columns=False, *args, **kwargs):
""" HTTP/SOAP connections, both channels and outgoing ones.
"""
q = _http_soap(session, cluster_id)
if connection:
q = q.filter(HTTPSOAP.connection==connection)
if transport:
q = q.filter(HTTPSOAP.transport==transport)
if not return_internal:
q = q.filter(not_(HTTPSOAP.name.startswith('zato')))
if data_format:
q = q.filter(HTTPSOAP.data_format.startswith(data_format))
return q
# ################################################################################################################################
def _out_sql(session, cluster_id):
return session.query(SQLConnectionPool).\
filter(Cluster.id==SQLConnectionPool.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SQLConnectionPool.name)
def out_sql(session, cluster_id, id):
""" An outgoing SQL connection.
"""
return _out_sql(session, cluster_id).\
filter(SQLConnectionPool.id==id).\
one()
@query_wrapper
def out_sql_list(session, cluster_id, needs_columns=False):
""" Outgoing SQL connections.
"""
return _out_sql(session, cluster_id)
# ################################################################################################################################
def _out_ftp(session, cluster_id):
return session.query(
OutgoingFTP.id,
OutgoingFTP.name,
OutgoingFTP.is_active,
OutgoingFTP.host,
OutgoingFTP.port,
OutgoingFTP.user,
OutgoingFTP.password,
OutgoingFTP.acct,
OutgoingFTP.timeout,
OutgoingFTP.dircache,
OutgoingFTP.opaque1,
).\
filter(Cluster.id==OutgoingFTP.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(OutgoingFTP.name)
def out_ftp(session, cluster_id, id):
""" An outgoing FTP connection.
"""
return _out_ftp(session, cluster_id).\
filter(OutgoingFTP.id==id).\
one()
@query_wrapper
def out_ftp_list(session, cluster_id, needs_columns=False):
""" Outgoing FTP connections.
"""
return _out_ftp(session, cluster_id)
# ################################################################################################################################
def _service(session, cluster_id):
return session.query(
Service.id,
Service.name,
Service.is_active,
Service.impl_name,
Service.is_internal,
Service.slow_threshold,
Service.opaque1,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Service.name)
def service(session, cluster_id, id=None, name=None):
""" A service.
"""
q = _service(session, cluster_id)
if name:
q = q.filter(Service.name==name)
elif id:
q = q.filter(Service.id==id)
return q.one()
@query_wrapper
def service_list(session, cluster_id, return_internal=True, include_list=None, needs_columns=False):
""" All services.
"""
q = _service(session, cluster_id)
if include_list:
q = q.filter(or_(Service.name.in_(include_list)))
else:
if not return_internal:
q = q.filter(not_(Service.name.startswith('zato')))
return q
@query_wrapper
def service_list_with_include(session, cluster_id, include_list, needs_columns=False):
q = _service(session, cluster_id)
return q.filter(Service.name.in_(include_list))
def service_id_list(session, cluster_id, name_list=None):
return session.query(
Service.id,
Service.impl_name).\
filter(Cluster.id==Service.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(Service.name.in_(name_list))
# ################################################################################################################################
def _msg_list(class_, order_by, session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return session.query(
class_.id, class_.name,
class_.value).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==class_.cluster_id).\
order_by(order_by)
@query_wrapper
def namespace_list(session, cluster_id, needs_columns=False):
""" All the namespaces.
"""
return _msg_list(MsgNamespace, MsgNamespace.name, session, cluster_id, query_wrapper)
@query_wrapper
def xpath_list(session, cluster_id, needs_columns=False):
""" All the XPaths.
"""
return _msg_list(XPath, XPath.name, session, cluster_id, query_wrapper)
@query_wrapper
def json_pointer_list(session, cluster_id, needs_columns=False):
""" All the JSON Pointers.
"""
return _msg_list(JSONPointer, JSONPointer.name, session, cluster_id, query_wrapper)
# ################################################################################################################################
def _cloud_aws_s3(session, cluster_id):
return session.query(
AWSS3.id, AWSS3.name, AWSS3.is_active, AWSS3.pool_size, AWSS3.address, AWSS3.debug_level, AWSS3.suppr_cons_slashes,
AWSS3.content_type, AWSS3.metadata_, AWSS3.security_id, AWSS3.bucket, AWSS3.encrypt_at_rest, AWSS3.storage_class,
SecurityBase.username, SecurityBase.password).\
filter(Cluster.id==cluster_id).\
filter(AWSS3.security_id==SecurityBase.id).\
order_by(AWSS3.name)
def cloud_aws_s3(session, cluster_id, id):
""" An AWS S3 connection.
"""
return _cloud_aws_s3(session, cluster_id).\
filter(AWSS3.id==id).\
one()
@query_wrapper
def cloud_aws_s3_list(session, cluster_id, needs_columns=False):
""" AWS S3 connections.
"""
return _cloud_aws_s3(session, cluster_id)
# ################################################################################################################################
def _pubsub_endpoint(session, cluster_id):
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.name,
PubSubEndpoint.endpoint_type,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.tags,
PubSubEndpoint.topic_patterns,
PubSubEndpoint.pub_tag_patterns,
PubSubEndpoint.message_tag_patterns,
PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id,
SecurityBase.sec_type,
SecurityBase.name.label('sec_name'),
Service.id.label('service_id'),
Service.name.label('service_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(Service, PubSubEndpoint.id==PubSubEndpoint.service_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
order_by(PubSubEndpoint.id)
def pubsub_endpoint(session, cluster_id, id):
""" An individual pub/sub endpoint.
"""
return _pubsub_endpoint(session, cluster_id).\
filter(PubSubEndpoint.id==id).\
one()
@query_wrapper
def pubsub_endpoint_list(session, cluster_id, needs_columns=False):
""" A list of pub/sub endpoints.
"""
return _pubsub_endpoint(session, cluster_id)
# ################################################################################################################################
def _pubsub_topic(session, cluster_id):
return session.query(
PubSubTopic.id,
PubSubTopic.name,
PubSubTopic.is_active,
PubSubTopic.is_internal,
PubSubTopic.max_depth_gd,
PubSubTopic.max_depth_non_gd,
PubSubTopic.has_gd,
PubSubTopic.is_api_sub_allowed,
PubSubTopic.depth_check_freq,
PubSubTopic.hook_service_id,
PubSubTopic.pub_buffer_size_gd,
PubSubTopic.task_sync_interval,
PubSubTopic.task_delivery_interval,
PubSubTopic.opaque1,
Service.name.label('hook_service_name'),
).\
outerjoin(Service, Service.id==PubSubTopic.hook_service_id).\
filter(Cluster.id==PubSubTopic.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubTopic.name)
@bunch_maker
def pubsub_topic(session, cluster_id, id):
""" A pub/sub topic.
"""
return _pubsub_topic(session, cluster_id).\
filter(PubSubTopic.id==id).\
one()
@query_wrapper
def pubsub_topic_list(session, cluster_id, needs_columns=False):
""" All pub/sub topics.
"""
return _pubsub_topic(session, cluster_id)
# ################################################################################################################################
def pubsub_publishers_for_topic(session, cluster_id, topic_id):
return session.query(
PubSubEndpoint.service_id, PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id, PubSubEndpoint.name,
PubSubEndpoint.is_active, PubSubEndpoint.is_internal,
PubSubEndpoint.last_seen, PubSubEndpoint.last_pub_time,
PubSubEndpointTopic.last_pub_time,
PubSubEndpointTopic.pub_msg_id.label('last_msg_id'),
PubSubEndpointTopic.pub_correl_id.label('last_correl_id'),
PubSubEndpointTopic.in_reply_to.label('last_in_reply_to'),
PubSubEndpointTopic.ext_client_id,
Service.name.label('service_name'),
SecurityBase.name.label('sec_name'),
ChannelWebSocket.name.label('ws_channel_name'),
).\
outerjoin(Service, Service.id==PubSubEndpoint.service_id).\
outerjoin(SecurityBase, SecurityBase.id==PubSubEndpoint.security_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==PubSubEndpoint.ws_channel_id).\
filter(PubSubEndpointTopic.topic_id==PubSubTopic.id).\
filter(PubSubEndpointTopic.topic_id==topic_id).\
filter(PubSubEndpointTopic.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointTopic.cluster_id==cluster_id)
# ################################################################################################################################
def _pubsub_topic_message(session, cluster_id, needs_sub_queue_check):
q = session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.pub_time, PubSubMessage.data_prefix_short,
PubSubMessage.pub_pattern_matched, PubSubMessage.priority,
PubSubMessage.ext_pub_time, PubSubMessage.size,
PubSubMessage.data_format, PubSubMessage.mime_type,
PubSubMessage.data, PubSubMessage.expiration,
PubSubMessage.expiration_time, PubSubMessage.has_gd,
PubSubMessage.ext_client_id,
PubSubEndpoint.id.label('endpoint_id'),
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.service_id,
PubSubEndpoint.security_id,
PubSubEndpoint.ws_channel_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
).\
filter(PubSubMessage.published_by_id==PubSubEndpoint.id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==PubSubTopic.id)
if needs_sub_queue_check:
q = q.\
filter(~PubSubMessage.is_in_sub_queue)
return q
# ################################################################################################################################
def pubsub_message(session, cluster_id, pub_msg_id, needs_sub_queue_check=True):
return _pubsub_topic_message(session, cluster_id, needs_sub_queue_check).\
filter(PubSubMessage.pub_msg_id==pub_msg_id)
# ################################################################################################################################
def _pubsub_endpoint_queue(session, cluster_id):
return session.query(
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.active_status,
PubSubSubscription.is_internal,
PubSubSubscription.creation_time,
PubSubSubscription.sub_key,
PubSubSubscription.has_gd,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.ext_client_id,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('name'), # Currently queue names are the same as their originating topics
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.id.label('endpoint_id'),
WebSocketSubscription.ext_client_id.label('ws_ext_client_id'),
).\
outerjoin(WebSocketSubscription, WebSocketSubscription.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_queue_list(session, cluster_id, endpoint_id, needs_columns=False):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id).\
order_by(PubSubSubscription.creation_time.desc())
# ################################################################################################################################
def pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.sub_key.in_(sub_key_list)).\
all()
# ################################################################################################################################
def pubsub_endpoint_queue(session, cluster_id, sub_id):
return _pubsub_endpoint_queue(session, cluster_id).\
filter(PubSubSubscription.id==sub_id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_topic(session, cluster_id, topic_id, needs_columns=False):
return _pubsub_topic_message(session, cluster_id, True).\
filter(PubSubMessage.topic_id==topic_id).\
order_by(PubSubMessage.pub_time.desc())
# ################################################################################################################################
def _pubsub_queue_message(session, cluster_id):
return session.query(
PubSubMessage.pub_msg_id.label('msg_id'),
PubSubMessage.pub_correl_id.label('correl_id'),
PubSubMessage.in_reply_to,
PubSubMessage.data_prefix_short,
PubSubMessage.priority,
PubSubMessage.ext_pub_time,
PubSubMessage.size,
PubSubMessage.data_format,
PubSubMessage.mime_type,
PubSubMessage.data,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.ext_client_id,
PubSubMessage.published_by_id,
PubSubMessage.pub_pattern_matched,
PubSubTopic.id.label('topic_id'),
PubSubTopic.name.label('topic_name'),
PubSubTopic.name.label('queue_name'), # Currently, queue name = name of its underlying topic
PubSubEndpointEnqueuedMessage.creation_time.label('recv_time'),
PubSubEndpointEnqueuedMessage.delivery_count,
PubSubEndpointEnqueuedMessage.last_delivery_time,
PubSubEndpointEnqueuedMessage.is_in_staging,
PubSubEndpointEnqueuedMessage.endpoint_id.label('subscriber_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpoint.name.label('subscriber_name'),
PubSubSubscription.sub_pattern_matched,
).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.topic_id==PubSubTopic.id).\
filter(PubSubEndpointEnqueuedMessage.endpoint_id==PubSubEndpoint.id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==PubSubSubscription.sub_key).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id)
# ################################################################################################################################
def pubsub_queue_message(session, cluster_id, msg_id):
return _pubsub_queue_message(session, cluster_id).\
filter(PubSubMessage.pub_msg_id==msg_id)
# ################################################################################################################################
@query_wrapper
def pubsub_messages_for_queue(session, cluster_id, sub_key, skip_delivered=False, needs_columns=False):
q = _pubsub_queue_message(session, cluster_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
if skip_delivered:
q = q.filter(PubSubEndpointEnqueuedMessage.delivery_status != PUBSUB.DELIVERY_STATUS.DELIVERED)
return q.order_by(PubSubEndpointEnqueuedMessage.creation_time.desc())
# ################################################################################################################################
def pubsub_hook_service(session, cluster_id, endpoint_id, model_class):
return session.query(
Service.id,
Service.name,
).\
filter(Cluster.id==Service.cluster_id).\
filter(Service.id==model_class.hook_service_id).\
first()
# ################################################################################################################################
def _notif_sql(session, cluster_id, needs_password):
""" SQL notifications.
"""
columns = [NotifSQL.id, NotifSQL.is_active, NotifSQL.name, NotifSQL.query, NotifSQL.notif_type, NotifSQL.interval,
NotifSQL.def_id, SQLConnectionPool.name.label('def_name'), Service.name.label('service_name')]
if needs_password:
columns.append(SQLConnectionPool.password)
return session.query(*columns).\
filter(Cluster.id==NotifSQL.cluster_id).\
filter(SQLConnectionPool.id==NotifSQL.def_id).\
filter(Service.id==NotifSQL.service_id).\
filter(Cluster.id==cluster_id)
@query_wrapper
def notif_sql_list(session, cluster_id, needs_password=False, needs_columns=False):
""" All the SQL notifications.
"""
return _notif_sql(session, cluster_id, needs_password)
# ################################################################################################################################
def _search_es(session, cluster_id):
""" ElasticSearch connections.
"""
return session.query(ElasticSearch).\
filter(Cluster.id==ElasticSearch.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ElasticSearch.name)
@query_wrapper
def search_es_list(session, cluster_id, needs_columns=False):
""" All the ElasticSearch connections.
"""
return _search_es(session, cluster_id)
# ################################################################################################################################
def _search_solr(session, cluster_id):
""" Solr sonnections.
"""
return session.query(Solr).\
filter(Cluster.id==Solr.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(Solr.name)
@query_wrapper
def search_solr_list(session, cluster_id, needs_columns=False):
""" All the Solr connections.
"""
return _search_solr(session, cluster_id)
# ################################################################################################################################
def _server(session, cluster_id, cluster_name):
q = session.query(
Server.id, Server.name, Server.bind_host, Server.bind_port, Server.last_join_status, Server.last_join_mod_date,
Server.last_join_mod_by, Server.up_status, Server.up_mod_date, Server.preferred_address,
Server.crypto_use_tls,
Cluster.id.label('cluster_id'), Cluster.name.label('cluster_name')).\
filter(Cluster.id==Server.cluster_id)
if cluster_id:
q = q.filter(Cluster.id==cluster_id)
else:
q = q.filter(Cluster.name==cluster_name)
q = q.order_by(Server.name)
return q
@query_wrapper
def server_list(session, cluster_id, cluster_name, up_status=None, needs_columns=False):
""" All the servers defined on a cluster.
"""
q = _server(session, cluster_id, cluster_name)
if up_status:
q = q.filter(Server.up_status==up_status)
return q
def server_by_name(session, cluster_id, cluster_name, server_name):
return _server(session, cluster_id, cluster_name).\
filter(Server.name==server_name).\
all()
def server_by_id(session, cluster_id, server_id):
return _server(session, cluster_id, None).\
filter(Server.id==server_id).\
one()
# ################################################################################################################################
def _cassandra_conn(session, cluster_id):
return session.query(CassandraConn).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraConn.cluster_id).\
order_by(CassandraConn.name)
def cassandra_conn(session, cluster_id, id):
""" A Cassandra connection definition.
"""
return _cassandra_conn(session, cluster_id).\
filter(CassandraConn.id==id).\
one()
@query_wrapper
def cassandra_conn_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra connection definitions.
"""
return _cassandra_conn(session, cluster_id)
# ################################################################################################################################
def _cassandra_query(session, cluster_id):
return session.query(
CassandraQuery.id, CassandraQuery.name, CassandraQuery.value,
CassandraQuery.is_active, CassandraQuery.cluster_id,
CassandraConn.name.label('def_name'),
CassandraConn.id.label('def_id'),
).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CassandraQuery.cluster_id).\
filter(CassandraConn.id==CassandraQuery.def_id).\
order_by(CassandraQuery.name)
def cassandra_query(session, cluster_id, id):
""" A Cassandra prepared statement.
"""
return _cassandra_query(session, cluster_id).\
filter(CassandraQuery.id==id).\
one()
@query_wrapper
def cassandra_query_list(session, cluster_id, needs_columns=False):
""" A list of Cassandra prepared statements.
"""
return _cassandra_query(session, cluster_id)
# ################################################################################################################################
def _email_smtp(session, cluster_id):
return session.query(SMTP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==SMTP.cluster_id).\
order_by(SMTP.name)
def email_smtp(session, cluster_id, id):
""" An SMTP connection.
"""
return _email_smtp(session, cluster_id).\
filter(SMTP.id==id).\
one()
@query_wrapper
def email_smtp_list(session, cluster_id, needs_columns=False):
""" A list of SMTP connections.
"""
return _email_smtp(session, cluster_id)
# ################################################################################################################################
def _email_imap(session, cluster_id):
return session.query(IMAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==IMAP.cluster_id).\
order_by(IMAP.name)
def email_imap(session, cluster_id, id):
""" An IMAP connection.
"""
return _email_imap(session, cluster_id).\
filter(IMAP.id==id).\
one()
@query_wrapper
def email_imap_list(session, cluster_id, needs_columns=False):
""" A list of IMAP connections.
"""
return _email_imap(session, cluster_id)
# ################################################################################################################################
def _rbac_permission(session, cluster_id):
return session.query(RBACPermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACPermission.cluster_id).\
order_by(RBACPermission.name)
def rbac_permission(session, cluster_id, id=None, name=None):
""" An RBAC permission.
"""
q = _rbac_permission(session, cluster_id)
if name:
q = q.filter(RBACPermission.name==name)
elif id:
q = q.filter(RBACPermission.id==id)
return q.one()
@query_wrapper
def rbac_permission_list(session, cluster_id, needs_columns=False):
""" A list of RBAC permissions.
"""
return _rbac_permission(session, cluster_id)
# ################################################################################################################################
def _rbac_role(session, cluster_id):
rbac_parent = aliased(RBACRole)
return session.query(RBACRole.id, RBACRole.name, RBACRole.parent_id, rbac_parent.name.label('parent_name')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRole.cluster_id).\
outerjoin(rbac_parent, rbac_parent.id==RBACRole.parent_id).\
order_by(RBACRole.name)
def rbac_role(session, cluster_id, id=None, name=None):
""" An RBAC role.
"""
q = _rbac_role(session, cluster_id)
if name:
q = q.filter(RBACRole.name==name)
elif id:
q = q.filter(RBACRole.id==id)
return q.one()
@query_wrapper
def rbac_role_list(session, cluster_id, needs_columns=False):
""" A list of RBAC roles.
"""
return _rbac_role(session, cluster_id)
# ################################################################################################################################
def _rbac_client_role(session, cluster_id):
return session.query(RBACClientRole).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACClientRole.cluster_id).\
order_by(RBACClientRole.client_def)
def rbac_client_role(session, cluster_id, id):
""" An individual mapping between a client and role.
"""
return _rbac_client_role(session, cluster_id).\
filter(RBACClientRole.id==id).\
one()
@query_wrapper
def rbac_client_role_list(session, cluster_id, needs_columns=False):
""" A list of mappings between clients and roles.
"""
return _rbac_client_role(session, cluster_id)
# ################################################################################################################################
def _rbac_role_permission(session, cluster_id):
return session.query(RBACRolePermission).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==RBACRolePermission.cluster_id).\
order_by(RBACRolePermission.role_id)
def rbac_role_permission(session, cluster_id, id):
""" An individual permission for a given role against a service.
"""
return _rbac_role_permission(session, cluster_id).\
filter(RBACRolePermission.id==id).\
one()
@query_wrapper
def rbac_role_permission_list(session, cluster_id, needs_columns=False):
""" A list of permissions for roles against services.
"""
return _rbac_role_permission(session, cluster_id)
# ################################################################################################################################
def cache_by_id(session, cluster_id, cache_id):
return session.query(Cache).\
filter(Cache.id==cluster_id).\
filter(Cluster.id==Cache.cluster_id).\
filter(Cache.id==cache_id).\
one()
# ################################################################################################################################
def _cache_builtin(session, cluster_id):
return session.query(CacheBuiltin).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheBuiltin.cluster_id).\
filter(Cache.id==CacheBuiltin.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.BUILTIN).\
order_by(CacheBuiltin.name)
def cache_builtin(session, cluster_id, id):
""" An individual built-in cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheBuiltin.id==id).\
one()
@query_wrapper
def cache_builtin_list(session, cluster_id, needs_columns=False):
""" A list of built-in cache definitions.
"""
return _cache_builtin(session, cluster_id)
# ################################################################################################################################
def _cache_memcached(session, cluster_id):
return session.query(
CacheMemcached.cache_id, CacheMemcached.name, CacheMemcached.is_active,
CacheMemcached.is_default, CacheMemcached.is_debug,
CacheMemcached.servers, CacheMemcached.extra,
CacheMemcached.cache_type).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==CacheMemcached.cluster_id).\
filter(Cache.id==CacheMemcached.cache_id).\
filter(Cache.cache_type==CACHE.TYPE.MEMCACHED).\
order_by(CacheMemcached.name)
def cache_memcached(session, cluster_id, id):
""" An individual Memcached cache definition.
"""
return _cache_builtin(session, cluster_id).\
filter(CacheMemcached.id==id).\
one()
@query_wrapper
def cache_memcached_list(session, cluster_id, needs_columns=False):
""" A list of Memcached cache definitions.
"""
return _cache_memcached(session, cluster_id)
# ################################################################################################################################
def _out_odoo(session, cluster_id):
return session.query(OutgoingOdoo).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingOdoo.cluster_id).\
order_by(OutgoingOdoo.name)
def out_odoo(session, cluster_id, id):
""" An individual Odoo connection.
"""
return _out_odoo(session, cluster_id).\
filter(OutgoingOdoo.id==id).\
one()
@query_wrapper
def out_odoo_list(session, cluster_id, needs_columns=False):
""" A list of Odoo connections.
"""
return _out_odoo(session, cluster_id)
# ################################################################################################################################
def _out_sap(session, cluster_id):
return session.query(OutgoingSAP).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==OutgoingSAP.cluster_id).\
order_by(OutgoingSAP.name)
def out_sap(session, cluster_id, id):
""" An individual SAP RFC connection.
"""
return _out_sap(session, cluster_id).\
filter(OutgoingSAP.id==id).\
one()
@query_wrapper
def out_sap_list(session, cluster_id, needs_columns=False):
""" A list of SAP RFC connections.
"""
return _out_sap(session, cluster_id)
# ################################################################################################################################
def _channel_web_socket(session, cluster_id):
""" WebSocket channels
"""
return session.query(
ChannelWebSocket.id,
ChannelWebSocket.name,
ChannelWebSocket.is_active,
ChannelWebSocket.is_internal,
ChannelWebSocket.address,
ChannelWebSocket.data_format,
ChannelWebSocket.service_id,
ChannelWebSocket.security_id,
ChannelWebSocket.new_token_wait_time,
ChannelWebSocket.token_ttl,
ChannelWebSocket.is_out,
ChannelWebSocket.opaque1,
SecurityBase.sec_type,
VaultConnection.default_auth_method.label('vault_conn_default_auth_method'),
SecurityBase.name.label('sec_name'),
Service.name.label('service_name'),
).\
outerjoin(Service, Service.id==ChannelWebSocket.service_id).\
outerjoin(SecurityBase, SecurityBase.id==ChannelWebSocket.security_id).\
outerjoin(VaultConnection, SecurityBase.id==VaultConnection.id).\
filter(Cluster.id==ChannelWebSocket.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(ChannelWebSocket.name)
def channel_web_socket(session, cluster_id, id):
""" An incoming WebSocket connection.
"""
return _channel_web_socket(session, cluster_id).\
filter(ChannelWebSocket.id==id).\
one()
@query_wrapper
def channel_web_socket_list(session, cluster_id, needs_columns=False):
""" All the WebSocket channel connections.
"""
return _channel_web_socket(session, cluster_id)
# ################################################################################################################################
def web_socket_client_by_pub_id(session, pub_client_id):
""" An individual WebSocket connection by its public ID.
"""
return session.query(
WebSocketClient.id,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id).\
one()
# ################################################################################################################################
def web_socket_client_by_ext_id(session, ext_client_id, needs_one_or_none=False):
""" An individual WebSocket connection by its external client ID.
"""
query = session.query(
WebSocketClient,
ChannelWebSocket.id.label('channel_id'),
ChannelWebSocket.name.label('channel_name')
).\
filter(WebSocketClient.ext_client_id==ext_client_id).\
outerjoin(ChannelWebSocket, ChannelWebSocket.id==WebSocketClient.channel_id)
return query.one_or_none() if needs_one_or_none else query.all()
# ################################################################################################################################
def web_socket_clients_by_server_id(session, server_id, server_pid):
""" A list of WebSocket clients attached to a particular server by the latter's ID.
"""
query = session.query(WebSocketClient).\
filter(WebSocketClient.server_id==server_id)
if server_pid:
query = query.\
filter(WebSocketClient.server_proc_pid==server_pid)
return query
# ################################################################################################################################
def _web_socket_client(session, cluster_id, channel_id):
return session.query(WebSocketClient).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.channel_id==channel_id).\
order_by(WebSocketClient.connection_time.desc())
# ################################################################################################################################
def web_socket_client(session, cluster_id, channel_id, pub_client_id=None, ext_client_id=None, use_first=True):
query = _web_socket_client(session, cluster_id, channel_id)
if pub_client_id:
query = query.filter(WebSocketClient.pub_client_id==pub_client_id)
elif ext_client_id:
query = query.filter(WebSocketClient.ext_client_id==ext_client_id)
else:
raise ValueError('Either pub_client_id or ext_client_id is required on input')
return query.first() if use_first else query.all()
# ################################################################################################################################
@query_wrapper
def web_socket_client_list(session, cluster_id, channel_id, needs_columns=False):
""" A list of subscriptions to a particular pattern.
"""
return _web_socket_client(session, cluster_id, channel_id)
# ################################################################################################################################
def _web_socket_sub_key_data(session, cluster_id, pub_client_id):
return session.query(
WebSocketClientPubSubKeys.sub_key,
PubSubSubscription.topic_id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.creation_time,
PubSubSubscription.endpoint_id,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.ext_client_id,
PubSubEndpoint.name.label('endpoint_name'),
PubSubTopic.name.label('topic_name')
).\
filter(WebSocketClient.pub_client_id==pub_client_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==WebSocketSubscription.sub_key).\
filter(WebSocketClientPubSubKeys.sub_key==PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id)
@query_wrapper
def web_socket_sub_key_data_list(session, cluster_id, pub_client_id, needs_columns=False):
return _web_socket_sub_key_data(session, cluster_id, pub_client_id)
# ################################################################################################################################
def _vault_connection(session, cluster_id):
return session.query(VaultConnection.id, VaultConnection.is_active, VaultConnection.name,
VaultConnection.url, VaultConnection.token, VaultConnection.default_auth_method,
VaultConnection.timeout, VaultConnection.allow_redirects, VaultConnection.tls_verify,
VaultConnection.tls_ca_cert_id, VaultConnection.tls_key_cert_id, VaultConnection.sec_type,
Service.name.label('service_name'), Service.id.label('service_id')).\
filter(Cluster.id==cluster_id).\
filter(Cluster.id==VaultConnection.cluster_id).\
outerjoin(Service, Service.id==VaultConnection.service_id).\
order_by(VaultConnection.name)
def vault_connection(session, cluster_id, id):
""" An individual Vault connection.
"""
return _vault_connection(session, cluster_id).\
filter(VaultConnection.id==id).\
one()
@query_wrapper
def vault_connection_list(session, cluster_id, needs_columns=False):
""" A list of Vault connections.
"""
return _vault_connection(session, cluster_id)
# ################################################################################################################################
def _sms_twilio(session, cluster_id):
""" SMS Twilio connections.
"""
return session.query(SMSTwilio).\
filter(Cluster.id==SMSTwilio.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(SMSTwilio.name)
def sms_twilio(session, cluster_id, id):
""" An individual SMS Twilio connection.
"""
return _sms_twilio(session, cluster_id).\
filter(SMSTwilio.id==id).\
one()
@query_wrapper
def sms_twilio_list(session, cluster_id, needs_columns=False):
""" All the SMS Twilio connections.
"""
return _sms_twilio(session, cluster_id)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/__init__.py | __init__.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import and_, exists, insert, update
from sqlalchemy.sql import expression as expr
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription, WebSocketSubscription
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
# ################################################################################################################################
def has_subscription(session, cluster_id, topic_id, endpoint_id):
""" Returns a boolean flag indicating whether input endpoint has subscription to a given topic.
"""
return session.query(exists().where(and_(
PubSubSubscription.endpoint_id==endpoint_id,
PubSubSubscription.topic_id==topic_id,
PubSubSubscription.cluster_id==cluster_id,
))).\
scalar()
# ################################################################################################################################
def add_wsx_subscription(session, cluster_id, is_internal, sub_key, ext_client_id, ws_channel_id, sub_id):
""" Adds an object representing a subscription of a WebSockets client.
"""
wsx_sub = WebSocketSubscription()
wsx_sub.is_internal = is_internal or False
wsx_sub.sub_key = sub_key
wsx_sub.ext_client_id = ext_client_id
wsx_sub.channel_id = ws_channel_id
wsx_sub.cluster_id = cluster_id
wsx_sub.subscription_id = sub_id
session.add(wsx_sub)
return wsx_sub
# ################################################################################################################################
def add_subscription(session, cluster_id, sub_key, ctx):
""" Adds an object representing a subscription regardless of the underlying protocol.
"""
# Common
ps_sub = PubSubSubscription()
ps_sub.cluster_id = ctx.cluster_id
ps_sub.server_id = ctx.server_id
ps_sub.topic_id = ctx.topic.id
ps_sub.is_internal = ctx.is_internal
ps_sub.is_staging_enabled = ctx.is_staging_enabled
ps_sub.creation_time = ctx.creation_time
ps_sub.sub_key = sub_key
ps_sub.sub_pattern_matched = ctx.sub_pattern_matched
ps_sub.has_gd = ctx.has_gd
ps_sub.active_status = ctx.active_status
ps_sub.endpoint_type = ctx.endpoint_type
ps_sub.endpoint_id = ctx.endpoint_id
ps_sub.delivery_method = ctx.delivery_method
ps_sub.delivery_data_format = ctx.delivery_data_format
ps_sub.delivery_batch_size = ctx.delivery_batch_size
ps_sub.wrap_one_msg_in_list = ctx.wrap_one_msg_in_list if ctx.wrap_one_msg_in_list is not None else True
ps_sub.delivery_max_retry = ctx.delivery_max_retry
ps_sub.delivery_err_should_block = ctx.delivery_err_should_block if ctx.delivery_err_should_block is not None else True
ps_sub.wait_sock_err = ctx.wait_sock_err
ps_sub.wait_non_sock_err = ctx.wait_non_sock_err
ps_sub.ext_client_id = ctx.ext_client_id
# AMQP
ps_sub.amqp_exchange = ctx.amqp_exchange
ps_sub.amqp_routing_key = ctx.amqp_routing_key
ps_sub.out_amqp_id = ctx.out_amqp_id
# Local files
ps_sub.files_directory_list = ctx.files_directory_list
# FTP
ps_sub.ftp_directory_list = ctx.ftp_directory_list
# REST/SOAP
ps_sub.security_id = ctx.security_id
ps_sub.out_http_soap_id = ctx.out_http_soap_id
ps_sub.out_http_method = ctx.out_http_method
# Services
ps_sub.service_id = ctx.service_id
# SMS - Twilio
ps_sub.sms_twilio_from = ctx.sms_twilio_from
ps_sub.sms_twilio_to_list = ctx.sms_twilio_to_list
ps_sub.smtp_is_html = ctx.smtp_is_html
ps_sub.smtp_subject = ctx.smtp_subject
ps_sub.smtp_from = ctx.smtp_from
ps_sub.smtp_to_list = ctx.smtp_to_list
ps_sub.smtp_body = ctx.smtp_body
# WebSockets
ps_sub.ws_channel_id = ctx.ws_channel_id
session.add(ps_sub)
return ps_sub
# ################################################################################################################################
def move_messages_to_sub_queue(session, cluster_id, topic_id, endpoint_id, sub_pattern_matched, sub_key, pub_time_max,
_initialized=_initialized):
""" Move all unexpired messages from topic to a given subscriber's queue. This method must be called with a global lock
held for topic because it carries out its job through a couple of non-atomic queries.
"""
enqueued_id_subquery = session.query(
PubSubEndpointEnqueuedMessage.pub_msg_id
).\
filter(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
now = utcnow_as_ms()
# SELECT statement used by the INSERT below finds all messages for that topic
# that haven't expired yet.
select_messages = session.query(
PubSubMessage.pub_msg_id,
PubSubMessage.topic_id,
expr.bindparam('creation_time', now),
expr.bindparam('endpoint_id', endpoint_id),
expr.bindparam('sub_pattern_matched', sub_pattern_matched),
expr.bindparam('sub_key', sub_key),
expr.bindparam('is_in_staging', False),
expr.bindparam('cluster_id', cluster_id),
).\
filter(PubSubMessage.topic_id==topic_id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.expiration_time > pub_time_max).\
filter(~PubSubMessage.is_in_sub_queue).\
filter(PubSubMessage.pub_msg_id.notin_(enqueued_id_subquery))
# All message IDs that are available in topic for that subscriber, if there are any at all.
# In theory, it is not required to pull all the messages to build the list in Python, but this is a relatively
# efficient operation because there won't be that many data returned yet it allows us to make sure
# the INSERT and UPDATE below are issued only if truly needed.
msg_ids = [elem.pub_msg_id for elem in select_messages.all()]
if msg_ids:
# INSERT references to topic's messages in the subscriber's queue.
insert_messages = insert(PubSubEndpointEnqueuedMessage).\
from_select((
PubSubEndpointEnqueuedMessage.pub_msg_id,
PubSubEndpointEnqueuedMessage.topic_id,
expr.column('creation_time'),
expr.column('endpoint_id'),
expr.column('sub_pattern_matched'),
expr.column('sub_key'),
expr.column('is_in_staging'),
expr.column('cluster_id'),
), select_messages)
# Move messages to subscriber's queue
session.execute(insert_messages)
# Indicate that all the messages are being delivered to the subscriber which means that no other
# subscriber will ever receive them. Note that we are changing the status only for the messages pertaining
# to the current subscriber without ever touching messages reiceved by any other one.
session.execute(
update(MsgTable).\
values({
'is_in_sub_queue': True,
}).\
where(and_(
MsgTable.c.pub_msg_id.in_(msg_ids),
~MsgTable.c.is_in_sub_queue
))
)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/subscribe.py | subscribe.py |
# SQLAlchemy
from sqlalchemy import and_, func, select
from sqlalchemy.sql.expression import false as sa_false
# Zato
from zato.common.odb.model import PubSubMessage, PubSubTopic, PubSubSubscription
from zato.common.odb.query import count
# ################################################################################################################################
MsgTable = PubSubMessage.__table__
# ################################################################################################################################
def get_topics_by_sub_keys(session, cluster_id, sub_keys):
""" Returns (topic_id, sub_key) for each input sub_key.
"""
return session.query(
PubSubTopic.id,
PubSubSubscription.sub_key).\
filter(PubSubSubscription.topic_id==PubSubTopic.id).\
filter(PubSubSubscription.sub_key.in_(sub_keys)).\
all()
# ################################################################################################################################
def get_gd_depth_topic(session, cluster_id, topic_id):
""" Returns current depth of input topic by its ID.
"""
q = session.query(MsgTable.c.id).\
filter(MsgTable.c.topic_id==topic_id).\
filter(MsgTable.c.cluster_id==cluster_id).\
filter(~MsgTable.c.is_in_sub_queue)
return count(session, q)
# ################################################################################################################################
def get_gd_depth_topic_list(session, cluster_id, topic_id_list):
""" Returns topics matching the input list as long as they have any messages undelivered to their queues.
"""
q = select([
MsgTable.c.topic_id,
func.count(MsgTable.c.topic_id).label('depth')]).\
where(and_(
MsgTable.c.cluster_id == cluster_id,
MsgTable.c.is_in_sub_queue == sa_false(),
MsgTable.c.topic_id.in_(topic_id_list),
)).\
group_by('topic_id')
return session.execute(q).fetchall()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/topic.py | topic.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, PubSubTopic, PubSubEndpoint, PubSubSubscription
from zato.common.odb.query import query_wrapper
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
def _pubsub_subscription(session, cluster_id):
return session.query(
PubSubSubscription.id,
PubSubSubscription.id.label('sub_id'),
PubSubSubscription.id.label('name'), # A unique 'name' attribute is needed by ConfigDict
PubSubSubscription.active_status,
PubSubSubscription.server_id,
PubSubSubscription.is_internal,
PubSubSubscription.is_staging_enabled,
PubSubSubscription.creation_time,
PubSubSubscription.last_interaction_time,
PubSubSubscription.last_interaction_type,
PubSubSubscription.last_interaction_details,
PubSubSubscription.sub_key,
PubSubSubscription.is_durable,
PubSubSubscription.has_gd,
PubSubSubscription.topic_id,
PubSubSubscription.endpoint_id,
PubSubSubscription.delivery_method,
PubSubSubscription.delivery_data_format,
PubSubSubscription.delivery_batch_size,
PubSubSubscription.wrap_one_msg_in_list,
PubSubSubscription.delivery_max_retry,
PubSubSubscription.ext_client_id,
PubSubSubscription.delivery_err_should_block,
PubSubSubscription.wait_sock_err,
PubSubSubscription.wait_non_sock_err,
PubSubSubscription.sub_pattern_matched,
PubSubSubscription.out_amqp_id,
PubSubSubscription.amqp_exchange,
PubSubSubscription.amqp_routing_key,
PubSubSubscription.files_directory_list,
PubSubSubscription.ftp_directory_list,
PubSubSubscription.sms_twilio_from,
PubSubSubscription.sms_twilio_to_list,
PubSubSubscription.smtp_is_html,
PubSubSubscription.smtp_subject,
PubSubSubscription.smtp_from,
PubSubSubscription.smtp_to_list,
PubSubSubscription.smtp_body,
PubSubSubscription.out_http_soap_id,
PubSubSubscription.out_http_soap_id.label('out_rest_http_soap_id'),
PubSubSubscription.out_http_soap_id.label('out_soap_http_soap_id'),
PubSubSubscription.out_http_method,
PubSubSubscription.delivery_endpoint,
PubSubSubscription.ws_channel_id,
PubSubSubscription.cluster_id,
PubSubTopic.name.label('topic_name'),
PubSubTopic.task_delivery_interval,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.service_id,
).\
outerjoin(PubSubTopic, PubSubTopic.id==PubSubSubscription.topic_id).\
filter(PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubSubscription.cluster_id).\
filter(Cluster.id==cluster_id).\
order_by(PubSubSubscription.id.desc())
# ################################################################################################################################
def pubsub_subscription(session, cluster_id, id):
""" A pub/sub subscription.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.id==id).\
one()
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list(session, cluster_id, needs_columns=False):
""" All pub/sub subscriptions.
"""
return _pubsub_subscription(session, cluster_id)
# ################################################################################################################################
@query_wrapper
def pubsub_subscription_list_by_endpoint_id(session, cluster_id, endpoint_id, needs_columns=False):
""" A list of all pub/sub subscriptions for a given endpoint with a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
def pubsub_subscription_list_by_endpoint_id_no_search(session, cluster_id, endpoint_id):
""" A list of all pub/sub subscriptions for a given endpoint without a search results wrapper.
"""
return _pubsub_subscription(session, cluster_id).\
filter(PubSubSubscription.endpoint_id==endpoint_id)
# ################################################################################################################################
@query_wrapper
def pubsub_endpoint_summary_list(session, cluster_id, needs_columns=False):
return session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubEndpoint.role.in_(_subscriber_role)).\
order_by(PubSubEndpoint.id)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/subscription.py | subscription.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import Cluster, PubSubEndpoint, PubSubSubscription
from zato.common.odb.query import query_wrapper
# ################################################################################################################################
_subscriber_role = (PUBSUB.ROLE.PUBLISHER_SUBSCRIBER.id, PUBSUB.ROLE.SUBSCRIBER.id)
# ################################################################################################################################
def _pubsub_endpoint_summary(session, cluster_id, topic_id):
q = session.query(
PubSubEndpoint.id,
PubSubEndpoint.is_active,
PubSubEndpoint.is_internal,
PubSubEndpoint.role,
PubSubEndpoint.name.label('endpoint_name'),
PubSubEndpoint.endpoint_type,
PubSubEndpoint.last_seen,
PubSubEndpoint.last_deliv_time,
func.count(PubSubSubscription.id).label('subscription_count'),
).\
group_by(PubSubEndpoint.id).\
outerjoin(PubSubSubscription, PubSubEndpoint.id==PubSubSubscription.endpoint_id).\
filter(Cluster.id==PubSubEndpoint.cluster_id).\
filter(Cluster.id==cluster_id).\
filter(PubSubEndpoint.role.in_(_subscriber_role))
if topic_id:
q = q.\
filter(PubSubSubscription.topic_id==topic_id)
return q
@query_wrapper
def pubsub_endpoint_summary_list(session, cluster_id, topic_id=None, needs_columns=False):
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
order_by(PubSubEndpoint.id)
def pubsub_endpoint_summary(session, cluster_id, endpoint_id, topic_id=None):
return _pubsub_endpoint_summary(session, cluster_id, topic_id).\
filter(PubSubEndpoint.id==endpoint_id).\
one()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/endpoint.py | endpoint.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import func, update
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage, PubSubSubscription
from zato.common.odb.query import count, _pubsub_queue_message
from zato.common.util.time_ import utcnow_as_ms
# ################################################################################################################################
PubSubEnqMsg = PubSubEndpointEnqueuedMessage
# ################################################################################################################################
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_to_delete = PUBSUB.DELIVERY_STATUS.TO_DELETE
_waiting = PUBSUB.DELIVERY_STATUS.WAITING_FOR_CONFIRMATION
# ################################################################################################################################
def get_messages(session, cluster_id, sub_key, batch_size, now, _initialized=_initialized, _waiting=_waiting):
""" Returns up to batch_size messages for input sub_key and mark them as being delivered.
"""
# First, get all messages but note it is SELECT FOR UPDATE
messages = _pubsub_queue_message(session, cluster_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubMessage.expiration_time>=now).\
with_for_update().\
order_by(PubSubMessage.ext_pub_time.desc()).\
limit(batch_size).\
all()
# Now, within the same transaction, update their delivery status to indicate they are being delivered
msg_id_list = [elem.msg_id for elem in messages]
if msg_id_list:
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': _waiting,
'delivery_time': now,
'delivery_count': PubSubEnqMsg.__table__.c.delivery_count + 1,
}).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# Return all messages fetched - our caller will commit the transaction thus releasing the FOR UPDATE lock
return messages
# ################################################################################################################################
def _set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status):
session.execute(
update(PubSubEnqMsg).\
values({
'delivery_status': status,
'delivery_time': now,
}).\
where(PubSubSubscription.sub_key==sub_key).\
where(PubSubEnqMsg.cluster_id).\
where(PubSubEnqMsg.sub_key==PubSubSubscription.sub_key).\
where(PubSubEnqMsg.pub_msg_id.in_(msg_id_list))
)
# ################################################################################################################################
def set_to_delete(session, cluster_id, sub_key, msg_id_list, now, status=_to_delete):
""" Marks all input messages as to be deleted.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def acknowledge_delivery(session, cluster_id, sub_key, msg_id_list, now, status=_delivered):
""" Confirms delivery of all messages from msg_id_list.
"""
_set_delivery_status(session, cluster_id, sub_key, msg_id_list, now, status)
# ################################################################################################################################
def get_queue_depth_by_sub_key(session, cluster_id, sub_key, now):
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
current_q = session.query(PubSubEnqMsg.id).\
filter(PubSubSubscription.sub_key==PubSubEnqMsg.sub_key).\
filter(PubSubEnqMsg.is_in_staging != True).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=now).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubEnqMsg.cluster_id==cluster_id) # noqa: E712
return count(session, current_q)
# ################################################################################################################################
def get_queue_depth_by_topic_id_list(session, cluster_id, topic_id_list):
""" Returns queue depth for a given sub_key - does not include messages expired, in staging, or already delivered.
"""
return session.query(PubSubEnqMsg.topic_id, func.count(PubSubEnqMsg.topic_id)).\
filter(PubSubEnqMsg.topic_id.in_(topic_id_list)).\
filter(PubSubEnqMsg.cluster_id==cluster_id).\
filter(PubSubEnqMsg.delivery_status==_initialized).\
filter(PubSubEnqMsg.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubMessage.expiration_time>=utcnow_as_ms()).\
group_by(PubSubMessage.topic_id).\
all()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/queue.py | queue.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from logging import getLogger
# SQLAlchemy
from sqlalchemy import update
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpoint, PubSubMessage, PubSubEndpointEnqueuedMessage, PubSubSubscription, Server, \
WebSocketClient, WebSocketClientPubSubKeys
logger = getLogger('zato_pubsub.sql')
# ################################################################################################################################
_initialized = PUBSUB.DELIVERY_STATUS.INITIALIZED
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_wsx = PUBSUB.ENDPOINT_TYPE.WEB_SOCKETS.id
# ################################################################################################################################
sql_messages_columns = (
PubSubMessage.pub_msg_id,
PubSubMessage.pub_correl_id,
PubSubMessage.in_reply_to,
PubSubMessage.published_by_id,
PubSubMessage.ext_client_id,
PubSubMessage.group_id,
PubSubMessage.position_in_group,
PubSubMessage.pub_time,
PubSubMessage.ext_pub_time,
PubSubMessage.data,
PubSubMessage.mime_type,
PubSubMessage.priority,
PubSubMessage.expiration,
PubSubMessage.expiration_time,
PubSubMessage.size,
PubSubMessage.user_ctx,
PubSubMessage.zato_ctx,
PubSubMessage.opaque1,
PubSubEndpointEnqueuedMessage.id.label('endp_msg_queue_id'),
PubSubEndpointEnqueuedMessage.sub_key,
PubSubEndpointEnqueuedMessage.sub_pattern_matched,
)
sql_msg_id_columns = (
PubSubMessage.pub_msg_id,
)
# ################################################################################################################################
def _get_base_sql_msg_query(session, columns, sub_key_list, pub_time_max, cluster_id, _float_str=PUBSUB.FLOAT_STRING_CONVERT):
return session.query(*columns).\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id==PubSubMessage.pub_msg_id).\
filter(PubSubEndpointEnqueuedMessage.sub_key.in_(sub_key_list)).\
filter(PubSubEndpointEnqueuedMessage.delivery_status==_initialized).\
filter(PubSubMessage.expiration_time > _float_str.format(pub_time_max)).\
filter(PubSubMessage.cluster_id==cluster_id)
# ################################################################################################################################
def _get_sql_msg_data_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max, columns, ignore_list=None,
needs_result=True, _initialized=_initialized, _float_str=PUBSUB.FLOAT_STRING_CONVERT):
""" Returns all SQL messages queued up for a given sub_key that are not being delivered
or have not been delivered already.
"""
logger.info('Getting GD messages for `%s` last_run:%r pub_time_max:%r needs_result:%d', sub_key_list, last_sql_run,
pub_time_max, int(needs_result))
query = _get_base_sql_msg_query(session, columns, sub_key_list, pub_time_max, cluster_id)
# If there is the last SQL run time given, it means that we have to fetch all messages
# enqueued for that subscriber since that time ..
if last_sql_run:
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time > _float_str.format(last_sql_run))
query = query.\
filter(PubSubEndpointEnqueuedMessage.creation_time <= _float_str.format(pub_time_max))
if ignore_list:
query = query.\
filter(PubSubEndpointEnqueuedMessage.id.notin_(ignore_list))
query = query.\
order_by(PubSubMessage.priority.desc()).\
order_by(PubSubMessage.ext_pub_time).\
order_by(PubSubMessage.pub_time)
return query.all() if needs_result else query
# ################################################################################################################################
def get_sql_messages_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max, ignore_list):
return _get_sql_msg_data_by_sub_key(session, cluster_id, sub_key_list, last_sql_run, pub_time_max,
sql_messages_columns, ignore_list)
# ################################################################################################################################
def get_sql_messages_by_msg_id_list(session, cluster_id, sub_key, pub_time_max, msg_id_list):
query = _get_base_sql_msg_query(session, sql_messages_columns, [sub_key], pub_time_max, cluster_id)
return query.\
filter(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(msg_id_list))
# ################################################################################################################################
def get_sql_msg_ids_by_sub_key(session, cluster_id, sub_key, last_sql_run, pub_time_max):
return _get_sql_msg_data_by_sub_key(session, cluster_id, [sub_key], last_sql_run, pub_time_max, sql_msg_id_columns,
needs_result=False)
# ################################################################################################################################
def confirm_pubsub_msg_delivered(session, cluster_id, sub_key, delivered_pub_msg_id_list, now, _delivered=_delivered):
""" Returns all SQL messages queued up for a given sub_key.
"""
session.execute(
update(PubSubEndpointEnqueuedMessage).\
values({
'delivery_status': _delivered,
'delivery_time': now
}).\
where(PubSubEndpointEnqueuedMessage.pub_msg_id.in_(delivered_pub_msg_id_list)).\
where(PubSubEndpointEnqueuedMessage.sub_key==sub_key)
)
# ################################################################################################################################
def get_delivery_server_for_sub_key(session, cluster_id, sub_key, is_wsx):
""" Returns information about which server handles delivery tasks for input sub_key, the latter must exist in DB.
Assumes that sub_key belongs to a non-WSX endpoint and then checks WebSockets in case the former query founds
no matching server.
"""
# Sub key belongs to a WebSockets client ..
if is_wsx:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
).\
filter(WebSocketClient.server_id==Server.id).\
filter(WebSocketClient.cluster_id==cluster_id).\
filter(WebSocketClient.id==WebSocketClientPubSubKeys.client_id).\
filter(WebSocketClientPubSubKeys.sub_key==sub_key).\
first()
# .. otherwise, it is a REST, SOAP or another kind of client, but for sure it's not WebSockets.
else:
return session.query(
Server.id.label('server_id'),
Server.name.label('server_name'),
Server.cluster_id,
PubSubEndpoint.endpoint_type,
).\
filter(Server.id==PubSubSubscription.server_id).\
filter(PubSubSubscription.sub_key==sub_key).\
filter(PubSubSubscription.endpoint_id==PubSubEndpoint.id).\
filter(PubSubSubscription.cluster_id==cluster_id).\
first()
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/delivery.py | delivery.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import true as sa_true
# Zato
from zato.common.api import PUBSUB
from zato.common.odb.model import PubSubEndpointEnqueuedMessage, PubSubMessage
# ################################################################################################################################
_delivered = PUBSUB.DELIVERY_STATUS.DELIVERED
_to_delete = PUBSUB.DELIVERY_STATUS.TO_DELETE
# ################################################################################################################################
def delete_msg_delivered(session, cluster_id, topic_id):
""" Deletes from topics all messages that have been delivered from their queues.
"""
# When a message is published and there are subscribers for it, its PubSubMessage.is_in_sub_queue attribute
# is set to True and a reference to that message is stored in PubSubEndpointEnqueuedMessage. Then, once the message
# is delivered to all subscribers, a background process calling delete_enq_delivered deletes all the references.
# Therefore, we can delete all PubSubMessage that have is_in_sub_queue = True because it means that there must have
# been subscribers to it and, seeing as there are no references to it anymore, it means that they must have been
# already deleted, so we can safely delete the PubSubMessage itself.
enqueued_subquery = session.query(PubSubMessage.pub_msg_id).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.topic_id==topic_id).\
filter(PubSubMessage.is_in_sub_queue==sa_true()).\
filter(PubSubMessage.pub_msg_id==PubSubEndpointEnqueuedMessage.pub_msg_id)
return session.query(PubSubMessage).\
filter(PubSubMessage.pub_msg_id.notin_(enqueued_subquery)).\
delete(synchronize_session=False)
# ################################################################################################################################
def delete_msg_expired(session, cluster_id, topic_id, now):
""" Deletes all expired messages from all topics.
"""
q = session.query(PubSubMessage).\
filter(PubSubMessage.cluster_id==cluster_id).\
filter(PubSubMessage.expiration_time<=now)
if topic_id:
q = q.filter(PubSubMessage.topic_id==topic_id)
return q.delete()
# ################################################################################################################################
def _delete_enq_msg_by_status(session, cluster_id, topic_id, status):
""" Deletes all messages already delivered or the ones that have been explicitly marked for deletion from delivery queues.
"""
q = session.query(PubSubEndpointEnqueuedMessage).\
filter(PubSubEndpointEnqueuedMessage.cluster_id==cluster_id).\
filter(PubSubEndpointEnqueuedMessage.delivery_status==status)
if topic_id:
q = q.filter(PubSubEndpointEnqueuedMessage.topic_id==topic_id)
return q.delete()
# ################################################################################################################################
def delete_enq_delivered(session, cluster_id, topic_id, status=_delivered):
""" Deletes all messages already delivered or the ones that have been explicitly marked for deletion from delivery queues.
"""
return _delete_enq_msg_by_status(session, cluster_id, topic_id, status)
# ################################################################################################################################
def delete_enq_marked_deleted(session, cluster_id, topic_id, status=_to_delete):
""" Deletes all messages that have been explicitly marked for deletion from delivery queues.
"""
return _delete_enq_msg_by_status(session, cluster_id, topic_id, status)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/cleanup.py | cleanup.py |
# stdlib
from logging import DEBUG, getLogger
from traceback import format_exc
# SQLAlchemy
from sqlalchemy.exc import IntegrityError
# Zato
from zato.common.api import PUBSUB
from zato.common.exception import BadRequest
from zato.common.odb.model import PubSubEndpoint, PubSubEndpointEnqueuedMessage, PubSubEndpointTopic, PubSubMessage, PubSubTopic
from zato.common.util.sql import sql_op_with_deadlock_retry
# ################################################################################################################################
logger_zato = getLogger('zato')
logger_pubsub = getLogger('zato_pubsub')
has_debug = logger_zato.isEnabledFor(DEBUG) or logger_pubsub.isEnabledFor(DEBUG)
# ################################################################################################################################
MsgInsert = PubSubMessage.__table__.insert
EndpointTopicInsert = PubSubEndpointTopic.__table__.insert
EnqueuedMsgInsert = PubSubEndpointEnqueuedMessage.__table__.insert
MsgTable = PubSubMessage.__table__
TopicTable = PubSubTopic.__table__
EndpointTable = PubSubEndpoint.__table__
EndpointTopicTable = PubSubEndpointTopic.__table__
# ################################################################################################################################
_initialized=PUBSUB.DELIVERY_STATUS.INITIALIZED
# ################################################################################################################################
def _sql_publish_with_retry(session, cid, cluster_id, topic_id, subscriptions_by_topic, gd_msg_list, now):
""" A low-level implementation of sql_publish_with_retry.
"""
# Publish messages - INSERT rows, each representing an individual message
topic_messages_inserted = insert_topic_messages(session, cid, gd_msg_list)
if has_debug:
sub_keys_by_topic = sorted(elem.sub_key for elem in subscriptions_by_topic)
logger_zato.info('With topic_messages_inserted `%s` `%s` `%s` `%s` `%s` `%s` `%s`',
cid, topic_messages_inserted, cluster_id, topic_id, sub_keys_by_topic, gd_msg_list, now)
if topic_messages_inserted:
# Move messages to each subscriber's queue
if subscriptions_by_topic:
try:
insert_queue_messages(session, cluster_id, subscriptions_by_topic, gd_msg_list, topic_id, now, cid)
if has_debug:
logger_zato.info('Inserted queue messages `%s` `%s` `%s` `%s` `%s` `%s`', cid, cluster_id,
sub_keys_by_topic, gd_msg_list, topic_id, now)
# No integrity error / no deadlock = all good
return True
except IntegrityError:
if has_debug:
logger_zato.info('Caught IntegrityError (_sql_publish_with_retry) `%s` `%s`', cid, format_exc())
# If we have an integrity error here it means that our transaction, the whole of it,
# was rolled back - this will happen on MySQL in case in case of deadlocks which may
# occur because delivery tasks update the table that insert_queue_messages wants to insert to.
# We need to return False for our caller to understand that the whole transaction needs
# to be repeated.
return False
else:
if has_debug:
logger_zato.info('No subscribers in `%s`', cid)
# No subscribers, also good
return True
# ################################################################################################################################
def sql_publish_with_retry(*args):
""" Populates SQL structures with new messages for topics and their counterparts in subscriber queues.
In case of a deadlock will retry the whole transaction, per MySQL's requirements, which rolls back
the whole of it rather than a deadlocking statement only.
"""
is_ok = False
while not is_ok:
if has_debug:
logger_zato.info('sql_publish_with_retry -> is_ok.1:`%s`', is_ok)
is_ok = _sql_publish_with_retry(*args)
if has_debug:
logger_zato.info('sql_publish_with_retry -> is_ok.2:`%s`', is_ok)
# ################################################################################################################################
def _insert_topic_messages(session, msg_list):
""" A low-level implementation for insert_topic_messages.
"""
session.execute(MsgInsert().values(msg_list))
# ################################################################################################################################
def insert_topic_messages(session, cid, msg_list):
""" Publishes messages to a topic, i.e. runs an INSERT that inserts rows, one for each message.
"""
try:
return sql_op_with_deadlock_retry(cid, 'insert_topic_messages', _insert_topic_messages, session, msg_list)
# Catch duplicate MsgId values sent by clients
except IntegrityError as e:
if has_debug:
logger_zato.info('Caught IntegrityError (insert_topic_messages) `%s` `%s`', cid, format_exc())
str_e = str(e)
if 'pubsb_msg_pubmsg_id_idx' in str_e:
raise BadRequest(cid, 'Duplicate msg_id:`{}`'.format(str_e))
else:
raise
# ################################################################################################################################
def _insert_queue_messages(session, queue_msgs):
""" A low-level call to enqueue messages.
"""
session.execute(EnqueuedMsgInsert().values(queue_msgs))
# ################################################################################################################################
def insert_queue_messages(session, cluster_id, subscriptions_by_topic, msg_list, topic_id, now, cid, _initialized=_initialized,
_float_str=PUBSUB.FLOAT_STRING_CONVERT):
""" Moves messages to each subscriber's queue, i.e. runs an INSERT that adds relevant references to the topic message.
Also, updates each message's is_in_sub_queue flag to indicate that it is no longer available for other subscribers.
"""
queue_msgs = []
for sub in subscriptions_by_topic:
for msg in msg_list:
# Enqueues the message for each subscriber
queue_msgs.append({
'creation_time': _float_str.format(now),
'pub_msg_id': msg['pub_msg_id'],
'endpoint_id': sub.endpoint_id,
'topic_id': topic_id,
'sub_key': sub.sub_key,
'cluster_id': cluster_id,
'sub_pattern_matched': msg['sub_pattern_matched'][sub.sub_key],
})
# Move the message to endpoint queues
return sql_op_with_deadlock_retry(cid, 'insert_queue_messages', _insert_queue_messages, session, queue_msgs)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/query/pubsub/publish.py | publish.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# SQLAlchemy
from sqlalchemy import Boolean, Column, DateTime, false as sa_false, ForeignKey, Index, Integer, Sequence, String, Text, \
UniqueConstraint
from sqlalchemy.ext.declarative import declared_attr
# Zato
from zato.common.odb.model.base import Base, _JSON
# ################################################################################################################################
class _SSOGroup(Base):
__tablename__ = 'zato_sso_group'
__table_args__ = (
UniqueConstraint('name', 'source', name='zato_g_name_uq'),
UniqueConstraint('group_id', name='zato_g_gid_uq'),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_group_id_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
# Publicly visible
group_id = Column(String(191), nullable=False)
name = Column(String(191), nullable=False)
source = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Groups may be optionally nested
parent_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOUser(Base):
__tablename__ = 'zato_sso_user'
__table_args__ = (
UniqueConstraint('username', name='zato_u_usrn_uq'),
UniqueConstraint('user_id', name='zato_user_id_uq'),
Index('zato_u_email_idx', 'email', unique=False, mysql_length={'email':767}),
Index('zato_u_appr_stat_idx', 'approval_status', unique=False),
Index('zato_u_dspn_idx', 'display_name_upper', unique=False),
Index('zato_u_alln_idx', 'first_name_upper', 'middle_name_upper', 'last_name_upper', unique=False),
Index('zato_u_lastn_idx', 'last_name_upper', unique=False),
Index('zato_u_sigst_idx', 'sign_up_status', unique=False),
Index('zato_u_sigctok_idx', 'sign_up_confirm_token', unique=True),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_user_id_seq'), primary_key=True)
# Publicly visible
user_id = Column(String(191), nullable=False)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
is_super_user = Column(Boolean(), nullable=False, default=False)
is_locked = Column(Boolean(), nullable=False, default=False)
locked_time = Column(DateTime(), nullable=True)
# Creation metadata, e.g. what this user's remote IP was
creation_ctx = Column(Text(), nullable=False)
# Note that this is not an FK - this is on purpose to keep this information around
# even if parent row is deleted.
locked_by = Column(String(191), nullable=True)
approval_status = Column(String(191), nullable=False)
approval_status_mod_time = Column(DateTime(), nullable=False) # When user was approved or rejected
approval_status_mod_by = Column(String(191), nullable=False) # Same comment as in locked_by
# Basic information, always required
username = Column(String(191), nullable=False)
password = Column(Text(), nullable=False)
password_is_set = Column(Boolean(), nullable=False)
password_must_change = Column(Boolean(), nullable=False)
password_last_set = Column(DateTime(), nullable=False)
password_expiry = Column(DateTime(), nullable=False)
# Sign-up information, possibly used in API workflows
sign_up_status = Column(String(191), nullable=False)
sign_up_time = Column(DateTime(), nullable=False)
sign_up_confirm_time = Column(DateTime(), nullable=True)
sign_up_confirm_token = Column(String(191), nullable=False)
# Won't be always needed
email = Column(Text(), nullable=True)
# Various cultures don't have a notion of first or last name and display_name is the one that can be used in that case.
display_name = Column(String(191), nullable=True)
first_name = Column(String(191), nullable=True)
middle_name = Column(String(191), nullable=True)
last_name = Column(String(191), nullable=True)
# Same as above but upper-cased for look-up / indexing purposes
display_name_upper = Column(String(191), nullable=True)
first_name_upper = Column(String(191), nullable=True)
middle_name_upper = Column(String(191), nullable=True)
last_name_upper = Column(String(191), nullable=True)
# Rate limiting
is_rate_limit_active = Column(Boolean(), nullable=True)
rate_limit_type = Column(String(40), nullable=True)
rate_limit_def = Column(Text(), nullable=True)
rate_limit_check_parent_def = Column(Boolean(), nullable=True)
# TOTP
is_totp_enabled = Column(Boolean(), nullable=False, server_default=sa_false())
totp_key = Column(Text(), nullable=True)
totp_label = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# ################################################################################################################################
class _SSOUserGroup(Base):
""" An N:N mapping of users to their groups.
"""
__tablename__ = 'zato_sso_user_group'
__table_args__ = (
UniqueConstraint('user_id', 'group_id', name='zato_ug_id_uq'),
{})
# Not exposed publicly, used only to have a natural FK
id = Column(Integer, Sequence('zato_sso_ug_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
group_id = Column(Integer, ForeignKey('zato_sso_group.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOSession(Base):
__tablename__ = 'zato_sso_session'
__table_args__ = (
Index('zato_sso_sust_idx', 'ust', unique=True),
Index('zato_sso_extsi_idx', 'ext_session_id', unique=False, mysql_length={'ext_session_id':767}),
{})
# Not exposed publicly, used only for SQL joins
id = Column(Integer, Sequence('zato_sso_sid_seq'), primary_key=True)
# Publicly visible session identifier (user session token)
ust = Column(String(191), nullable=False)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
remote_addr = Column(Text(), nullable=False)
user_agent = Column(Text(), nullable=False)
auth_type = Column(Text(), nullable=False)
auth_principal = Column(Text(), nullable=False)
# ID of a session external to SSO that is linked to this one,
# where external may still mean JWT or Basic Auth,
# but it is not a built-in SSO one.
ext_session_id = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
@declared_attr
def user_id(cls):
return Column(Integer, ForeignKey('zato_sso_user.id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOAttr(Base):
__tablename__ = 'zato_sso_attr'
__table_args__ = (
UniqueConstraint('name', 'is_session_attr', 'user_id', '_ust_string', name='zato_attr_name_uq'),
Index('zato_attr_usr', 'user_id', unique=False),
Index('zato_attr_usr_ust', 'user_id', 'ust', unique=False),
Index('zato_attr_usr_name', 'user_id', 'name', unique=False),
Index('zato_attr_usr_ust_name', 'user_id', 'ust', 'name', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_attr_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
expiration_time = Column(DateTime(), nullable=True)
is_session_attr = Column(Boolean(), nullable=False)
is_encrypted = Column(Boolean(), nullable=False, default=False)
serial_method = Column(String(20), nullable=False, default='json')
name = Column(String(191), nullable=False)
value = Column(Text(), nullable=True)
# Unlike ust, this cannot be NULL so it may be used for practical purposes in the unique constraint 'zato_attr_name_uq',
# otherwise all NULL values are considered different (or at least uncomparable) and API-wise, it is not possible
# to construct a sensible unique constraint.
_ust_string = Column(String(191), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
ust = Column(String(191), ForeignKey('zato_sso_session.ust', ondelete='CASCADE'), nullable=True)
# ################################################################################################################################
class _SSOLinkedAuth(Base):
__tablename__ = 'zato_sso_linked_auth'
__table_args__ = (
Index('auth_idx', 'auth_type', 'user_id', 'auth_id', 'auth_principal', unique=True,
mysql_length={'auth_type':191, 'user_id':191, 'auth_principal':191}),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_linked_auth_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False) # Currently unused and always set to True
is_internal = Column(Boolean(), nullable=False, default=False)
creation_time = Column(DateTime(), nullable=False)
last_modified = Column(DateTime(), nullable=True)
# If True, auth_principal will point to an account/user defined externally to Zato,
# e.g. in a system that Zato has no direct authentication support for.
# Otherwise, if False, auth_id will be filled in.
has_ext_principal = Column(Boolean(), nullable=False)
# A label describing authentication type
auth_type = Column(Text(191), nullable=False)
#
# Will be provided if has_ext_principal is False, in which case it will point to one of sec_base.id definitions.
#
# Note that if the SSO ODB is installed in a standalone database, this column will not be an FK
# because there will be no parent sec_base.id column to point to. The Alembic logic to add
# the FK after the table is created is implemented in cli/create_odb.py:Create.
auth_id = Column(Integer, nullable=True)
# Will be given if auth_id is not provided.
auth_principal = Column(Text(191), nullable=True)
# E.g. name of an environment this link is valid in - useful in cases when the same user
# has multiple linked accounts, different in different auth sources (environments).
auth_source = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################
class _SSOPasswordReset(Base):
__tablename__ = 'zato_sso_password_reset'
__table_args__ = (
Index('zato_prt_value_type', 'token', 'type_', unique=True),
{})
# Not exposed publicly, used only because SQLAlchemy requires an FK
id = Column(Integer, Sequence('zato_sso_flow_prt_seq'), primary_key=True)
creation_time = Column(DateTime(), nullable=False)
expiration_time = Column(DateTime(), nullable=False)
# Creation metadata in JSON
creation_ctx = Column(_JSON(), nullable=False)
# The actual PRT (password reset token)
token = Column(String(191), nullable=False)
# PRT type - what kind is it of, e.g. a Zato built-in one or an external one?
type_ = Column(String(191), nullable=False)
# This key is used to reset the password after the PRT has been accessed
reset_key = Column(String(191), nullable=False)
# This is set when the PRT is accessed in order to set a time limit
# for the password reset procedure (using prt.password_change_session_duration from sso.conf)
reset_key_exp_time = Column(DateTime(), nullable=False)
# Will be set to True when the PRT has been accessed in any way,
# e.g. a user clicks on a link.
has_been_accessed = Column(Boolean(), nullable=False, default=False)
# When was the PRT accessed
access_time = Column(DateTime(), nullable=True)
# Access metadata in JSON
access_ctx = Column(_JSON(), nullable=True)
# Will be set to True when a password is reset using this PRT and reset_key
is_password_reset = Column(Boolean(), nullable=False, default=False)
# When was the password reset
password_reset_time = Column(DateTime(), nullable=True)
# Password reset metadata in JSON
password_reset_ctx = Column(_JSON(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# SSO user this entry links to
user_id = Column(String(191), ForeignKey('zato_sso_user.user_id', ondelete='CASCADE'), nullable=False)
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/model/sso.py | sso.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from ftplib import FTP_PORT
# SQLAlchemy
from sqlalchemy import BigInteger, Boolean, Column, DateTime, Enum, false as sa_false, ForeignKey, Index, Integer, LargeBinary, \
Numeric, Sequence, SmallInteger, String, Text, true as sa_true, UniqueConstraint
from sqlalchemy.orm import backref, relationship
# Zato
from zato.common.api import AMQP, CASSANDRA, CLOUD, DATA_FORMAT, HTTP_SOAP_SERIALIZATION_TYPE, MISC, NOTIF, ODOO, SAP, PUBSUB, \
SCHEDULER, STOMP, PARAMS_PRIORITY, URL_PARAMS_PRIORITY
from zato.common.json_internal import json_dumps
from zato.common.odb.const import WMQ_DEFAULT_PRIORITY
from zato.common.odb.model.base import Base, _JSON
from zato.common.odb.model.sso import _SSOAttr, _SSOPasswordReset, _SSOGroup, _SSOLinkedAuth, _SSOSession, _SSOUser
# ################################################################################################################################
def to_json(model, return_as_dict=False):
""" Returns a JSON representation of an SQLAlchemy-backed object.
"""
out = {}
out['fields'] = {}
out['pk'] = getattr(model, 'id')
for col in model._sa_class_manager.mapper.mapped_table.columns:
out['fields'][col.name] = getattr(model, col.name)
if return_as_dict:
return out
else:
return json_dumps([out])
# ################################################################################################################################
class SSOGroup(_SSOGroup):
pass
# ################################################################################################################################
class SSOUser(_SSOUser):
pass
# ################################################################################################################################
class SSOSession(_SSOSession):
pass
# ################################################################################################################################
class SSOAttr(_SSOAttr):
pass
# ################################################################################################################################
class SSOLinkedAuth(_SSOLinkedAuth):
pass
# ################################################################################################################################
class SSOPasswordReset(_SSOPasswordReset):
pass
# ################################################################################################################################
class AlembicRevision(Base):
""" A table for Alembic to store its revision IDs for SQL migrations.
Note that Alembic as of version 0.6.0 which is the latest one right now (Sun, Jun 8 2014)
doesn't declare 'version_num' to be a primary key but we need to because SQLAlchemy always needs one.
"""
__tablename__ = 'alembic_version'
version_num = Column(String(32), primary_key=True)
def __init__(self, version_num=None):
self.version_num = version_num
# ################################################################################################################################
class ZatoInstallState(Base):
""" Contains a row for each Zato installation belonging to that particular
ODB. For instance, installing Zato 1.0 will add a new row, installing 1.1
"""
__tablename__ = 'install_state'
id = Column(Integer, Sequence('install_state_seq'), primary_key=True)
version = Column(Integer, unique=True, nullable=False)
install_time = Column(DateTime(), nullable=False)
source_host = Column(String(200), nullable=False)
source_user = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, version=None, install_time=None, source_host=None, source_user=None):
self.id = id
self.version = version
self.install_time = install_time
self.source_host = source_host
self.source_user = source_user
# ################################################################################################################################
class Cluster(Base):
""" Represents a Zato cluster.
"""
__tablename__ = 'cluster'
id = Column(Integer, Sequence('cluster_id_seq'), primary_key=True)
name = Column(String(200), unique=True, nullable=False)
description = Column(String(1000), nullable=True)
odb_type = Column(String(30), nullable=False)
odb_host = Column(String(200), nullable=True)
odb_port = Column(Integer(), nullable=True)
odb_user = Column(String(200), nullable=True)
odb_db_name = Column(String(200), nullable=True)
odb_schema = Column(String(200), nullable=True)
broker_host = Column(String(200), nullable=False)
broker_port = Column(Integer(), nullable=False)
lb_host = Column(String(200), nullable=False)
lb_port = Column(Integer(), nullable=False)
lb_agent_port = Column(Integer(), nullable=False)
cw_srv_id = Column(Integer(), nullable=True)
cw_srv_keep_alive_dt = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def __init__(self, id=None, name=None, description=None, odb_type=None, odb_host=None, odb_port=None, odb_user=None,
odb_db_name=None, odb_schema=None, broker_host=None, broker_port=None, lb_host=None, lb_port=None,
lb_agent_port=None, cw_srv_id=None, cw_srv_keep_alive_dt=None):
self.id = id
self.name = name
self.description = description
self.odb_type = odb_type
self.odb_host = odb_host
self.odb_port = odb_port
self.odb_user = odb_user
self.odb_db_name = odb_db_name
self.odb_schema = odb_schema
self.broker_host = broker_host
self.broker_port = broker_port
self.lb_host = lb_host
self.lb_agent_port = lb_agent_port
self.lb_port = lb_port
self.cw_srv_id = cw_srv_id
self.cw_srv_keep_alive_dt = cw_srv_keep_alive_dt
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Server(Base):
""" Represents a Zato server.
"""
__tablename__ = 'server'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('server_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(400), nullable=True)
bind_host = Column(String(400), nullable=True)
bind_port = Column(Integer(), nullable=True)
preferred_address = Column(String(400), nullable=True)
crypto_use_tls = Column(Boolean(), nullable=True)
# If the server's request to join a cluster has been accepted, and for now
# it will always be.
last_join_status = Column(String(40), nullable=True)
last_join_mod_date = Column(DateTime(), nullable=True)
last_join_mod_by = Column(String(200), nullable=True)
# Whether the server's up or not
up_status = Column(String(40), nullable=True)
up_mod_date = Column(DateTime(), nullable=True)
token = Column(String(32), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('servers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, cluster=None, token=None, last_join_status=None, last_join_mod_date=None,
last_join_mod_by=None):
self.id = id
self.name = name
self.cluster = cluster
self.token = token
self.last_join_status = last_join_status
self.last_join_mod_date = last_join_mod_date
self.last_join_mod_by = last_join_mod_by
self.has_lb_config = False # Not used by the database
self.in_lb = False # Not used by the database
self.lb_state = None # Not used by the database
self.lb_address = None # Not used by the database
self.may_be_deleted = None # Not used by the database
self.up_mod_date_user = None # Not used by the database
# ################################################################################################################################
class SecurityBase(Base):
""" A base class for all the security definitions.
"""
__tablename__ = 'sec_base'
__table_args__ = (UniqueConstraint('cluster_id', 'name'),
UniqueConstraint('cluster_id', 'username', 'sec_type'), {})
__mapper_args__ = {'polymorphic_on': 'sec_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# It's nullable because some children classes do not use usernames
username = Column(String(200), nullable=True)
password = Column(String(1000), nullable=True)
password_type = Column(String(45), nullable=True)
is_active = Column(Boolean(), nullable=False)
sec_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('security_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class MultiSecurity(Base):
""" An N:N mapping between security definitions and objects making use of them.
"""
__tablename__ = 'sec_multi'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'security_id', 'is_channel', 'is_outconn'), {})
id = Column(Integer, Sequence('sec_multi_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
priority = Column(Integer(), nullable=False)
conn_id = Column(String(100), nullable=False)
conn_type = Column(String(100), nullable=False)
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sec_multi_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPBasicAuth(SecurityBase):
""" An HTTP Basic Auth definition.
"""
__tablename__ = 'sec_basic_auth'
__mapper_args__ = {'polymorphic_identity': 'basic_auth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
realm = Column(String(200), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, realm=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.realm = realm
self.password = password
self.cluster = cluster
# ################################################################################################################################
class JWT(SecurityBase):
""" A set of JavaScript Web Token (JWT) credentials.
"""
__tablename__ = 'sec_jwt'
__mapper_args__ = {'polymorphic_identity': 'jwt'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
ttl = Column(Integer, nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, ttl=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.ttl = ttl
self.cluster = cluster
# ################################################################################################################################
class WSSDefinition(SecurityBase):
""" A WS-Security definition.
"""
__tablename__ = 'sec_wss_def'
__mapper_args__ = {'polymorphic_identity':'wss'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
reject_empty_nonce_creat = Column(Boolean(), nullable=False)
reject_stale_tokens = Column(Boolean(), nullable=True)
reject_expiry_limit = Column(Integer(), nullable=False)
nonce_freshness_time = Column(Integer(), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, password_type=None,
reject_empty_nonce_creat=None, reject_stale_tokens=None, reject_expiry_limit=None, nonce_freshness_time=None,
cluster=None, password_type_raw=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.password_type = password_type
self.reject_empty_nonce_creat = reject_empty_nonce_creat
self.reject_stale_tokens = reject_stale_tokens
self.reject_expiry_limit = reject_expiry_limit
self.nonce_freshness_time = nonce_freshness_time
self.cluster = cluster
self.password_type_raw = password_type_raw
# ################################################################################################################################
class OAuth(SecurityBase):
""" Stores OAuth credentials.
"""
__tablename__ = 'sec_oauth'
__mapper_args__ = {'polymorphic_identity':'oauth'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
proto_version = Column(String(32), nullable=False)
sig_method = Column(String(32), nullable=False) # HMAC-SHA1 or PLAINTEXT
max_nonce_log = Column(Integer(), nullable=False)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, proto_version=None, sig_method=None,
max_nonce_log=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.proto_version = proto_version
self.sig_method = sig_method
self.max_nonce_log = max_nonce_log
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NTLM(SecurityBase):
""" Stores NTLM definitions.
"""
__tablename__ = 'sec_ntlm'
__mapper_args__ = {'polymorphic_identity': 'ntlm'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class AWSSecurity(SecurityBase):
""" Stores Amazon credentials.
"""
__tablename__ = 'sec_aws'
__mapper_args__ = {'polymorphic_identity': 'aws'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class OpenStackSecurity(SecurityBase):
""" Stores OpenStack credentials (no longer used, to be removed).
"""
__tablename__ = 'sec_openstack'
__mapper_args__ = {'polymorphic_identity': 'openstack'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class APIKeySecurity(SecurityBase):
""" Stores API keys.
"""
__tablename__ = 'sec_apikey'
__mapper_args__ = {'polymorphic_identity': 'apikey'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class XPathSecurity(SecurityBase):
""" Stores XPath-based credentials.
"""
__tablename__ = 'sec_xpath'
__mapper_args__ = {'polymorphic_identity':'xpath_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
username_expr = Column(String(200), nullable=False)
password_expr = Column(String(200), nullable=True)
def __init__(self, id=None, name=None, is_active=None, username=None, password=None, username_expr=None, password_expr=None,
cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.username = username
self.password = password
self.username_expr = username_expr
self.password_expr = password_expr
self.cluster = cluster
def to_json(self):
return to_json(self)
# ################################################################################################################################
class TLSKeyCertSecurity(SecurityBase):
""" Stores information regarding TLS key/cert pairs used in outgoing connections.
"""
__tablename__ = 'sec_tls_key_cert'
__mapper_args__ = {'polymorphic_identity':'tls_key_cert'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
info = Column(LargeBinary(200000), nullable=False)
auth_data = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class TLSChannelSecurity(SecurityBase):
""" Stores information regarding TLS client certificate-based security definitions.
"""
__tablename__ = 'sec_tls_channel'
__mapper_args__ = {'polymorphic_identity':'tls_channel_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
value = Column(LargeBinary(200000), nullable=False)
# ################################################################################################################################
class VaultConnection(SecurityBase):
""" Stores information on how to connect to Vault and how to authenticate against it by default.
"""
__tablename__ = 'sec_vault_conn'
__mapper_args__ = {'polymorphic_identity':'vault_conn_sec'}
id = Column(Integer, ForeignKey('sec_base.id'), primary_key=True)
url = Column(String(200), nullable=False)
token = Column(String(200), nullable=True)
default_auth_method = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=False)
allow_redirects = Column(Boolean(), nullable=False)
tls_verify = Column(Boolean(), nullable=False)
tls_key_cert_id = Column(Integer, ForeignKey('sec_tls_key_cert.id', ondelete='CASCADE'), nullable=True)
tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('vault_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class TLSCACert(Base):
""" Stores information regarding CA certs.
"""
__tablename__ = 'sec_tls_ca_cert'
id = Column(Integer, Sequence('sec_tls_ca_cert_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(LargeBinary(200000), nullable=False)
info = Column(LargeBinary(200000), nullable=False)
is_active = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('ca_cert_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class HTTPSOAP(Base):
""" An incoming or outgoing HTTP/SOAP connection.
"""
__tablename__ = 'http_soap'
__table_args__ = (
UniqueConstraint('name', 'connection', 'transport', 'cluster_id'),
Index('path_host_conn_act_clus_idx', 'url_path', 'host', 'connection', 'soap_action', 'cluster_id', unique=False), {})
id = Column(Integer, Sequence('http_soap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
connection = Column(String(20), nullable=False)
transport = Column(String(200), nullable=False)
host = Column(String(200), nullable=True)
url_path = Column(String(200), nullable=False)
method = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
soap_action = Column(String(200), nullable=False)
soap_version = Column(String(20), nullable=True)
data_format = Column(String(20), nullable=True)
content_type = Column(String(200), nullable=True)
ping_method = Column(String(60), nullable=True)
pool_size = Column(Integer, nullable=True)
serialization_type = Column(String(200), nullable=False, default=HTTP_SOAP_SERIALIZATION_TYPE.SUDS.id)
timeout = Column(Integer(), nullable=False, default=MISC.DEFAULT_HTTP_TIMEOUT)
merge_url_params_req = Column(Boolean, nullable=True, default=True)
url_params_pri = Column(String(200), nullable=True, default=URL_PARAMS_PRIORITY.DEFAULT)
params_pri = Column(String(200), nullable=True, default=PARAMS_PRIORITY.DEFAULT)
has_rbac = Column(Boolean, nullable=False, default=False)
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
security = relationship(SecurityBase, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
sec_tls_ca_cert_id = Column(Integer, ForeignKey('sec_tls_ca_cert.id', ondelete='CASCADE'), nullable=True)
sec_tls_ca_cert = relationship('TLSCACert', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('http_soap', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('http_soap_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, is_internal=None, connection=None, transport=None, host=None,
url_path=None, method=None, soap_action=None, soap_version=None, data_format=None, ping_method=None,
pool_size=None, merge_url_params_req=None, url_params_pri=None, params_pri=None, serialization_type=None,
timeout=None, sec_tls_ca_cert_id=None, service_id=None, service=None, security=None, cluster_id=None,
cluster=None, service_name=None, security_id=None, has_rbac=None, security_name=None, content_type=None,
cache_id=None, cache_type=None, cache_expiry=None, cache_name=None, content_encoding=None, match_slash=None,
http_accept=None, opaque=None, **kwargs):
super(HTTPSOAP, self).__init__(**kwargs)
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.connection = connection
self.transport = transport
self.host = host
self.url_path = url_path
self.method = method
self.soap_action = soap_action
self.soap_version = soap_version
self.data_format = data_format
self.ping_method = ping_method
self.pool_size = pool_size
self.merge_url_params_req = merge_url_params_req
self.url_params_pri = url_params_pri
self.params_pri = params_pri
self.serialization_type = serialization_type
self.timeout = timeout
self.sec_tls_ca_cert_id = sec_tls_ca_cert_id
self.service_id = service_id
self.service = service
self.security = security
self.cluster_id = cluster_id
self.cluster = cluster
self.service_name = service_name # Not used by the DB
self.security_id = security_id
self.has_rbac = has_rbac
self.security_name = security_name
self.content_type = content_type
self.cache_id = cache_id
self.cache_type = cache_type
self.cache_expiry = cache_expiry
self.cache_name = cache_name # Not used by the DB
self.content_encoding = content_encoding
self.match_slash = match_slash # Not used by the DB
self.http_accept = http_accept # Not used by the DB
self.opaque1 = opaque
self.is_rate_limit_active = None
self.rate_limit_type = None
self.rate_limit_def = None
self.rate_limit_check_parent_def = None
# ################################################################################################################################
class SQLConnectionPool(Base):
""" An SQL connection pool.
"""
__tablename__ = 'sql_pool'
__table_args__ = (UniqueConstraint('cluster_id', 'name'), {})
id = Column(Integer, Sequence('sql_pool_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
db_name = Column(String(200), nullable=False)
engine = Column(String(200), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sql_pools', order_by=name, cascade='all, delete, delete-orphan'))
engine_display_name = None # For auto-completion, not used by DB
def __init__(self, id=None, name=None, is_active=None, db_name=None, username=None, engine=None, extra=None, host=None,
port=None, pool_size=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.db_name = db_name
self.username = username
self.engine = engine
self.extra = extra
self.host = host
self.port = port
self.pool_size = pool_size
self.cluster = cluster
# ################################################################################################################################
class Service(Base):
""" A set of basic informations about a service available in a given cluster.
"""
__tablename__ = 'service'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('service_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
impl_name = Column(String(2000), nullable=False)
is_internal = Column(Boolean(), nullable=False)
wsdl = Column(LargeBinary(5000000), nullable=True)
wsdl_name = Column(String(200), nullable=True)
slow_threshold = Column(Integer, nullable=False, default=99999)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('services', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, impl_name=None, is_internal=None, cluster=None, wsdl=None,
wsdl_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.impl_name = impl_name
self.is_internal = is_internal
self.cluster = cluster
self.wsdl = wsdl
self.wsdl_name = wsdl_name
self.plain_http_channels = [] # Not used by the database
self.soap_channels = [] # Not used by the database
self.amqp_channels = [] # Not used by the database
self.wmq_channels = [] # Not used by the database
self.zmq_channels = [] # Not used by the database
self.scheduler_jobs = [] # Not used by the database
self.deployment_info = [] # Not used by the database
self.source_info = None # Not used by the database
self.may_be_deleted = False # Not used by the database
self.sample_cid = None # Not used by the database
self.sample_req_timestamp = None # Not used by the database
self.sample_resp_timestamp = None # Not used by the database
self.sample_req = None # Not used by the database
self.sample_resp = None # Not used by the database
self.sample_req_resp_freq = None # Not used by the database
self.sample_req_html = None # Not used by the database
self.sample_resp_html = None # Not used by the database
self.usage = None # Not used by the database
self.time_last = None # Not used by the database
self.time_min_all_time = None # Not used by the database
self.time_max_all_time = None # Not used by the database
self.time_mean_all_time = None # Not used by the database
self.time_usage_1h = None # Not used by the database
self.time_min_1h = None # Not used by the database
self.time_max_1h = None # Not used by the database
self.time_trend_mean_1h = None # Not used by the database
self.time_trend_rate_1h = None # Not used by the database
self.docs_summary = None # Not used by the database
self.docs_description = None # Not used by the database
self.invokes = None # Not used by the database
self.invoked_by = None # Not used by the database
self.last_timestamp = None # Not used by the database
self.last_timestamp_utc = None # Not used by the database
# ################################################################################################################################
class DeployedService(Base):
""" A service living on a given server.
"""
__tablename__ = 'deployed_service'
__table_args__ = (UniqueConstraint('server_id', 'service_id'), {})
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
source = Column(LargeBinary(500000), nullable=True)
source_path = Column(String(2000), nullable=True)
source_hash = Column(String(512), nullable=True)
source_hash_method = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=True)
server = relationship(Server, backref=backref('deployed_services', order_by=deployment_time, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False, primary_key=True)
service = relationship(Service, backref=backref('deployment_data', order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, deployment_time, details, server_id, service_id, source, source_path, source_hash, source_hash_method):
self.deployment_time = deployment_time
self.details = details
self.server_id = server_id
self.service_id = service_id
self.source = source
self.source_path = source_path
self.source_hash = source_hash
self.source_hash_method = source_hash_method
# ################################################################################################################################
class Job(Base):
""" A scheduler's job. Stores all the information needed to execute a job
if it's a one-time job, otherwise the information is kept in related tables.
"""
__tablename__ = 'job'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('job_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
job_type = Column(Enum(SCHEDULER.JOB_TYPE.ONE_TIME, SCHEDULER.JOB_TYPE.INTERVAL_BASED,
SCHEDULER.JOB_TYPE.CRON_STYLE, name='job_type'), nullable=False)
start_date = Column(DateTime(), nullable=False)
extra = Column(LargeBinary(500000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('jobs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, job_type=None, start_date=None, extra=None, cluster=None,
cluster_id=None, service=None, service_id=None, service_name=None, interval_based=None, cron_style=None,
definition_text=None, job_type_friendly=None):
self.id = id
self.name = name
self.is_active = is_active
self.job_type = job_type
self.start_date = start_date
self.extra = extra
self.cluster = cluster
self.cluster_id = cluster_id
self.service = service
self.service_id = service_id
self.service_name = service_name # Not used by the database
self.interval_based = interval_based
self.cron_style = cron_style
self.definition_text = definition_text # Not used by the database
self.job_type_friendly = job_type_friendly # Not used by the database
# ################################################################################################################################
class IntervalBasedJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_interval_based'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_intrvl_seq'), primary_key=True)
job_id = Column(Integer, nullable=False)
weeks = Column(Integer, nullable=True)
days = Column(Integer, nullable=True)
hours = Column(Integer, nullable=True)
minutes = Column(Integer, nullable=True)
seconds = Column(Integer, nullable=True)
repeats = Column(Integer, nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(Job, backref=backref('interval_based', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, weeks=None, days=None, hours=None, minutes=None, seconds=None, repeats=None,
definition_text=None):
self.id = id
self.job = job
self.weeks = weeks
self.days = days
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.repeats = repeats
self.definition_text = definition_text # Not used by the database
# ################################################################################################################################
class CronStyleJob(Base):
""" A Cron-style scheduler's job.
"""
__tablename__ = 'job_cron_style'
__table_args__ = (UniqueConstraint('job_id'), {})
id = Column(Integer, Sequence('job_cron_seq'), primary_key=True)
cron_definition = Column(String(4000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=False)
job = relationship(
Job, backref=backref('cron_style', uselist=False, cascade='all, delete, delete-orphan', single_parent=True))
def __init__(self, id=None, job=None, cron_definition=None):
self.id = id
self.job = job
self.cron_definition = cron_definition
# ################################################################################################################################
class Cache(Base):
""" Base class for all cache definitions.
"""
__tablename__ = 'cache'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'cache_type'}
id = Column(Integer, Sequence('cache_builtin_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_default = Column(Boolean(), nullable=False)
cache_type = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cache_list', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self):
self.current_size = 0 # Not used by the DB
# ################################################################################################################################
class CacheBuiltin(Cache):
""" Cache definitions using mechanisms built into Zato.
"""
__tablename__ = 'cache_builtin'
__mapper_args__ = {'polymorphic_identity':'builtin'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
max_size = Column(Integer(), nullable=False)
max_item_size = Column(Integer(), nullable=False)
extend_expiry_on_get = Column(Boolean(), nullable=False)
extend_expiry_on_set = Column(Boolean(), nullable=False)
sync_method = Column(String(20), nullable=False)
persistent_storage = Column(String(40), nullable=False)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class CacheMemcached(Cache):
""" Cache definitions using Memcached.
"""
__tablename__ = 'cache_memcached'
__mapper_args__ = {'polymorphic_identity':'memcached'}
cache_id = Column(Integer, ForeignKey('cache.id'), primary_key=True)
servers = Column(Text, nullable=False)
is_debug = Column(Boolean(), nullable=False)
extra = Column(LargeBinary(20000), nullable=True)
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class ConnDefAMQP(Base):
""" An AMQP connection definition.
"""
__tablename__ = 'conn_def_amqp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False)
vhost = Column(String(200), nullable=False)
username = Column(String(200), nullable=False)
password = Column(String(200), nullable=False)
frame_max = Column(Integer(), nullable=False)
heartbeat = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('amqp_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, vhost=None, username=None, password=None, frame_max=None,
heartbeat=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.host = host
self.port = port
self.vhost = vhost
self.username = username
self.password = password
self.frame_max = frame_max
self.heartbeat = heartbeat
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ConnDefWMQ(Base):
""" A IBM MQ connection definition.
"""
__tablename__ = 'conn_def_wmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# TODO is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer, nullable=False)
queue_manager = Column(String(200), nullable=True)
channel = Column(String(200), nullable=False)
cache_open_send_queues = Column(Boolean(), nullable=False)
cache_open_receive_queues = Column(Boolean(), nullable=False)
use_shared_connections = Column(Boolean(), nullable=False)
dynamic_queue_template = Column(String(200), nullable=False, server_default='SYSTEM.DEFAULT.MODEL.QUEUE') # We're not actually using it yet
ssl = Column(Boolean(), nullable=False)
ssl_cipher_spec = Column(String(200))
ssl_key_repository = Column(String(200))
needs_mcd = Column(Boolean(), nullable=False)
use_jms = Column(Boolean(), nullable=False)
max_chars_printed = Column(Integer, nullable=False)
username = Column(String(100), nullable=True)
password = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('wmq_conn_defs', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, host=None, port=None, queue_manager=None, channel=None, cache_open_send_queues=None,
cache_open_receive_queues=None, use_shared_connections=None, ssl=None, ssl_cipher_spec=None, ssl_key_repository=None,
needs_mcd=None, max_chars_printed=None, cluster_id=None, cluster=None, username=None, password=None, use_jms=None):
self.id = id
self.name = name
self.host = host
self.queue_manager = queue_manager
self.channel = channel
self.port = port
self.cache_open_receive_queues = cache_open_receive_queues
self.cache_open_send_queues = cache_open_send_queues
self.use_shared_connections = use_shared_connections
self.ssl = ssl
self.ssl_cipher_spec = ssl_cipher_spec
self.ssl_key_repository = ssl_key_repository
self.needs_mcd = needs_mcd
self.max_chars_printed = max_chars_printed
self.cluster_id = cluster_id
self.cluster = cluster
self.username = username
self.password = password
self.use_jms = use_jms
# ################################################################################################################################
class OutgoingAMQP(Base):
""" An outgoing AMQP connection.
"""
__tablename__ = 'out_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(AMQP.DEFAULT.PRIORITY), nullable=False)
content_type = Column(String(200), nullable=True)
content_encoding = Column(String(200), nullable=True)
expiration = Column(Integer(), nullable=True)
user_id = Column(String(200), nullable=True)
app_id = Column(String(200), nullable=True)
pool_size = Column(SmallInteger(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('out_conns_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, content_type=None,
content_encoding=None, expiration=None, user_id=None, app_id=None, def_id=None, delivery_mode_text=None,
def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.content_type = content_type
self.content_encoding = content_encoding
self.expiration = expiration
self.user_id = user_id
self.app_id = app_id
self.def_id = def_id
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by the DB
# ################################################################################################################################
class OutgoingFTP(Base):
""" An outgoing FTP connection.
"""
__tablename__ = 'out_ftp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_ftp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
acct = Column(String(200), nullable=True)
timeout = Column(Integer, nullable=True)
port = Column(Integer, server_default=str(FTP_PORT), nullable=False)
dircache = Column(Boolean(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_ftp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, host=None, user=None, password=None, acct=None, timeout=None,
port=None, dircache=None, cluster_id=None):
self.id = id
self.name = name
self.is_active = is_active
self.host = host
self.user = user
self.password = password
self.acct = acct
self.timeout = timeout
self.port = port
self.dircache = dircache
self.cluster_id = cluster_id
# ################################################################################################################################
class OutgoingOdoo(Base):
""" An outgoing Odoo connection.
"""
__tablename__ = 'out_odoo'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_odoo_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
port = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.PORT))
user = Column(String(200), nullable=False)
database = Column(String(200), nullable=False)
protocol = Column(String(200), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(ODOO.DEFAULT.POOL_SIZE))
password = Column(String(400), nullable=False)
client_type = Column(String(40), nullable=False, server_default=str(ODOO.CLIENT_TYPE.OPENERP_CLIENT_LIB))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_odoo', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
self.protocol_name = None # Not used by the DB
# ################################################################################################################################
class OutgoingSAP(Base):
""" An outgoing SAP RFC connection.
"""
__tablename__ = 'out_sap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_sap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(200), nullable=False)
sysnr = Column(String(3), nullable=True, server_default=str(SAP.DEFAULT.INSTANCE))
user = Column(String(200), nullable=False)
client = Column(String(4), nullable=False)
sysid = Column(String(4), nullable=False)
password = Column(String(400), nullable=False)
pool_size = Column(Integer(), nullable=False, server_default=str(SAP.DEFAULT.POOL_SIZE))
router = Column(String(400), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_sap', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingSTOMP(Base):
""" An outgoing STOMP connection.
"""
__tablename__ = 'out_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_stomp', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, cluster=None):
self.cluster = cluster
# ################################################################################################################################
class OutgoingWMQ(Base):
""" An outgoing IBM MQ connection.
"""
__tablename__ = 'out_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('out_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
delivery_mode = Column(SmallInteger(), nullable=False)
priority = Column(SmallInteger(), server_default=str(WMQ_DEFAULT_PRIORITY), nullable=False)
expiration = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('out_conns_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, delivery_mode=None, priority=None, expiration=None, def_id=None,
cluster=None, delivery_mode_text=None, def_name=None):
self.id = id
self.name = name
self.is_active = is_active
self.delivery_mode = delivery_mode
self.priority = priority
self.expiration = expiration
self.def_id = def_id
self.cluster = cluster
self.delivery_mode_text = delivery_mode_text # Not used by the DB
self.def_name = def_name # Not used by DB
self.def_name_full_text = None # Not used by DB
# ################################################################################################################################
class OutgoingZMQ(Base):
""" An outgoing Zero MQ connection.
"""
__tablename__ = 'out_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('out_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
socket_method = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('out_conns_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, cluster_id=None, cluster=None):
self.id = id
self.name = name
self.is_active = is_active
self.socket_type = socket_type
self.address = address
self.cluster_id = cluster_id
self.cluster = cluster
# ################################################################################################################################
class ChannelAMQP(Base):
""" An incoming AMQP connection.
"""
__tablename__ = 'channel_amqp'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_amqp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
consumer_tag_prefix = Column(String(200), nullable=False)
pool_size = Column(Integer, nullable=False)
ack_mode = Column(String(20), nullable=False)
prefetch_count = Column(Integer, nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_amqp', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_amqp.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefAMQP, backref=backref('channels_amqp', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, consumer_tag_prefix=None, def_id=None, def_name=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.consumer_tag_prefix = consumer_tag_prefix
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelSTOMP(Base):
""" An incoming STOMP connection.
"""
__tablename__ = 'channel_stomp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_stomp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
username = Column(String(200), nullable=True, server_default=STOMP.DEFAULT.USERNAME)
password = Column(String(200), nullable=True)
address = Column(String(200), nullable=False, server_default=STOMP.DEFAULT.ADDRESS)
proto_version = Column(String(20), nullable=False, server_default=STOMP.DEFAULT.PROTOCOL)
timeout = Column(Integer(), nullable=False, server_default=str(STOMP.DEFAULT.TIMEOUT))
sub_to = Column(Text, nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_stomp', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWMQ(Base):
""" An incoming IBM MQ connection.
"""
__tablename__ = 'channel_wmq'
__table_args__ = (UniqueConstraint('name', 'def_id'), {})
id = Column(Integer, Sequence('channel_wmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
queue = Column(String(200), nullable=False)
data_format = Column(String(20), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_wmq', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_wmq.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(ConnDefWMQ, backref=backref('channels_wmq', cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, queue=None, def_id=None, def_name=None, service_name=None,
data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.queue = queue
self.def_id = def_id
self.def_name = def_name # Not used by the DB
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class ChannelZMQ(Base):
""" An incoming Zero MQ connection.
"""
__tablename__ = 'channel_zmq'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('channel_zmq_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
address = Column(String(200), nullable=False)
socket_type = Column(String(20), nullable=False)
sub_key = Column(String(200), nullable=True)
data_format = Column(String(20), nullable=True)
socket_method = Column(String(20), nullable=False)
pool_strategy = Column(String(20), nullable=False)
service_source = Column(String(20), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('channels_zmq', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, address=None, socket_type=None, socket_type_text=None, sub_key=None,
service_name=None, data_format=None):
self.id = id
self.name = name
self.is_active = is_active
self.address = address
self.socket_type = socket_type
self.socket_type_text = socket_type_text # Not used by the DB
self.sub_key = sub_key
self.service_name = service_name # Not used by the DB
self.data_format = data_format
# ################################################################################################################################
class DeploymentPackage(Base):
""" A package to be deployed onto a server, either a plain .py/.pyw or
a Distutils2 archive.
"""
__tablename__ = 'deployment_package'
id = Column(Integer, Sequence('depl_package_seq'), primary_key=True)
deployment_time = Column(DateTime(), nullable=False)
details = Column(String(2000), nullable=False)
payload_name = Column(String(200), nullable=False)
payload = Column(LargeBinary(5000000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('originating_deployment_packages',
order_by=deployment_time, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, deployment_time=None, details=None, payload_name=None, payload=None):
self.id = id
self.deployment_time = deployment_time
self.details = details
self.payload_name = payload_name
self.payload = payload
# ################################################################################################################################
class DeploymentStatus(Base):
""" Whether a server has already deployed a given package.
"""
__tablename__ = 'deployment_status'
__table_args__ = (UniqueConstraint('package_id', 'server_id'), {})
id = Column(Integer, Sequence('depl_status_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
package_id = Column(
Integer, ForeignKey('deployment_package.id', ondelete='CASCADE'), nullable=False, primary_key=False)
package = relationship(
DeploymentPackage, backref=backref('deployment_status_list', order_by=package_id, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False, primary_key=False)
server = relationship(
Server, backref=backref('deployment_status_list', order_by=server_id, cascade='all, delete, delete-orphan'))
# See zato.common.DEPLOYMENT_STATUS
status = Column(String(20), nullable=False)
status_change_time = Column(DateTime(), nullable=False)
def __init__(self, package_id=None, server_id=None, status=None, status_change_time=None):
self.package_id = package_id
self.server_id = server_id
self.status = status
self.status_change_time = status_change_time
# ################################################################################################################################
class MsgNamespace(Base):
""" A message namespace, used in XPath, for instance.
"""
__tablename__ = 'msg_ns'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_ns_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('namespaces', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class XPath(Base):
""" An XPath expression to run against XML messages.
"""
__tablename__ = 'msg_xpath'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_xpath_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('xpaths', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class JSONPointer(Base):
""" An XPath-list expression to run against JSON messages.
"""
__tablename__ = 'msg_json_pointer'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('msg_json_pointer_seq'), primary_key=True)
name = Column(String(200), nullable=False)
value = Column(String(1500), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('json_pointers', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, value=None, cluster_id=None):
self.id = id
self.name = name
self.value = value
self.cluster_id = cluster_id
# ################################################################################################################################
class OpenStackSwift(Base):
""" A connection to OpenStack's Swift (no longer used, to be removed).
"""
__tablename__ = 'os_swift'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('os_swift_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False)
auth_url = Column(String(200), nullable=False)
auth_version = Column(String(200), nullable=False)
user = Column(String(200), nullable=True)
secret_key = Column(String(200), nullable=True)
retries = Column(Integer, nullable=False)
is_snet = Column(Boolean(), nullable=False)
starting_backoff = Column(Integer, nullable=False)
max_backoff = Column(Integer, nullable=False)
tenant_name = Column(String(200), nullable=True)
should_validate_cert = Column(Boolean(), nullable=False)
cacert = Column(String(200), nullable=True)
should_retr_ratelimit = Column(Boolean(), nullable=False)
needs_tls_compr = Column(Boolean(), nullable=False)
custom_options = Column(String(2000), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('openstack_swift_conns', order_by=name, cascade='all, delete, delete-orphan'))
def __init__(self, id=None, name=None, is_active=None, auth_url=None, auth_version=None, user=None, key=None, retries=None,
is_snet=None, starting_backoff=None, max_backoff=None, tenant_name=None, should_validate_cert=None,
cacert=None, should_retr_ratelimit=None, needs_tls_compr=None, custom_options=None):
self.id = id
self.name = name
self.is_active = is_active
self.auth_url = auth_url
self.auth_version = auth_version
self.user = user
self.key = key
self.retries = retries
self.is_snet = is_snet
self.starting_backoff = starting_backoff
self.max_backoff = max_backoff
self.tenant_name = tenant_name
self.should_validate_cert = should_validate_cert
self.cacert = cacert
self.should_retr_ratelimit = should_retr_ratelimit
self.needs_tls_compr = needs_tls_compr
self.custom_options = custom_options
# ################################################################################################################################
class AWSS3(Base):
""" An outgoing connection to AWS S3.
"""
__tablename__ = 'aws_s3'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('aws_s3_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
pool_size = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.POOL_SIZE)
address = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.ADDRESS)
debug_level = Column(Integer, nullable=False, default=CLOUD.AWS.S3.DEFAULTS.DEBUG_LEVEL)
suppr_cons_slashes = Column(Boolean(), nullable=False, default=True)
content_type = Column(String(200), nullable=False, default=CLOUD.AWS.S3.DEFAULTS.CONTENT_TYPE)
metadata_ = Column(String(2000), nullable=True) # Can't be 'metadata' because this is reserved to SQLAlchemy
bucket = Column(String(2000), nullable=True)
encrypt_at_rest = Column(Boolean(), nullable=False, default=False)
storage_class = Column(String(200), nullable=False, default=CLOUD.AWS.S3.STORAGE_CLASS.DEFAULT)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
security = relationship(SecurityBase, backref=backref('aws_s3_conns', order_by=is_active, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('aws_s3_conns', order_by=name, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class Notification(Base):
""" A base class for all notifications, be it cloud, FTP-based or others.
"""
__tablename__ = 'notif'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
__mapper_args__ = {'polymorphic_on': 'notif_type'}
id = Column(Integer, Sequence('sec_base_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
notif_type = Column(String(45), nullable=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
interval = Column(Integer, nullable=False, default=NOTIF.DEFAULT.CHECK_INTERVAL)
name_pattern = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.NAME_PATTERN)
name_pattern_neg = Column(Boolean(), nullable=True, default=False)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
get_data = Column(Boolean(), nullable=True, default=False)
get_data_patt = Column(String(2000), nullable=True, default=NOTIF.DEFAULT.GET_DATA_PATTERN)
get_data_patt_neg = Column(Boolean(), nullable=True, default=False)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship(Service, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('notification_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class NotificationOpenStackSwift(Notification):
""" Stores OpenStack Swift notifications (no longer used).
"""
__tablename__ = 'notif_os_swift'
__mapper_args__ = {'polymorphic_identity': 'openstack_swift'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
containers = Column(String(16380), nullable=False)
def_id = Column(Integer, ForeignKey('os_swift.id'), primary_key=True)
definition = relationship(
OpenStackSwift, backref=backref('notif_oss_list', order_by=id, cascade='all, delete, delete-orphan'))
def to_json(self):
return to_json(self)
# ################################################################################################################################
class NotificationSQL(Notification):
""" Stores SQL notifications.
"""
__tablename__ = 'notif_sql'
__mapper_args__ = {'polymorphic_identity': 'sql'}
id = Column(Integer, ForeignKey('notif.id'), primary_key=True)
query = Column(Text, nullable=False)
def_id = Column(Integer, ForeignKey('sql_pool.id'), primary_key=True)
definition = relationship(
SQLConnectionPool, backref=backref('notif_sql_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraConn(Base):
""" Connections to Cassandra.
"""
__tablename__ = 'conn_def_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('conn_def_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
contact_points = Column(String(400), nullable=False, default=CASSANDRA.DEFAULT.CONTACT_POINTS)
port = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PORT)
exec_size = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.EXEC_SIZE)
proto_version = Column(Integer, nullable=False, default=CASSANDRA.DEFAULT.PROTOCOL_VERSION)
cql_version = Column(Integer, nullable=True)
default_keyspace = Column(String(400), nullable=False)
username = Column(String(200), nullable=True)
password = Column(String(200), nullable=True)
tls_ca_certs = Column(String(200), nullable=True)
tls_client_cert = Column(String(200), nullable=True)
tls_client_priv_key = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ElasticSearch(Base):
__tablename__ = 'search_es'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_es_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
hosts = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
body_as = Column(String(45), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_es_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class Solr(Base):
__tablename__ = 'search_solr'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('search_solr_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False, default=True)
address = Column(String(400), nullable=False)
timeout = Column(Integer(), nullable=False)
ping_path = Column(String(40), nullable=False)
options = Column(String(800), nullable=True)
pool_size = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('search_solr_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class CassandraQuery(Base):
""" Cassandra query templates.
"""
__tablename__ = 'query_cassandra'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('query_cassandra_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
value = Column(LargeBinary(40000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('cassandra_queries', order_by=name, cascade='all, delete, delete-orphan'))
def_id = Column(Integer, ForeignKey('conn_def_cassandra.id', ondelete='CASCADE'), nullable=False)
def_ = relationship(CassandraConn, backref=backref('cassandra_queries', cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMTP(Base):
__tablename__ = 'email_smtp'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_smtp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
is_debug = Column(Boolean(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
ping_address = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('smtp_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class IMAP(Base):
__tablename__ = 'email_imap'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('email_imap_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
host = Column(String(400), nullable=False)
port = Column(Integer(), nullable=False)
timeout = Column(Integer(), nullable=False)
debug_level = Column(Integer(), nullable=False)
username = Column(String(400), nullable=True)
password = Column(String(400), nullable=True)
mode = Column(String(20), nullable=False)
get_criteria = Column(String(2000), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('imap_conns', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRole(Base):
""" All the roles known within a particular cluster.
"""
__tablename__ = 'rbac_role'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_seq'), primary_key=True)
name = Column(String(200), nullable=False)
parent_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=True)
parent = relationship('RBACRole', backref=backref('children'), remote_side=[id])
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_roles', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACPermission(Base):
""" Permissions defined in a given cluster.
"""
__tablename__ = 'rbac_perm'
__table_args__ = (UniqueConstraint('name', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_perm_seq'), primary_key=True)
name = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_permissions', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACClientRole(Base):
""" Mappings between clients and roles they have.
"""
__tablename__ = 'rbac_client_role'
__table_args__ = (UniqueConstraint('client_def', 'role_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_cli_rol_seq'), primary_key=True)
name = Column(String(400), nullable=False)
client_def = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_client_roles', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_client_roles', order_by=client_def, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RBACRolePermission(Base):
""" Mappings between roles and permissions they have on given services.
"""
__tablename__ = 'rbac_role_perm'
__table_args__ = (UniqueConstraint('role_id', 'perm_id', 'service_id', 'cluster_id'), {})
id = Column(Integer, Sequence('rbac_role_perm_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
role_id = Column(Integer, ForeignKey('rbac_role.id', ondelete='CASCADE'), nullable=False)
role = relationship(RBACRole, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
perm_id = Column(Integer, ForeignKey('rbac_perm.id', ondelete='CASCADE'), nullable=False)
perm = relationship(RBACPermission, backref=backref('rbac_role_perms', order_by=id, cascade='all, delete, delete-orphan'))
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=False)
service = relationship('Service', backref=backref('role_perm', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rbac_role_permissions', order_by=id, cascade='all, delete, delete-orphan'))
def get_name(self):
return '{}/{}/{}/{}'.format(self.id, self.role_id, self.perm_id, self.service_id)
# ################################################################################################################################
class KVData(Base):
""" Key/value data table.
"""
__tablename__ = 'kv_data'
__table_args__ = (Index('key_clust_id_idx', 'key', 'cluster_id', unique=True, mysql_length={'key':767}),)
id = Column(Integer, Sequence('kv_data_id_seq'), primary_key=True)
key = Column(LargeBinary(), nullable=False)
value = Column(LargeBinary(), nullable=True)
data_type = Column(String(200), nullable=False, default='text')
creation_time = Column(DateTime(), nullable=False)
expiry_time = Column(DateTime(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True)
cluster = relationship(Cluster, backref=backref('kv_data', order_by=key, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class ChannelWebSocket(Base):
""" A WebSocket connection definition.
"""
__tablename__ = 'channel_web_socket'
__table_args__ = (UniqueConstraint('name', 'cluster_id'),
UniqueConstraint('address', 'cluster_id'), {})
id = Column(Integer, Sequence('web_socket_chan_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
is_out = Column(Boolean(), nullable=False, default=sa_false())
address = Column(String(200), nullable=False)
data_format = Column(String(20), nullable=False)
new_token_wait_time = Column(Integer(), nullable=False)
token_ttl = Column(Integer(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
service = relationship('Service', backref=backref('web_socket', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('web_socket_list', order_by=name, cascade='all, delete, delete-orphan'))
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
def __init__(self, id=None, name=None, is_active=None, is_internal=None, address=None, data_format=None,
new_token_wait_time=None, token_ttl=None, service_id=None, service=None, cluster_id=None, cluster=None,
security_id=None, security=None):
self.id = id
self.name = name
self.is_active = is_active
self.is_internal = is_internal
self.address = address
self.data_format = data_format
self.new_token_wait_time = new_token_wait_time
self.token_ttl = token_ttl
self.service_id = service_id
self.service = service
self.cluster_id = cluster_id
self.cluster = cluster
self.security_id = security_id
self.security = security
self.service_name = None # Not used by DB
self.sec_type = None # Not used by DB
# ################################################################################################################################
class WebSocketClient(Base):
""" An active WebSocket client - currently connected to a Zato server process.
"""
__tablename__ = 'web_socket_client'
__table_args__ = (
Index('wscl_pub_client_idx', 'cluster_id', 'pub_client_id', unique=True),
Index('wscl_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('wscl_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wscl_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('wscl_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = Column(Integer, Sequence('web_socket_cli_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
# This one is assigned by Zato
pub_client_id = Column(String(200), nullable=False)
# These are assigned by clients themselves
ext_client_id = Column(String(200), nullable=False)
ext_client_name = Column(String(200), nullable=True)
local_address = Column(String(400), nullable=False)
peer_address = Column(String(400), nullable=False)
peer_fqdn = Column(String(400), nullable=False)
connection_time = Column(DateTime, nullable=False)
last_seen = Column(DateTime, nullable=False)
server_proc_pid = Column(Integer, nullable=False)
server_name = Column(String(200), nullable=False) # References server.name
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=False)
channel = relationship(
ChannelWebSocket, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=False)
server = relationship(
Server, backref=backref('server_web_socket_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('web_socket_client_list', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketClientPubSubKeys(Base):
""" Associates currently active WebSocket clients with subscription keys.
"""
__tablename__ = 'web_socket_cli_ps_keys'
__table_args__ = (
Index('wscl_psk_cli', 'cluster_id', 'client_id', unique=False),
Index('wscl_psk_sk', 'cluster_id', 'sub_key', unique=False),
{})
id = Column(Integer, Sequence('web_socket_cli_ps_seq'), primary_key=True)
# The same as in web_socket_sub.sub_key
sub_key = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
client_id = Column(Integer, ForeignKey('web_socket_client.id', ondelete='CASCADE'), nullable=False)
client = relationship(
WebSocketClient, backref=backref('web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref(
'web_socket_cli_ps_keys', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class WebSocketSubscription(Base):
""" Persistent subscriptions pertaining to a given long-running, possibly restartable, WebSocket connection.
"""
__tablename__ = 'web_socket_sub'
__table_args__ = (
Index('wssub_channel_idx', 'cluster_id', 'channel_id', unique=False),
Index('wssub_subkey_idx', 'cluster_id', 'sub_key', unique=True),
Index('wssub_extcli_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('wssub_subkey_chan_idx', 'cluster_id', 'sub_key', 'channel_id', unique=True),
{})
id = Column(Integer, Sequence('web_socket_sub_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
ext_client_id = Column(String(200), nullable=False)
# Each transient, per-connection, web_socket_cli_ps_keys.sub_key will refer to this column
sub_key = Column(String(200), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
channel = relationship(
ChannelWebSocket, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
subscription_id = Column(Integer, ForeignKey('pubsub_sub.id', ondelete='CASCADE'), nullable=False)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('web_socket_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubEndpoint(Base):
""" An individual endpoint participating in publish/subscribe scenarios.
"""
__tablename__ = 'pubsub_endpoint'
__table_args__ = (
Index('pubsb_endp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endp_name_idx', 'cluster_id', 'name', unique=True),
UniqueConstraint('cluster_id', 'security_id'),
UniqueConstraint('cluster_id', 'service_id'),
UniqueConstraint('cluster_id', 'ws_channel_id'),
{})
id = Column(Integer, Sequence('pubsub_endp_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_internal = Column(Boolean(), nullable=False, server_default=sa_false())
is_active = Column(Boolean(), nullable=False, server_default=sa_true()) # Unusued for now
endpoint_type = Column(String(40), nullable=False) # WSX, REST, AMQP and other types
last_seen = Column(BigInteger(), nullable=True)
last_pub_time = Column(BigInteger(), nullable=True)
last_sub_time = Column(BigInteger(), nullable=True)
last_deliv_time = Column(BigInteger(), nullable=True)
# Endpoint's role, e.g. publisher, subscriber or both
role = Column(String(40), nullable=False)
# Tags describing this endpoint
tags = Column(Text, nullable=True) # Unusued for now
# Patterns for topics that this endpoint may subscribe to
topic_patterns = Column(Text, nullable=True)
# Patterns for tags of publishers
pub_tag_patterns = Column(Text, nullable=True) # Unused for now
# Patterns for tags of messages
message_tag_patterns = Column(Text, nullable=True) # Unused for now
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Endpoint is a service
service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
# Identifies the endpoint through its security definition, e.g. a username/password combination.
security_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=True)
security = relationship(SecurityBase, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a reference to a generic connection
gen_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
gen_conn = relationship('GenericConn', backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
# Identifies the endpoint through a long-running WebSockets channel
ws_channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoints', order_by=id, cascade='all, delete, delete-orphan'))
sec_type = None # Not used by DB
sec_name = None # Not used by DB
ws_channel_name = None # Not used by DB
service_name = None # Not used by DB
# ################################################################################################################################
class PubSubTopic(Base):
""" A topic in pub/sub.
"""
__tablename__ = 'pubsub_topic'
__table_args__ = (
Index('pubsb_tp_clust_idx', 'cluster_id', unique=False),
Index('pubsb_tp_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_tp_name_idx', 'cluster_id', 'name', unique=True),
{})
id = Column(Integer, Sequence('pubsub_topic_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
max_depth_gd = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_GD)
max_depth_non_gd = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.TOPIC_MAX_DEPTH_NON_GD)
depth_check_freq = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DEPTH_CHECK_FREQ)
has_gd = Column(Boolean(), nullable=False) # Guaranteed delivery
is_api_sub_allowed = Column(Boolean(), nullable=False)
# How many messages to buffer in RAM before they are actually saved in SQL / pushed to tasks
pub_buffer_size_gd = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.PUB_BUFFER_SIZE_GD))
task_sync_interval = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_SYNC_INTERVAL))
task_delivery_interval = Column(Integer(), nullable=False, server_default=str(PUBSUB.DEFAULT.TASK_DELIVERY_INTERVAL))
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# A hook service invoked during publications to this specific topic
hook_service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_topics', order_by=name, cascade='all, delete, delete-orphan'))
# Not used by DB
ext_client_id = None
last_pub_time = None
pub_time = None
ext_pub_time = None
last_pub_time = None
last_pub_msg_id = None
last_endpoint_id = None
last_endpoint_name = None
last_pub_has_gd = None
last_pub_server_pid = None
last_pub_server_name = None
# ################################################################################################################################
class PubSubEndpointTopic(Base):
""" A list of topics to which a given endpoint has ever published along with metadata about the latest publication.
There is one row for each existing publisher and topic ever in use.
"""
__tablename__ = 'pubsub_endp_topic'
__table_args__ = (
Index('pubsb_endpt_clust_idx', 'cluster_id', unique=False),
Index('pubsb_endpt_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_endpt_msgid_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_endpt_clsendtp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=True),
{})
id = Column(Integer, Sequence('pubsub_endpt_seq'), primary_key=True)
pub_pattern_matched = Column(Text, nullable=False)
last_pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=False)
pub_msg_id = Column(String(200), nullable=False)
pub_correl_id = Column(String(200), nullable=True)
in_reply_to = Column(String(200), nullable=True)
ext_client_id = Column(Text(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True)
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_endpoint_topics', order_by=endpoint_id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_endpoint_topics', order_by=topic_id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_topics', order_by=cluster_id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubMessage(Base):
""" An individual message published to a topic.
"""
__tablename__ = 'pubsub_message'
__table_args__ = (
# This index is needed for FKs from other tables,
# otherwise with MySQL we get error 1215 'Cannot add foreign key constraint'
Index('pubsb_msg_pubmsg_id_idx', 'pub_msg_id', unique=True),
Index('pubsb_msg_pubmsg_clu_id_idx', 'cluster_id', 'pub_msg_id', unique=True),
Index('pubsb_msg_inreplyto_id_idx', 'cluster_id', 'in_reply_to', unique=False),
Index('pubsb_msg_correl_id_idx', 'cluster_id', 'pub_correl_id', unique=False),
{})
# For SQL joins
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
# Publicly visible message identifier
pub_msg_id = Column(String(200), nullable=False)
# Publicly visible correlation ID
pub_correl_id = Column(String(200), nullable=True)
# Publicly visible ID of the message current message is a response to
in_reply_to = Column(String(200), nullable=True)
# ID of an external client on whose behalf the endpoint published the message
ext_client_id = Column(Text(), nullable=True)
# Will group messages belonging logically to the same group, useful if multiple
# messages are published with the same timestamp by the same client but they still
# need to be correctly ordered.
group_id = Column(Text(), nullable=True)
position_in_group = Column(Integer, nullable=True)
# What matching pattern allowed an endpoint to publish this message
pub_pattern_matched = Column(Text, nullable=False)
pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When the row was created
ext_pub_time = Column(Numeric(20, 7, asdecimal=False), nullable=True) # When the message was created by publisher
expiration_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
last_updated = Column(Numeric(20, 7, asdecimal=False), nullable=True)
data = Column(Text(2 * 10 ** 9), nullable=False) # 2 GB to prompt a promotion to LONGTEXT under MySQL
data_prefix = Column(Text(), nullable=False)
data_prefix_short = Column(String(200), nullable=False)
data_format = Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.DATA_FORMAT)
mime_type = Column(String(200), nullable=False, server_default=PUBSUB.DEFAULT.MIME_TYPE)
size = Column(Integer, nullable=False)
priority = Column(Integer, nullable=False, server_default=str(PUBSUB.PRIORITY.DEFAULT))
expiration = Column(BigInteger, nullable=False, server_default='0')
has_gd = Column(Boolean(), nullable=False, server_default=sa_true()) # Guaranteed delivery
# Is the message in at least one delivery queue, meaning that there is at least one
# subscriber to whom this message will be sent so the message is no longer considered
# to be available in the topic for other subscribers to receive it,
# i.e. it can be said that it has been already transported to all subsriber queues (possibly to one only).
is_in_sub_queue = Column(Boolean(), nullable=False, server_default=sa_false())
# User-defined arbitrary context data
user_ctx = Column(_JSON(), nullable=True)
# Zato-defined arbitrary context data
zato_ctx = Column(_JSON(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
published_by_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False)
published_by = relationship(
PubSubEndpoint, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=True)
topic = relationship(
PubSubTopic, backref=backref('pubsub_msg_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_messages', order_by=id, cascade='all, delete, delete-orphan'))
pub_time_utc = None # Not used by DB
# ################################################################################################################################
class PubSubSubscription(Base):
""" Stores high-level information about topics an endpoint subscribes to.
"""
__tablename__ = 'pubsub_sub'
__table_args__ = (
Index('pubsb_sub_clust_idx', 'cluster_id', unique=False),
Index('pubsb_sub_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_sub_clust_endpt_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
Index('pubsb_sub_clust_subk', 'sub_key', unique=True),
{})
id = Column(Integer, Sequence('pubsub_sub_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False, default=False)
creation_time = Column(Numeric(20, 7, asdecimal=False), nullable=False)
sub_key = Column(String(200), nullable=False) # Externally visible ID of this subscription
sub_pattern_matched = Column(Text, nullable=False)
deliver_by = Column(Text, nullable=True) # Delivery order, e.g. by priority, date etc.
ext_client_id = Column(Text, nullable=True) # Subscriber's ID as it is stored by that external system
is_durable = Column(Boolean(), nullable=False, default=True) # For now always True = survives cluster restarts
has_gd = Column(Boolean(), nullable=False) # Guaranteed delivery
active_status = Column(String(200), nullable=False, default=PUBSUB.QUEUE_ACTIVE_STATUS.FULLY_ENABLED.id)
is_staging_enabled = Column(Boolean(), nullable=False, default=False)
delivery_method = Column(String(200), nullable=False, default=PUBSUB.DELIVERY_METHOD.NOTIFY.id)
delivery_data_format = Column(String(200), nullable=False, default=DATA_FORMAT.JSON)
delivery_endpoint = Column(Text, nullable=True)
# This is updated only periodically, e.g. once an hour, rather than each time the subscriber is seen,
# so the value is not an exact time of the last interaction with the subscriber but a time,
# within a certain range (default=60 minutes), when any action was last time carried out with the subscriber.
# For WSX subscribers, this value will never be less than their ping timeout.
last_interaction_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
last_interaction_type = Column(String(200), nullable=True)
last_interaction_details = Column(Text, nullable=True)
# How many messages to deliver in a single batch for that endpoint
delivery_batch_size = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_BATCH_SIZE)
# If delivery_batch_size is 1, whether such a single message delivered to endpoint
# should be sent as-is or wrapped in a single-element list.
wrap_one_msg_in_list = Column(Boolean(), nullable=False)
# How many bytes to send at most in a single delivery
delivery_max_size = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_SIZE) # Unused for now
# How many times to retry delivery for a single message
delivery_max_retry = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.DELIVERY_MAX_RETRY)
# Should a failed delivery of a single message block the entire delivery queue
# until that particular message has been successfully delivered.
delivery_err_should_block = Column(Boolean(), nullable=False)
# How many seconds to wait on a TCP socket error
wait_sock_err = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_SOCKET_ERROR)
# How many seconds to wait on an error other than a TCP socket one
wait_non_sock_err = Column(Integer(), nullable=False, default=PUBSUB.DEFAULT.WAIT_TIME_NON_SOCKET_ERROR)
# A hook service invoked before messages are delivered for this specific subscription
hook_service_id = Column(Integer, ForeignKey('service.id', ondelete='CASCADE'), nullable=True)
# REST/POST
out_http_method = Column(Text, nullable=True, default='POST') # E.g. POST or PATCH
# AMQP
amqp_exchange = Column(Text, nullable=True)
amqp_routing_key = Column(Text, nullable=True)
# Flat files
files_directory_list = Column(Text, nullable=True)
# FTP
ftp_directory_list = Column(Text, nullable=True)
# SMS - Twilio
sms_twilio_from = Column(Text, nullable=True)
sms_twilio_to_list = Column(Text, nullable=True)
# SMTP
smtp_subject = Column(Text, nullable=True)
smtp_from = Column(Text, nullable=True)
smtp_to_list = Column(Text, nullable=True)
smtp_body = Column(Text, nullable=True)
smtp_is_html = Column(Boolean(), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=True)
endpoint = relationship(
PubSubEndpoint, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_job_id = Column(Integer, ForeignKey('job.id', ondelete='CASCADE'), nullable=True)
out_job = relationship(
Job, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_http_soap_id = Column(Integer, ForeignKey('http_soap.id', ondelete='CASCADE'), nullable=True)
out_http_soap = relationship(
HTTPSOAP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_smtp_id = Column(Integer, ForeignKey('email_smtp.id', ondelete='CASCADE'), nullable=True)
out_smtp = relationship(
SMTP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_amqp_id = Column(Integer, ForeignKey('out_amqp.id', ondelete='CASCADE'), nullable=True)
out_amqp = relationship(
OutgoingAMQP, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
out_gen_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
out_gen_conn = relationship(
'GenericConn', backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
ws_channel_id = Column(Integer, ForeignKey('channel_web_socket.id', ondelete='CASCADE'), nullable=True)
ws_channel = relationship(
ChannelWebSocket, backref=backref('pubsub_ws_subs', order_by=id, cascade='all, delete, delete-orphan'))
# Server that will run the delivery task for this subscription
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True)
server = relationship(
Server, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=True)
cluster = relationship(
Cluster, backref=backref('pubsub_sub_list', order_by=id, cascade='all, delete, delete-orphan'))
name = None # Not used by DB
topic_name = None # Not used by DB
total_depth = None # Not used by DB
current_depth_gd = None # Not used by DB
current_depth_non_gd = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointEnqueuedMessage(Base):
""" A queue of messages for an individual endpoint subscribed to a topic.
"""
__tablename__ = 'pubsub_endp_msg_queue'
__table_args__ = (
Index('pubsb_enms_q_pubmid_idx', 'cluster_id', 'pub_msg_id', unique=False),
Index('pubsb_enms_q_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_q_endp_idx', 'cluster_id', 'endpoint_id', unique=False),
Index('pubsb_enms_q_subs_idx', 'cluster_id', 'sub_key', unique=False),
Index('pubsb_enms_q_endptp_idx', 'cluster_id', 'endpoint_id', 'topic_id', unique=False),
{})
__mapper_args__ = {
'confirm_deleted_rows': False
}
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
creation_time = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When was the message enqueued
delivery_count = Column(Integer, nullable=False, server_default='0')
last_delivery_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
is_in_staging = Column(Boolean(), nullable=False, server_default=sa_false())
sub_pattern_matched = Column(Text, nullable=False)
# A flag indicating whether this message is deliverable at all - will be set to False
# after delivery_count reaches max retries for subscription or if a hook services decides so.
is_deliverable = Column(Boolean(), nullable=False, server_default=sa_true())
delivery_status = Column(Integer, nullable=False, server_default=str(PUBSUB.DELIVERY_STATUS.INITIALIZED))
delivery_time = Column(Numeric(20, 7, asdecimal=False), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
pub_msg_id = Column(String(200), ForeignKey('pubsub_message.pub_msg_id', ondelete='CASCADE'), nullable=False)
endpoint_id = Column(Integer, ForeignKey('pubsub_endpoint.id', ondelete='CASCADE'), nullable=False)
endpoint = relationship(PubSubEndpoint,
backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(PubSubTopic, backref=backref('pubsub_endp_q_list', order_by=id, cascade='all, delete, delete-orphan'))
sub_key = Column(String(200), ForeignKey('pubsub_sub.sub_key', ondelete='CASCADE'), nullable=False)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_endpoint_queues', order_by=id, cascade='all, delete, delete-orphan'))
queue_name = None # Not used by DB
# ################################################################################################################################
class PubSubEndpointQueueInteraction(Base):
""" A series of interactions with a message queue's endpoint.
"""
__tablename__ = 'pubsub_endp_msg_q_inter'
__table_args__ = (
Index('pubsb_enms_qi_id_idx', 'cluster_id', 'id', unique=True),
Index('pubsb_enms_qi_endptp_idx', 'cluster_id', 'queue_id', unique=False),
{})
id = Column(Integer, Sequence('pubsub_msg_seq'), primary_key=True)
entry_timestamp = Column(Numeric(20, 7, asdecimal=False), nullable=False) # When the row was created
inter_type = Column(String(200), nullable=False)
inter_details = Column(Text, nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
queue_id = Column(Integer, ForeignKey('pubsub_endp_msg_queue.id', ondelete='CASCADE'), nullable=False)
queue = relationship(
PubSubEndpointEnqueuedMessage, backref=backref(
'pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('pubsub_endpoint_queue_interactions', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class PubSubChannel(Base):
""" An N:N mapping between arbitrary channels and topics to which their messages should be sent.
"""
__tablename__ = 'pubsub_channel'
__table_args__ = (UniqueConstraint('cluster_id', 'conn_id', 'conn_type', 'topic_id'), {})
id = Column(Integer, Sequence('pubsub_channel_seq'), primary_key=True)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False)
conn_id = Column(String(100), nullable=False)
conn_type = Column(String(100), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
topic_id = Column(Integer, ForeignKey('pubsub_topic.id', ondelete='CASCADE'), nullable=False)
topic = relationship(
PubSubTopic, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('pubsub_channel_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class SMSTwilio(Base):
""" Outgoing SMS connections with Twilio.
"""
__tablename__ = 'sms_twilio'
__table_args__ = (
UniqueConstraint('name', 'cluster_id'),
{})
id = Column(Integer, Sequence('sms_twilio_id_seq'), primary_key=True)
name = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
account_sid = Column(String(200), nullable=False)
auth_token = Column(String(200), nullable=False)
default_from = Column(String(200), nullable=True)
default_to = Column(String(200), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('sms_twilio_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericObject(Base):
""" A generic data object.
"""
__tablename__ = 'generic_object'
__table_args__ = (
Index('gen_obj_uq_name_type', 'name', 'type_', 'cluster_id', unique=True,
mysql_length={'name':191, 'type_':191}),
Index('gen_obj_par_id', 'cluster_id', 'parent_id', 'parent_type', unique=False,
mysql_length={'parent_id':191, 'parent_type':191}),
Index('gen_obj_cat_id', 'cluster_id', 'category_id', unique=False,
mysql_length={'category_id':191}),
Index('gen_obj_cat_subcat_id', 'cluster_id', 'category_id', 'subcategory_id', unique=False,
mysql_length={'category_id':191, 'subcategory_id':191}),
Index('gen_obj_cat_name', 'cluster_id', 'category_name', unique=False,
mysql_length={'category_name':191}),
Index('gen_obj_cat_subc_name', 'cluster_id', 'category_name', 'subcategory_name', unique=False,
mysql_length={'category_name':191, 'subcategory_name':191}),
Index('gen_obj_par_obj_id', 'cluster_id', 'parent_object_id', unique=False),
{})
id = Column(Integer, Sequence('generic_object_seq'), primary_key=True)
name = Column(Text(191), nullable=False)
type_ = Column(Text(191), nullable=False)
subtype = Column(Text(191), nullable=True)
category_id = Column(Text(191), nullable=True)
subcategory_id = Column(Text(191), nullable=True)
creation_time = Column(DateTime, nullable=False)
last_modified = Column(DateTime, nullable=False)
category_name = Column(Text(191), nullable=True)
subcategory_name = Column(Text(191), nullable=True)
# This references back to generic objects
parent_object_id = Column(Integer, nullable=True)
# This may reference objects other than the current model
parent_id = Column(Text(191), nullable=True)
parent_type = Column(Text(191), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
generic_conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
generic_conn_def_sec_id = Column(Integer, ForeignKey('generic_conn_def_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=True)
generic_conn_sec_id = Column(Integer, ForeignKey('generic_conn_sec.id', ondelete='CASCADE'), nullable=True)
generic_conn_client_id = Column(Integer, ForeignKey('generic_conn_client.id', ondelete='CASCADE'), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_object_list', order_by=name, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDef(Base):
""" Generic connection definitions - with details kept in JSON.
"""
__tablename__ = 'generic_conn_def'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_def_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnDefSec(Base):
""" N:N security mappings for generic connection definitions.
"""
__tablename__ = 'generic_conn_def_sec'
__table_args__ = (
UniqueConstraint('conn_def_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=False)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_def_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConn(Base):
""" Generic connections - with details kept in JSON.
"""
__tablename__ = 'generic_conn'
__table_args__ = (
UniqueConstraint('name', 'type_', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_def_seq'), primary_key=True)
name = Column(String(200), nullable=False)
type_ = Column(String(200), nullable=False)
is_active = Column(Boolean(), nullable=False)
is_internal = Column(Boolean(), nullable=False, default=False)
cache_expiry = Column(Integer, nullable=True, default=0)
address = Column(Text(), nullable=True)
port = Column(Integer, nullable=True)
timeout = Column(Integer, nullable=True)
data_format = Column(String(60), nullable=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
# Both are needed because some connections can be duplex
is_channel = Column(Boolean(), nullable=False)
is_outconn = Column(Boolean(), nullable=False)
version = Column(String(200), nullable=True)
extra = Column(Text(), nullable=True)
pool_size = Column(Integer(), nullable=False)
# This can be used if only one security definition should be assigned to the object
username = Column(String(1000), nullable=True)
username_type = Column(String(45), nullable=True)
secret = Column(String(1000), nullable=True)
secret_type = Column(String(45), nullable=True)
# Is RBAC enabled for the object
sec_use_rbac = Column(Boolean(), nullable=False, default=False)
# Some connections will have a connection definition assigned
conn_def_id = Column(Integer, ForeignKey('generic_conn_def.id', ondelete='CASCADE'), nullable=True)
conn_def = relationship(GenericConnDef, backref=backref('generic_conn_def_list',
order_by=id, cascade='all, delete, delete-orphan'))
cache_id = Column(Integer, ForeignKey('cache.id', ondelete='CASCADE'), nullable=True)
cache = relationship('Cache', backref=backref('generic_conn_list', order_by=name, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnSec(Base):
""" N:N security mappings for generic connections.
"""
__tablename__ = 'generic_conn_sec'
__table_args__ = (
UniqueConstraint('conn_id', 'sec_base_id', 'cluster_id'),
{})
id = Column(Integer, Sequence('generic_conn_sec_seq'), primary_key=True)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(GenericConn, backref=backref('generic_conn_list', order_by=id,
cascade='all, delete, delete-orphan'))
sec_base_id = Column(Integer, ForeignKey('sec_base.id', ondelete='CASCADE'), nullable=False)
sec_base = relationship(SecurityBase, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('generic_conn_sec_list', order_by=id,
cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class GenericConnClient(Base):
""" A live client connection.
"""
__tablename__ = 'generic_conn_client'
__table_args__ = (
Index('gen_conn_cli_idx', 'cluster_id', 'pub_client_id', unique=False),
Index('gen_conn_cli_ext_n_idx', 'cluster_id', 'ext_client_name', unique=False),
Index('gen_conn_cli_ext_i_idx', 'cluster_id', 'ext_client_id', unique=False),
Index('gen_conn_cli_pr_addr_idx', 'cluster_id', 'peer_address', unique=False),
Index('gen_conn_cli_pr_fqdn_idx', 'cluster_id', 'peer_fqdn', unique=False),
{})
# This ID is for SQL
id = Column(Integer, Sequence('generic_conn_client_seq'), primary_key=True)
is_internal = Column(Boolean(), nullable=False)
# This one is assigned by Zato
pub_client_id = Column(String(200), nullable=False)
# These are assigned by clients themselves
ext_client_id = Column(String(200), nullable=False)
ext_client_name = Column(String(200), nullable=True)
local_address = Column(String(400), nullable=False)
peer_address = Column(String(400), nullable=False)
peer_fqdn = Column(String(400), nullable=False)
connection_time = Column(DateTime, nullable=False)
last_seen = Column(DateTime, nullable=False)
server_proc_pid = Column(Integer, nullable=True)
server_name = Column(String(200), nullable=True) # References server.name
conn_id = Column(Integer, ForeignKey('generic_conn.id', ondelete='CASCADE'), nullable=False)
conn = relationship(
GenericConn, backref=backref('clients', order_by=local_address, cascade='all, delete, delete-orphan'))
server_id = Column(Integer, ForeignKey('server.id', ondelete='CASCADE'), nullable=True)
server = relationship(
Server, backref=backref('gen_conn_clients', order_by=local_address, cascade='all, delete, delete-orphan'))
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(
Cluster, backref=backref('gen_conn_clients', order_by=last_seen, cascade='all, delete, delete-orphan'))
# ################################################################################################################################
class RateLimitState(Base):
""" Rate limiting persistent storage for exact definitions.
"""
__tablename__ = 'rate_limit_state'
__table_args__ = (
Index('rate_lim_obj_idx', 'object_type', 'object_id', 'period', 'last_network', unique=True,
mysql_length={'object_type':191, 'object_id':191, 'period':191, 'last_network':191}),
{})
id = Column(Integer(), Sequence('rate_limit_state_seq'), primary_key=True)
object_type = Column(Text(191), nullable=False)
object_id = Column(Text(191), nullable=False)
period = Column(Text(), nullable=False)
requests = Column(Integer(), nullable=False, server_default='0')
last_cid = Column(Text(), nullable=False)
last_request_time_utc = Column(DateTime(), nullable=False)
last_from = Column(Text(), nullable=False)
last_network = Column(Text(), nullable=False)
# JSON data is here
opaque1 = Column(_JSON(), nullable=True)
cluster_id = Column(Integer, ForeignKey('cluster.id', ondelete='CASCADE'), nullable=False)
cluster = relationship(Cluster, backref=backref('rate_limit_state_list', order_by=id, cascade='all, delete, delete-orphan'))
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/odb/model/__init__.py | __init__.py |
# stdlib
from typing import Optional as optional
# Zato
from zato.common.typing_ import dataclass
# ################################################################################################################################
# ################################################################################################################################
class Default:
# This is relative to server.conf's main.work_dir
fs_data_path = 'events'
# Sync database to disk once in that many events ..
sync_threshold = 30_000
# .. or once in that many seconds.
sync_interval = 30
# ################################################################################################################################
# ################################################################################################################################
class EventInfo:
class EventType:
service_request = 1_000_000
service_response = 1_000_001
class ObjectType:
service = 2_000_000
# ################################################################################################################################
# ################################################################################################################################
# All event actions possible
class Action:
Ping = b'01'
PingReply = b'02'
Push = b'03'
GetTable = b'04'
GetTableReply = b'05'
SyncState = b'06'
LenAction = len(Ping)
# ################################################################################################################################
# ################################################################################################################################
@dataclass(init=False)
class PushCtx:
id: str
cid: str
timestamp: str
event_type: int
source_type: optional[str] = None
source_id: optional[str] = None
object_type: int
object_id: str
recipient_type: optional[str] = None
recipient_id: optional[str] = None
total_time_ms: int
def __hash__(self):
return hash(self.id)
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/events/common.py | common.py |
# stdlib
import socket
from datetime import datetime
from logging import getLogger
# gevent
from gevent import sleep
from gevent.lock import RLock
# orjson
from orjson import dumps
# simdjson
from simdjson import loads
# Zato
from zato.common.events.common import Action
from zato.common.typing_ import asdict
from zato.common.util.api import new_cid
from zato.common.util.tcp import read_from_socket, SocketReaderCtx, wait_until_port_taken
# ################################################################################################################################
# ################################################################################################################################
if 0:
from zato.common.events.common import PushCtx
PushCtx = PushCtx
# ################################################################################################################################
# ################################################################################################################################
logger = getLogger(__name__)
# ################################################################################################################################
# ################################################################################################################################
utcnow = datetime.utcnow
# ################################################################################################################################
# ################################################################################################################################
class Client:
def __init__(self, host, port):
# type: (str, int) -> None
self.host = host
self.port = port
self.remote_addr_str = '{}:{}'.format(self.host, self.port)
self.socket = None # type: socket.socket
self.peer_name = '<Client-peer_name-default>'
self.peer_name_str = '<Client-peer_name_str-default>'
self.conn_id = 'zstrcl' + new_cid(bytes=4)
self.max_wait_time = 30
self.max_msg_size = 30_000_000
self.read_buffer_size = 30_000_000
self.recv_timeout = 30
self.should_log_messages = False
self.is_connected = False
self.lock = RLock()
# ################################################################################################################################
def connect(self):
# For later use
start = utcnow()
with self.lock:
if self.is_connected:
return
self.socket = socket.socket(type=socket.SOCK_STREAM)
while not self.is_connected:
logger.info('Connecting to %s', self.remote_addr_str)
try:
self.socket.connect((self.host, self.port))
self.peer_name = self.socket.getpeername()
self.peer_name_str = '{}:{}'.format(*self.peer_name)
except Exception as e:
logger.info('Connection error `%s` (%s) -> %s', e.args, utcnow() - start, self.remote_addr_str)
sleep(1)
else:
logger.info('Connected to %s after %s', self.remote_addr_str, utcnow() - start)
self.is_connected = True
# ################################################################################################################################
def send(self, action, data=b''):
# type: (bytes) -> None
with self.lock:
try:
self.socket.sendall(action + data + b'\n')
except Exception as e:
self.is_connected = False
logger.warning('Socket send error `%s` -> %s', e.args, self.remote_addr_str)
self.close()
self.connect()
# ################################################################################################################################
def read(self):
# type: () -> bytes
with self.lock:
# Build a receive context ..
ctx = SocketReaderCtx(
self.conn_id,
self.socket,
self.max_wait_time,
self.max_msg_size,
self.read_buffer_size,
self.recv_timeout,
self.should_log_messages
)
# .. wait for the reply and return it.
return read_from_socket(ctx)
# ################################################################################################################################
def ping(self):
logger.info('Pinging %s (%s)', self.peer_name_str, self.conn_id)
# Send the ping message ..
self.send(Action.Ping)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and response != Action.PingReply:
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
# ################################################################################################################################
def push(self, ctx):
# type: (PushCtx) -> None
# Serialise the context to dict ..
data = asdict(ctx)
# .. now to JSON ..
data = dumps(data)
# .. and send it across (there will be no response).
self.send(Action.Push, data)
# ################################################################################################################################
def get_table(self):
# Request the tabulated data ..
self.send(Action.GetTable)
# .. wait for the reply ..
response = self.read()
# .. and raise an exception in case of any error.
if response and (not response.startswith(Action.GetTableReply)):
raise ValueError('Unexpected response received from `{}` -> `{}`'.format(self.peer_name, response))
table = response[Action.LenAction:]
return loads(table) if table else None
# ################################################################################################################################
def sync_state(self):
# Request that the database sync its state with persistent storage ..
self.send(Action.SyncState)
# .. wait for the reply
self.read()
# ################################################################################################################################
def close(self):
self.socket.close()
# ################################################################################################################################
def run(self):
# Make sure that we have a port to connect to ..
wait_until_port_taken(self.port, 5)
# .. do connect now ..
self.connect()
# .. and ping the remote end to confirm that we have connectivity.
self.ping()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/events/client.py | client.py |
# stdlib
from tempfile import gettempdir
from threading import Thread
# pyftpdlib
from pyftpdlib.authorizers import DummyAuthorizer as _DummyAuthorizer
from pyftpdlib.handlers import FTPHandler as _FTPHandler
from pyftpdlib.servers import FTPServer as _ImplFTPServer
# ################################################################################################################################
# ################################################################################################################################
class config:
port = 11021
username = '111'
password = '222'
directory = gettempdir()
# ################################################################################################################################
# ################################################################################################################################
def create_ftp_server():
# type: () -> _ImplFTPServer
authorizer = _DummyAuthorizer()
authorizer.add_user(config.username, config.password, config.directory, 'elradfmwMT')
handler = _FTPHandler
handler.authorizer = authorizer
handler.banner = 'Welcome to Zato'
handler.log_prefix = '[%(username)s]@%(remote_ip)s'
address = ('', config.port)
server = _ImplFTPServer(address, handler)
server.max_cons = 10
server.max_cons_per_ip = 10
return server
# ################################################################################################################################
# ################################################################################################################################
class FTPServer(Thread):
def __init__(self):
self.impl = create_ftp_server()
Thread.__init__(self, target=self.impl.serve_forever)
self.setDaemon(True)
def stop(self):
self.impl.close_all()
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/test/ftp.py | ftp.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import socket
import ssl
from http.client import OK
from tempfile import NamedTemporaryFile
from threading import Thread
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
# psutil
import psutil
# Python 2/3 compatibility
from past.builtins import xrange
# Zato
from zato.common.api import ZATO_OK
from zato.common.test.tls_material import ca_cert, server1_cert, server1_key
def get_free_port(start=20001, end=50000):
taken = []
for c in psutil.net_connections(kind='inet'):
if c.status == psutil.CONN_LISTEN:
taken.append(c.laddr[1])
for port in xrange(start, end):
if port not in taken:
return port
class _HTTPHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(OK)
self.send_header('Content-Type', 'application/json')
self.wfile.write('\n')
self.wfile.write('"{}"'.format(ZATO_OK))
do_DELETE = do_OPTIONS = do_POST = do_PUT = do_PATCH = do_GET
def log_message(self, *ignored_args, **ignored_kwargs):
pass # Base class logs to stderr and we want to silence it outs
class _TLSServer(HTTPServer):
def __init__(self, cert_reqs, ca_cert):
self.port = get_free_port()
self.cert_reqs = cert_reqs
self.ca_cert=None
HTTPServer.__init__(self, ('0.0.0.0', self.port), _HTTPHandler)
def server_bind(self):
with NamedTemporaryFile(prefix='zato-tls', delete=False) as server1_key_tf:
server1_key_tf.write(server1_key)
server1_key_tf.flush()
with NamedTemporaryFile(prefix='zato-tls', delete=False) as server1_cert_tf:
server1_cert_tf.write(server1_cert)
server1_cert_tf.flush()
with NamedTemporaryFile(prefix='zato-tls', delete=False) as ca_cert_tf:
ca_cert_tf.write(ca_cert)
ca_cert_tf.flush()
self.socket = ssl.wrap_socket(
self.socket, server1_key_tf.name, server1_cert_tf.name, True, self.cert_reqs, ca_certs=ca_cert_tf.name)
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
class TLSServer(Thread):
def __init__(self, cert_reqs=ssl.CERT_NONE, ca_cert=None):
Thread.__init__(self)
self.setDaemon(True)
self.server = None
self.cert_reqs = cert_reqs
self.ca_cert=None
def get_port(self):
return self.server.port
def stop(self):
self.server.server_close()
def run(self):
self.server = _TLSServer(self.cert_reqs, self.ca_cert)
self.server.serve_forever() | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/test/tls.py | tls.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# # ##############################################################################################################################
#
# ** WARNING ** WARNING ** WARNING **
#
# Crypto material below is not safe for use outside of Zato's own unittests. Don't use it anywhere else.
#
# # ##############################################################################################################################
ca_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKAIBAAKCAgEAvx4Np3z+u6MJkXfqRby7nNk5ucqDFHY0ZB4Tj+0xM1AKQP80
4YtPAkTrGSnpjJqGl8LlG+NYy8WWrYggObuwXpgkcPjG2TkzlCDXW2gnzFUuI/iM
StTl7dZMH6/MG89eTnWeruglkH4Dp3kx+MfkpFLcnPr2IkL4/drfZsFXhYMdLTj/
zkq5mkx32kzh77AIdlO2COZ3qTIF9LtZS9X6RwXSowWkT+KPCortI79mjJyiQ2K+
H18N75adcdWmDVHQL+HlEVee1NR2THRVaYsf9yGFcjD0EOJPZsv2GGKzIR0eiQOa
4nK5ZS40uqoGsB7hj+j3aC+2QncXNYPm9Rzjp/NQBG5RCczFefJ6X5Fu1VTSf4AX
b9Qln4UsWHbqbkHpuFivVtgg8yyjQTJmqZme62xdyZv+B58pXbPQf7H/eVHxJaO2
LFV0tIrYNIc9VDFiRPmUjGIXb2zX5X5p2vcy5/osmd4UJQA+cJLSOsEbPntfDDaN
zKEzTMH4dI+5qO/8raoIhTSv7CbvUzI6sb3C8NltGOs+l24UhI/apjDJs3YAwiq8
PZomOgXSCHqVYkGE9dUK9x5SRNH8dLXOv3TFoqxvWBvZ0moaRIiaUWNEyKlyUB3/
ePXZSOvJT+4bwB9j/eFFzxseaC1TSNlKHz0SLkGqW2rfV6TZYiUtq0RQOVsCAwEA
AQKCAgEAkBIbwPLdJJ3XDK4VENUhqS+n8ILVJYumGwMBRDJOMJxZ3n2EY7IcsV06
zRFnWfLzG1x0Uf2kZphg6hgAEwWGAwk2D2O6ou4YD8ZiEioGNmbQDZXETHUJj61I
XWqstxovwX2xTbD7AF2+a3VVUnF9ztIYNM6K1XEfOl7QoimFzMP2Lq0VSXHTUJns
j8f9Wi6dcnXQeA0kj4uCKedBfYWk0f11uCb8oqvroMrx0UzsBXveZmX9ZLDHVKF5
tuKT9t6Bzla/0771oQM42pGoAZQ7WJUQf/CfTEsOCDQhJGjjGEdXSXpKPAK396pJ
XZ3mxMXCzDWWrBerkZctC86PQJ+yjQ2viLsLaF/pHMe4g6vn8yqalJDOOzpal+Jx
XFAD10oslzzfBrSaL5kl8Gem/qitFAO032hPW3lUVRgsSJ9ilHYqdIrERqROeDED
yVntTTnqCjyNaHhkZl483z+wWam7skGp6G/OWI8ZveSMKRj2g+8mFcEv1ocJ+Z8h
gAS4YLQqWhtWr2zFZ6YK/Vd3PmNwyaFjZIQ5vpOESyAiQzo/SXj4MQ4FFCLUOEH7
z39ZL6GmWwSEgOBq850yPAGGflcR7dwTDIZTvffZ81wpup9IJaaginkkoan6GT7t
LCtcDqXJpoNhA/lLLQVD2E6QQE3YM5ztkFvqqhgLRMr4f7JU5IECggEBAPdYD1Lw
FukDO1wuGpvZygyum3q8CFhucRPy0PFICYRuyx3ZeZC8Y1s3Dydl1QmDX//9a8+e
E3ae3/zbbS8tLjkNpHPTUkXrRsZ3f4Pkju2efkpvdLxCBXScGhRNE4jv0d/6gniM
7EgvvutELoxBRE3GFiORhSpa+vWOdMD/aKb7uJ6QNmzpfVHzPo9KfQqDHf4cn3wr
Kd8AYpGXn8n0xEsAZMVtrpxRHII3kigCw/9N6GX+jeCPP6IiaoeSWYfC8Iza6YNI
St5XDpI8bFs5MPIV8rlM+6IJoz+3z5nh/wb92h48N0znsLWUqR0bciAP1vmSJMSw
MTLJrwMwhlobyrECggEBAMXOQ0atRNPKjW21JsdM9cuaWaZ0PC5vhC9OYyONlk8R
Ve91xqBJXeklrIgda/SCyFYMfRHBNM1yT42JmS0ubQtWu6wpUQfx6U2XT4ZEEZCq
fQG5LpVUlLdHZN/yp5VIuWITh2BFFGB+jYPvZOmX76kuuvbfDjOACh5aSuPjSIgf
X22WeNah06a6m2Qh27nIWOh4glk3xMrnHuHGj/GgvrTucjcIcs2elkzM92T0P3rU
wuJlp5VgQXCSoPikvShvArh1PBO042kQ28SYbya/mjW47RspiAJQQzvm1DVsi8FI
FXm/vu0fSHjWs18ypBYQHeyJeu/qWLxxlt7Dp3sQL8sCggEAHjY2YPYMhlert322
KFU41cW6Hgq7pjmPPFWLaf1XlEKIMtQughxQsoKOKkzI8cmHP1hwA8MWM4YCa/tN
YdbN75AYB0HHqdysH2/XNoADaUjTujnU823JBs5ObS5g9Xf9lbMenqTv831Jf6kr
WlxagHlymNOchWjpgHbvEefgm4zhpxSMYU8/zHO+r3f0wAT18+UBIgSPr7p3T7tK
fDuWgmbA6FCWZGeP6OPqyVJVKGkWuuaV49j7d81mX7rjjq6j/UB8B1ocMv5FPF1/
CsF4lglSRYn+rnMo6o6EIBK3uN3m94x5YL5oGjXXVkPU88+bfY55SUEQMVjrNKOH
tZfxcQKCAQAmdIwlwGfCGP3X10D7vB2JAK/vKWfNy0ZSgBXMAqm3I3KmhCoiXUER
o45gRAAJ4Ccce38RJZOjYVbP+HE8FGuEqc8AkGO9fK1TtVfzjWYwzsRQwnSo+XGU
FCArXZxw7FuGEq/d6nAktlXC0Za3xx8DsB8PAZxcLMdK0Vj/5t7h/99oibliWMGy
B1NQazixbJ7ESzFkMPBkVfxt/lFbs1mACV9RDaZsDSnBMpPiH437zkM5CnRDGRx/
yzHaRQS1SKepvrj4R9FySqG/Hbd2PAe57ALEphVYBcycZ6rX3Atrfx0Vt05iARPw
0iS7HDhERcvbgXrSC6hGsnqXQkhcJ3BzAoIBAEd3ZQEWhPu/ChO8AUbtXK3p+G8n
s6C7lv8eh+S39+rWYNeuzyvXfVtHSseWG7TWDnDne3WmbQr4nlsLCcsp4wZYxTB+
ysfQnv1qXOQeR4FGGrJ2x9co2rXuIqLiECWY+4IAo5vsjMVi8ZZ4Alj1l78Lg02W
WYI7lUgWGFaz6QfMZBCQ08Xnys6ueRZuR8SvI2SvStvehVeCpqOHP8uLxjBkLmSA
uosil5GtOP9pgnn+W1tkscTSsTIgsCF3i9qDD7XYdtEDZel80ugDn3SUYbkLRgpi
q39wvU1nNWuOlUvW4Eg0ofYIWdgffJvRGLJNJ6+KhBovnkA54JJg1Stwokc=
-----END RSA PRIVATE KEY-----
""".strip()
ca_cert = """
-----BEGIN CERTIFICATE-----
MIIFoDCCA4igAwIBAgIJAMpUuR9ijhIRMA0GCSqGSIb3DQEBBQUAMBsxCzAJBgNV
BAYTAkFVMQwwCgYDVQQDEwNDQTIwHhcNMTQwNzIwMTgyMTU2WhcNMjQwNzE3MTgy
MTU2WjAbMQswCQYDVQQGEwJBVTEMMAoGA1UEAxMDQ0EyMIICIjANBgkqhkiG9w0B
AQEFAAOCAg8AMIICCgKCAgEAnMEaU26+UqOtQkiDkCiJdfB/Pv4sL7yef3iE9Taf
bpuTPdheqzkeR9NHxklyjKMjrAlVrIDu1D4ZboIDmgcq1Go4OCWhTwriFrATJYlp
LZhOlzd5/hC0SCJ1HljR4/mOWsVj/KanftMYzSNADjQ0cxVtPguj/H8Y7CDlQxQ4
d6I1+JPGCUIwG3HfSwC5Lxqp/QLUC6OuKqatwDetaE7+t9Ei6LXrFvOg6rPb4cuQ
jymzWnql0Q1NEOGyifbhXaQgO6mM5DaT/q3XtakqviUZDLbIo4IWJAmvlB8tbcbP
wzku+6jEBhkdTAzAb6K6evTK4wUUSrHTE6vF/PHq5+KLrGReX/NrCgdTH/LB/Aux
817IF2St4ohiI8XVtWoC/Ye94c1ju/LBWIFPZAxFoNJJ5zvlLwJN8/o1wuIVNQ3p
4FWTXVArmSOGEmQL48UTUFq/VKJeoDstUoyIsKnBn4uRMcYPIsMh1VF6Heayq1T9
eO2Uwkw75IZVLVA9WaXnCIc07peDREFbyWtyKzpDa2Bh8bLVQ/tyB+sBJkO2lGPb
PMRZl50IhdD7JENNfTG89LCBNioPDNQXN9q3XQYSZgQ9H70Zp+Y3/ipXvIAelPwq
Uyg7YoIjOTqFF25g2c/XSrwSpKCr22lb1vkCLUT7pA0tslMVdULo1FkkkfIDDiHs
FC8CAwEAAaOB5jCB4zAdBgNVHQ4EFgQUmh+yIUO2PG/fMMMjXjestsQPg48wSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAPBgNVHRMBAf8EBTADAQH/MBEGCWCG
SAGG+EIBAQQEAwIBBjAJBgNVHRIEAjAAMCsGCWCGSAGG+EIBDQQeFhxUaW55Q0Eg
R2VuZXJhdGVkIENlcnRpZmljYXRlMAkGA1UdEQQCMAAwDgYDVR0PAQH/BAQDAgEG
MA0GCSqGSIb3DQEBBQUAA4ICAQBJsjzUBFUOpuOGz3n2MwcH8IaDtHyKNTsvhuT8
rS2zVpnexUdRgMzdG0K0mgfKLc+t/oEt8oe08ZtRnpj1FVJLvz68cPbEBxsqkTWi
Kf65vtTtoZidVBnpIC4Tq7Kx0XQXg8h+3iykqFF6ObqxZix/V9hs3QDRnTNiWGE7
thGCAWWVy1r56nkS91uhQhSWt471FevmdxOdf7+4Df8OsQGcPF6sH/TQcOVgDc20
EiapNMpRxQmhyOI7HBZdYGmHM6okGTf/mtUFhBLKDfdLfBHoGhUINiv939O6M6X3
LFserZ9DEd9IIOTsvYQyWhJDijekEtvBfehwp1NjQcity/l/pwUajw/NUok56Dj7
jHBjHJSSgb5xJ9EMrtJ2Qm2a5pUZXwF2cJIxBjQR5bufJpgiYPRjzxbncStuibps
JjSGwiGvoyGbg2xLw7sSI7C2G9KGMtwbS4Di1/e0M1WfFg/ibT3Z1VhqtEL6Yr+m
CG6rI1BBiPfJqqeryLg8q9a4CQFA+vhXSzvly/pT7jZcLyXc/6pCU6GqjFZDaiGI
sBQseOvrJQ1CouAMnwc9Z8vxOOThqtMTZsGGawi+16+5NmpLwW53V/wtHUZuk39F
29ICmBRa3wrCyhNMb+AFhaPjO34jtRGqeOJA98eS29GooycDnh/Uwi3txZu6DNmZ
NVRV1A==
-----END CERTIFICATE-----
""".strip()
ca_cert_invalid = """
-----BEGIN CERTIFICATE-----
MIIF4TCCA8mgAwIBAgIJAOnUIE1WsqOoMA0GCSqGSIb3DQEBBQUAMDAxCzAJBgNV
BAYTAkRFMSEwHwYDVQQDExh6YXRvLnVuaXR0ZXN0LmNhLmludmFsaWQwIBcNMTQw
ODAxMTYyNTMwWhgPMjExNDA3MDgxNjI1MzBaMDAxCzAJBgNVBAYTAkRFMSEwHwYD
VQQDExh6YXRvLnVuaXR0ZXN0LmNhLmludmFsaWQwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQCevYnCOWEf3ez1utRrUuoBDxRI8VhokIg+q6QcUQyuoTsg
ofxgVTnJC9rO/S3xXRUN0cfAbA3wzPvctTvRCcZP+3KZvL58mOfGK6GTIq2Fe2LW
tD7YPIaQRsWCWTTy/jKr9CLRqyJ+TVQLjU/CG4MCyUZ/I9+XATPMLy5ew8l24G99
Q1hYk0aB2jEtOGFV3zH4JHD2SlDgrZozcVIkVSRUPMVL8tqNZpLwohV8D4mr58ZB
0ll8SnnT4nZAGb4pgOEUgjials38gBHp3PhNhLG1BN6MdZGDqHjpI3n8T9VX3uhm
wv6nYeKG8/SqqjKgq30pEqH/gGjOBAqjdAOi7DTbsq+n6Xk0bDWEUGJG+2D8Odfu
AntUm1xpfEEKABQ/JO91HdMIi6bU+Rp30cAxBJqFl3GJt2ypADqh+h3q2vWbZtR1
XgW3j/GzhxzzgGfJ0bqZmDq/bOlcm1zbB43jiUdjay3C+HKUDmnYEkKY0+Ar9gXm
QKBgFYEnstNt2ceJiMXhrInFMMLdmHnuiQsGYHPXUvQJQqWcr1a8BSP11AXqf55p
wyONLNcKsPIqS8q0OOK89CLzsqUso7tpDYFy237nOKE9ZBMn8NtlRd9UfCLQPC6p
5lFo3/QZsuucVmKZzD2iSSIeCeTDzZsozycOkj/Cr5m4V1S4TmBQl0eA4lIlWQID
AQABo4H7MIH4MB0GA1UdDgQWBBRU926sfA4IdgQogtv3jPjcj6dYBTBgBgNVHSME
WTBXgBRU926sfA4IdgQogtv3jPjcj6dYBaE0pDIwMDELMAkGA1UEBhMCREUxITAf
BgNVBAMTGHphdG8udW5pdHRlc3QuY2EuaW52YWxpZIIJAOnUIE1WsqOoMA8GA1Ud
EwEB/wQFMAMBAf8wEQYJYIZIAYb4QgEBBAQDAgEGMAkGA1UdEgQCMAAwKwYJYIZI
AYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwCQYDVR0RBAIw
ADAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggIBAAQY6PF59X5eZ0ju
1J3KwqD41AZPymcNUAn423vQjwyN6h/gavyjyO/a9ZRw/Duk0AZ9Oca7U2ufOG5c
QtDksi2Ukk5hQNBo/UDqguPM/+buvNtGF/ibWzzIKj6YxMfFRzrTic+qAEBMli3z
UrcrgUQo//iBVx5EYMM/xZm+49Cl6wFJntzM3AL3uvphcR8vvRs9ieEpm11KtMtt
G9j/6gsOGH7INX3VRUM9MdxXF45gt/R7Xm915Juh3Qt4ZYrD+eKjuL3JypB56Tb9
7cWaLffzHKGwePYedJXczvQb5nPkgrSYN1SZQoOxaN+f3q3tkn9RcL4zsteoOHSm
PJkYTdXkUMluopXFjPOPolNKljEs8Ys0ow+6QT/PLSlGBgH7L/gUWtgxzOpd6NNK
8NES9aZtL+xpmmLkciWH/tXt9s+9+vzCUwEuXF8uvPieJgjgW6hVxFofJpyGy2Vz
ZNxG+oBSP8fXDQyNM1PFTVSdP2xVzX2VIhnZOoqUTPAbFHYlsyvXnybcourP2Jtv
Ytm+X6SGyQR4eo8wGtXHqu1H8R4/LyFuLy7Xb/ILk/Sp9F1MklNWYUA59d3PlG/a
Ds0Vj2YzSEyStP1a+HaahUZEj0+3/W/x+f8068HyWsVDGa2/9U8IZwn7+C7pK9fN
wSQh3r/cB+X3alAbvPwTlzyNXFu1
-----END CERTIFICATE-----
""".strip()
client1_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKQIBAAKCAgEAzBsxWVTPDi8jBQFVofwMBoSdKvE+VYe+S6w+bTSUekL+pvzf
pirRGD7owGcySKgzgZ4Jj8yGEk4tjVxCwq5epEL06XLP5XMEKzk0TMYu+aINcZ2v
YCrW3Sr6/GZ9PWw3oHK2pul7g+o1sMPFtOcM1sRfVG5LdXXDXclRd5QTPO2FTrDP
cTr5LoC1jtOAJhJ0XqQd/LOV/95j4+k0ypOCCkFI5kl9caZSnaG7xMrSsssLkBrk
a99hSN4CB+1/A0vZUsPRIb4yzudlVzn/w7aWKMOQYxLrFE/NJ+4fTiJ9bBpL20jE
yiq87kCgVRbx1tVo2Rzyp+bQcDvcKZ6YXrZqj4I9s5JPiygvdnxB5dPegYBFWYkK
eosZO8gTif53vZz7yQIiYU768FYpbRW7UVWY2fk+MGBIj0hlCsclPUh66SZYiRlm
OaxaufMC4m5ofS4zJNs5HryynvnyTwxqde4iPvukPxuQKASs+z25kSWjqv8R9HqW
ct6i0GQNKO1FbcuiX8vlRjXB6bMYEJbSgccESe1yZTSIWvnw0ihFA0K+7K3NsERs
IAEdbRxREzgW6eDbTMU8wsaudQsyKHzvZD+blejkgXUEyFO554u9m8VINk/JCmdA
P95a+XumFnoNrUQ9n2c4kHfFgT6dzB5i5okXMm1GFrx7d2VLdVBCSAYKIO0CAwEA
AQKCAgEAlmu3+9j328E7ctXf8Uha6HbVia9PPCOVSBnBzCPDBTPYjuKwRLsrbE34
pMupEEj95Jm+/D5D1LvO8G49OVLepvo9msqlkrkoKl63P5mTRyB5/fCzLhGEVmz1
mgxCYoEdod7I48wQ3lA+j25Ih6D8Ik+I3iWG8SL//1997b2wS+fUpgDCcPWAbRgo
NgGDYQuavaEABJupgW+5eF8HLAB4BuzEOAuTKq3kFw353veHPoNLm0FmdGWlQdlz
77nFMH22xTtRJigRM9DvK9CvwOIQWix+fbWUkFybmsDwS1o5yvC6VPqVJVVH9eKl
BvCo/KY85j1iTAFcPkqvX/Dk5HBVqOrmx4NQU5o/9eJnSknfcGAdsWr3952wrHxa
kGjjkwsp6fBb/NkVqJuODgzSC7XwJR0D4OwnzTuzcoi2uXwjDohAJEYd6M8rITP1
6RckzXu9upM3bh4cFnv76TF9Dbca0paBb9VPeXSUZYMZazwsXYlETWDLZjhX9RLv
CA2pk1gBSorMyqx8KOLfH2Lx8ZbB9QBdqU6WAUz00cO72TiVw2dbU8Gp34BO78N2
mpahflg98WnRLQhzb6iwcCXHzfVdHUYsHcALq5vBh4RkDK74xzXp4sjE0za3BiqA
MaO+0+Tsfw7loyXMWXimXFazxD3FZ/YLWQPNlEGJMOma/94DBEECggEBAObaShP9
9RzbpiHltH6/JIOI5f61agc7vyCnHQ9TUejOsUbXrgcsWnVcqSdNa0azpGpqtwKO
S2haF+DviKF+zM6znJJ41AyBjqIyBipDAKcF8Tervq2dPP/16SEMO/D1CX0IwFUd
M2Si1eWU49bk/y7fkH5zw/0xJXLXrKyDSBTaiyRj6+KGj6h2uJPmRhStlgvuyufu
PD0TcffBOP9tx5HfkWcGmnPJrZZ+ehe4Kn5q8BR4W11V64/a03ALbx+2f6DcOU48
8m3O9tXucExjOuDUOC9JZXMQucUEtrOMADnIMLXEjYjW/VbV5jP+QYCj+Er028Ip
xoNXjSwyFgduYd0CggEBAOJXCJ0eo9EUSJgSH1ZKPyziRCmhmnEXBVwQOPOYOO73
rPHWdpjG+HUkQSWOsFxa3Yjia9r9z3DA8ynVzPnmbG8Otc4i2IN/S4ksbFFjHtjs
F0hQBFmYI21VqkUqK8iFOOwQacFmyYs8lqg7PnQhS7GoQsnbnHE0HOpe9zjQr6Fl
T5AY6bJ9cdhXPnap/2LLP08wpNcaW0XbKWRT0+hAl5WvZry3ftn7ubNstF/HAUTU
bxLBn0CYMtTg/jAGyYtj5MvNLFGUFGx3Lg51mBS3TZWstOeF/7sAD5w453VjVWKy
Qkj4OkWJRxxbB5fuJVGrqTXc/SNh/+z25iuUX0EAMlECggEAVklhRve1loPDJQhm
3rkzPLb+wKWua+W5Gstb4U6TXyFiwcf8FFJPvW5VC4u0fUjIO76HiT0GkoqaQklG
GJb8loYsD9N57vK+DYIFK+a/Z66g6t4W922+Ty3rZZ7dCMOOOF39BdNUUllK+fUc
9EXD3BFUQO+kYg7soHBc6l5nouPM/l0a3iDNsXouo5l+uFvpqawny2kQuwN5pdFj
LJYr8ipOfuPI9156s7WyjQsZVwdBlWUnQUvMMIjqXwbnEkN0kPu/r664LrMdL/lf
oC225DJujb4xXUDzLuEEKTg7HV3mVwqQnIU/TCXHVcfDVAH13I6JVZmnyZAABHT0
JvLrQQKCAQEAmiRboWU0ezctGSN+Y+28iHyvnwqHe20KIWCK6JpKa7QQ+8HqkrEu
k9hU5Zb/VGYtaQOKIGGp3EgLUfpg1e+u+RMzjWb9vM/8STcPrX2rjF98m6qiy8Fo
nxUwGFpX5v+TfHDRFP1DVKe2kmuGZOAoBJ1qnr4JFK9A4fw6sV6tvWSZgrD0trHn
zkXcLEQpwMZaHzwphrRUZIaU8daFAi67DR2fAfaVVS6xkRf+3xtQKefinQtvwTXl
qERx15NHvr4RGxpnjEckgZnIq+A56iHLnJs5uFLxjhDEkMfQGYnEpKpxqfAi/yg2
XYFA8p8kmzIk0qHlYytid6bNqfApzsKrgQKCAQAqDHO2DSVZEiqpG9ony4WcRTMY
lZ85e3S1gCWDwDHfhGBFLgg7JgmqVsM6De1s6+gcSRK8wXVJzRbF4XWkBLmXU2Nr
FS4ZCFoSPDUFrETtd7X5a6UL14gkpmFxNp3NEfIkGHFemti2U2Iv+v2E/A23sQbR
oAhWdJru5/ToQEGSS2lThIxebj8laedmKoyI2c4muxwvkB3grrSN1FNDs7bmUSTP
CKyAiZSy8T+YHPL4r9Up5M86LRbUvHmIVy7kJaYjQTGeqNJFPX0WMqb6xTm3VA7G
4Zfx4Q3uMFdRgGHQIhwIIYe14sw8ImHbAyRKuXT0Noo/ETmWCaVZzi8pil9M
-----END RSA PRIVATE KEY-----
""".strip()
client1_cert = """
-----BEGIN CERTIFICATE-----
MIIF0DCCA7igAwIBAgIBBzANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDIwOVoXDTIxMTIxNTEyNDIwOVowNzEL
MAkGA1UEBhMCQVUxEDAOBgNVBAMTB0NsaWVudDQxFjAUBgkqhkiG9w0BCQEWB0Ns
aWVudDQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDMGzFZVM8OLyMF
AVWh/AwGhJ0q8T5Vh75LrD5tNJR6Qv6m/N+mKtEYPujAZzJIqDOBngmPzIYSTi2N
XELCrl6kQvTpcs/lcwQrOTRMxi75og1xna9gKtbdKvr8Zn09bDegcram6XuD6jWw
w8W05wzWxF9Ubkt1dcNdyVF3lBM87YVOsM9xOvkugLWO04AmEnRepB38s5X/3mPj
6TTKk4IKQUjmSX1xplKdobvEytKyywuQGuRr32FI3gIH7X8DS9lSw9EhvjLO52VX
Of/DtpYow5BjEusUT80n7h9OIn1sGkvbSMTKKrzuQKBVFvHW1WjZHPKn5tBwO9wp
nphetmqPgj2zkk+LKC92fEHl096BgEVZiQp6ixk7yBOJ/ne9nPvJAiJhTvrwVilt
FbtRVZjZ+T4wYEiPSGUKxyU9SHrpJliJGWY5rFq58wLibmh9LjMk2zkevLKe+fJP
DGp17iI++6Q/G5AoBKz7PbmRJaOq/xH0epZy3qLQZA0o7UVty6Jfy+VGNcHpsxgQ
ltKBxwRJ7XJlNIha+fDSKEUDQr7src2wRGwgAR1tHFETOBbp4NtMxTzCxq51CzIo
fO9kP5uV6OSBdQTIU7nni72bxUg2T8kKZ0A/3lr5e6YWeg2tRD2fZziQd8WBPp3M
HmLmiRcybUYWvHt3ZUt1UEJIBgog7QIDAQABo4IBATCB/jAJBgNVHRMEAjAAMBEG
CWCGSAGG+EIBAQQEAwIEsDArBglghkgBhvhCAQ0EHhYcVGlueUNBIEdlbmVyYXRl
ZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUffrp+KrDJFGTgUARU2M+RvvRlJkwSwYD
VR0jBEQwQoAUmh+yIUO2PG/fMMMjXjestsQPg4+hH6QdMBsxCzAJBgNVBAYTAkFV
MQwwCgYDVQQDEwNDQTKCCQDKVLkfYo4SETAJBgNVHRIEAjAAMBIGA1UdEQQLMAmB
B0NsaWVudDQwDgYDVR0PAQH/BAQDAgWgMBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMC
MA0GCSqGSIb3DQEBBQUAA4ICAQAuspxaskmlZSNIaK4qE2gUWLm37Otr6hwJdP4P
s6B4jkjMW5n2gQ0ZjtWVXEG2xA771pTqL9XNtqBdUGRNBs3tj2lSp5n7KTuxilVX
S79EaoOVr7/vbEGscgrpRcIhYhXxS9JxdL64drWJybjMBuw945lxPmYA8G3bW3LN
R40raEN//gui9Hb0hIW+2uu/WM8Hw60Gmc50q5mQh3A3n8ZUocFxKkUfb3tLqeG3
cgqCYgUctTqISJsHbQkTI594rRhQeYyaGirg0t2OgeVaXBX+7HBnDAomR1VPxahU
hhqxc8cE6l6ufIKusljOYljWydcgcinJnwGyH/gxSdMCItolPj4gAiVvCbJ/Pu38
GNlgCPc1pfJ2vSgzoUeMr5HLTx/jwfNpHDE3on/qtiaYCWWkZqKJOC/0Nq2Jz9lM
jvbWTSnQ+oRq7B/5cH02u+M2dcuZFrrmosQq680Ov8K/f4/jBjwGgFXg46fCXzsR
mNc0s6Dx3nS2ecIocDQfR7cy+oqYVHQOhvBrp6zSbb2H265D8i82jV/i5j6DbZ6P
s/Ab7xtyW6AwGr6O+s9Wix4w6vVKds7uq5lTUIjjl5dw6JcHjpBmlmPsKvQH2izx
1fLOfvz9aFHvvXEKFqpptwd9ZQL2KpmNIrOp7jrnpQ1e18zbL8HnX6W4V0rKUAn4
svkkFA==
-----END CERTIFICATE-----
""".strip()
server1_key = """
-----BEGIN RSA PRIVATE KEY-----
MIIJKgIBAAKCAgEAtvCbou2IcBSHbMdFWJ4PzPYsxrsprli027OLFEPXs6a3X7L9
z2gNL9BuK7Zh/XK9XNAPYYsjYWVkP0O4JbyK4rH2kOPXuUIYGFztz2BwXPvDLjlr
uqNVWAbil3g7EIUqcRJfxkx6aZRG6KlWOfGsJHGd46pUDRF79WupkSauC3t0EgqH
C18WcDuQtCkYVxoFiRflfkLdjVl2TD2RcXOBvDnxj1N5668HyVHsEU32l0xfOByq
LeLl5z4uk+DrgvmwOFVi/4ij2uSm/+oa2rKFFPLlWUbeUtdiEHQ3Sw+6aY0+95gH
sUjMXfqzIF6/Yo/nlk6JjGh4FLaJyDCyOj8MGdG7kgvDl5Ho1cmJip63Y/z95aRf
4gtrZq0nD7upwyZC6XlWS7jr6V7Pd0KrRT9bLbrLeCZEZ1rWiM4KItM8GViolRSY
aRyJgQOMh5F0jIV9w9Ai9Oxta72jmCaSFozwQyjWL3CqCxCUsvIFiEQEdiGaGFRs
3DehWI1dHpCmgrTtChCIu1+bEMogl75d1VPYKAHhRcblFySkwjbgpatajxEkKxmb
tKeg2kuH8josU+3hxCyj+66JwzfiYt1UNh1uHzrOvheosl3p+5JpotBuVAm4+ofP
anEydEXEg9ORxYD08Ddqql62QGO8QUMLt+SwcdWRQRQkjAxvX0lFotMI/eUCAwEA
AQKCAgEApgyTWDm+o+0eVzAw05T0xpeUYPY1iRjfYKQBU22Y9moW+/hdxMqvXX0U
4vxyyThChWIc8+71OExtx7bSCP6wGcBrC2yjvHYvpL2E5bylgODMcsKP9CKZLoNh
XRc2lXIp6eRBpp54Zii+jCRYLdQc6h9urt1F2W7LUyJcEXJIfAecfVus5Dd1CH4o
hD7g5v6pk5xrJEXRD6HqbJ1dzNqJIa5+ghfFDJYcvTFs0vAvKXma3DW4ilnvUAvy
/ysi2gmFWDy41TTTdbYhlxyJL4TmovMuFfDrj8oMKt8x6SHnlDMuulH2eYaYaZ1K
xdD6ap4wGRBEbXvNsw9U1K7Ot2vOsH+AUK46bZfkw+Oe28j6i342gL/o29z6BwSe
GP+an+VeCS87WUuYCzGugucVBU7UnbGkXyYXbSpYS1h0FrSxElqCTxXBmteo4KJL
uWo3iQXg7ik8gpPG89Xo5c8tylEVEvA9wLB7lZNPURsY9QNXLyYGffJuW8AYFJyv
ymhdiVtLNV5rBUgXmjl+g8g416u6Oj/sx5NfcCQTCw04q5LbCeiHW/KsvIhV3eHz
mj7kQ/OrAtdwZA7ER3mhm7cXqw0EutA+p+HZ87BWYi5HBV7eOgxrxHTw9SK4OIFt
OhKH6l0nghsI/P7PNBR3b+yySFkrn06ctttYCLm6NRYqRoWFrmkCggEBAOOYJOHw
bT/EgJM3vugXl6DKQs4OnfmRdQ2T08HWHCu6tMzEp7veNtE2oAz39XezpOK+gclJ
VGnGBLiZC2eTAsyb7WxAbmRW2Q17+K9NC1SXpYvFDFFaWI65sQciiZBdDZlDzUJw
NlIXgKfJSuAuqXx78slcQuV69Ii7CYys3AbbPeGgKVEqOHGn74hFhUlmWpoE2lM9
tr2p5pZMdKBIe98dyFnzPbBB81dbIfILzH5wSWJLGPuSWhB28a0cY21OAczd59Eq
FyYMTItdk5X8bZLjj0NZ803WWq1usl+X5z3Kr/2aQvV/FRJH26/UBz8z2Pqdk67D
WhBLavhTrj1k68sCggEBAM3Ftj5fr2BKV7TiGroZnLDi+9UdOE344K6OI/sM85/m
YcUJWGxJFTVgOIpMtIJQ9CxHc9xhTabFSGzJ6VOLYW4r5EbiBFY3WrL4lrUeOIzF
XAxBJQR8vt1d/wQD7h0WKDSimpToM4wOcFzEHEkyB9bVbyw2sWj+bM+sD8O5Q0gv
a5Z1W406Ssn+z1gvVBM3MDbUqrrzTTXqHvWOwdDvkxb1eIY++Kco5FIhy7NecdT1
oV+8GfOUCFMqLXTRrHg7atQgS7vcehsILuQqhXs0y3PSbbemVgLLG9E0CZ+w/zbX
HBu14Hhjj4XogSJi+HC5uyUTafNmq0bYhL29wCax5w8CggEBANAC7CK8VX1koYbr
+kWx2lmQwsIFxgilEvCX3YBZqmGlQT2ttwgTrtJENL/lmKoQvHCoYYKQzN/npcT5
y9ycFoDfOn4n3T1Dyxlx5vaBWgu0lg9Kx1lLU4kO2meE/2m8QoOD3oQMfvlElcfE
R/ThcPJfbqTu+A049WpKWA4Epwx1MPeYJGsURYZLULehopJVRBVkvg46Z1ytfhx8
QFOGLADd/ZGIqScA/+ElX78TXZFqGwgFTw4O0tYdgAER4yWxmB+f6RHYgFO8BfGS
UyNQFO2dogCSo7bOZQ4CEHEiKqzlJTiJ1wz9W0rb9kObbAwt3PAhOSsPTK973oac
JLHkHUUCggEAa3ZfsL9j5ZOtrkeO0bXigPZpsmiqKP5ayI5u+ANRkCZO1QoGZbbd
Hpz7qi5Y7t28Rwuh1Gv0k63gHwBrnDfkUBcYBnSu8x/BfEoa2sfHnKzNX5D99hP3
0b/vGHe8+O/DW4m31SBXG0PHJos8gnVgZq/ceWiuyjhlNyeSrBKqsp4hP9hWUbEp
scgjHNjKvaZKxbfW2f+KSSfVt0QwsB8N4CWeJe3pCdNvOf1wVmJybFdDSa4Al5at
qlESoDmIKtpM9i9PnfKMymVBp+MVBr0Rq5Evv4Nc0+SiyGS2yfEzt74rbcVUT0sf
fz1ngz/Qo3474Cb9ZCIwPLWCzVy1Zv/tvQKCAQEAv8uxjmM/CqtKDW9c/z4Z4y6O
squI4AjCgbml8VzC2aS1zQwbCsq0KmGYVgYALKT4dSH+B+6koy+J5GPpVX9xL0Zq
MZJlo1Hmi2hDW+gi/w+Q62iRdqO+SoqbFZJ5aX4iF3dyX9rvDyOzRFr+kddtuQ6y
tru00ATHMp2hix8LoKDo8dLY9bX6Y9RmgWAVOYbFHm4OB9wE2fya3feo6O3znJY9
EqlYKE0bzcHQQzeT0+Lh9+1KLBg6B6jfyAscVKmSgJyEHLW7gzgF/h10py8XMEVj
syS6C3/DMznzpQSyjdTHqdiGuLfagF9oHxRaRacXaxLP2CzILIUFIaEIvJevYg==
-----END RSA PRIVATE KEY-----
""".strip()
server1_cert = """
-----BEGIN CERTIFICATE-----
MIIFwTCCA6mgAwIBAgIBBjANBgkqhkiG9w0BAQUFADAbMQswCQYDVQQGEwJBVTEM
MAoGA1UEAxMDQ0EyMB4XDTE0MDkxMzEyNDEzN1oXDTIxMTIxNTEyNDEzN1owOTEL
MAkGA1UEBhMCQVUxEjAQBgNVBAMTCWxvY2FsaG9zdDEWMBQGCSqGSIb3DQEJARYH
U2VydmVyNDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALbwm6LtiHAU
h2zHRVieD8z2LMa7Ka5YtNuzixRD17Omt1+y/c9oDS/Qbiu2Yf1yvVzQD2GLI2Fl
ZD9DuCW8iuKx9pDj17lCGBhc7c9gcFz7wy45a7qjVVgG4pd4OxCFKnESX8ZMemmU
RuipVjnxrCRxneOqVA0Re/VrqZEmrgt7dBIKhwtfFnA7kLQpGFcaBYkX5X5C3Y1Z
dkw9kXFzgbw58Y9TeeuvB8lR7BFN9pdMXzgcqi3i5ec+LpPg64L5sDhVYv+Io9rk
pv/qGtqyhRTy5VlG3lLXYhB0N0sPummNPveYB7FIzF36syBev2KP55ZOiYxoeBS2
icgwsjo/DBnRu5ILw5eR6NXJiYqet2P8/eWkX+ILa2atJw+7qcMmQul5Vku46+le
z3dCq0U/Wy26y3gmRGda1ojOCiLTPBlYqJUUmGkciYEDjIeRdIyFfcPQIvTsbWu9
o5gmkhaM8EMo1i9wqgsQlLLyBYhEBHYhmhhUbNw3oViNXR6QpoK07QoQiLtfmxDK
IJe+XdVT2CgB4UXG5RckpMI24KWrWo8RJCsZm7SnoNpLh/I6LFPt4cQso/uuicM3
4mLdVDYdbh86zr4XqLJd6fuSaaLQblQJuPqHz2pxMnRFxIPTkcWA9PA3aqpetkBj
vEFDC7fksHHVkUEUJIwMb19JRaLTCP3lAgMBAAGjgfEwge4wCQYDVR0TBAIwADAR
BglghkgBhvhCAQEEBAMCBkAwKwYJYIZIAYb4QgENBB4WHFRpbnlDQSBHZW5lcmF0
ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFHewxkloJzmR2Cj0/za/ZHS0yl92MEsG
A1UdIwREMEKAFJofsiFDtjxv3zDDI143rLbED4OPoR+kHTAbMQswCQYDVQQGEwJB
VTEMMAoGA1UEAxMDQ0EyggkAylS5H2KOEhEwCQYDVR0SBAIwADASBgNVHREECzAJ
gQdTZXJ2ZXI0MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMBMA0GCSqGSIb3DQEBBQUA
A4ICAQA+EAj846j4u/PZvLITPX/kI1+8Y9JIULKwdQ2v8O5mMf9In2Pk9MQ+81RP
rpDZo3ZsfkkdoAR7j5ZeTdMargFAeyErfJpZ5Fv4LryaNotJB0/iG8vcWpOJ7qa7
bae+5hQ0vzAoeIxg7kRXN2noSyHHhd3riddOp3/TxetKoFdWSjjnMXqBvZbYzUcf
asdKMXKcvZlan01f+zV8CkR7+Scd+5uW33lNHnUmCzeGA5G8z1vA05u9TVAkwU5r
XbdJbUjCE3d+X/jkaS5IvhBu6tKSA1YFcD9Brh8CmMjtCWLk8ETv+78WJzqyjiaT
OisFTUI/jC18dKgFyyehEeeYo5SZO7BIsNgplDX2UOumQwZYdUX4M3ObRt2n33Fb
ReVhPf39oCDSOGEckRGeJX6ydVRjWJHC/qT3gDKaMPZd5lN0M1BOqyAFakM0oU/7
VPf9dUQsw/BeUvm+34hE382JIefzBA32SsyfQjNf6L6tV1JYEfeaebSI+cIny9me
lfvTgPmoabqCXVN03hyppf7/0tD8BpitC9ghFrN61oJLEgJOJ9tLuQz0h5gbxeZP
mOAkPcQs5FMuzNmP/amLSfCFfdUT5iIqZ3uIAsqnw0ftp8OOEAdyoC4/vgVx3y6b
BOX+H+pK1aZXjNzcacyPSawHJTvqexNJFWV167okb1BmOFJL9w==
-----END CERTIFICATE-----
""".strip() | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/test/tls_material.py | tls_material.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from datetime import datetime
from tempfile import NamedTemporaryFile
from random import choice, randint
from unittest import TestCase
from uuid import uuid4
# Bunch
from bunch import Bunch, bunchify
# mock
from mock import MagicMock, Mock
# nose
from nose.tools import eq_
# six
from six import string_types
# SQLAlchemy
from sqlalchemy import create_engine
# Zato
from zato.common.api import CHANNEL, DATA_FORMAT, SIMPLE_IO
from zato.common.ext.configobj_ import ConfigObj
from zato.common.json_internal import loads
from zato.common.log_message import CID_LENGTH
from zato.common.odb import model
from zato.common.odb.model import Cluster, ElasticSearch
from zato.common.odb.api import SessionWrapper, SQLConnectionPool
from zato.common.odb.query import search_es_list
from zato.common.simpleio_ import get_bytes_to_str_encoding, get_sio_server_config, simple_io_conf_contents
from zato.common.py23_ import maxint
from zato.common.util.api import is_port_taken, new_cid
from zato.server.service import Service
# Zato - Cython
from zato.simpleio import CySimpleIO
# Python 2/3 compatibility
from past.builtins import basestring, cmp, unicode, xrange
# ################################################################################################################################
if 0:
from zato.common.util.search import SearchResults
SearchResults = SearchResults
# ################################################################################################################################
test_class_name = '<my-test-class>'
# ################################################################################################################################
class test_odb_data:
cluster_id = 1
name = 'my.name'
is_active = True
es_hosts = 'my.hosts'
es_timeout = 111
es_body_as = 'my.body_as'
# ################################################################################################################################
def rand_bool():
return choice((True, False))
# ################################################################################################################################
def rand_csv(count=3):
return ','.join(str(elem) for elem in rand_int(count=count))
# ################################################################################################################################
def rand_dict():
out = {}
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out[choice(funcs)()] = choice(funcs)()
return out
# ################################################################################################################################
def rand_list():
out = []
funcs = [rand_bool, rand_int, rand_string]
for x in range(rand_int(30)):
out.append(choice(funcs)())
return out
# ################################################################################################################################
def rand_list_of_dicts():
out = []
for x in range(rand_int(30)):
out.append(rand_dict())
return out
# ################################################################################################################################
def rand_opaque():
return rand_object()
rand_nested = rand_opaque
# ################################################################################################################################
def rand_datetime(to_string=True):
value = datetime.utcnow() # Current time is as random any other
return value.isoformat() if to_string else value
# ################################################################################################################################
def rand_int(start=1, stop=100, count=1):
if count == 1:
return randint(start, stop)
else:
return [randint(start, stop) for x in range(count)]
# ################################################################################################################################
def rand_float(start=1.0, stop=100.0):
return float(rand_int(start, stop))
# ################################################################################################################################
def rand_string(count=1, prefix=''):
prefix = ('-' + prefix + '-') if prefix else ''
if count == 1:
return 'a' + prefix + uuid4().hex
else:
return ['a' + prefix + uuid4().hex for x in range(count)]
# ################################################################################################################################
def rand_unicode():
return u'ϠϡϢϣϤϥϦϧϨϩϪϫϬϭ'
# ################################################################################################################################
def rand_object():
return object()
# ################################################################################################################################
def rand_date_utc(as_string=False):
value = datetime.utcnow() # Now is as random as any other date
if as_string:
return value.isoformat()
return value
# ################################################################################################################################
def is_like_cid(cid):
""" Raises ValueError if the cid given on input does not look like a genuine CID
produced by zato.common.util.new_cid
"""
if not isinstance(cid, string_types):
raise ValueError('CID `{}` should be string like instead of `{}`'.format(cid, type(cid)))
len_given = len(cid)
if len_given != CID_LENGTH:
raise ValueError('CID `{}` should have length `{}` instead of `{}`'.format(cid, CID_LENGTH, len_given))
return True
# ################################################################################################################################
def get_free_tcp_port(start=40000, stop=40500):
""" Iterates between start and stop, returning first free TCP port. Must not be used except for tests because
it comes with a race condition - another process may want to bind the port we find before our caller does.
"""
for port in xrange(start, stop):
if not is_port_taken(port):
return port
else:
raise Exception('Could not find any free TCP port between {} and {}'.format(start, stop))
# ################################################################################################################################
def enrich_with_static_config(object_):
""" Adds to an object (service instance or class) all attributes that are added by service store.
Useful during tests since there is no service store around to do it.
"""
object_.component_enabled_ibm_mq = True
object_.component_enabled_zeromq = True
object_.component_enabled_patterns = True
object_.component_enabled_target_matcher = True
object_.component_enabled_invoke_matcher = True
object_.component_enabled_sms = True
object_.get_name()
def target_match(*args, **kwargs):
return True
is_allowed = target_match
object_._worker_config = Bunch(out_odoo=None, out_soap=None)
object_._worker_store = Bunch(
sql_pool_store=None, outgoing_web_sockets=None, cassandra_api=None,
cassandra_query_api=None, email_smtp_api=None, email_imap_api=None, search_es_api=None, search_solr_api=None,
target_matcher=Bunch(target_match=target_match, is_allowed=is_allowed), invoke_matcher=Bunch(is_allowed=is_allowed),
vault_conn_api=None, sms_twilio_api=None)
# ################################################################################################################################
class Expected(object):
""" A container for the data a test expects the service to return.
"""
def __init__(self):
self.data = []
def add(self, item):
self.data.append(item)
def get_data(self):
if not self.data or len(self.data) > 1:
return self.data
else:
return self.data[0]
# ################################################################################################################################
class FakeBrokerClient(object):
def __init__(self):
self.publish_args = []
self.publish_kwargs = []
self.invoke_async_args = []
self.invoke_async_kwargs = []
def publish(self, *args, **kwargs):
raise NotImplementedError()
def invoke_async(self, *args, **kwargs):
self.invoke_async_args.append(args)
self.invoke_async_kwargs.append(kwargs)
# ################################################################################################################################
class FakeKVDB(object):
class FakeConn(object):
def __init__(self):
self.setnx_args = None
self.setnx_return_value = True
self.expire_args = None
self.delete_args = None
def return_none(self, *ignored_args, **ignored_kwargs):
return None
get = hget = return_none
def setnx(self, *args):
self.setnx_args = args
return self.setnx_return_value
def expire(self, *args):
self.expire_args = args
def delete(self, args):
self.delete_args = args
def __init__(self):
self.conn = self.FakeConn()
def translate(self, *ignored_args, **ignored_kwargs):
raise NotImplementedError()
# ################################################################################################################################
class FakeServices(object):
def __getitem__(self, ignored):
return {'slow_threshold': 1234}
# ################################################################################################################################
class FakeServiceStore(object):
def __init__(self, name_to_impl_name=None, impl_name_to_service=None):
self.services = FakeServices()
self.name_to_impl_name = name_to_impl_name or {}
self.impl_name_to_service = impl_name_to_service or {}
def new_instance(self, impl_name, is_active=True):
return self.impl_name_to_service[impl_name](), is_active
# ################################################################################################################################
class FakeServer(object):
""" A fake mock server used in test cases.
"""
def __init__(self, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None, worker_store=None):
self.kvdb = FakeKVDB()
self.service_store = FakeServiceStore(service_store_name_to_impl_name, service_store_impl_name_to_service)
self.worker_store = worker_store
self.fs_server_config = Bunch()
self.fs_server_config.misc = Bunch()
self.fs_server_config.misc.zeromq_connect_sleep = 0.1
self.fs_server_config.misc.internal_services_may_be_deleted = False
self.repo_location = rand_string()
self.delivery_store = None
self.user_config = Bunch()
self.static_config = Bunch()
self.time_util = Bunch()
self.servers = []
self.ipc_api = None
self.component_enabled = Bunch()
# ################################################################################################################################
class SIOElemWrapper(object):
""" Makes comparison between two SIOElem elements use their names.
"""
def __init__(self, value):
self.value = value
def __cmp__(self, other):
# Compare to either other's name or to other directly. In the latter case it means it's a plain string name
# of a SIO attribute.
return cmp(self.value.name, getattr(other, 'name', other))
# ################################################################################################################################
class ServiceTestCase(TestCase):
def __init__(self, *args, **kwargs):
self.maxDiff = None
super(ServiceTestCase, self).__init__(*args, **kwargs)
def invoke(self, class_, request_data, expected, mock_data={}, channel=CHANNEL.HTTP_SOAP, job_type=None,
data_format=DATA_FORMAT.JSON, service_store_name_to_impl_name=None, service_store_impl_name_to_service=None):
""" Sets up a service's invocation environment, then invokes and returns
an instance of the service.
"""
class_.component_enabled_cassandra = True
class_.component_enabled_email = True
class_.component_enabled_search = True
class_.component_enabled_msg_path = True
class_.has_sio = getattr(class_, 'SimpleIO', False)
instance = class_()
server = MagicMock()
server.component_enabled.stats = False
worker_store = MagicMock()
worker_store.worker_config = MagicMock
worker_store.worker_config.outgoing_connections = MagicMock(return_value=(None, None, None, None))
worker_store.worker_config.cloud_aws_s3 = MagicMock(return_value=None)
worker_store.invoke_matcher.is_allowed = MagicMock(return_value=True)
simple_io_config = {
'int_parameters': SIMPLE_IO.INT_PARAMETERS.VALUES,
'int_parameter_suffixes': SIMPLE_IO.INT_PARAMETERS.SUFFIXES,
'bool_parameter_prefixes': SIMPLE_IO.BOOL_PARAMETERS.SUFFIXES,
}
class_.update(
instance, channel, FakeServer(service_store_name_to_impl_name, service_store_impl_name_to_service, worker_store),
None, worker_store, new_cid(), request_data, request_data, simple_io_config=simple_io_config,
data_format=data_format, job_type=job_type)
def get_data(self, *ignored_args, **ignored_kwargs):
return expected.get_data()
instance.get_data = get_data
for attr_name, mock_path_data_list in mock_data.items():
setattr(instance, attr_name, Mock())
attr = getattr(instance, attr_name)
for mock_path_data in mock_path_data_list:
for path, value in mock_path_data.items():
split = path.split('.')
new_path = '.return_value.'.join(elem for elem in split) + '.return_value'
attr.configure_mock(**{new_path:value})
broker_client_publish = getattr(self, 'broker_client_publish', None)
if broker_client_publish:
instance.broker_client = FakeBrokerClient()
instance.broker_client.publish = broker_client_publish
def set_response_func(*args, **kwargs):
pass
instance.handle()
instance.update_handle(
set_response_func, instance, request_data, channel, data_format, None, server, None, worker_store, new_cid(),
None)
return instance
def _check_sio_request_input(self, instance, request_data):
for k, v in request_data.items():
self.assertEquals(getattr(instance.request.input, k), v)
sio_keys = set(getattr(instance.SimpleIO, 'input_required', []))
sio_keys.update(set(getattr(instance.SimpleIO, 'input_optional', [])))
given_keys = set(request_data.keys())
diff = sio_keys ^ given_keys
self.assertFalse(diff, 'There should be no difference between sio_keys {} and given_keys {}, diff {}'.format(
sio_keys, given_keys, diff))
def check_impl(self, service_class, request_data, response_data, response_elem, mock_data={}):
expected_data = sorted(response_data.items())
instance = self.invoke(service_class, request_data, None, mock_data)
self._check_sio_request_input(instance, request_data)
if response_data:
if not isinstance(instance.response.payload, basestring):
response = loads(instance.response.payload.getvalue())[response_elem] # Raises KeyError if 'response_elem' doesn't match
else:
response = loads(instance.response.payload)[response_elem]
self.assertEqual(sorted(response.items()), expected_data)
def check_impl_list(self, service_class, item_class, request_data, # noqa
response_data, request_elem, response_elem, mock_data={}): # noqa
expected_keys = response_data.keys()
expected_data = tuple(response_data for x in range(rand_int(10)))
expected = Expected()
for datum in expected_data:
item = item_class()
for key in expected_keys:
value = getattr(datum, key)
setattr(item, key, value)
expected.add(item)
instance = self.invoke(service_class, request_data, expected, mock_data)
response = loads(instance.response.payload.getvalue())[response_elem]
for idx, item in enumerate(response):
expected = expected_data[idx]
given = Bunch(item)
for key in expected_keys:
given_value = getattr(given, key)
expected_value = getattr(expected, key)
eq_(given_value, expected_value)
self._check_sio_request_input(instance, request_data)
def wrap_force_type(self, elem):
return SIOElemWrapper(elem)
# ################################################################################################################################
class ODBTestCase(TestCase):
def setUp(self):
engine_url = 'sqlite:///:memory:'
pool_name = 'ODBTestCase.pool'
config = {
'engine': 'sqlite',
'sqlite_path': ':memory:',
'fs_sql_config': {
'engine': {
'ping_query': 'SELECT 1'
}
}
}
# Create a standalone engine ..
self.engine = create_engine(engine_url)
# .. all ODB objects for that engine..
model.Base.metadata.create_all(self.engine)
# .. an SQL pool too ..
self.pool = SQLConnectionPool(pool_name, config, config)
# .. a session wrapper on top of everything ..
self.session_wrapper = SessionWrapper()
self.session_wrapper.init_session(pool_name, config, self.pool)
# .. and all ODB objects for that wrapper's engine too ..
model.Base.metadata.create_all(self.session_wrapper.pool.engine)
# Unrelated to the above, used in individual tests
self.ODBTestModelClass = ElasticSearch
def tearDown(self):
model.Base.metadata.drop_all(self.engine)
self.ODBTestModelClass = None
def get_session(self):
return self.session_wrapper.session()
def get_sample_odb_orm_result(self, is_list):
# type: (bool) -> object
cluster = Cluster()
cluster.id = test_odb_data.cluster_id
cluster.name = 'my.cluster'
cluster.odb_type = 'sqlite'
cluster.broker_host = 'my.broker.host'
cluster.broker_port = 1234
cluster.lb_host = 'my.lb.host'
cluster.lb_port = 5678
cluster.lb_agent_port = 9012
es = self.ODBTestModelClass()
es.name = test_odb_data.name
es.is_active = test_odb_data.is_active
es.hosts = test_odb_data.es_hosts
es.timeout = test_odb_data.es_timeout
es.body_as = test_odb_data.es_body_as
es.cluster_id = test_odb_data.cluster_id
session = self.session_wrapper._session
session.add(cluster)
session.add(es)
session.commit()
session = self.session_wrapper._session
result = search_es_list(session, test_odb_data.cluster_id) # type: tuple
result = result[0] # type: SearchResults
# This is a one-element tuple of ElasticSearch ORM objects
result = result.result # type: tuple
return result if is_list else result[0]
# ################################################################################################################################
class MyODBService(Service):
class SimpleIO:
output = 'cluster_id', 'is_active', 'name'
# ################################################################################################################################
class MyODBServiceWithResponseElem(MyODBService):
class SimpleIO(MyODBService.SimpleIO):
response_elem = 'my_response_elem'
# ################################################################################################################################
class MyZatoClass:
def to_zato(self):
return {
'cluster_id': test_odb_data.cluster_id,
'is_active': test_odb_data.is_active,
'name': test_odb_data.name,
}
# ################################################################################################################################
# ################################################################################################################################
class BaseSIOTestCase(TestCase):
# ################################################################################################################################
def setUp(self):
self.maxDiff = maxint
# ################################################################################################################################
def get_server_config(self, needs_response_elem=False):
with NamedTemporaryFile(delete=False) as f:
contents = simple_io_conf_contents.format(bytes_to_str_encoding=get_bytes_to_str_encoding())
if isinstance(contents, unicode):
contents = contents.encode('utf8')
f.write(contents)
f.flush()
temporary_file_name=f.name
sio_fs_config = ConfigObj(temporary_file_name)
sio_fs_config = bunchify(sio_fs_config)
import os
os.remove(temporary_file_name)
sio_server_config = get_sio_server_config(sio_fs_config)
if not needs_response_elem:
sio_server_config.response_elem = None
return sio_server_config
# ################################################################################################################################
def get_sio(self, declaration, class_):
sio = CySimpleIO(self.get_server_config(), declaration)
sio.build(class_)
return sio
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/test/__init__.py | __init__.py |
# stdlib
from operator import itemgetter
# Zato
from zato.common.odb.query import pubsub_endpoint_queue_list_by_sub_keys
# ################################################################################################################################
if 0:
from typing import Union as union
from zato.server.base.parallel import ParallelServer
ParallelServer = ParallelServer
union = union
# ################################################################################################################################
def make_short_msg_copy_from_dict(msg, data_prefix_len, data_prefix_short_len):
out_msg = {}
out_msg['msg_id'] = msg['pub_msg_id']
out_msg['in_reply_to'] = msg.get('in_reply_to')
out_msg['data'] = msg['data'][:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg['size']
out_msg['pub_pattern_matched'] = msg['pub_pattern_matched']
out_msg['sub_pattern_matched'] = msg['sub_pattern_matched']
out_msg['pub_time'] = msg['pub_time']
out_msg['expiration'] = msg['expiration']
out_msg['expiration_time'] = msg['expiration_time']
out_msg['topic_id'] = msg['topic_id']
out_msg['topic_name'] = msg['topic_name']
out_msg['cluster_id'] = msg['cluster_id']
out_msg['published_by_id'] = msg['published_by_id']
out_msg['delivery_status'] = msg['delivery_status']
out_msg['server_name'] = msg['server_name']
out_msg['server_pid'] = msg['server_pid']
out_msg['has_gd'] = msg['has_gd']
out_msg['recv_time'] = msg['recv_time']
out_msg['sub_key'] = msg['sub_key']
return out_msg
# ################################################################################################################################
def make_short_msg_copy_from_msg(msg, data_prefix_len, data_prefix_short_len):
out_msg = {}
out_msg['msg_id'] = msg.pub_msg_id
out_msg['in_reply_to'] = msg.in_reply_to
out_msg['data'] = msg.data[:data_prefix_len]
out_msg['data_prefix_short'] = out_msg['data'][:data_prefix_short_len]
out_msg['size'] = msg.size
out_msg['pub_pattern_matched'] = msg.pub_pattern_matched
out_msg['sub_pattern_matched'] = msg.sub_pattern_matched
out_msg['pub_time'] = msg.pub_time
out_msg['expiration'] = msg.expiration
out_msg['expiration_time'] = msg.expiration_time
out_msg['topic_id'] = msg.topic_id
out_msg['topic_name'] = msg.topic_name
out_msg['cluster_id'] = msg.cluster_id
out_msg['published_by_id'] = msg.published_by_id
out_msg['delivery_status'] = msg.delivery_status
out_msg['server_name'] = msg.server_name
out_msg['server_pid'] = msg.server_pid
out_msg['has_gd'] = msg.has_gd
out_msg['recv_time'] = msg.recv_time
out_msg['sub_key'] = msg.sub_key
return out_msg
# ################################################################################################################################
def get_last_topics(topic_list, as_list=True):
# type: (list, bool) -> union[dict, list]
# Response to produce
out = {}
for item in topic_list: # type: (dict)
# Local alias
topic_id = item['topic_id'] # type: int
# .. we may have visited this topic already ..
previous = out.get(topic_id, {}) # type: dict
# .. if we have ..
if previous:
if item['pub_time'] > previous['pub_time']:
out[topic_id] = item
# .. otherwise, we can just set the current one ..
else:
out[topic_id] = item
if as_list:
out = sorted(out.values(), key=itemgetter('pub_time'), reverse=True)
return out
else:
return out
# ################################################################################################################################
def get_last_pub_metadata(server, topic_id_list):
# type: (ParallelServer, list) -> dict
# Make sure we have a list on input
if isinstance(topic_id_list, list):
input_topic_id = None
is_single_topic = False
else:
input_topic_id = int(topic_id_list)
is_single_topic = True
topic_id_list = [topic_id_list]
# Always use integers for topic IDs
topic_id_list = [int(elem) for elem in topic_id_list]
# Look up topic metadata in all the servers ..
response = server.rpc.invoke_all('zato.pubsub.topic.get-topic-metadata', {'topic_id_list':topic_id_list})
# Produce our response
out = get_last_topics(response.data, as_list=False)
if is_single_topic:
return out.get(input_topic_id) or {}
else:
return out
# ################################################################################################################################
def get_endpoint_metadata(server, endpoint_id):
# type: (ParallelServer, int) -> dict
# All topics from all PIDs
topic_list = []
response = server.rpc.invoke_all('zato.pubsub.endpoint.get-endpoint-metadata', {'endpoint_id':endpoint_id})
for pid_response in response.data: # type: dict
for pid_topic_list in pid_response.values(): # type: list
for topic_data in pid_topic_list: # type: dict
topic_list.append(topic_data)
return get_last_topics(topic_list, as_list=True)
# ################################################################################################################################
def get_topic_sub_keys_from_sub_keys(session, cluster_id, sub_key_list):
topic_sub_keys = {}
for item in pubsub_endpoint_queue_list_by_sub_keys(session, cluster_id, sub_key_list):
sub_keys = topic_sub_keys.setdefault(item.topic_name, [])
sub_keys.append(item.sub_key)
return topic_sub_keys
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/pubsub.py | pubsub.py |
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
from collections import deque
from datetime import datetime
from itertools import count
# gevent
from gevent.lock import RLock
# ################################################################################################################################
class default:
max_size = 1000
# ################################################################################################################################
class Event(object):
""" An individual event emitted to an event log.
"""
__slots__ = 'log_id', 'event_id', 'name', 'timestamp', 'ctx'
def __init__(self, log_id, event_id, name, ctx, _utcnow=datetime.utcnow):
self.log_id = log_id
self.event_id = event_id
self.name = name
self.ctx = ctx
self.timestamp = _utcnow()
def __repr__(self):
return '<{} at {} log:{} id:{} n:{} t:{}>'.format(self.__class__.__name__, hex(id(self)),
self.log_id, self.event_id, self.name, self.timestamp)
def to_dict(self):
return {
'log_id': self.log_id,
'event_id': self.event_id,
'name': self.name,
'timestamp': self.timestamp.isoformat(),
'ctx': None if self.ctx is None else repr(self.ctx)
}
# ################################################################################################################################
class EventLog(object):
""" A backlog of max_size events of arbitrary nature described by attributes such as ID, name, timestamp and opaque context.
"""
def __init__(self, log_id, max_size=default.max_size):
self.log_id = log_id
self.event_id_counter = count(1)
self.lock = RLock()
self.events = deque(maxlen=max_size)
# ################################################################################################################################
def emit(self, name, ctx=None):
self.events.append(Event(self.log_id, next(self.event_id_counter), name, ctx))
# ################################################################################################################################
def get_event_list(self):
return [elem.to_dict() for elem in self.events]
# ################################################################################################################################
if __name__ == '__main__':
el = EventLog('aaa')
for x in range(1, 50):
el.emit('aaa-{}'.format(x))
print(list(reversed(el.events))) | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/event.py | event.py |
# ################################################################################################################################
# ################################################################################################################################
if 0:
from typing import Callable
Callable = Callable
# ################################################################################################################################
# ################################################################################################################################
_search_attrs = 'num_pages', 'cur_page', 'prev_page', 'next_page', 'has_prev_page', 'has_next_page', 'page_size', 'total'
# ################################################################################################################################
# ################################################################################################################################
class SearchResults(object):
def __init__(self, q, result, columns, total):
# type: (object, object, object, int) -> None
self.q = q
self.result = result
self.total = total
self.columns = columns # type: list
self.num_pages = 0
self.cur_page = 0
self.prev_page = 0
self.next_page = 0
self.has_prev_page = False
self.has_next_page = False
self.page_size = None # type: int
# ################################################################################################################################
def __iter__(self):
return iter(self.result)
# ################################################################################################################################
def __repr__(self):
# To avoice circular imports - this is OK because we very rarely repr(self) anyway
from zato.common.util.api import make_repr
return make_repr(self)
# ################################################################################################################################
def set_data(self, cur_page, page_size):
num_pages, rest = divmod(self.total, page_size)
# Apparently there are some results in rest that did not fit a full page
if rest:
num_pages += 1
self.num_pages = num_pages
self.cur_page = cur_page + 1 # Adding 1 because, again, the external API is 1-indexed
self.prev_page = self.cur_page - 1 if self.cur_page > 1 else 0
self.next_page = self.cur_page + 1 if self.cur_page < self.num_pages else None
self.has_prev_page = self.prev_page >= 1
self.has_next_page = bool(self.next_page and self.next_page <= self.num_pages) or False
self.page_size = page_size
# ################################################################################################################################
@staticmethod
def from_list(
data_list, # type: list
cur_page, # type: int
page_size, # type: int
needs_sort=False, # type: bool
post_process_func=None, # type: Callable
sort_key=None, # type: object
needs_reverse=True # type: bool
):
cur_page = cur_page - 1 if cur_page else 0 # We index lists from 0
# Set it here because later on it may be shortened to the page_size of elements
total = len(data_list)
# If we get here, we must have collected some data at all
if data_list:
# We need to sort the output ..
if needs_sort:
data_list.sort(key=sort_key, reverse=needs_reverse)
# .. the output may be already sorted but we may perhaps need to reverse it.
else:
if needs_reverse:
data_list.reverse()
start = cur_page * page_size
end = start + page_size
data_list = data_list[start:end]
if post_process_func:
post_process_func(data_list)
search_results = SearchResults(None, data_list, None, total)
search_results.set_data(cur_page, page_size)
return search_results
# ################################################################################################################################
def to_dict(self, _search_attrs=_search_attrs):
out = {}
out['result'] = self.result
for name in _search_attrs:
out[name] = getattr(self, name, None)
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/search.py | search.py |
# stdlib
import math
from datetime import timedelta
from operator import itemgetter
# Humanize
from humanize import precisedelta
# numpy
import numpy as np
# Zato
from zato.common.api import StatsKey
# ################################################################################################################################
# ################################################################################################################################
float_stats = ('item_max', 'item_min', 'item_mean', 'item_total_time')
# ################################################################################################################################
# ################################################################################################################################
def tmean(data, limit_from=None, limit_to=None):
""" Trimmed mean - includes only elements up to the input limit, if it is given at all.
"""
data = data if isinstance(data, list) else [data]
if limit_from or limit_to:
_data = []
for elem in data:
if limit_from:
if elem < limit_from:
continue
if limit_to:
if elem > limit_to:
continue
_data.append(elem)
data = _data[:]
count = len(data)
total = sum(data)
return total / count if count else 0
# ################################################################################################################################
# ################################################################################################################################
#
# Taken from https://code.activestate.com/recipes/511478-finding-the-percentile-of-the-values/
#
# Original code by Wai Yip Tung, licensed under the Python Foundation License
#
def percentile(data, percent, key=lambda x:x):
"""
Find the percentile of a list of values.
@parameter data - a list of values
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of data.
@return - the percentile of the values
"""
if not data:
return 0
data.sort()
k = (len(data)-1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(data[int(k)])
d0 = key(data[int(f)]) * (c-k)
d1 = key(data[int(c)]) * (k-f)
return d0 + d1
# ################################################################################################################################
# ################################################################################################################################
def collect_current_usage(data):
# type: (list) -> dict
# For later use
usage = 0
last_duration = None
last_timestamp = ''
usage_min = None
usage_max = None
usage_mean = None
# Make sure we always have a list to iterate over (rather than None)
data = data or []
for elem in data:
if elem is None:
continue
usage += elem[StatsKey.PerKeyValue]
if elem[StatsKey.PerKeyLastTimestamp] > last_timestamp:
last_timestamp = elem[StatsKey.PerKeyLastTimestamp]
last_duration = elem[StatsKey.PerKeyLastDuration]
if usage_min:
usage_min = min([usage_min, elem[StatsKey.PerKeyMin]])
else:
usage_min = elem[StatsKey.PerKeyMin]
if usage_max:
usage_max = max([usage_max, elem[StatsKey.PerKeyMax]])
else:
usage_max = elem[StatsKey.PerKeyMax]
if usage_mean:
usage_mean = np.mean([usage_mean, elem[StatsKey.PerKeyMean]])
else:
usage_mean = elem[StatsKey.PerKeyMean]
usage_mean = round(usage_mean, 3)
return {
StatsKey.PerKeyValue: usage,
StatsKey.PerKeyLastDuration: last_duration,
StatsKey.PerKeyLastTimestamp: last_timestamp,
StatsKey.PerKeyMin: usage_min,
StatsKey.PerKeyMax: usage_max,
StatsKey.PerKeyMean: usage_mean,
}
# ################################################################################################################################
# ################################################################################################################################
def should_include_in_table_stats(service_name):
# type: (str) -> bool
if service_name.startswith('pub.zato'):
return False
elif service_name.startswith('zato'):
return False
else:
return True
# ################################################################################################################################
# ################################################################################################################################
def combine_table_data(data, round_digits=2):
# type: (list, int) -> dict
# Response to return
out = []
# How many objects we have seen, e.g. how many individual services
total_object_id = 0
# Total usage across all events
total_usage = 0
# Total time spent in all the events (in ms)
total_time = 0
# Total mean time across all objects
total_mean = 0
# First pass, filter out objects with known unneeded names
# and collect total usage of each object and of objects as a whole.
for pid_response in data: # type: dict
if pid_response:
for object_name, stats in pid_response.items(): # type: (str, dict)
if should_include_in_table_stats(object_name):
# Update per object counters
# Total usage needs to be an integer
stats['item_total_usage'] = int(stats['item_total_usage'])
# These are always floats that we need to round up
for name in float_stats:
stats[name] = round(stats[name], round_digits)
# Add to totals
total_usage += stats['item_total_usage']
total_mean += stats['item_mean']
total_time += stats['item_total_time']
total_object_id += 1
# Finally, add the results so that they can be used in further steps
item = dict(stats)
item['name'] = object_name
out.append(item)
# We know how many events we have so we can now compute the mean across all of them
if total_object_id:
total_mean = total_mean / total_object_id
# In this pass, we can attach additional per-object statistics
for item in out: # type: dict
item_usage_share = item['item_total_usage'] / total_usage * 100
item_usage_share = round(item_usage_share, round_digits)
item_time_share = item['item_total_time'] / total_time * 100
item_time_share = round(item_time_share, round_digits)
item['item_usage_share'] = item_usage_share
item['item_time_share'] = item_time_share
item['item_total_usage_human'] = item['item_total_usage'] # Currently, this is the same
total_time_delta_min_unit = 'milliseconds' if item['item_total_time'] < 1 else 'seconds'
total_time_delta = timedelta(milliseconds=item['item_total_time'])
total_time_delta = precisedelta(total_time_delta, minimum_unit=total_time_delta_min_unit)
item['item_total_time_human'] = total_time_delta
# Sort by the most interesting attribute
out.sort(key=itemgetter('item_time_share'), reverse=True)
return out
# ################################################################################################################################
# ################################################################################################################################ | zato-common-holocene | /zato-common-holocene-3.2.1.tar.gz/zato-common-holocene-3.2.1/src/zato/common/util/stats.py | stats.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.