repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
wsmith323/staticmodel | refs/heads/master | staticmodel/django/models/__init__.py | 1 | from .fields import StaticModelCharField, StaticModelIntegerField, StaticModelTextField
|
codificat/sos | refs/heads/master | sos/plugins/ssh.py | 5 | # Copyright (C) 2007 Red Hat, Inc., Eugene Teo <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class Ssh(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""Secure shell service
"""
plugin_name = 'ssh'
profiles = ('services', 'security', 'identity')
def setup(self):
self.add_copy_spec([
"/etc/ssh/ssh_config",
"/etc/ssh/sshd_config"
])
# vim: et ts=4 sw=4
|
MSA-Argentina/recuento_web2py | refs/heads/master | models/app_settings.py | 1 | # coding: utf8
# Configuraciones globales:
TITULO = "Recuento PASO 2013"
SUBTITULO = " - ".join(["Elecciones Nacionales",
"Primarias Abiertas Simultáneas y Obligatorias",
"Argentina", "11 de Agosto 2013"])
# cadena de conexion a base principal:
PG_CNNSTR = "postgres://reingart:clave@localhost/recuento"
|
lparsons/bioconda-recipes | refs/heads/master | bootstrap.py | 14 | #!/usr/bin/env python
import os
import sys
import subprocess as sp
import argparse
if sys.version_info.major == 3:
PY3 = True
from urllib.request import urlretrieve
else:
PY3 = True
from urllib import urlretrieve
usage = """
The easy way to test recipes is by using `circleci build`. However this does
not allow local testing recipes using mulled-build (due to the technicalities
of running docker within docker and the CircleCI client).
This script makes it easy to do mulled-build tests. It works by using the same
code used in the .circleci/setup.sh script to build an isolated Miniconda
environment and a custom `activate` script.
Set up the environment like this:
./bootstrap.py /tmp/miniconda
It creates an activate script at ~/.config/bioconda/activate. So you can then use:
source ~/.config/bioconda/activate
and then use that isolated root environment independent of any other conda
installations you might have.
"""
ap = argparse.ArgumentParser(usage)
ap.add_argument('bootstrap', help='''Location to which a new Miniconda
installation plus bioconda-utils should be installed. This will
be separate from any existing conda installations.''')
ap.add_argument('--no-docker', action='store_true', help='''By default we
expect Docker to be present. Use this arg to disable that
behavior. This will reduce functionality, but is useful if
you're unable to install docker.''')
args = ap.parse_args()
# This is the "common" step in the CircleCI config which gets the versions of
# Miniconda and bioconda-utils that we're using.
urlretrieve(
'https://raw.githubusercontent.com/bioconda/bioconda-common/master/common.sh',
filename='.circleci/common.sh')
# TODO: this mimics the override in the "common" job in .circleci/config.yaml
with open('.circleci/common.sh', 'w') as fout:
fout.write("MINICONDA_VER=py37_4.8.3\nBIOCONDA_UTILS_TAG=master\n")
local_config_path = os.path.expanduser('~/.config/bioconda/activate')
def _write_custom_activate(install_path):
"""
Once the isolated Miniconda version has been installed, copy its activate
script over to a custom location, and then hard-code the paths and PS1. We
don't need a matching `deactivate` because the activate script properly
keeps track of the new location.
"""
config_dir = os.path.dirname(local_config_path)
if not os.path.exists(config_dir):
os.makedirs(config_dir)
activate = os.path.join(install_path, 'miniconda/bin/activate')
lines = [i.rstrip() for i in open(activate)]
# The following is code from cb2; disabling but keeping it around for now:
if 0:
# Exact matches to lines we want to replace in the activate script, leading
# space included.
substitutions = [
(
'_CONDA_DIR=$(dirname "$_SCRIPT_LOCATION")',
'_CONDA_DIR="{0}/miniconda/bin"'.format(install_path)
),
(
' export PS1="(${CONDA_DEFAULT_ENV}) $PS1"',
' export PS1="(BIOCONDA-UTILS) $PS1"',
)
]
for orig, sub in substitutions:
# Be very picky so that we'll know if/when the activate script changes.
try:
pos = lines.index(orig)
except ValueError:
raise ValueError(
"Expecting '{0}' to be in {1} but couldn't find it"
.format(orig, activate)
)
lines[pos] = sub
with open(local_config_path, 'w') as fout:
for line in lines:
fout.write(line + '\n')
use_docker = "true"
if args.no_docker:
use_docker = "false"
env = {
'WORKSPACE': args.bootstrap,
'BOOTSTRAP': "true",
'USE_DOCKER': use_docker,
'PATH': os.environ.get('PATH', ""),
'HTTPS_PROXY': os.environ.get('HTTPS_PROXY', ""),
'https_proxy': os.environ.get('https_proxy', "")
}
sp.check_call(['.circleci/setup.sh'], env=env)
_write_custom_activate(args.bootstrap)
print("""
An isolated version of bioconda-utils has been installed to {1}. This is
separate from any other conda installations you might have.
To use it, source this custom activate script:
source ~/.config/bioconda/activate
When done:
source deactivate
""")
|
rwbogl/gbg | refs/heads/master | pycparser/pycparser/ast_transforms.py | 43 | #------------------------------------------------------------------------------
# pycparser: ast_transforms.py
#
# Some utilities used by the parser to create a friendlier AST.
#
# Copyright (C) 2008-2015, Eli Bendersky
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
def fix_switch_cases(switch_node):
""" The 'case' statements in a 'switch' come out of parsing with one
child node, so subsequent statements are just tucked to the parent
Compound. Additionally, consecutive (fall-through) case statements
come out messy. This is a peculiarity of the C grammar. The following:
switch (myvar) {
case 10:
k = 10;
p = k + 1;
return 10;
case 20:
case 30:
return 20;
default:
break;
}
Creates this tree (pseudo-dump):
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
The goal of this transform it to fix this mess, turning it into the
following:
Switch
ID: myvar
Compound:
Case 10:
k = 10
p = k + 1
return 10
Case 20:
Case 30:
return 20
Default:
break
A fixed AST node is returned. The argument may be modified.
"""
assert isinstance(switch_node, c_ast.Switch)
if not isinstance(switch_node.stmt, c_ast.Compound):
return switch_node
# The new Compound child for the Switch, which will collect children in the
# correct order
new_compound = c_ast.Compound([], switch_node.stmt.coord)
# The last Case/Default node
last_case = None
# Goes over the children of the Compound below the Switch, adding them
# either directly below new_compound or below the last Case as appropriate
for child in switch_node.stmt.block_items:
if isinstance(child, (c_ast.Case, c_ast.Default)):
# If it's a Case/Default:
# 1. Add it to the Compound and mark as "last case"
# 2. If its immediate child is also a Case or Default, promote it
# to a sibling.
new_compound.block_items.append(child)
_extract_nested_case(child, new_compound.block_items)
last_case = new_compound.block_items[-1]
else:
# Other statements are added as children to the last case, if it
# exists.
if last_case is None:
new_compound.block_items.append(child)
else:
last_case.stmts.append(child)
switch_node.stmt = new_compound
return switch_node
def _extract_nested_case(case_node, stmts_list):
""" Recursively extract consecutive Case statements that are made nested
by the parser and add them to the stmts_list.
"""
if isinstance(case_node.stmts[0], (c_ast.Case, c_ast.Default)):
stmts_list.append(case_node.stmts.pop())
_extract_nested_case(stmts_list[-1], stmts_list)
|
mdworks2016/work_development | refs/heads/master | Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/setuptools/windows_support.py | 1015 | import platform
import ctypes
def windows_only(func):
if platform.system() != 'Windows':
return lambda *args, **kwargs: None
return func
@windows_only
def hide_file(path):
"""
Set the hidden attribute on a file or directory.
From http://stackoverflow.com/questions/19622133/
`path` must be text.
"""
__import__('ctypes.wintypes')
SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW
SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD
SetFileAttributes.restype = ctypes.wintypes.BOOL
FILE_ATTRIBUTE_HIDDEN = 0x02
ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN)
if not ret:
raise ctypes.WinError()
|
andrewyoung1991/abjad | refs/heads/master | abjad/tools/labeltools/test/test_labeltools_label_logical_ties_in_expr_with_logical_tie_durations.py | 2 | # -*- encoding: utf-8 -*-
from abjad import *
def test_labeltools_label_logical_ties_in_expr_with_logical_tie_durations_01():
staff = Staff("c'8 c'8 c'8 c'8")
scoretools.FixedDurationTuplet(Duration(2, 8), staff[:3])
tie = spannertools.Tie()
attach(tie, staff.select_leaves()[:2])
tie = spannertools.Tie()
attach(tie, staff.select_leaves()[2:])
labeltools.label_logical_ties_in_expr_with_logical_tie_durations(staff)
assert systemtools.TestManager.compare(
staff,
r'''
\new Staff {
\times 2/3 {
c'8 ~
_ \markup {
\column
{
\small
1/4
\small
1/6
}
}
c'8
c'8 ~
_ \markup {
\column
{
\small
1/4
\small
5/24
}
}
}
c'8
}
'''
)
assert inspect_(staff).is_well_formed() |
trafferty/utils | refs/heads/master | python/parseXfdSSLog.py | 1 | #!/usr/bin/env python
import sys
import time
import re
import argparse
import json
import matplotlib.pyplot as plt
import numpy as np
import datetime as dt
def parseXfdSSLog(xfdLog, output_path, generic=False):
'''
import re
p = re.compile(ur'2015-..-..\ (?P<start_ts>[0-9:,]*).*?\n2015-..-..\ (?P<end_ts>[0-9:,]*)\ INFO\ RequestProcessor-. xfd\.XPM\ -\ XaarCmdAPI:\ Calling bXaarScorpionXUSBBusy')
test_str = u"2015-08-27 20:24:02,442 INFO RequestProcessor-4 xfd.xfdservice - Vector 0 is -13.0525085, -0.0352770499999826 to 17.7094003, -0.0352770499999826\n2015-08-27 20:24:02,462 INFO RequestProcessor-4 xfd.xfdservice - Dispensing pattern for tcs:0 recipe:-1 swathe:0\n2015-08-27 20:24:02,465 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling XaarScorpionAllowGetTemperatures...call success\n2015-08-27 20:24:02,469 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetStatusData...call success\n2015-08-27 20:24:02,471 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetPCBTemperature...call success\n2015-08-27 20:24:02,475 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetStatusData...call success\n2015-08-27 20:24:02,477 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetPCBTemperature...call success\n2015-08-27 20:24:02,479 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling XaarScorpionAllowGetTemperatures...call success\n2015-08-27 20:24:02,481 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:24:02,514 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionEnablePrintMode...call success\n2015-08-27 20:24:02,524 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetupSwatheBlockParametersUpdated...call success\n2015-08-27 20:24:02,560 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPrintDataParametersUpdated...call success\n2015-08-27 20:24:02,570 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetupSwatheBlockParametersUpdated...call success\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPrintDataParametersUpdated...call success\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - Setting up DDFS. DDFSValue = 1341370\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - swathe.internalEncoderFrequency_Hz = 28347.04149014728\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - DDFSMultiplier = 0.3356\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - cycleMode = 3\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - encoderDivide = 47\n2015-08-27 20:24:02,606 INFO RequestProcessor-4 xfd.printmanagerXPM - ...DDFSValue (multiplied together) = 1341370\n2015-08-27 20:24:02,611 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetDDFSValueSEPD...call success\n2015-08-27 20:24:02,611 INFO RequestProcessor-4 xfd.printmanagerXPM - Using internal encoder frequency of 28347.04149014728 Hz\n2015-08-27 20:24:02,611 INFO RequestProcessor-4 xfd.printmanagerXPM - SetDDFSEnable = 1\n2015-08-27 20:24:02,615 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetDDFSEnable...call success\n2015-08-27 20:24:02,615 INFO RequestProcessor-4 xfd.printmanagerXPM - SetPDInternal = 0\n2015-08-27 20:24:02,618 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPDInternalSEPD...call success\n2015-08-27 20:24:02,620 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:24:02,622 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:24:02,627 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionEnablePrintMode...call success\n2015-08-27 20:24:02,629 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:34:02,442 INFO RequestProcessor-4 xfd.xfdservice - getDropRecipeSwatheVectors 0 rec:-1\n2015-08-27 20:34:02,442 INFO RequestProcessor-4 xfd.xfdservice - Vector 0 is -13.0525085, -0.0352770499999826 to 17.7094003, -0.0352770499999826\n2015-08-27 20:34:02,464 INFO RequestProcessor-4 xfd.xfdservice - Dispensing pattern for tcs:0 recipe:-1 swathe:0\n2015-08-27 20:34:02,467 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling XaarScorpionAllowGetTemperatures...call success\n2015-08-27 20:34:02,471 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetStatusData...call success\n2015-08-27 20:34:02,473 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetPCBTemperature...call success\n2015-08-27 20:34:02,477 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetStatusData...call success\n2015-08-27 20:34:02,479 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionGetPCBTemperature...call success\n2015-08-27 20:34:02,481 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling XaarScorpionAllowGetTemperatures...call success\n2015-08-27 20:34:02,483 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:34:02,516 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionEnablePrintMode...call success\n2015-08-27 20:34:02,526 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetupSwatheBlockParametersUpdated...call success\n2015-08-27 20:34:02,562 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPrintDataParametersUpdated...call success\n2015-08-27 20:34:02,572 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetupSwatheBlockParametersUpdated...call success\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPrintDataParametersUpdated...call success\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - Setting up DDFS. DDFSValue = 1341370\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - swathe.internalEncoderFrequency_Hz = 28347.04149014728\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - DDFSMultiplier = 0.3356\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - cycleMode = 3\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - encoderDivide = 47\n2015-08-27 20:34:02,608 INFO RequestProcessor-4 xfd.printmanagerXPM - ...DDFSValue (multiplied together) = 1341370\n2015-08-27 20:34:02,613 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetDDFSValueSEPD...call success\n2015-08-27 20:34:02,613 INFO RequestProcessor-4 xfd.printmanagerXPM - Using internal encoder frequency of 28347.04149014728 Hz\n2015-08-27 20:34:02,613 INFO RequestProcessor-4 xfd.printmanagerXPM - SetDDFSEnable = 1\n2015-08-27 20:34:02,617 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetDDFSEnable...call success\n2015-08-27 20:34:02,617 INFO RequestProcessor-4 xfd.printmanagerXPM - SetPDInternal = 0\n2015-08-27 20:34:02,620 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionSetPDInternalSEPD...call success\n2015-08-27 20:34:02,622 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:34:02,624 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:34:02,629 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionEnablePrintMode...call success\n2015-08-27 20:34:02,631 INFO RequestProcessor-4 xfd.XPM - XaarCmdAPI: Calling bXaarScorpionXUSBBusy...call success\n2015-08-27 20:44:02,444 INFO RequestProcessor-4 xfd.xfdservice - getDropRecipeSwatheVectors 0 rec:-1\n2015-08-27 20:44:02,444 INFO RequestProcessor-4 xfd.xfdservice - Vector 0 is -13.0525085, -0.0352770499999826 to 17.7094003, -0.0352770499999826\n2015-08-27 20:44:02,461 INFO RequestProcessor-4 xfd.xfdservice - Dispensing pattern for tcs:0 recipe:-1 swathe:0\n:\n"
re.findall(p, test_str)
'''
XUSBBusy_pattern=ur'2015-..-..\ (?P<start_ts>[0-9:,]*).*?\n2015-..-..\ (?P<end_ts>[0-9:,]*)\ INFO\ RequestProcessor-. xfd\.XPM\ -\ XaarCmdAPI:\ Calling bXaarScorpionXUSBBusy'
EnablePrintMode_pattern=ur'2015-..-..\ (?P<start_ts>[0-9:,]*).*?\n2015-..-..\ (?P<end_ts>[0-9:,]*)\ INFO\ RequestProcessor-. xfd\.XPM\ -\ XaarCmdAPI:\ Calling bXaarScorpionEnablePrintMode'
f = open(xfdLog, 'r')
buf = f.read()
f.close()
print "File (%s) opened and read into buffer, length of buf: %d" % (xfdLog, len(buf))
#XUSBBusy_sets = [x.groupdict() for x in re.finditer(XUSBBusy_pattern, buf, re.DOTALL)]
XUSBBusy_sets = [x.groupdict() for x in re.finditer(XUSBBusy_pattern, buf)]
print "Parsing log for XUSBBusy calls...found %d records." % (len(XUSBBusy_sets))
if len(XUSBBusy_sets) > 0: print " >> Date range: %s - %s" % (XUSBBusy_sets[0]['start_ts'], XUSBBusy_sets[-1]['start_ts'])
#EnablePrintMode_sets = [x.groupdict() for x in re.finditer(EnablePrintMode_pattern, buf, re.DOTALL)]
EnablePrintMode_sets = [x.groupdict() for x in re.finditer(EnablePrintMode_pattern, buf)]
print "Parsing log for EnablePrintMode calls...found %d records." % (len(EnablePrintMode_sets))
if len(EnablePrintMode_sets) > 0: print " >> Date range: %s - %s" % (EnablePrintMode_sets[0]['start_ts'], EnablePrintMode_sets[-1]['start_ts'])
timestamp_format = "%H:%M:%S,%f"
processing_times_XUSBBusy = []
processing_times_EnablePrintMode = []
for XUSBBusy_set in XUSBBusy_sets:
'''
[{u'end_ts': u'20:24:02,481', u'start_ts': u'20:24:02,442'},
{u'end_ts': u'20:24:02,620', u'start_ts': u'20:24:02,514'},
{u'end_ts': u'20:24:02,629', u'start_ts': u'20:24:02,622'},
{u'end_ts': u'20:34:02,483', u'start_ts': u'20:34:02,442'},
{u'end_ts': u'20:34:02,622', u'start_ts': u'20:34:02,516'},
{u'end_ts': u'20:34:02,631', u'start_ts': u'20:34:02,624'}]
'''
start_ts = dt.datetime.strptime(XUSBBusy_set['start_ts'], timestamp_format)
end_ts = dt.datetime.strptime(XUSBBusy_set['end_ts'], timestamp_format)
time_delta = end_ts-start_ts
delta_ms = time_delta.total_seconds() * 1000
processing_times_XUSBBusy.append(delta_ms)
for EnablePrintMode_set in EnablePrintMode_sets:
start_ts = dt.datetime.strptime(EnablePrintMode_set['start_ts'], timestamp_format)
end_ts = dt.datetime.strptime(EnablePrintMode_set['end_ts'], timestamp_format)
time_delta = end_ts-start_ts
delta_ms = time_delta.total_seconds() * 1000
processing_times_EnablePrintMode.append(delta_ms)
processing_times_XUSBBusy_np = np.array(processing_times_XUSBBusy)
processing_times_EnablePrintMode_np = np.array(processing_times_EnablePrintMode)
fig = plt.figure(figsize=(10*2,5))
if len(processing_times_XUSBBusy) > 0:
plt.plot(processing_times_XUSBBusy, color='b')
plt.hlines(processing_times_XUSBBusy_np.mean(), 0, len(processing_times_XUSBBusy), color='r', linewidth=2, linestyle='--')
plt.text(len(processing_times_XUSBBusy)+10, processing_times_XUSBBusy_np.mean(), ("mean=%.f" % (np.round(processing_times_XUSBBusy_np.mean()))), fontsize=18)
plt.text(len(processing_times_XUSBBusy)+10, processing_times_XUSBBusy_np.mean()+processing_times_XUSBBusy_np.max()/2, ("stdev=%.2f" % (processing_times_XUSBBusy_np.std())), fontsize=18)
plt.ylabel('time (ms)')
plt.title('XUSB Busy call timing')
plt.legend()
plt.show()
fig = plt.figure(figsize=(10*2,5))
if len(processing_times_EnablePrintMode) > 0:
plt.plot(processing_times_EnablePrintMode, color='g')
plt.hlines(processing_times_EnablePrintMode_np.mean(), 0, len(processing_times_EnablePrintMode), color='r', linewidth=2, linestyle='--')
plt.text(len(processing_times_EnablePrintMode)+10, processing_times_EnablePrintMode_np.mean(), ("mean=%.f" % (np.round(processing_times_EnablePrintMode_np.mean()))), fontsize=18)
plt.text(len(processing_times_EnablePrintMode)+10, processing_times_EnablePrintMode_np.mean()+processing_times_EnablePrintMode_np.max()/2, ("stdev=%.2f" % (processing_times_EnablePrintMode_np.std())), fontsize=18)
plt.ylabel('time (ms)')
plt.title('EnablePrintMode call timing')
plt.legend()
plt.show()
if __name__ == "__main__":
'''
parseXfdLog.py -i file_to_parse
'''
parser = argparse.ArgumentParser(description='open process log file, parse it according to parse function')
parser.add_argument('-i', '--in_file', dest='in_file', type=str,
help='input file...if not specified then use stdin')
parser.add_argument('-o', '--output_path', dest='output_path', type=str,
help='output path...if not specified then will use /tmp', default='/tmp')
args = parser.parse_args()
if args.in_file:
parseXfdSSLog(args.in_file, args.output_path)
else:
parser.print_help()
sys.exit(1)
|
harwee/electrum-xvg-tor | refs/heads/master | lib/asn1tinydecoder.py | 15 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
# This is a simple and fast ASN1 decoder without external libraries.
#
# In order to browse through the ASN1 structure you need only 3
# functions allowing you to navigate:
# asn1_node_root(...), asn1_node_next(...) and asn1_node_first_child(...)
#
####################### BEGIN ASN1 DECODER ############################
# Author: Jens Getreu, 8.11.2014
##### NAVIGATE
# The following 4 functions are all you need to parse an ASN1 structure
# gets the first ASN1 structure in der
def asn1_node_root(der):
return asn1_read_length(der,0)
# gets the next ASN1 structure following (ixs,ixf,ixl)
def asn1_node_next(der, (ixs,ixf,ixl)):
return asn1_read_length(der,ixl+1)
# opens the container (ixs,ixf,ixl) and returns the first ASN1 inside
def asn1_node_first_child(der, (ixs,ixf,ixl)):
if ord(der[ixs]) & 0x20 != 0x20:
raise ValueError('Error: can only open constructed types. '
+'Found type: 0x'+der[ixs].encode("hex"))
return asn1_read_length(der,ixf)
# is true if one ASN1 chunk is inside another chunk.
def asn1_node_is_child_of((ixs,ixf,ixl), (jxs,jxf,jxl)):
return ( (ixf <= jxs ) and (jxl <= ixl) ) or \
( (jxf <= ixs ) and (ixl <= jxl) )
##### END NAVIGATE
##### ACCESS PRIMITIVES
# get content and verify type byte
def asn1_get_value_of_type(der,(ixs,ixf,ixl),asn1_type):
asn1_type_table = {
'BOOLEAN': 0x01, 'INTEGER': 0x02,
'BIT STRING': 0x03, 'OCTET STRING': 0x04,
'NULL': 0x05, 'OBJECT IDENTIFIER': 0x06,
'SEQUENCE': 0x70, 'SET': 0x71,
'PrintableString': 0x13, 'IA5String': 0x16,
'UTCTime': 0x17, 'ENUMERATED': 0x0A,
'UTF8String': 0x0C, 'PrintableString': 0x13,
}
if asn1_type_table[asn1_type] != ord(der[ixs]):
raise ValueError('Error: Expected type was: '+
hex(asn1_type_table[asn1_type])+
' Found: 0x'+der[ixs].encode('hex'))
return der[ixf:ixl+1]
# get value
def asn1_get_value(der,(ixs,ixf,ixl)):
return der[ixf:ixl+1]
# get type+length+value
def asn1_get_all(der,(ixs,ixf,ixl)):
return der[ixs:ixl+1]
##### END ACCESS PRIMITIVES
##### HELPER FUNCTIONS
# converter
def bitstr_to_bytestr(bitstr):
if bitstr[0] != '\x00':
raise ValueError('Error: only 00 padded bitstr can be converted to bytestr!')
return bitstr[1:]
# converter
def bytestr_to_int(s):
# converts bytestring to integer
i = 0
for char in s:
i <<= 8
i |= ord(char)
return i
# ix points to the first byte of the asn1 structure
# Returns first byte pointer, first content byte pointer and last.
def asn1_read_length(der,ix):
first= ord(der[ix+1])
if (ord(der[ix+1]) & 0x80) == 0:
length = first
ix_first_content_byte = ix+2
ix_last_content_byte = ix_first_content_byte + length -1
else:
lengthbytes = first & 0x7F
length = bytestr_to_int(der[ix+2:ix+2+lengthbytes])
ix_first_content_byte = ix+2+lengthbytes
ix_last_content_byte = ix_first_content_byte + length -1
return (ix,ix_first_content_byte,ix_last_content_byte)
##### END HELPER FUNCTIONS
####################### END ASN1 DECODER ############################
|
supersven/intellij-community | refs/heads/master | python/testData/formatter/continuationIndentForCallInStatementPart.py | 83 | for item in really_long_name_of_the_function_with_a_lot_of_patams(
param1, param2, param3):
pass |
abstract-open-solutions/account-financial-tools | refs/heads/8.0 | account_auto_fy_sequence/__openerp__.py | 13 | # coding=utf-8
##############################################################################
#
# account_auto_fy_sequence module for Odoo
# Copyright (C) 2014 ACSONE SA/NV (<http://acsone.eu>)
# @author Stéphane Bidoul <[email protected]>
#
# account_auto_fy_sequence is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License v3 or later
# as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# account_auto_fy_sequence is distributed
# in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License v3 or later for more details.
#
# You should have received a copy of the GNU Affero General Public License
# v3 or later along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Automatic Fiscal Year Sequences',
'version': '8.0.0.1.0',
'category': 'Accounting',
'author': "ACSONE SA/NV,Odoo Community Association (OCA)",
'website': 'http://acsone.eu',
'depends': ['account'],
'data': [
'views/ir_sequence_view.xml',
],
'installable': True,
'application': False,
'auto_install': False,
'license': 'AGPL-3',
}
|
BayanGroup/sentry | refs/heads/master | src/sentry/ratelimits/base.py | 25 | from __future__ import absolute_import
class RateLimiter(object):
def validate(self):
"""
Validates the settings for this backend (i.e. such as proper connection
info).
Raise ``InvalidConfiguration`` if there is a configuration error.
"""
def is_limited(self, project, key, limit):
return False
|
GoogleCloudPlatform/python-docs-samples | refs/heads/master | appengine/standard/ndb/queries/snippets_models.py | 9 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import ndb
class Account(ndb.Model):
username = ndb.StringProperty()
userid = ndb.IntegerProperty()
email = ndb.StringProperty()
class Address(ndb.Model):
type = ndb.StringProperty() # E.g., 'home', 'work'
street = ndb.StringProperty()
city = ndb.StringProperty()
class Contact(ndb.Model):
name = ndb.StringProperty()
addresses = ndb.StructuredProperty(Address, repeated=True)
class Article(ndb.Model):
title = ndb.StringProperty()
stars = ndb.IntegerProperty()
tags = ndb.StringProperty(repeated=True)
class ArticleWithDifferentDatastoreName(ndb.Model):
title = ndb.StringProperty('t')
class Employee(ndb.Model):
full_name = ndb.StringProperty('n')
retirement_age = ndb.IntegerProperty('r')
class Manager(ndb.Model):
pass
class FlexEmployee(ndb.Expando):
name = ndb.StringProperty()
age = ndb.IntegerProperty()
class Bar(ndb.Model):
pass
class Message(ndb.Model):
content = ndb.StringProperty()
userid = ndb.IntegerProperty()
|
dchud/sentinel | refs/heads/master | canary/ui/__init__.py | 1 | # $Id$
_q_exports = [
'error',
'search',
'record',
'news',
'canary_png',
'advanced_search',
'opensearch',
'unapi',
'about',
'admin',
'edit',
'login',
'logout',
'register',
'verify',
'user',
'assistant',
'reaper',
'resetpass',
]
import cStringIO
import sys
from quixote.errors import PublishError
from quixote.publish import get_publisher
from quixote.util import StaticFile
from canary.qx_defs import NotLoggedInError
from canary.qx_utils import MyStaticFile, load_static_exports
from canary.ui import about, admin, assistant, edit, user, record_ui, opensearch
from canary.ui.browse_ui import Browse
from canary.ui.pages import _q_index, _q_exception_handler, not_found, reaper
from canary.ui.pages import login_general, login_yale, logout
from canary.ui.pages import register, verify, resetpass
from canary.ui.pages import news, robots, unapi, TempImage
from canary.ui.search import search, advanced_search
from canary.ui.summary_ui import Summary
record = record_ui
config = get_publisher().config
this_module = sys.modules[__name__]
login = getattr(this_module, 'login_%s' % config.authn_mode)
def error (request):
raise PublishError(public_msg = "Oops, an error occured.")
def _q_lookup (request, name=''):
if name == 'favicon.ico':
return request.redirect('/images/favicon.ico')
elif name == 'wdd_styles.css':
return StaticFile(config.static_html_dir + '/wdd_styles.css',
mime_type='text/css', cache_time=300)
elif name == 'wdd_print.css':
return StaticFile(config.static_html_dir + '/wdd_print.css',
mime_type='text/css', cache_time=300)
elif name.endswith('.js'):
return StaticFile(config.static_html_dir + '/%s' % name,
mime_type='text/javascript', cache_time=300)
elif name == 'robots.txt':
if config.enable_robots_txt:
return robots()
else:
return not_found()
elif name == 'browse':
return Browse(request)
elif name == 'timage':
return TempImage()
elif name == 'summary':
return Summary(request)
else:
return not_found()
|
robert-sandor/student_management | refs/heads/develop | run.py | 1 | from app import app
app.run(debug=True)
import os
from app import app
# print('http://student-management-robert-sandor-2.c9users.io/')
# app.run(host=os.getenv('IP', '0.0.0.0'), port=int(os.getenv('PORT', 8080)),
# debug=True)
|
pauldeng/nilmtk | refs/heads/master | nilmtk/tests/test_timeframe.py | 5 | #!/usr/bin/python
from __future__ import print_function, division
import unittest
import pandas as pd
from nilmtk.timeframe import TimeFrame, merge_timeframes
class TestTimeFrame(unittest.TestCase):
def test_date_setting(self):
TimeFrame()
TimeFrame("2012-01-01", "2013-01-01")
# test identical start and end dates
with self.assertRaises(ValueError):
TimeFrame("2012-01-01", "2012-01-01")
TimeFrame(start="2011-01-01")
TimeFrame(end="2011-01-01")
# test end date after start date
with self.assertRaises(ValueError):
TimeFrame("2012-01-01", "2011-01-01")
tf = TimeFrame()
tf.end = "2011-01-01"
tf.start = "2010-01-01"
with self.assertRaises(ValueError):
tf.start = "2012-01-01"
def test_time_delta(self):
tf = TimeFrame("2012-01-01 00:00:00", "2013-01-01 00:00:00")
self.assertEqual(tf.timedelta.total_seconds(), 60*60*24*366)
def test_intersection(self):
tf = TimeFrame("2012-01-01 00:00:00", "2013-01-01 00:00:00")
self.assertFalse(tf.empty)
new_tf = tf.intersection(tf)
self.assertEqual(tf, new_tf)
self.assertFalse(new_tf.empty)
new_tf = tf.intersection(TimeFrame())
self.assertEqual(tf, new_tf)
self.assertFalse(new_tf.empty)
new_tf = tf.intersection(TimeFrame(start="1990-01-01"))
self.assertEqual(tf, new_tf)
self.assertFalse(new_tf.empty)
new_tf = tf.intersection(TimeFrame(end="2100-01-01"))
self.assertEqual(tf, new_tf)
self.assertFalse(new_tf.empty)
small_tf = TimeFrame("2012-01-05 00:00:00", "2012-01-06 00:00:00")
new_tf = tf.intersection(small_tf)
self.assertEqual(small_tf, new_tf)
self.assertFalse(new_tf.empty)
large_tf = TimeFrame("2010-01-01 00:00:00", "2014-01-01 00:00:00")
new_tf = tf.intersection(large_tf)
self.assertEqual(tf, new_tf)
self.assertFalse(new_tf.empty)
disjoint = TimeFrame("2015-01-01", "2016-01-01")
new_tf = tf.intersection(disjoint)
self.assertTrue(new_tf.empty)
# try intersecting with emtpy TF
new_tf = tf.intersection(new_tf)
self.assertTrue(new_tf.empty)
disjoint = TimeFrame("2015-01-01", "2016-01-01")
tf.enabled = False
new_tf = tf.intersection(disjoint)
self.assertEqual(new_tf, disjoint)
self.assertFalse(new_tf.empty)
tf.enabled = True
# crop into the start of tf
new_start = "2012-01-05 04:05:06"
new_tf = tf.intersection(TimeFrame(start=new_start, end="2014-01-01"))
self.assertEqual(new_tf, TimeFrame(start=new_start, end=tf.end))
self.assertFalse(new_tf.empty)
# crop into the end of tf
new_end = "2012-01-07 04:05:06"
new_tf = tf.intersection(TimeFrame(start="2011-01-01", end=new_end))
self.assertEqual(new_tf, TimeFrame(start=tf.start, end=new_end))
self.assertFalse(new_tf.empty)
def test_adjacent(self):
# overlap
tf1 = TimeFrame("2011-01-01 00:00:00", "2011-02-01 00:00:00")
tf2 = TimeFrame("2011-02-01 00:00:00", "2011-03-01 00:00:00")
self.assertTrue(tf1.adjacent(tf2))
self.assertTrue(tf2.adjacent(tf1))
# no overlap
tf1 = TimeFrame("2011-01-01 00:00:00", "2011-02-01 00:00:00")
tf2 = TimeFrame("2011-02-01 00:00:01", "2011-03-01 00:00:00")
self.assertFalse(tf1.adjacent(tf2))
self.assertFalse(tf2.adjacent(tf1))
# no overlap but gap specified
tf1 = TimeFrame("2011-01-01 00:00:00", "2011-02-01 00:00:00")
tf2 = TimeFrame("2011-02-01 00:00:01", "2011-03-01 00:00:00")
self.assertTrue(tf1.adjacent(tf2, gap=1))
self.assertTrue(tf2.adjacent(tf1, gap=1))
self.assertTrue(tf1.adjacent(tf2, gap=100))
self.assertTrue(tf2.adjacent(tf1, gap=100))
def test_union(self):
# overlap
def test_u(ts1, ts2, ts3, ts4):
ts1 = pd.Timestamp(ts1)
ts2 = pd.Timestamp(ts2)
ts3 = pd.Timestamp(ts3)
ts4 = pd.Timestamp(ts4)
tf1 = TimeFrame(ts1, ts2)
tf2 = TimeFrame(ts3, ts4)
union = tf1.union(tf2)
self.assertEqual(union.start, ts1)
self.assertEqual(union.end, ts4)
test_u("2011-01-01 00:00:00", "2011-02-01 00:00:00",
"2011-02-01 00:00:00", "2011-03-01 00:00:00")
test_u("2011-01-01 00:00:00", "2011-01-15 00:00:00",
"2011-02-01 00:00:00", "2011-03-01 00:00:00")
def test_merge_timeframes(self):
tfs = [TimeFrame("2010-01-01", "2011-01-01"),
TimeFrame("2011-01-01", "2011-06-01"),
TimeFrame("2012-01-01", "2013-01-01")]
merged = merge_timeframes(tfs)
correct_answer = [TimeFrame("2010-01-01", "2011-06-01"),
TimeFrame("2012-01-01", "2013-01-01")]
self.assertEqual(merged, correct_answer)
if __name__ == '__main__':
unittest.main()
|
betoesquivel/fil2014 | refs/heads/master | build/django/tests/urlpatterns_reverse/urls_error_handlers.py | 79 | # Used by the ErrorHandlerResolutionTests test case.
from django.conf.urls import patterns
urlpatterns = patterns('')
handler400 = 'urlpatterns_reverse.views.empty_view'
handler404 = 'urlpatterns_reverse.views.empty_view'
handler500 = 'urlpatterns_reverse.views.empty_view'
|
bennibaermann/stickandrun | refs/heads/master | config.py | 1 | # -*- coding: utf-8 -*-
# set which part of the game you want to debug, set all for maximum output
# DEBUG = ('init', 'font', 'track', 'path', 'station', 'passenger', 'random' )
DEBUG = ('init','font' )
BLACK = ( 0, 0, 0)
VERYLIGHTGREY= (220, 220, 220)
LIGHTGREY= (200, 200, 200)
WHITE = (255, 255, 255)
BLUE = ( 0, 0, 255)
GREEN = ( 0, 255, 0)
RED = (255, 0, 0)
MAGENTA = (255, 0, 255)
CYAN = ( 0, 255, 255)
YELLOW = (255, 255, 0)
(MAX_X,MAX_Y) = (400,400)
STATUSHEIGHT = 20
RIGHT_OFFSET = 100
STARTPOS = (20,MAX_Y-100)
HEROSIZE = 10
STICKSPEED = 3
FPS = 30
FONTSIZE = 18 # size of the default font used
MIN_GAP = 10 # minimal gap between towers and between towers and border
MIN_WIDTH = 10 # minimal width of tower
MAX_WIDTH = 50 # maximal width of tower
PERFECT = 5 # range of perfect match
|
polyaxon/polyaxon | refs/heads/master | core/polyaxon/polyflow/joins/__init__.py | 1 | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import polyaxon_sdk
from marshmallow import fields
from polyaxon.polyflow.params.params import ParamSchema, ParamValueMixin
from polyaxon.schemas.base import BaseCamelSchema, BaseConfig
from polyaxon.schemas.fields.ref_or_obj import RefOrObject
class JoinParamSchema(ParamSchema):
ref = None
@staticmethod
def schema_config():
return V1JoinParam
class V1JoinParam(BaseConfig, ParamValueMixin, polyaxon_sdk.V1JoinParam):
SCHEMA = JoinParamSchema
IDENTIFIER = "join_param"
REDUCED_ATTRIBUTES = [
"contextOnly",
"connection",
"toInit",
]
@property
def is_literal(self):
return False
@property
def is_ref(self):
return True
@property
def is_template_ref(self):
return False
@property
def is_join_ref(self):
return True
@property
def is_runs_ref(self):
return False
@property
def is_ops_ref(self):
return False
@property
def is_dag_ref(self):
return False
class JoinSchema(BaseCamelSchema):
query = fields.Str(required=True)
sort = fields.Str(allow_none=True)
limit = RefOrObject(fields.Int(allow_none=True))
offset = RefOrObject(fields.Int(allow_none=True))
params = fields.Dict(
keys=fields.Str(), values=fields.Nested(JoinParamSchema), allow_none=True
)
@staticmethod
def schema_config():
return V1Join
class V1Join(BaseConfig, polyaxon_sdk.V1Join):
"""Joins allow to query several runs based on a search specification.
The result of the join will be a list of values based on the results from executing the search.
A Join corresponds to a valid [query specification](/docs/core/query-language/),
the result of the search will be used to resolve
the params defined in the join.
```yaml
>>> joins:
>>> - query: "metrics.loss: <0.01"
>>> sort: "metrics.loss"
>>> limit: 5
>>> params:
>>> all_param1:
>>> value: inputs.param1
>>> all_result1:
>>> value: outputs.result1
>>> all_result2:
>>> value: outputs.result2
>>> tensorboard_paths:
>>> value: artifacts.tensorboard
>>> contextOnly: true
>>> - query: "metrics.accuracy: >.9"
>>> sort: "-metrics.accuracy"
>>> params:
>>> all_inputs:
>>> value: inputs
>>> all_outputs:
>>> value: outputs
>>> run_artifact_paths:
>>> value: artifacts.base
>>> uuids:
>>> value: globals.uuid
>>> contextOnly: true
>>> files:
>>> value: {files: ["subpath/files", "subpath2/file2"]}
>>> toInit: true
```
```python
>>> joins = [
>>> V1Join(
>>> query="metrics.loss: <0.01",
>>> sort="metrics.loss",
>>> limit=5,
>>> params={
>>> "all_param1": V1JoinParam(value="inputs.param1"),
>>> "all_result1": V1JoinParam(value="outputs.result1"),
>>> "all_result2": V1JoinParam(value="outputs.result2"),
>>> "tensorboard_paths": V1JoinParam(
>>> value="artifacts.tensorboard", context_only=True
>>> ),
>>> },
>>> ),
>>> V1Join(
>>> query="metrics.accuracy: >.9",
>>> sort="-metrics.accuracy",
>>> params={
>>> "all_inputs": V1JoinParam(value="inputs"),
>>> "all_outputs": V1JoinParam(value="outputs"),
>>> "run_artifact_paths": V1JoinParam(value="artifacts")
>>> "uuids": V1JoinParam(value="globals.uuid"),
>>> "artifacts": V1JoinParam(
>>> value={files: ["subpath/files", "subpath2/file2"]},
>>> to_init=True,
>>> ),
>>> }
>>> )
>>> ]
```
This will instruct Polyaxon to perform 2 searches.
Each search will expose the params to be used similar to the default
[params section](/docs/core/specification/params/).
Polyaxon will validate the params of search against the IO(inputs/outputs) definition.
Users should make sure that their IO definition specify the `isList: true`,
unless the type is `artifacts`.
If a param is based on `contexts`, `inputs`, `outputs`, or `artifacts`
Polyaxon will turn that param into a list by querying
that field from all runs in the search result:
```python
>>> {
>>> "all_param1": [val_run_1, val_run_223, val_run_234, ...],
>>> ...
>>> }
```
When the param is of type [ArtifactsType](/docs/core/specification/types/#v1artifactstype),
all files and dirs will be concatenated in a single list,
each value will be prefixed with the uuid (run path) of each run in the query result:
```python
>>> {
>>> "artifacts": {
>>> "file": [
>>> "run_3/subpath/files", "run_3/subpath2/file2",
>>> "run_4/subpath/files", "run_4/subpath2/file2",
>>> ...
>>> ],
>>> }
>>> ...
>>> }
```
> **Note**: the difference between using `artifacts.lineage_name`
> and [ArtifactsType](/docs/core/specification/types/#v1artifactstype),
> is that the former will only expose the path(s) based on any lineage logged
> during the runtime, the later is a manual way of selecting specific files and dirs.
## Fields
### query
A valid query respecting
[Polyaxon Query Language](/docs/core/query-language/runs/#query)
```yaml
>>> joins:
>>> - query: "metrics.loss: <0.01, project.name: {{ globals.project_name}}, kind: job"
```
### sort
A valid sort respecting
[Polyaxon Query Language](/docs/core/query-language/runs/#sort)
```yaml
>>> joins:
>>> - sort: "created_at, -metrics.loss"
```
### limit
The maximum number of runs to join based on the query/sort condition.
> **Note**: at the moment we have a hard limit, `5000`, on the number of upstream runs to join.
```yaml
>>> joins:
>>> - limit: "10"
```
### offset
```yaml
>>> joins:
>>> - offset: "100"
```
An optional integer used for pagination.
### params
Similar to the default [params specification](/docs/core/specification/params/)
with the exception that it does not accept the `ref` key.
The reference is generated automatically based on the search performed by Polyaxon.
The fields supported: `value`, `context_only`, `connection`, `to_init`
"""
SCHEMA = JoinSchema
IDENTIFIER = "join"
REDUCED_ATTRIBUTES = [
"sort",
"limit",
"offset",
]
|
FrameBenchers/django | refs/heads/master | blog/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
openfisca/openfisca-qt | refs/heads/master | openfisca_qt/scripts/validation/check_num_table.py | 1 | # -*- coding:utf-8 -*-
#
# This file is part of OpenFisca.
# OpenFisca is a socio-fiscal microsimulation software
# Copyright © 2011 Clément Schaff, Mahdi Ben Jelloul
# Licensed under the terms of the GVPLv3 or later license
# (see openfisca/__init__.py for details)
# Script to compute the aggregates for all the referenced years
import pdb
import os
from openfisca_core.simulations import SurveySimulation
from openfisca_france.data.sources.config import destination_dir
from openfisca_qt.plugins.survey.aggregates import Aggregates
from openfisca_qt.plugins.survey.inequality import Inequality
from pandas import ExcelWriter, ExcelFile, HDFStore
import pandas.rpy.common as com
fname_all = "aggregates_inflated_loyers.xlsx"
fname_all = os.path.join(destination_dir, fname_all)
num_output = None
def compar_num_table():
writer = None
years = range(2006,2007)
tot1 = 0
tot3 = 0
filename = destination_dir+'output3.h5'
store = HDFStore(filename)
for year in years:
yr = str(year)
# fname = "Agg_%s.%s" %(str(yr), "xls")
simu = SurveySimulation()
simu.set_config(year = yr)
simu.set_param()
import time
deb3 = time.clock()
sous_ech = [6000080, 6000080, 6000195, 6000195, 6000288, 6000288, 6000499, 6000499, 6000531, 6000531, 6000542, 6000542]
sous_ech = [6000191, 6000191, 6000531, 6000614, 6000195, 6000195, 6000499, 6000499, 6000531, 6000614, 6000531,
6000614, 6000531, 6000531, 6000195, 6000195, 6000288, 6000288, 6000499, 6000499, 6000531, 6000542,
6000542, 6000614, 6000191]
#al
sous_ech = [6000122, 6000865, 6001256]
# typ_men
sous_ech = [6006630, 6006753, 6008508]
# foy
sous_ech = [6036028, 6028397, 6019248]
sous_ech = None
simu.set_survey(num_table=3, subset=sous_ech)
simu.compute()
agg3 = Aggregates()
for ent in ['ind','men','foy','fam']:
tab = simu.output_table.table3[ent]
renam={}
renam['wprm_'+ent] = 'wprm'
tab = tab.rename(columns=renam)
agg3.set_simulation(simu)
agg3.compute()
fin3 = time.clock()
# if writer is None:
# writer = ExcelWriter(str(fname_all))
fname_all = os.path.join(destination_dir, 'agg3.xlsx')
agg3.aggr_frame.to_excel(fname_all, yr, index= False, header= True)
# export to csv to run compar in R
for ent in ['ind','men','foy','fam']:
dir_name = destination_dir + ent +'.csv'
tab = simu.output_table.table3[ent]
renam ={}
renam['wprm_'+ent] = 'wprm'
if ent=='ind':
ident = ["idmen","quimen","idfam","quifam","idfoy","quifoy"]
else:
ident = ["idmen","idfam","idfoy"]
for nom in ident:
renam[nom+'_'+ent] = nom
tab = tab.rename(columns=renam)
order_var = ident+list(tab.columns - ident)
tab.sort(['idmen','idfam','idfoy']).ix[:num_output,order_var].to_csv(dir_name)
deb1 = time.clock()
simu.set_survey(num_table=1, subset=sous_ech)
simu.compute()
agg = Aggregates()
agg.set_simulation(simu)
agg.compute()
fin1 = time.clock()
# export to csv to run compar in R
dir_name = destination_dir + 'en1' +'.csv'
tab = simu.output_table.table
tab.drop(['idfam_fam','idfam_foy','idfam_men','idfoy_fam','idfoy_foy','idfoy_men','idmen_men','idmen_fam','idmen_foy','wprm_foy','wprm_fam'],
axis=1, inplace=True)
renam ={}
ent = 'ind'
renam['wprm_'+ent] = 'wprm'
ident = ["noi","idmen","quimen","idfam","quifam","idfoy","quifoy"]
for nom in ident:
renam[nom+'_'+ent] = nom
tab = tab.rename(columns=renam)
order_var = ident+list(tab.columns - ident)
tab.sort(['idmen','idfam','idfoy']).ix[:num_output,order_var].to_csv(dir_name)
# if writer is None:
# writer = ExcelWriter(str(fname_all))
fname_all = os.path.join(destination_dir, 'agg1.xlsx')
agg.aggr_frame.to_excel(fname_all, yr, index= False, header= True)
del simu
del agg
import gc
gc.collect()
tot1 += fin1 - deb1
tot3 += fin3 - deb3
print "Time to process 1 table :" +str(fin1 - deb1)
print "Time to process 3 table :" +str(fin3 - deb3)
print tot1, tot3, tot3- tot1
if __name__ == '__main__':
compar_num_table()
|
ashwinirudrappa/zulip | refs/heads/master | zerver/context_processors.py | 126 | from __future__ import absolute_import
from django.conf import settings
import ujson
from zproject.backends import password_auth_enabled, dev_auth_enabled, google_auth_enabled
def add_settings(request):
realm = request.user.realm if hasattr(request.user, "realm") else None
return {
# We use the not_voyager variable name so that templates
# will render even if the appropriate context is not provided
# to the template
'not_voyager': not settings.VOYAGER,
'zulip_com': settings.ZULIP_COM,
'zulip_admin': settings.ZULIP_ADMINISTRATOR,
'login_url': settings.HOME_NOT_LOGGED_IN,
'only_sso': settings.ONLY_SSO,
'external_api_path': settings.EXTERNAL_API_PATH,
'external_api_uri': settings.EXTERNAL_API_URI,
'external_uri_scheme': settings.EXTERNAL_URI_SCHEME,
'api_site_required': settings.EXTERNAL_API_PATH != "api.zulip.com",
'email_integration_enabled': settings.EMAIL_GATEWAY_BOT != "",
'email_gateway_example': settings.EMAIL_GATEWAY_EXAMPLE,
'password_auth_enabled': password_auth_enabled(realm),
'dev_auth_enabled': dev_auth_enabled(),
'google_auth_enabled': google_auth_enabled(),
}
def add_metrics(request):
return {
'dropboxAppKey': settings.DROPBOX_APP_KEY
}
|
jeremyh/eo-datasets | refs/heads/eodatasets3 | versioneer.py | 2 | # Version: 0.18
# flake8: noqa
"""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
commit date in ISO 8601 format. This will be None if the date is not
available.
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See [details.md](details.md) in the Versioneer
source tree for descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Known Limitations
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
[issues page](https://github.com/warner/python-versioneer/issues).
### Subprojects
Versioneer has limited support for source trees in which `setup.py` is not in
the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
two common reasons why `setup.py` might not be in the root:
* Source trees which contain multiple subprojects, such as
[Buildbot](https://github.com/buildbot/buildbot), which contains both
"master" and "slave" subprojects, each with their own `setup.py`,
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs).
* Source trees whose main purpose is to contain a C library, but which also
provide bindings to Python (and perhaps other langauges) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
and implementation details which frequently cause `pip install .` from a
subproject directory to fail to find a correct version string (so it usually
defaults to `0+unknown`).
`pip install --editable .` should work correctly. `setup.py install` might
work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
this issue. The discussion in
[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
pip to let Versioneer work correctly.
Versioneer-0.16 and earlier only looked for a `.git` directory next to the
`setup.cfg`, so subprojects were completely unsupported with those releases.
### Editable installs with setuptools <= 18.5
`setup.py develop` and `pip install --editable .` allow you to install a
project into a virtualenv once, then continue editing the source code (and
test) without re-installing after every change.
"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
convenient way to specify executable scripts that should be installed along
with the python package.
These both work as expected when using modern setuptools. When using
setuptools-18.5 or earlier, however, certain operations will cause
`pkg_resources.DistributionNotFound` errors when running the entrypoint
script, which must be resolved by re-installing the package. This happens
when the install happens with one version, then the egg_info data is
regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
### Unicode version strings
While Versioneer works (and is continually tested) with both Python 2 and
Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
Newer releases probably generate unicode version strings on py2. It's not
clear that this is wrong, but it may be surprising for applications when then
write these strings to a network connection or include them in bytes-oriented
APIs like cryptographic checksums.
[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
this question.
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = (
"Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND')."
)
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print(
"Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py)
)
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen(
[c] + args,
cwd=cwd,
env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr else None),
)
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY[
"git"
] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG) :] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r"\d", r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix) :]
if verbose:
print("picking %s" % r)
return {
"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False,
"error": None,
"date": date,
}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {
"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False,
"error": "no suitable tags",
"date": None,
}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(
GITS,
[
"describe",
"--tags",
"--dirty",
"--always",
"--long",
"--match",
"%s*" % tag_prefix,
],
cwd=root,
)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[: git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % (
full_tag,
tag_prefix,
)
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix) :]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[
0
].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {
"version": dirname[len(parentdir_prefix) :],
"full-revisionid": None,
"dirty": False,
"error": None,
"date": None,
}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print(
"Tried directories %s but none started with prefix %s"
% (str(rootdirs), parentdir_prefix)
)
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(
r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
)
if not mo:
mo = re.search(
r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S
)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {
"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None,
}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {
"version": rendered,
"full-revisionid": pieces["long"],
"dirty": pieces["dirty"],
"error": None,
"date": pieces.get("date"),
}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert (
cfg.versionfile_source is not None
), "please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {
"version": "0+unknown",
"full-revisionid": None,
"dirty": None,
"error": "unable to compute version",
"date": None,
}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(
LONG
% {
"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
}
)
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if "py2exe" in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(
LONG
% {
"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
}
)
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(
target_versionfile, self._versioneer_generated_versions
)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (
EnvironmentError,
configparser.NoSectionError,
configparser.NoOptionError,
) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg", file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(
LONG
% {
"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
}
)
ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(
" appending versionfile_source ('%s') to MANIFEST.in"
% cfg.versionfile_source
)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
|
bigown/SOpt | refs/heads/master | Python/ConvertIntToByte.py | 2 | print((65).to_bytes(1, byteorder='big'))
print(bytes([10,20,30,40,50,60,70,80,90,100]))
#https://pt.stackoverflow.com/q/270545/101
|
LS80/script.module.pyrollbar | refs/heads/master | lib/rollbar/logger.py | 1 | """
Hooks for integrating with the python logging framework.
Usage:
import logging
from rollbar.logger import RollbarHandler
rollbar.init('ACCESS_TOKEN', 'ENVIRONMENT')
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# report ERROR and above to Rollbar
rollbar_handler = RollbarHandler()
rollbar_handler.setLevel(logging.ERROR)
# attach the handlers to the root logger
logger.addHandler(rollbar_handler)
"""
import copy
import logging
import threading
import time
import rollbar
class RollbarHandler(logging.Handler):
SUPPORTED_LEVELS = set(('debug', 'info', 'warning', 'error', 'critical'))
_history = threading.local()
def __init__(self,
access_token=None,
environment=None,
level=logging.INFO,
history_size=10,
history_level=logging.DEBUG):
logging.Handler.__init__(self)
if access_token is not None:
rollbar.init(access_token, environment)
self.notify_level = level
self.history_size = history_size
if history_size > 0:
self._history.records = []
self.setHistoryLevel(history_level)
def setLevel(self, level):
"""
Override so we set the effective level for which
log records we notify Rollbar about instead of which
records we save to the history.
"""
self.notify_level = level
def setHistoryLevel(self, level):
"""
Use this method to determine which records we record history
for. Use setLevel() to determine which level we report records
to Rollbar for.
"""
logging.Handler.setLevel(self, level)
def emit(self, record):
level = record.levelname.lower()
if level not in self.SUPPORTED_LEVELS:
return
exc_info = record.exc_info
# use the original message, not the formatted one
message = record.msg
extra_data = {
'args': record.args,
'record': {
'created': record.created,
'funcName': record.funcName,
'lineno': record.lineno,
'module': record.module,
'name': record.name,
'pathname': record.pathname,
'process': record.process,
'processName': record.processName,
'relativeCreated': record.relativeCreated,
'thread': record.thread,
'threadName': record.threadName
}
}
extra_data.update(getattr(record, 'extra_data', {}))
payload_data = getattr(record, 'payload_data', {})
self._add_history(record, payload_data)
# after we've added the history data, check to see if the
# notify level is satisfied
if record.levelno < self.notify_level:
return
# Wait until we know we're going to send a report before trying to
# load the request
request = getattr(record, "request", None) or rollbar.get_request()
uuid = None
try:
# when not in an exception handler, exc_info == (None, None, None)
if exc_info and exc_info[0]:
if message:
message_template = {
'body': {
'trace': {
'exception': {
'description': message
}
}
}
}
payload_data = rollbar.dict_merge(payload_data, message_template)
uuid = rollbar.report_exc_info(exc_info,
level=level,
request=request,
extra_data=extra_data,
payload_data=payload_data)
else:
uuid = rollbar.report_message(message,
level=level,
request=request,
extra_data=extra_data,
payload_data=payload_data)
except:
self.handleError(record)
else:
if uuid:
record.rollbar_uuid = uuid
def _add_history(self, record, payload_data):
if hasattr(self._history, 'records'):
records = self._history.records
history = list(records[-self.history_size:])
if history:
history_data = [self._build_history_data(r) for r in history]
payload_data.setdefault('server', {})['history'] = history_data
records.append(record)
# prune the messages if we have too many
self._history.records = list(records[-self.history_size:])
def _build_history_data(self, record):
data = {'timestamp': record.created,
'message': record.getMessage()}
if hasattr(record, 'rollbar_uuid'):
data['uuid'] = record.rollbar_uuid
return data
|
thismatters/biometrics-scanner | refs/heads/master | headless.py | 1 | from time import sleep
from daqthread import DAQThread
import sys
import termios
import fcntl
import os
def myGetch():
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldflags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
c = ''
try:
while 1:
try:
c = sys.stdin.read(1)
break
except IOError: pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags)
return c
def print_instructions():
print "Press [space] to mark current time on record, press [q] to quit recording"
return myGetch()
if __name__ == '__main__':
run_duration = 6
print 'Starting'
daqThread = DAQThread()
daqThread.be_quiet()
daqThread.start()
print 'Started'
while 1:
keystroke = print_instructions()
if keystroke == ' ':
daqThread.redraw_lock()
daqThread.add_mark()
print 'Mark %d added at t=%.1f. \nCurrent pulse rate: %.1f BPM, \nCurrent dermal response: %.1f kOhms' % (daqThread.mark_count(), daqThread.get_last('time'), daqThread.get_last('bpm1'), daqThread.get_last('edr'))
daqThread.redraw_lock_release()
if keystroke == 'q' or keystroke == 'Q':
break
# minute_count = 0
# try:
# while minute_count < run_duration:
# sleep(60)
# daqThread.add_mark()
# minute_count += 1
# print 'Minute: %d of %d' % (minute_count, run_duration)
# except KeyboardInterrupt:
# ''''''
print 'Stopping'
daqThread.stop()
print 'Exiting'
exit()
|
adrfer/swift | refs/heads/master | utils/pass-pipeline/src/passes.py | 12 |
from pass_pipeline import Pass
# TODO: This should not be hard coded. Create a tool in the compiler that knows
# how to dump the passes and the pipelines themselves.
AADumper = Pass('AADumper')
ABCOpt = Pass('ABCOpt')
AllocBoxToStack = Pass('AllocBoxToStack')
CFGPrinter = Pass('CFGPrinter')
COWArrayOpts = Pass('COWArrayOpts')
CSE = Pass('CSE')
CapturePromotion = Pass('CapturePromotion')
CapturePropagation = Pass('CapturePropagation')
ClosureSpecializer = Pass('ClosureSpecializer')
CodeMotion = Pass('CodeMotion')
CopyForwarding = Pass('CopyForwarding')
DCE = Pass('DCE')
DeadFunctionElimination = Pass('DeadFunctionElimination')
DeadObjectElimination = Pass('DeadObjectElimination')
DefiniteInitialization = Pass('DefiniteInitialization')
DiagnoseUnreachable = Pass('DiagnoseUnreachable')
DiagnosticConstantPropagation = Pass('DiagnosticConstantPropagation')
EarlyInliner = Pass('EarlyInliner')
EmitDFDiagnostics = Pass('EmitDFDiagnostics')
FunctionSignatureOpts = Pass('FunctionSignatureOpts')
GlobalARCOpts = Pass('GlobalARCOpts')
GlobalLoadStoreOpts = Pass('GlobalLoadStoreOpts')
GlobalOpt = Pass('GlobalOpt')
IVInfoPrinter = Pass('IVInfoPrinter')
InOutDeshadowing = Pass('InOutDeshadowing')
InstCount = Pass('InstCount')
LICM = Pass('LICM')
LateInliner = Pass('LateInliner')
LoopInfoPrinter = Pass('LoopInfoPrinter')
LoopRotate = Pass('LoopRotate')
LowerAggregateInstrs = Pass('LowerAggregateInstrs')
MandatoryInlining = Pass('MandatoryInlining')
Mem2Reg = Pass('Mem2Reg')
NoReturnFolding = Pass('NoReturnFolding')
PerfInliner = Pass('PerfInliner')
PerformanceConstantPropagation = Pass('PerformanceConstantPropagation')
PredictableMemoryOptimizations = Pass('PredictableMemoryOptimizations')
SILCleanup = Pass('SILCleanup')
SILCombine = Pass('SILCombine')
SILLinker = Pass('SILLinker')
SROA = Pass('SROA')
SimplifyCFG = Pass('SimplifyCFG')
SpeculativeDevirtualizer = Pass('SpeculativeDevirtualizer')
SplitAllCriticalEdges = Pass('SplitAllCriticalEdges')
SplitNonCondBrCriticalEdges = Pass('SplitNonCondBrCriticalEdges')
StripDebugInfo = Pass('StripDebugInfo')
SwiftArrayOpts = Pass('SwiftArrayOpts')
PASSES = [
AADumper,
ABCOpt,
AllocBoxToStack,
CFGPrinter,
COWArrayOpts,
CSE,
CapturePromotion,
CapturePropagation,
ClosureSpecializer,
CodeMotion,
CopyForwarding,
DCE,
DeadFunctionElimination,
DeadObjectElimination,
DefiniteInitialization,
DiagnoseUnreachable,
DiagnosticConstantPropagation,
EarlyInliner,
EmitDFDiagnostics,
FunctionSignatureOpts,
GlobalARCOpts,
GlobalLoadStoreOpts,
GlobalOpt,
IVInfoPrinter,
InOutDeshadowing,
InstCount,
LICM,
LateInliner,
LoopInfoPrinter,
LoopRotate,
LowerAggregateInstrs,
MandatoryInlining,
Mem2Reg,
NoReturnFolding,
PerfInliner,
PerformanceConstantPropagation,
PredictableMemoryOptimizations,
SILCleanup,
SILCombine,
SILLinker,
SROA,
SimplifyCFG,
SpeculativeDevirtualizer,
SplitAllCriticalEdges,
SplitNonCondBrCriticalEdges,
StripDebugInfo,
SwiftArrayOpts,
]
|
yongshengwang/hue | refs/heads/master | desktop/core/ext-py/pyasn1-0.1.8/pyasn1/type/namedval.py | 172 | # ASN.1 named integers
from pyasn1 import error
__all__ = [ 'NamedValues' ]
class NamedValues:
def __init__(self, *namedValues):
self.nameToValIdx = {}; self.valToNameIdx = {}
self.namedValues = ()
automaticVal = 1
for namedValue in namedValues:
if isinstance(namedValue, tuple):
name, val = namedValue
else:
name = namedValue
val = automaticVal
if name in self.nameToValIdx:
raise error.PyAsn1Error('Duplicate name %s' % (name,))
self.nameToValIdx[name] = val
if val in self.valToNameIdx:
raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val))
self.valToNameIdx[val] = name
self.namedValues = self.namedValues + ((name, val),)
automaticVal = automaticVal + 1
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues]))
def __str__(self): return str(self.namedValues)
def __eq__(self, other): return tuple(self) == tuple(other)
def __ne__(self, other): return tuple(self) != tuple(other)
def __lt__(self, other): return tuple(self) < tuple(other)
def __le__(self, other): return tuple(self) <= tuple(other)
def __gt__(self, other): return tuple(self) > tuple(other)
def __ge__(self, other): return tuple(self) >= tuple(other)
def __hash__(self): return hash(tuple(self))
def getName(self, value):
if value in self.valToNameIdx:
return self.valToNameIdx[value]
def getValue(self, name):
if name in self.nameToValIdx:
return self.nameToValIdx[name]
def __getitem__(self, i): return self.namedValues[i]
def __len__(self): return len(self.namedValues)
def __add__(self, namedValues):
return self.__class__(*self.namedValues + namedValues)
def __radd__(self, namedValues):
return self.__class__(*namedValues + tuple(self))
def clone(self, *namedValues):
return self.__class__(*tuple(self) + namedValues)
# XXX clone/subtype?
|
dushu1203/chromium.src | refs/heads/nw12 | tools/telemetry/telemetry/timeline/bounds_unittest.py | 95 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.timeline import bounds
class BoundsTests(unittest.TestCase):
def testGetOverlap(self):
# Non overlap cases.
self.assertEquals(0, bounds.Bounds.GetOverlap(10, 20, 30, 40))
self.assertEquals(0, bounds.Bounds.GetOverlap(30, 40, 10, 20))
# Overlap cases.
self.assertEquals(10, bounds.Bounds.GetOverlap(10, 30, 20, 40))
self.assertEquals(10, bounds.Bounds.GetOverlap(20, 40, 10, 30))
# Inclusive cases.
self.assertEquals(10, bounds.Bounds.GetOverlap(10, 40, 20, 30))
self.assertEquals(10, bounds.Bounds.GetOverlap(20, 30, 10, 40))
|
hjanime/VisTrails | refs/heads/master | vistrails/db/versions/v1_0_4/persistence/sql/auto_gen.py | 1 | ###############################################################################
##
## Copyright (C) 2014-2015, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
"""generated automatically by auto_dao.py"""
from __future__ import division
from sql_dao import SQLDAO
from vistrails.db.versions.v1_0_4.domain import *
class DBMashupAliasSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_alias'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
parent = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
mashup_alias = DBMashupAlias(name=name,
id=id)
mashup_alias.db_parent = parent
mashup_alias.db_entity_id = entity_id
mashup_alias.db_entity_type = entity_type
mashup_alias.is_dirty = False
res[('mashup_alias', id)] = mashup_alias
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup', obj.db_parent) in all_objects:
p = all_objects[('mashup', obj.db_parent)]
p.db_add_alias(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_component is not None:
child = obj.db_component
child.db_mashup_alias = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_alias'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
group = DBGroup(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
group.db_parentType = parentType
group.db_entity_id = entity_id
group.db_entity_type = entity_type
group.db_parent = parent
group.is_dirty = False
res[('group', id)] = group
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_workflow is not None:
child = obj.db_workflow
child.db_group = obj.db_id
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAddSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'add_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
add = DBAdd(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
add.db_action = action
add.db_entity_id = entity_id
add.db_entity_type = entity_type
add.is_dirty = False
res[('add', id)] = add
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'add_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBGroupExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'group_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
group_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
group_type = self.convertFromDB(row[6], 'str', 'varchar(255)')
completed = self.convertFromDB(row[7], 'int', 'int')
error = self.convertFromDB(row[8], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[9], 'long', 'int')
parentType = self.convertFromDB(row[10], 'str', 'char(32)')
entity_id = self.convertFromDB(row[11], 'long', 'int')
entity_type = self.convertFromDB(row[12], 'str', 'char(16)')
parent = self.convertFromDB(row[13], 'long', 'long')
group_exec = DBGroupExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
group_name=group_name,
group_type=group_type,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
group_exec.db_parentType = parentType
group_exec.db_entity_id = entity_id
group_exec.db_entity_type = entity_type
group_exec.db_parent = parent
group_exec.is_dirty = False
res[('group_exec', id)] = group_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_iteration':
p = all_objects[('loop_iteration', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'group_name', 'group_type', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_group_name') and obj.db_group_name is not None:
columnMap['group_name'] = \
self.convertToDB(obj.db_group_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_group_type') and obj.db_group_type is not None:
columnMap['group_type'] = \
self.convertToDB(obj.db_group_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'group_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
val = self.convertFromDB(row[4], 'str', 'mediumtext')
alias = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
parameter = DBParameter(pos=pos,
name=name,
type=type,
val=val,
alias=alias,
id=id)
parameter.db_parentType = parentType
parameter.db_entity_id = entity_id
parameter.db_entity_type = entity_type
parameter.db_parent = parent
parameter.is_dirty = False
res[('parameter', id)] = parameter
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'function':
p = all_objects[('function', obj.db_parent)]
p.db_add_parameter(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'type', 'val', 'alias', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_alias') and obj.db_alias is not None:
columnMap['alias'] = \
self.convertToDB(obj.db_alias, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail = DBVistrail(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
id=id)
vistrail.is_dirty = False
res[('vistrail', id)] = vistrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified']
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_vistrail = obj.db_id
for child in obj.db_tags:
child.db_vistrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_vistrailVariables:
child.db_vistrail = obj.db_id
for child in obj.db_parameter_explorations:
child.db_vistrail = obj.db_id
for child in obj.db_actionAnnotations:
child.db_vistrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
module = DBModule(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
id=id)
module.db_parentType = parentType
module.db_entity_id = entity_id
module.db_entity_type = entity_type
module.db_parent = parent
module.is_dirty = False
res[('module', id)] = module
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
type = self.convertFromDB(row[1], 'str', 'varchar(255)')
moduleId = self.convertFromDB(row[2], 'long', 'int')
moduleName = self.convertFromDB(row[3], 'str', 'varchar(255)')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
signature = self.convertFromDB(row[5], 'str', 'varchar(4095)')
parentType = self.convertFromDB(row[6], 'str', 'char(32)')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
parent = self.convertFromDB(row[9], 'long', 'long')
port = DBPort(type=type,
moduleId=moduleId,
moduleName=moduleName,
name=name,
signature=signature,
id=id)
port.db_parentType = parentType
port.db_entity_id = entity_id
port.db_entity_type = entity_type
port.db_parent = parent
port.is_dirty = False
res[('port', id)] = port
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'connection':
p = all_objects[('connection', obj.db_parent)]
p.db_add_port(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'type', 'moduleId', 'moduleName', 'name', 'signature', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_moduleId') and obj.db_moduleId is not None:
columnMap['moduleId'] = \
self.convertToDB(obj.db_moduleId, 'long', 'int')
if hasattr(obj, 'db_moduleName') and obj.db_moduleName is not None:
columnMap['moduleName'] = \
self.convertToDB(obj.db_moduleName, 'str', 'varchar(255)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_signature') and obj.db_signature is not None:
columnMap['signature'] = \
self.convertToDB(obj.db_signature, 'str', 'varchar(4095)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
module_id = self.convertFromDB(row[1], 'long', 'int')
port_name = self.convertFromDB(row[2], 'str', 'varchar(255)')
is_alias = self.convertFromDB(row[3], 'long', 'int')
parentType = self.convertFromDB(row[4], 'str', 'char(32)')
parameter_exploration = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
pe_function = DBPEFunction(module_id=module_id,
port_name=port_name,
id=id)
pe_function.db_parentType = parentType
pe_function.db_parameter_exploration = parameter_exploration
pe_function.db_entity_id = entity_id
pe_function.db_entity_type = entity_type
pe_function.is_dirty = False
res[('pe_function', id)] = pe_function
return res
def from_sql_fast(self, obj, all_objects):
if ('parameter_exploration', obj.db_parameter_exploration) in all_objects:
p = all_objects[('parameter_exploration', obj.db_parameter_exploration)]
p.db_add_function(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'module_id', 'port_name', 'is_alias', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_port_name') and obj.db_port_name is not None:
columnMap['port_name'] = \
self.convertToDB(obj.db_port_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_is_alias') and obj.db_is_alias is not None:
columnMap['is_alias'] = \
self.convertToDB(obj.db_is_alias, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_parameter_exploration') and obj.db_parameter_exploration is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parameter_exploration, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_pe_function = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'pe_function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_id = self.convertFromDB(row[1], 'long', 'int')
entity_type = self.convertFromDB(row[2], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
version = self.convertFromDB(row[4], 'str', 'char(16)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[6], 'long', 'int')
group = self.convertFromDB(row[7], 'long', 'int')
workflow = DBWorkflow(entity_type=entity_type,
name=name,
version=version,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
workflow.db_entity_id = entity_id
workflow.db_group = group
workflow.is_dirty = False
res[('workflow', id)] = workflow
return res
def from_sql_fast(self, obj, all_objects):
if ('group', obj.db_group) in all_objects:
p = all_objects[('group', obj.db_group)]
p.db_add_workflow(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_id', 'entity_type', 'name', 'version', 'last_modified', 'vistrail_id', 'parent_id']
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
if hasattr(obj, 'db_group') and obj.db_group is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_group, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_connections:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_plugin_datas:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_others:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_modules:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
user = self.convertFromDB(row[3], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[4], 'long', 'int')
entity_id = self.convertFromDB(row[5], 'long', 'int')
entity_type = self.convertFromDB(row[6], 'str', 'char(16)')
mashup_action = DBMashupAction(prevId=prevId,
date=date,
user=user,
id=id)
mashup_action.db_mashuptrail = mashuptrail
mashup_action.db_entity_id = entity_id
mashup_action.db_entity_type = entity_type
mashup_action.is_dirty = False
res[('mashup_action', id)] = mashup_action
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_mashup is not None:
child = obj.db_mashup
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBChangeSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'change_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
oldObjId = self.convertFromDB(row[2], 'long', 'int')
newObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjId = self.convertFromDB(row[4], 'long', 'int')
parentObjType = self.convertFromDB(row[5], 'str', 'char(16)')
action = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
change = DBChange(what=what,
oldObjId=oldObjId,
newObjId=newObjId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
change.db_action = action
change.db_entity_id = entity_id
change.db_entity_type = entity_type
change.is_dirty = False
res[('change', id)] = change
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'old_obj_id', 'new_obj_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_oldObjId') and obj.db_oldObjId is not None:
columnMap['old_obj_id'] = \
self.convertToDB(obj.db_oldObjId, 'long', 'int')
if hasattr(obj, 'db_newObjId') and obj.db_newObjId is not None:
columnMap['new_obj_id'] = \
self.convertToDB(obj.db_newObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_data is not None:
child = obj.db_data
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'change_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPackageSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'package'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
identifier = self.convertFromDB(row[2], 'str', 'varchar(1023)')
codepath = self.convertFromDB(row[3], 'str', 'varchar(1023)')
load_configuration = self.convertFromDB(row[4], 'int', 'int')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
description = self.convertFromDB(row[6], 'str', 'varchar(1023)')
registry = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
package = DBPackage(name=name,
identifier=identifier,
codepath=codepath,
load_configuration=load_configuration,
version=version,
description=description,
id=id)
package.db_registry = registry
package.db_entity_id = entity_id
package.db_entity_type = entity_type
package.is_dirty = False
res[('package', id)] = package
return res
def from_sql_fast(self, obj, all_objects):
if ('registry', obj.db_registry) in all_objects:
p = all_objects[('registry', obj.db_registry)]
p.db_add_package(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'identifier', 'codepath', 'load_configuration', 'version', 'description', 'parent_id', 'entity_id', 'entity_type']
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_identifier') and obj.db_identifier is not None:
columnMap['identifier'] = \
self.convertToDB(obj.db_identifier, 'str', 'varchar(1023)')
if hasattr(obj, 'db_codepath') and obj.db_codepath is not None:
columnMap['codepath'] = \
self.convertToDB(obj.db_codepath, 'str', 'varchar(1023)')
if hasattr(obj, 'db_load_configuration') and obj.db_load_configuration is not None:
columnMap['load_configuration'] = \
self.convertToDB(obj.db_load_configuration, 'int', 'int')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_description') and obj.db_description is not None:
columnMap['description'] = \
self.convertToDB(obj.db_description, 'str', 'varchar(1023)')
if hasattr(obj, 'db_registry') and obj.db_registry is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_registry, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_module_descriptors:
child.db_package = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'package'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLoopExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'loop_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
loop_exec = DBLoopExec(ts_start=ts_start,
ts_end=ts_end,
id=id)
loop_exec.db_parentType = parentType
loop_exec.db_entity_id = entity_id
loop_exec.db_entity_type = entity_type
loop_exec.db_parent = parent
loop_exec.is_dirty = False
res[('loop_exec', id)] = loop_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_loop_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_loop_iterations:
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'loop_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBConnectionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'connection_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
parentType = self.convertFromDB(row[1], 'str', 'char(32)')
entity_id = self.convertFromDB(row[2], 'long', 'int')
entity_type = self.convertFromDB(row[3], 'str', 'char(16)')
parent = self.convertFromDB(row[4], 'long', 'long')
connection = DBConnection(id=id)
connection.db_parentType = parentType
connection.db_entity_id = entity_id
connection.db_entity_type = entity_type
connection.db_parent = parent
connection.is_dirty = False
res[('connection', id)] = connection
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_connection(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_ports:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'connection_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
prevId = self.convertFromDB(row[1], 'long', 'int')
date = self.convertFromDB(row[2], 'datetime', 'datetime')
session = self.convertFromDB(row[3], 'long', 'int')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
action = DBAction(prevId=prevId,
date=date,
session=session,
user=user,
id=id)
action.db_vistrail = vistrail
action.db_entity_id = entity_id
action.db_entity_type = entity_type
action.is_dirty = False
res[('action', id)] = action
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_action(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'prev_id', 'date', 'session', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_prevId') and obj.db_prevId is not None:
columnMap['prev_id'] = \
self.convertToDB(obj.db_prevId, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_operations:
child.db_action = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'action'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
depth = self.convertFromDB(row[4], 'int', 'int')
sort_key = self.convertFromDB(row[5], 'int', 'int')
min_conns = self.convertFromDB(row[6], 'int', 'int')
max_conns = self.convertFromDB(row[7], 'int', 'int')
parentType = self.convertFromDB(row[8], 'str', 'char(32)')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
parent = self.convertFromDB(row[11], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
depth=depth,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
type = self.convertFromDB(row[2], 'str', 'varchar(255)')
optional = self.convertFromDB(row[3], 'int', 'int')
depth = self.convertFromDB(row[4], 'int', 'int')
sort_key = self.convertFromDB(row[5], 'int', 'int')
min_conns = self.convertFromDB(row[6], 'int', 'int')
max_conns = self.convertFromDB(row[7], 'int', 'int')
parentType = self.convertFromDB(row[8], 'str', 'char(32)')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
parent = self.convertFromDB(row[11], 'long', 'long')
portSpec = DBPortSpec(name=name,
type=type,
optional=optional,
depth=depth,
sort_key=sort_key,
min_conns=min_conns,
max_conns=max_conns,
id=id)
portSpec.db_parentType = parentType
portSpec.db_entity_id = entity_id
portSpec.db_entity_type = entity_type
portSpec.db_parent = parent
portSpec.is_dirty = False
res[('portSpec', id)] = portSpec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'module_descriptor':
p = all_objects[('module_descriptor', obj.db_parent)]
p.db_add_portSpec(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_depth') and obj.db_depth is not None:
columnMap['depth'] = \
self.convertToDB(obj.db_depth, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'type', 'optional', 'depth', 'sort_key', 'min_conns', 'max_conns', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_optional') and obj.db_optional is not None:
columnMap['optional'] = \
self.convertToDB(obj.db_optional, 'int', 'int')
if hasattr(obj, 'db_depth') and obj.db_depth is not None:
columnMap['depth'] = \
self.convertToDB(obj.db_depth, 'int', 'int')
if hasattr(obj, 'db_sort_key') and obj.db_sort_key is not None:
columnMap['sort_key'] = \
self.convertToDB(obj.db_sort_key, 'int', 'int')
if hasattr(obj, 'db_min_conns') and obj.db_min_conns is not None:
columnMap['min_conns'] = \
self.convertToDB(obj.db_min_conns, 'int', 'int')
if hasattr(obj, 'db_max_conns') and obj.db_max_conns is not None:
columnMap['max_conns'] = \
self.convertToDB(obj.db_max_conns, 'int', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecItems:
child.db_portSpec = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLogSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'log_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
name = self.convertFromDB(row[3], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
vistrail_id = self.convertFromDB(row[5], 'long', 'int')
log = DBLog(entity_type=entity_type,
version=version,
name=name,
last_modified=last_modified,
vistrail_id=vistrail_id,
id=id)
log.is_dirty = False
res[('log', id)] = log
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'name', 'last_modified', 'vistrail_id']
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_vistrail_id') and obj.db_vistrail_id is not None:
columnMap['vistrail_id'] = \
self.convertToDB(obj.db_vistrail_id, 'long', 'int')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_workflow_execs:
child.db_log = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'log_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLoopIterationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'loop_iteration'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parent = self.convertFromDB(row[6], 'str', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
loop_iteration = DBLoopIteration(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_iteration.db_parent = parent
loop_iteration.db_entity_id = entity_id
loop_iteration.db_entity_type = entity_type
loop_iteration.is_dirty = False
res[('loop_iteration', id)] = loop_iteration
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
iteration = self.convertFromDB(row[3], 'int', 'int')
completed = self.convertFromDB(row[4], 'int', 'int')
error = self.convertFromDB(row[5], 'str', 'varchar(1023)')
parent = self.convertFromDB(row[6], 'str', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
loop_iteration = DBLoopIteration(ts_start=ts_start,
ts_end=ts_end,
iteration=iteration,
completed=completed,
error=error,
id=id)
loop_iteration.db_parent = parent
loop_iteration.db_entity_id = entity_id
loop_iteration.db_entity_type = entity_type
loop_iteration.is_dirty = False
res[('loop_iteration', id)] = loop_iteration
return res
def from_sql_fast(self, obj, all_objects):
if ('loop_exec', obj.db_parent) in all_objects:
p = all_objects[('loop_exec', obj.db_parent)]
p.db_add_loop_iteration(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'str', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'iteration', 'completed', 'error', 'parent_id', 'entity_id', 'entity_type']
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_iteration') and obj.db_iteration is not None:
columnMap['iteration'] = \
self.convertToDB(obj.db_iteration, 'int', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'str', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'loop_iteration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPEParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'pe_parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
interpolator = self.convertFromDB(row[2], 'str', 'varchar(255)')
value = self.convertFromDB(row[3], 'str', 'mediumtext')
dimension = self.convertFromDB(row[4], 'long', 'int')
parentType = self.convertFromDB(row[5], 'str', 'char(32)')
pe_function = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
pe_parameter = DBPEParameter(pos=pos,
interpolator=interpolator,
value=value,
dimension=dimension,
id=id)
pe_parameter.db_parentType = parentType
pe_parameter.db_pe_function = pe_function
pe_parameter.db_entity_id = entity_id
pe_parameter.db_entity_type = entity_type
pe_parameter.is_dirty = False
res[('pe_parameter', id)] = pe_parameter
return res
def from_sql_fast(self, obj, all_objects):
if ('pe_function', obj.db_pe_function) in all_objects:
p = all_objects[('pe_function', obj.db_pe_function)]
p.db_add_parameter(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'interpolator', 'value', 'dimension', 'parent_type', 'parent_id', 'entity_id', 'entity_type']
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_interpolator') and obj.db_interpolator is not None:
columnMap['interpolator'] = \
self.convertToDB(obj.db_interpolator, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_dimension') and obj.db_dimension is not None:
columnMap['dimension'] = \
self.convertToDB(obj.db_dimension, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_pe_function') and obj.db_pe_function is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_pe_function, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'pe_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBWorkflowExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'workflow_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
user = self.convertFromDB(row[1], 'str', 'varchar(255)')
ip = self.convertFromDB(row[2], 'str', 'varchar(255)')
session = self.convertFromDB(row[3], 'long', 'int')
vt_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
ts_start = self.convertFromDB(row[5], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[6], 'datetime', 'datetime')
parent_id = self.convertFromDB(row[7], 'long', 'int')
parent_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
parent_version = self.convertFromDB(row[9], 'long', 'int')
completed = self.convertFromDB(row[10], 'int', 'int')
name = self.convertFromDB(row[11], 'str', 'varchar(255)')
log = self.convertFromDB(row[12], 'long', 'int')
entity_id = self.convertFromDB(row[13], 'long', 'int')
entity_type = self.convertFromDB(row[14], 'str', 'char(16)')
workflow_exec = DBWorkflowExec(user=user,
ip=ip,
session=session,
vt_version=vt_version,
ts_start=ts_start,
ts_end=ts_end,
parent_id=parent_id,
parent_type=parent_type,
parent_version=parent_version,
completed=completed,
name=name,
id=id)
workflow_exec.db_log = log
workflow_exec.db_entity_id = entity_id
workflow_exec.db_entity_type = entity_type
workflow_exec.is_dirty = False
res[('workflow_exec', id)] = workflow_exec
return res
def from_sql_fast(self, obj, all_objects):
if ('log', obj.db_log) in all_objects:
p = all_objects[('log', obj.db_log)]
p.db_add_workflow_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'user', 'ip', 'session', 'vt_version', 'ts_start', 'ts_end', 'parent_id', 'parent_type', 'parent_version', 'completed', 'name', 'log_id', 'entity_id', 'entity_type']
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_ip') and obj.db_ip is not None:
columnMap['ip'] = \
self.convertToDB(obj.db_ip, 'str', 'varchar(255)')
if hasattr(obj, 'db_session') and obj.db_session is not None:
columnMap['session'] = \
self.convertToDB(obj.db_session, 'long', 'int')
if hasattr(obj, 'db_vt_version') and obj.db_vt_version is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vt_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_parent_id') and obj.db_parent_id is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent_id, 'long', 'int')
if hasattr(obj, 'db_parent_type') and obj.db_parent_type is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parent_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_parent_version') and obj.db_parent_version is not None:
columnMap['parent_version'] = \
self.convertToDB(obj.db_parent_version, 'long', 'int')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_log') and obj.db_log is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_log, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_machines:
child.db_workflow_exec = obj.db_id
for child in obj.db_item_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'workflow_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBLocationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'location'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
x = self.convertFromDB(row[1], 'float', 'DECIMAL(18,12)')
y = self.convertFromDB(row[2], 'float', 'DECIMAL(18,12)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
location = DBLocation(x=x,
y=y,
id=id)
location.db_parentType = parentType
location.db_entity_id = entity_id
location.db_entity_type = entity_type
location.db_parent = parent
location.is_dirty = False
res[('location', id)] = location
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_location(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'x', 'y', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_x') and obj.db_x is not None:
columnMap['x'] = \
self.convertToDB(obj.db_x, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_y') and obj.db_y is not None:
columnMap['y'] = \
self.convertToDB(obj.db_y, 'float', 'DECIMAL(18,12)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'location'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBFunctionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'function'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
function = DBFunction(pos=pos,
name=name,
id=id)
function.db_parentType = parentType
function.db_entity_id = entity_id
function.db_entity_type = entity_type
function.db_parent = parent
function.is_dirty = False
res[('function', id)] = function
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_function(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'name', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_parameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'function'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
actionAnnotation = DBActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
actionAnnotation.db_vistrail = vistrail
actionAnnotation.db_entity_id = entity_id
actionAnnotation.db_entity_type = entity_type
actionAnnotation.is_dirty = False
res[('actionAnnotation', id)] = actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBControlParameterSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'control_parameter'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
controlParameter = DBControlParameter(name=name,
value=value,
id=id)
controlParameter.db_parentType = parentType
controlParameter.db_entity_id = entity_id
controlParameter.db_entity_type = entity_type
controlParameter.db_parent = parent
controlParameter.is_dirty = False
res[('controlParameter', id)] = controlParameter
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
controlParameter = DBControlParameter(name=name,
value=value,
id=id)
controlParameter.db_parentType = parentType
controlParameter.db_entity_id = entity_id
controlParameter.db_entity_type = entity_type
controlParameter.db_parent = parent
controlParameter.is_dirty = False
res[('controlParameter', id)] = controlParameter
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'vistrail':
p = all_objects[('vistrail', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_controlParameter(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_controlParameter(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'control_parameter'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPluginDataSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'plugin_data'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
data = self.convertFromDB(row[1], 'str', 'varchar(8191)')
parentType = self.convertFromDB(row[2], 'str', 'char(32)')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
parent = self.convertFromDB(row[5], 'long', 'long')
plugin_data = DBPluginData(data=data,
id=id)
plugin_data.db_parentType = parentType
plugin_data.db_entity_id = entity_id
plugin_data.db_entity_type = entity_type
plugin_data.db_parent = parent
plugin_data.is_dirty = False
res[('plugin_data', id)] = plugin_data
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_plugin_data(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'data', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_data') and obj.db_data is not None:
columnMap['data'] = \
self.convertToDB(obj.db_data, 'str', 'varchar(8191)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'plugin_data'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBDeleteSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'delete_tbl'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
what = self.convertFromDB(row[1], 'str', 'varchar(255)')
objectId = self.convertFromDB(row[2], 'long', 'int')
parentObjId = self.convertFromDB(row[3], 'long', 'int')
parentObjType = self.convertFromDB(row[4], 'str', 'char(16)')
action = self.convertFromDB(row[5], 'long', 'int')
entity_id = self.convertFromDB(row[6], 'long', 'int')
entity_type = self.convertFromDB(row[7], 'str', 'char(16)')
delete = DBDelete(what=what,
objectId=objectId,
parentObjId=parentObjId,
parentObjType=parentObjType,
id=id)
delete.db_action = action
delete.db_entity_id = entity_id
delete.db_entity_type = entity_type
delete.is_dirty = False
res[('delete', id)] = delete
return res
def from_sql_fast(self, obj, all_objects):
if ('action', obj.db_action) in all_objects:
p = all_objects[('action', obj.db_action)]
p.db_add_operation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'what', 'object_id', 'par_obj_id', 'par_obj_type', 'action_id', 'entity_id', 'entity_type']
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_what') and obj.db_what is not None:
columnMap['what'] = \
self.convertToDB(obj.db_what, 'str', 'varchar(255)')
if hasattr(obj, 'db_objectId') and obj.db_objectId is not None:
columnMap['object_id'] = \
self.convertToDB(obj.db_objectId, 'long', 'int')
if hasattr(obj, 'db_parentObjId') and obj.db_parentObjId is not None:
columnMap['par_obj_id'] = \
self.convertToDB(obj.db_parentObjId, 'long', 'int')
if hasattr(obj, 'db_parentObjType') and obj.db_parentObjType is not None:
columnMap['par_obj_type'] = \
self.convertToDB(obj.db_parentObjType, 'str', 'char(16)')
if hasattr(obj, 'db_action') and obj.db_action is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'delete_tbl'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBVistrailVariableSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'vistrail_variable'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = global_props
orderBy = 'name'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
name = self.convertFromDB(row[0], 'str', 'varchar(255)')
uuid = self.convertFromDB(row[1], 'str', 'char(36)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
module = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
value = self.convertFromDB(row[5], 'str', 'varchar(8191)')
vistrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
vistrailVariable = DBVistrailVariable(uuid=uuid,
package=package,
module=module,
namespace=namespace,
value=value,
name=name)
vistrailVariable.db_vistrail = vistrail
vistrailVariable.db_entity_id = entity_id
vistrailVariable.db_entity_type = entity_type
vistrailVariable.is_dirty = False
res[('vistrailVariable', name)] = vistrailVariable
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_vistrailVariable(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['name', 'uuid', 'package', 'module', 'namespace', 'value', 'parent_id', 'entity_id', 'entity_type']
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
columnMap = {}
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_uuid') and obj.db_uuid is not None:
columnMap['uuid'] = \
self.convertToDB(obj.db_uuid, 'str', 'char(36)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'vistrail_variable'
whereMap = {}
whereMap.update(global_props)
if obj.db_name is not None:
keyStr = self.convertToDB(obj.db_name, 'str', 'varchar(255)')
whereMap['name'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleDescriptorSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_descriptor'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
package = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package_version = self.convertFromDB(row[4], 'str', 'varchar(255)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
base_descriptor_id = self.convertFromDB(row[6], 'long', 'int')
package = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
module_descriptor = DBModuleDescriptor(name=name,
package=package,
namespace=namespace,
package_version=package_version,
version=version,
base_descriptor_id=base_descriptor_id,
id=id)
module_descriptor.db_package = package
module_descriptor.db_entity_id = entity_id
module_descriptor.db_entity_type = entity_type
module_descriptor.is_dirty = False
res[('module_descriptor', id)] = module_descriptor
return res
def from_sql_fast(self, obj, all_objects):
if ('package', obj.db_package) in all_objects:
p = all_objects[('package', obj.db_package)]
p.db_add_module_descriptor(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'package', 'namespace', 'package_version', 'version', 'base_descriptor_id', 'parent_id', 'entity_id', 'entity_type']
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package_version') and obj.db_package_version is not None:
columnMap['package_version'] = \
self.convertToDB(obj.db_package_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_base_descriptor_id') and obj.db_base_descriptor_id is not None:
columnMap['base_descriptor_id'] = \
self.convertToDB(obj.db_base_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_package, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_portSpecs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_descriptor'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBTagSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'tag'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[2], 'long', 'int')
entity_id = self.convertFromDB(row[3], 'long', 'int')
entity_type = self.convertFromDB(row[4], 'str', 'char(16)')
tag = DBTag(name=name,
id=id)
tag.db_vistrail = vistrail
tag.db_entity_id = entity_id
tag.db_entity_type = entity_type
tag.is_dirty = False
res[('tag', id)] = tag
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_tag(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'parent_id', 'entity_id', 'entity_type']
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'tag'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBPortSpecItemSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'port_spec_item'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
pos = self.convertFromDB(row[1], 'long', 'int')
module = self.convertFromDB(row[2], 'str', 'varchar(255)')
package = self.convertFromDB(row[3], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[4], 'str', 'varchar(255)')
label = self.convertFromDB(row[5], 'str', 'varchar(4095)')
default = self.convertFromDB(row[6], 'str', 'varchar(4095)')
values = self.convertFromDB(row[7], 'str', 'mediumtext')
entry_type = self.convertFromDB(row[8], 'str', 'varchar(255)')
portSpec = self.convertFromDB(row[9], 'long', 'int')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
portSpecItem = DBPortSpecItem(pos=pos,
module=module,
package=package,
namespace=namespace,
label=label,
default=default,
values=values,
entry_type=entry_type,
id=id)
portSpecItem.db_portSpec = portSpec
portSpecItem.db_entity_id = entity_id
portSpecItem.db_entity_type = entity_type
portSpecItem.is_dirty = False
res[('portSpecItem', id)] = portSpecItem
return res
def from_sql_fast(self, obj, all_objects):
if ('portSpec', obj.db_portSpec) in all_objects:
p = all_objects[('portSpec', obj.db_portSpec)]
p.db_add_portSpecItem(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'pos', 'module', 'package', 'namespace', 'label', '_default', '_values', 'entry_type', 'parent_id', 'entity_id', 'entity_type']
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_module') and obj.db_module is not None:
columnMap['module'] = \
self.convertToDB(obj.db_module, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_label') and obj.db_label is not None:
columnMap['label'] = \
self.convertToDB(obj.db_label, 'str', 'varchar(4095)')
if hasattr(obj, 'db_default') and obj.db_default is not None:
columnMap['_default'] = \
self.convertToDB(obj.db_default, 'str', 'varchar(4095)')
if hasattr(obj, 'db_values') and obj.db_values is not None:
columnMap['_values'] = \
self.convertToDB(obj.db_values, 'str', 'mediumtext')
if hasattr(obj, 'db_entry_type') and obj.db_entry_type is not None:
columnMap['entry_type'] = \
self.convertToDB(obj.db_entry_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_portSpec') and obj.db_portSpec is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_portSpec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'port_spec_item'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupComponentSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_component'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
vtid = self.convertFromDB(row[1], 'long', 'int')
vttype = self.convertFromDB(row[2], 'str', 'varchar(255)')
vtparent_type = self.convertFromDB(row[3], 'str', 'char(32)')
vtparent_id = self.convertFromDB(row[4], 'long', 'int')
vtpos = self.convertFromDB(row[5], 'long', 'int')
vtmid = self.convertFromDB(row[6], 'long', 'int')
pos = self.convertFromDB(row[7], 'long', 'int')
type = self.convertFromDB(row[8], 'str', 'varchar(255)')
val = self.convertFromDB(row[9], 'str', 'mediumtext')
minVal = self.convertFromDB(row[10], 'str', 'varchar(255)')
maxVal = self.convertFromDB(row[11], 'str', 'varchar(255)')
stepSize = self.convertFromDB(row[12], 'str', 'varchar(255)')
strvaluelist = self.convertFromDB(row[13], 'str', 'mediumtext')
widget = self.convertFromDB(row[14], 'str', 'varchar(255)')
seq = self.convertFromDB(row[15], 'int', 'int')
parent = self.convertFromDB(row[16], 'str', 'varchar(255)')
mashup_alias = self.convertFromDB(row[17], 'long', 'int')
entity_id = self.convertFromDB(row[18], 'long', 'int')
entity_type = self.convertFromDB(row[19], 'str', 'char(16)')
mashup_component = DBMashupComponent(vtid=vtid,
vttype=vttype,
vtparent_type=vtparent_type,
vtparent_id=vtparent_id,
vtpos=vtpos,
vtmid=vtmid,
pos=pos,
type=type,
val=val,
minVal=minVal,
maxVal=maxVal,
stepSize=stepSize,
strvaluelist=strvaluelist,
widget=widget,
seq=seq,
parent=parent,
id=id)
mashup_component.db_mashup_alias = mashup_alias
mashup_component.db_entity_id = entity_id
mashup_component.db_entity_type = entity_type
mashup_component.is_dirty = False
res[('mashup_component', id)] = mashup_component
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_alias', obj.db_mashup_alias) in all_objects:
p = all_objects[('mashup_alias', obj.db_mashup_alias)]
p.db_add_component(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'vtid', 'vttype', 'vtparent_type', 'vtparent_id', 'vtpos', 'vtmid', 'pos', 'type', 'val', 'minVal', 'maxVal', 'stepSize', 'strvaluelist', 'widget', 'seq', 'parent', 'alias_id', 'entity_id', 'entity_type']
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_vttype') and obj.db_vttype is not None:
columnMap['vttype'] = \
self.convertToDB(obj.db_vttype, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtparent_type') and obj.db_vtparent_type is not None:
columnMap['vtparent_type'] = \
self.convertToDB(obj.db_vtparent_type, 'str', 'char(32)')
if hasattr(obj, 'db_vtparent_id') and obj.db_vtparent_id is not None:
columnMap['vtparent_id'] = \
self.convertToDB(obj.db_vtparent_id, 'long', 'int')
if hasattr(obj, 'db_vtpos') and obj.db_vtpos is not None:
columnMap['vtpos'] = \
self.convertToDB(obj.db_vtpos, 'long', 'int')
if hasattr(obj, 'db_vtmid') and obj.db_vtmid is not None:
columnMap['vtmid'] = \
self.convertToDB(obj.db_vtmid, 'long', 'int')
if hasattr(obj, 'db_pos') and obj.db_pos is not None:
columnMap['pos'] = \
self.convertToDB(obj.db_pos, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_val') and obj.db_val is not None:
columnMap['val'] = \
self.convertToDB(obj.db_val, 'str', 'mediumtext')
if hasattr(obj, 'db_minVal') and obj.db_minVal is not None:
columnMap['minVal'] = \
self.convertToDB(obj.db_minVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_maxVal') and obj.db_maxVal is not None:
columnMap['maxVal'] = \
self.convertToDB(obj.db_maxVal, 'str', 'varchar(255)')
if hasattr(obj, 'db_stepSize') and obj.db_stepSize is not None:
columnMap['stepSize'] = \
self.convertToDB(obj.db_stepSize, 'str', 'varchar(255)')
if hasattr(obj, 'db_strvaluelist') and obj.db_strvaluelist is not None:
columnMap['strvaluelist'] = \
self.convertToDB(obj.db_strvaluelist, 'str', 'mediumtext')
if hasattr(obj, 'db_widget') and obj.db_widget is not None:
columnMap['widget'] = \
self.convertToDB(obj.db_widget, 'str', 'varchar(255)')
if hasattr(obj, 'db_seq') and obj.db_seq is not None:
columnMap['seq'] = \
self.convertToDB(obj.db_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent'] = \
self.convertToDB(obj.db_parent, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashup_alias') and obj.db_mashup_alias is not None:
columnMap['alias_id'] = \
self.convertToDB(obj.db_mashup_alias, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_component'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
version = self.convertFromDB(row[2], 'long', 'int')
type = self.convertFromDB(row[3], 'str', 'varchar(255)')
vtid = self.convertFromDB(row[4], 'long', 'int')
layout = self.convertFromDB(row[5], 'str', 'mediumtext')
geometry = self.convertFromDB(row[6], 'str', 'mediumtext')
has_seq = self.convertFromDB(row[7], 'int', 'int')
parent = self.convertFromDB(row[8], 'long', 'int')
entity_id = self.convertFromDB(row[9], 'long', 'int')
entity_type = self.convertFromDB(row[10], 'str', 'char(16)')
mashup = DBMashup(name=name,
version=version,
type=type,
vtid=vtid,
layout=layout,
geometry=geometry,
has_seq=has_seq,
id=id)
mashup.db_parent = parent
mashup.db_entity_id = entity_id
mashup.db_entity_type = entity_type
mashup.is_dirty = False
res[('mashup', id)] = mashup
return res
def from_sql_fast(self, obj, all_objects):
if ('mashup_action', obj.db_parent) in all_objects:
p = all_objects[('mashup_action', obj.db_parent)]
p.db_add_mashup(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'type', 'vtid', 'layout', 'geometry', 'has_seq', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'long', 'int')
if hasattr(obj, 'db_type') and obj.db_type is not None:
columnMap['type'] = \
self.convertToDB(obj.db_type, 'str', 'varchar(255)')
if hasattr(obj, 'db_vtid') and obj.db_vtid is not None:
columnMap['vtid'] = \
self.convertToDB(obj.db_vtid, 'long', 'int')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'mediumtext')
if hasattr(obj, 'db_geometry') and obj.db_geometry is not None:
columnMap['geometry'] = \
self.convertToDB(obj.db_geometry, 'str', 'mediumtext')
if hasattr(obj, 'db_has_seq') and obj.db_has_seq is not None:
columnMap['has_seq'] = \
self.convertToDB(obj.db_has_seq, 'int', 'int')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_aliases:
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashup'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMachineSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'machine'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
workflow_exec = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_workflow_exec = workflow_exec
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'varchar(255)')
os = self.convertFromDB(row[2], 'str', 'varchar(255)')
architecture = self.convertFromDB(row[3], 'str', 'varchar(255)')
processor = self.convertFromDB(row[4], 'str', 'varchar(255)')
ram = self.convertFromDB(row[5], 'int', 'bigint')
vistrailId = self.convertFromDB(row[6], 'long', 'int')
workflow_exec = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
machine = DBMachine(name=name,
os=os,
architecture=architecture,
processor=processor,
ram=ram,
id=id)
machine.db_vistrailId = vistrailId
machine.db_workflow_exec = workflow_exec
machine.db_entity_id = entity_id
machine.db_entity_type = entity_type
machine.is_dirty = False
res[('machine', id)] = machine
return res
def from_sql_fast(self, obj, all_objects):
if ('workflow_exec', obj.db_workflow_exec) in all_objects:
p = all_objects[('workflow_exec', obj.db_workflow_exec)]
p.db_add_machine(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_workflow_exec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'os', 'architecture', 'processor', 'ram', 'vt_id', 'log_id', 'entity_id', 'entity_type']
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_os') and obj.db_os is not None:
columnMap['os'] = \
self.convertToDB(obj.db_os, 'str', 'varchar(255)')
if hasattr(obj, 'db_architecture') and obj.db_architecture is not None:
columnMap['architecture'] = \
self.convertToDB(obj.db_architecture, 'str', 'varchar(255)')
if hasattr(obj, 'db_processor') and obj.db_processor is not None:
columnMap['processor'] = \
self.convertToDB(obj.db_processor, 'str', 'varchar(255)')
if hasattr(obj, 'db_ram') and obj.db_ram is not None:
columnMap['ram'] = \
self.convertToDB(obj.db_ram, 'int', 'bigint')
if hasattr(obj, 'db_vistrailId') and obj.db_vistrailId is not None:
columnMap['vt_id'] = \
self.convertToDB(obj.db_vistrailId, 'long', 'int')
if hasattr(obj, 'db_workflow_exec') and obj.db_workflow_exec is not None:
columnMap['log_id'] = \
self.convertToDB(obj.db_workflow_exec, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'machine'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBOtherSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'other'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
other = DBOther(key=key,
value=value,
id=id)
other.db_parentType = parentType
other.db_entity_id = entity_id
other.db_entity_type = entity_type
other.db_parent = parent
other.is_dirty = False
res[('other', id)] = other
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_other(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'okey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['okey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'other'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAbstractionSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'abstraction'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
cache = self.convertFromDB(row[1], 'int', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
namespace = self.convertFromDB(row[3], 'str', 'varchar(255)')
package = self.convertFromDB(row[4], 'str', 'varchar(511)')
version = self.convertFromDB(row[5], 'str', 'varchar(255)')
internal_version = self.convertFromDB(row[6], 'str', 'varchar(255)')
parentType = self.convertFromDB(row[7], 'str', 'char(32)')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parent = self.convertFromDB(row[10], 'long', 'long')
abstraction = DBAbstraction(cache=cache,
name=name,
namespace=namespace,
package=package,
version=version,
internal_version=internal_version,
id=id)
abstraction.db_parentType = parentType
abstraction.db_entity_id = entity_id
abstraction.db_entity_type = entity_type
abstraction.db_parent = parent
abstraction.is_dirty = False
res[('abstraction', id)] = abstraction
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_module(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'cache', 'name', 'namespace', 'package', 'version', 'internal_version', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_cache') and obj.db_cache is not None:
columnMap['cache'] = \
self.convertToDB(obj.db_cache, 'int', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_namespace') and obj.db_namespace is not None:
columnMap['namespace'] = \
self.convertToDB(obj.db_namespace, 'str', 'varchar(255)')
if hasattr(obj, 'db_package') and obj.db_package is not None:
columnMap['package'] = \
self.convertToDB(obj.db_package, 'str', 'varchar(511)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_internal_version') and obj.db_internal_version is not None:
columnMap['internal_version'] = \
self.convertToDB(obj.db_internal_version, 'str', 'varchar(255)')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
if obj.db_location is not None:
child = obj.db_location
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_functions:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_controlParameters:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'abstraction'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashuptrailSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashuptrail'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
name = self.convertFromDB(row[1], 'str', 'char(36)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
vtVersion = self.convertFromDB(row[3], 'long', 'int')
last_modified = self.convertFromDB(row[4], 'datetime', 'datetime')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
mashuptrail = DBMashuptrail(name=name,
version=version,
vtVersion=vtVersion,
last_modified=last_modified,
id=id)
mashuptrail.db_entity_type = entity_type
mashuptrail.is_dirty = False
res[('mashuptrail', id)] = mashuptrail
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'name', 'version', 'vt_version', 'last_modified', 'entity_type']
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'char(36)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_vtVersion') and obj.db_vtVersion is not None:
columnMap['vt_version'] = \
self.convertToDB(obj.db_vtVersion, 'long', 'int')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_actions:
child.db_mashuptrail = obj.db_id
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_actionAnnotations:
child.db_mashuptrail = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'mashuptrail'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBRegistrySQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'registry'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
global_props['entity_id'] = self.convertToDB(id, 'long', 'int')
entity_type = self.convertFromDB(row[1], 'str', 'char(16)')
global_props['entity_type'] = self.convertToDB(entity_type, 'str', 'char(16)')
version = self.convertFromDB(row[2], 'str', 'char(16)')
root_descriptor_id = self.convertFromDB(row[3], 'long', 'int')
name = self.convertFromDB(row[4], 'str', 'varchar(255)')
last_modified = self.convertFromDB(row[5], 'datetime', 'datetime')
registry = DBRegistry(entity_type=entity_type,
version=version,
root_descriptor_id=root_descriptor_id,
name=name,
last_modified=last_modified,
id=id)
registry.is_dirty = False
res[('registry', id)] = registry
return res
def from_sql_fast(self, obj, all_objects):
pass
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'entity_type', 'version', 'root_descriptor_id', 'name', 'last_modified']
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_version') and obj.db_version is not None:
columnMap['version'] = \
self.convertToDB(obj.db_version, 'str', 'char(16)')
if hasattr(obj, 'db_root_descriptor_id') and obj.db_root_descriptor_id is not None:
columnMap['root_descriptor_id'] = \
self.convertToDB(obj.db_root_descriptor_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_last_modified') and obj.db_last_modified is not None:
columnMap['last_modified'] = \
self.convertToDB(obj.db_last_modified, 'datetime', 'datetime')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
if obj.db_id is None:
obj.db_id = lastId
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
global_props['entity_type'] = self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_id') and obj.db_id is not None:
global_props['entity_id'] = self.convertToDB(obj.db_id, 'long', 'int')
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_packages:
child.db_registry = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'registry'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'mediumtext')
parentType = self.convertFromDB(row[3], 'str', 'char(32)')
entity_id = self.convertFromDB(row[4], 'long', 'int')
entity_type = self.convertFromDB(row[5], 'str', 'char(16)')
parent = self.convertFromDB(row[6], 'long', 'long')
annotation = DBAnnotation(key=key,
value=value,
id=id)
annotation.db_parentType = parentType
annotation.db_entity_id = entity_id
annotation.db_entity_type = entity_type
annotation.db_parent = parent
annotation.is_dirty = False
res[('annotation', id)] = annotation
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'vistrail':
p = all_objects[('vistrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow':
p = all_objects[('workflow', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module':
p = all_objects[('module', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'module_exec':
p = all_objects[('module_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'add':
p = all_objects[('add', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'change':
p = all_objects[('change', obj.db_parent)]
p.db_add_data(obj)
elif obj.db_parentType == 'action':
p = all_objects[('action', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'abstraction':
p = all_objects[('abstraction', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'mashuptrail':
p = all_objects[('mashuptrail', obj.db_parent)]
p.db_add_annotation(obj)
elif obj.db_parentType == 'group':
p = all_objects[('group', obj.db_parent)]
p.db_add_annotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'mediumtext')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBParameterExplorationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'parameter_exploration'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
action_id = self.convertFromDB(row[1], 'long', 'int')
name = self.convertFromDB(row[2], 'str', 'varchar(255)')
date = self.convertFromDB(row[3], 'datetime', 'datetime')
user = self.convertFromDB(row[4], 'str', 'varchar(255)')
dims = self.convertFromDB(row[5], 'str', 'varchar(255)')
layout = self.convertFromDB(row[6], 'str', 'varchar(255)')
vistrail = self.convertFromDB(row[7], 'long', 'int')
entity_id = self.convertFromDB(row[8], 'long', 'int')
entity_type = self.convertFromDB(row[9], 'str', 'char(16)')
parameter_exploration = DBParameterExploration(action_id=action_id,
name=name,
date=date,
user=user,
dims=dims,
layout=layout,
id=id)
parameter_exploration.db_vistrail = vistrail
parameter_exploration.db_entity_id = entity_id
parameter_exploration.db_entity_type = entity_type
parameter_exploration.is_dirty = False
res[('parameter_exploration', id)] = parameter_exploration
return res
def from_sql_fast(self, obj, all_objects):
if ('vistrail', obj.db_vistrail) in all_objects:
p = all_objects[('vistrail', obj.db_vistrail)]
p.db_add_parameter_exploration(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'action_id', 'name', 'date', 'user', 'dims', 'layout', 'parent_id', 'entity_id', 'entity_type']
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_name') and obj.db_name is not None:
columnMap['name'] = \
self.convertToDB(obj.db_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_dims') and obj.db_dims is not None:
columnMap['dims'] = \
self.convertToDB(obj.db_dims, 'str', 'varchar(255)')
if hasattr(obj, 'db_layout') and obj.db_layout is not None:
columnMap['layout'] = \
self.convertToDB(obj.db_layout, 'str', 'varchar(255)')
if hasattr(obj, 'db_vistrail') and obj.db_vistrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_vistrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_functions:
child.db_parameter_exploration = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'parameter_exploration'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBMashupActionAnnotationSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'mashup_action_annotation'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
key = self.convertFromDB(row[1], 'str', 'varchar(255)')
value = self.convertFromDB(row[2], 'str', 'varchar(8191)')
action_id = self.convertFromDB(row[3], 'long', 'int')
date = self.convertFromDB(row[4], 'datetime', 'datetime')
user = self.convertFromDB(row[5], 'str', 'varchar(255)')
mashuptrail = self.convertFromDB(row[6], 'long', 'int')
entity_id = self.convertFromDB(row[7], 'long', 'int')
entity_type = self.convertFromDB(row[8], 'str', 'char(16)')
mashup_actionAnnotation = DBMashupActionAnnotation(key=key,
value=value,
action_id=action_id,
date=date,
user=user,
id=id)
mashup_actionAnnotation.db_mashuptrail = mashuptrail
mashup_actionAnnotation.db_entity_id = entity_id
mashup_actionAnnotation.db_entity_type = entity_type
mashup_actionAnnotation.is_dirty = False
res[('mashup_actionAnnotation', id)] = mashup_actionAnnotation
return res
def from_sql_fast(self, obj, all_objects):
if ('mashuptrail', obj.db_mashuptrail) in all_objects:
p = all_objects[('mashuptrail', obj.db_mashuptrail)]
p.db_add_actionAnnotation(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'akey', 'value', 'action_id', 'date', 'user', 'parent_id', 'entity_id', 'entity_type']
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_key') and obj.db_key is not None:
columnMap['akey'] = \
self.convertToDB(obj.db_key, 'str', 'varchar(255)')
if hasattr(obj, 'db_value') and obj.db_value is not None:
columnMap['value'] = \
self.convertToDB(obj.db_value, 'str', 'varchar(8191)')
if hasattr(obj, 'db_action_id') and obj.db_action_id is not None:
columnMap['action_id'] = \
self.convertToDB(obj.db_action_id, 'long', 'int')
if hasattr(obj, 'db_date') and obj.db_date is not None:
columnMap['date'] = \
self.convertToDB(obj.db_date, 'datetime', 'datetime')
if hasattr(obj, 'db_user') and obj.db_user is not None:
columnMap['user'] = \
self.convertToDB(obj.db_user, 'str', 'varchar(255)')
if hasattr(obj, 'db_mashuptrail') and obj.db_mashuptrail is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_mashuptrail, 'long', 'int')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
pass
def delete_sql_column(self, db, obj, global_props):
table = 'mashup_action_annotation'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
class DBModuleExecSQLDAOBase(SQLDAO):
def __init__(self, daoList):
self.daoList = daoList
self.table = 'module_exec'
def getDao(self, dao):
return self.daoList[dao]
def get_sql_columns(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
dbCommand = self.createSQLSelect(table, columns, whereMap, orderBy, lock)
data = self.executeSQL(db, dbCommand, True)
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def get_sql_select(self, db, global_props,lock=False):
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = global_props
orderBy = 'id'
return self.createSQLSelect(table, columns, whereMap, orderBy, lock)
def process_sql_columns(self, data, global_props):
res = {}
for row in data:
id = self.convertFromDB(row[0], 'long', 'int')
ts_start = self.convertFromDB(row[1], 'datetime', 'datetime')
ts_end = self.convertFromDB(row[2], 'datetime', 'datetime')
cached = self.convertFromDB(row[3], 'int', 'int')
module_id = self.convertFromDB(row[4], 'long', 'int')
module_name = self.convertFromDB(row[5], 'str', 'varchar(255)')
completed = self.convertFromDB(row[6], 'int', 'int')
error = self.convertFromDB(row[7], 'str', 'varchar(1023)')
machine_id = self.convertFromDB(row[8], 'long', 'int')
parentType = self.convertFromDB(row[9], 'str', 'char(32)')
entity_id = self.convertFromDB(row[10], 'long', 'int')
entity_type = self.convertFromDB(row[11], 'str', 'char(16)')
parent = self.convertFromDB(row[12], 'long', 'long')
module_exec = DBModuleExec(ts_start=ts_start,
ts_end=ts_end,
cached=cached,
module_id=module_id,
module_name=module_name,
completed=completed,
error=error,
machine_id=machine_id,
id=id)
module_exec.db_parentType = parentType
module_exec.db_entity_id = entity_id
module_exec.db_entity_type = entity_type
module_exec.db_parent = parent
module_exec.is_dirty = False
res[('module_exec', id)] = module_exec
return res
def from_sql_fast(self, obj, all_objects):
if obj.db_parentType == 'workflow_exec':
p = all_objects[('workflow_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'group_exec':
p = all_objects[('group_exec', obj.db_parent)]
p.db_add_item_exec(obj)
elif obj.db_parentType == 'loop_iteration':
p = all_objects[('loop_iteration', obj.db_parent)]
p.db_add_item_exec(obj)
def set_sql_columns(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
lastId = self.executeSQL(db, dbCommand, False)
def set_sql_command(self, db, obj, global_props, do_copy=True):
if not do_copy and not obj.is_dirty:
return None
columns = ['id', 'ts_start', 'ts_end', 'cached', 'module_id', 'module_name', 'completed', 'error', 'machine_id', 'parent_type', 'entity_id', 'entity_type', 'parent_id']
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
columnMap = {}
if hasattr(obj, 'db_id') and obj.db_id is not None:
columnMap['id'] = \
self.convertToDB(obj.db_id, 'long', 'int')
if hasattr(obj, 'db_ts_start') and obj.db_ts_start is not None:
columnMap['ts_start'] = \
self.convertToDB(obj.db_ts_start, 'datetime', 'datetime')
if hasattr(obj, 'db_ts_end') and obj.db_ts_end is not None:
columnMap['ts_end'] = \
self.convertToDB(obj.db_ts_end, 'datetime', 'datetime')
if hasattr(obj, 'db_cached') and obj.db_cached is not None:
columnMap['cached'] = \
self.convertToDB(obj.db_cached, 'int', 'int')
if hasattr(obj, 'db_module_id') and obj.db_module_id is not None:
columnMap['module_id'] = \
self.convertToDB(obj.db_module_id, 'long', 'int')
if hasattr(obj, 'db_module_name') and obj.db_module_name is not None:
columnMap['module_name'] = \
self.convertToDB(obj.db_module_name, 'str', 'varchar(255)')
if hasattr(obj, 'db_completed') and obj.db_completed is not None:
columnMap['completed'] = \
self.convertToDB(obj.db_completed, 'int', 'int')
if hasattr(obj, 'db_error') and obj.db_error is not None:
columnMap['error'] = \
self.convertToDB(obj.db_error, 'str', 'varchar(1023)')
if hasattr(obj, 'db_machine_id') and obj.db_machine_id is not None:
columnMap['machine_id'] = \
self.convertToDB(obj.db_machine_id, 'long', 'int')
if hasattr(obj, 'db_parentType') and obj.db_parentType is not None:
columnMap['parent_type'] = \
self.convertToDB(obj.db_parentType, 'str', 'char(32)')
if hasattr(obj, 'db_entity_id') and obj.db_entity_id is not None:
columnMap['entity_id'] = \
self.convertToDB(obj.db_entity_id, 'long', 'int')
if hasattr(obj, 'db_entity_type') and obj.db_entity_type is not None:
columnMap['entity_type'] = \
self.convertToDB(obj.db_entity_type, 'str', 'char(16)')
if hasattr(obj, 'db_parent') and obj.db_parent is not None:
columnMap['parent_id'] = \
self.convertToDB(obj.db_parent, 'long', 'long')
columnMap.update(global_props)
if obj.is_new or do_copy:
dbCommand = self.createSQLInsert(table, columnMap)
else:
dbCommand = self.createSQLUpdate(table, columnMap, whereMap)
return dbCommand
def set_sql_process(self, obj, global_props, lastId):
pass
def to_sql_fast(self, obj, do_copy=True):
for child in obj.db_annotations:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
for child in obj.db_loop_execs:
child.db_parentType = obj.vtType
child.db_parent = obj.db_id
def delete_sql_column(self, db, obj, global_props):
table = 'module_exec'
whereMap = {}
whereMap.update(global_props)
if obj.db_id is not None:
keyStr = self.convertToDB(obj.db_id, 'long', 'int')
whereMap['id'] = keyStr
dbCommand = self.createSQLDelete(table, whereMap)
self.executeSQL(db, dbCommand, False)
"""generated automatically by auto_dao.py"""
class SQLDAOListBase(dict):
def __init__(self, daos=None):
if daos is not None:
dict.update(self, daos)
if 'mashup_alias' not in self:
self['mashup_alias'] = DBMashupAliasSQLDAOBase(self)
if 'group' not in self:
self['group'] = DBGroupSQLDAOBase(self)
if 'add' not in self:
self['add'] = DBAddSQLDAOBase(self)
if 'group_exec' not in self:
self['group_exec'] = DBGroupExecSQLDAOBase(self)
if 'parameter' not in self:
self['parameter'] = DBParameterSQLDAOBase(self)
if 'vistrail' not in self:
self['vistrail'] = DBVistrailSQLDAOBase(self)
if 'module' not in self:
self['module'] = DBModuleSQLDAOBase(self)
if 'port' not in self:
self['port'] = DBPortSQLDAOBase(self)
if 'pe_function' not in self:
self['pe_function'] = DBPEFunctionSQLDAOBase(self)
if 'workflow' not in self:
self['workflow'] = DBWorkflowSQLDAOBase(self)
if 'mashup_action' not in self:
self['mashup_action'] = DBMashupActionSQLDAOBase(self)
if 'change' not in self:
self['change'] = DBChangeSQLDAOBase(self)
if 'package' not in self:
self['package'] = DBPackageSQLDAOBase(self)
if 'loop_exec' not in self:
self['loop_exec'] = DBLoopExecSQLDAOBase(self)
if 'connection' not in self:
self['connection'] = DBConnectionSQLDAOBase(self)
if 'action' not in self:
self['action'] = DBActionSQLDAOBase(self)
if 'portSpec' not in self:
self['portSpec'] = DBPortSpecSQLDAOBase(self)
if 'log' not in self:
self['log'] = DBLogSQLDAOBase(self)
if 'loop_iteration' not in self:
self['loop_iteration'] = DBLoopIterationSQLDAOBase(self)
if 'pe_parameter' not in self:
self['pe_parameter'] = DBPEParameterSQLDAOBase(self)
if 'workflow_exec' not in self:
self['workflow_exec'] = DBWorkflowExecSQLDAOBase(self)
if 'location' not in self:
self['location'] = DBLocationSQLDAOBase(self)
if 'function' not in self:
self['function'] = DBFunctionSQLDAOBase(self)
if 'actionAnnotation' not in self:
self['actionAnnotation'] = DBActionAnnotationSQLDAOBase(self)
if 'controlParameter' not in self:
self['controlParameter'] = DBControlParameterSQLDAOBase(self)
if 'plugin_data' not in self:
self['plugin_data'] = DBPluginDataSQLDAOBase(self)
if 'delete' not in self:
self['delete'] = DBDeleteSQLDAOBase(self)
if 'vistrailVariable' not in self:
self['vistrailVariable'] = DBVistrailVariableSQLDAOBase(self)
if 'module_descriptor' not in self:
self['module_descriptor'] = DBModuleDescriptorSQLDAOBase(self)
if 'tag' not in self:
self['tag'] = DBTagSQLDAOBase(self)
if 'portSpecItem' not in self:
self['portSpecItem'] = DBPortSpecItemSQLDAOBase(self)
if 'mashup_component' not in self:
self['mashup_component'] = DBMashupComponentSQLDAOBase(self)
if 'mashup' not in self:
self['mashup'] = DBMashupSQLDAOBase(self)
if 'machine' not in self:
self['machine'] = DBMachineSQLDAOBase(self)
if 'other' not in self:
self['other'] = DBOtherSQLDAOBase(self)
if 'abstraction' not in self:
self['abstraction'] = DBAbstractionSQLDAOBase(self)
if 'mashuptrail' not in self:
self['mashuptrail'] = DBMashuptrailSQLDAOBase(self)
if 'registry' not in self:
self['registry'] = DBRegistrySQLDAOBase(self)
if 'annotation' not in self:
self['annotation'] = DBAnnotationSQLDAOBase(self)
if 'parameter_exploration' not in self:
self['parameter_exploration'] = DBParameterExplorationSQLDAOBase(self)
if 'mashup_actionAnnotation' not in self:
self['mashup_actionAnnotation'] = DBMashupActionAnnotationSQLDAOBase(self)
if 'module_exec' not in self:
self['module_exec'] = DBModuleExecSQLDAOBase(self)
|
malkoto1/just_cook | refs/heads/master | SQLAlchemy-1.0.4/test/orm/test_default_strategies.py | 29 | from test.orm import _fixtures
from sqlalchemy import testing
from sqlalchemy.orm import mapper, relationship, create_session
from sqlalchemy import util
import sqlalchemy as sa
from sqlalchemy.testing import eq_, assert_raises_message
class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
def _assert_fully_loaded(self, users):
# verify everything loaded, with no additional sql needed
def go():
# comparison with no additional sql
eq_(users, self.static.user_all_result)
# keywords are not part of self.static.user_all_result, so
# verify all the item keywords were loaded, with no more sql.
# 'any' verifies at least some items have keywords; we build
# a list for any([...]) instead of any(...) to prove we've
# iterated all the items with no sql.
f = util.flatten_iterator
assert any([i.keywords for i in
f([o.items for o in f([u.orders for u in users])])])
self.assert_sql_count(testing.db, go, 0)
def _assert_addresses_loaded(self, users):
# verify all the addresses were joined loaded with no more sql
def go():
for u, static in zip(users, self.static.user_all_result):
eq_(u.addresses, static.addresses)
self.assert_sql_count(testing.db, go, 0)
def _downgrade_fixture(self):
users, Keyword, items, order_items, orders, Item, User, \
Address, keywords, item_keywords, Order, addresses = \
self.tables.users, self.classes.Keyword, self.tables.items, \
self.tables.order_items, self.tables.orders, \
self.classes.Item, self.classes.User, self.classes.Address, \
self.tables.keywords, self.tables.item_keywords, \
self.classes.Order, self.tables.addresses
mapper(Address, addresses)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
lazy='subquery',
order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='subquery',
order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
order_by=orders.c.id)))
return create_session()
def _upgrade_fixture(self):
users, Keyword, items, order_items, orders, Item, User, \
Address, keywords, item_keywords, Order, addresses = \
self.tables.users, self.classes.Keyword, self.tables.items, \
self.tables.order_items, self.tables.orders, \
self.classes.Item, self.classes.User, self.classes.Address, \
self.tables.keywords, self.tables.item_keywords, \
self.classes.Order, self.tables.addresses
mapper(Address, addresses)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
lazy='select',
order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy=True,
order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy=True,
order_by=addresses.c.id),
orders=relationship(Order,
order_by=orders.c.id)))
return create_session()
def test_downgrade_baseline(self):
"""Mapper strategy defaults load as expected
(compare to rest of DefaultStrategyOptionsTest downgrade tests)."""
sess = self._downgrade_fixture()
users = []
# test _downgrade_fixture mapper defaults, 3 queries (2 subquery loads).
def go():
users[:] = sess.query(self.classes.User)\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 3)
# all loaded with no additional sql
self._assert_fully_loaded(users)
def test_disable_eagerloads(self):
"""Mapper eager load strategy defaults can be shut off
with enable_eagerloads(False)."""
# While this isn't testing a mapper option, it is included
# as baseline reference for how XYZload('*') option
# should work, namely, it shouldn't affect later queries
# (see other test_select_s)
sess = self._downgrade_fixture()
users = []
# demonstrate that enable_eagerloads loads with only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.enable_eagerloads(False)\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# demonstrate that users[0].orders must now be loaded with 3 sql
# (need to lazyload, and 2 subquery: 3 total)
def go():
users[0].orders
self.assert_sql_count(testing.db, go, 3)
def test_last_one_wins(self):
sess = self._downgrade_fixture()
users = []
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.options(sa.orm.lazyload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
def test_star_must_be_alone(self):
sess = self._downgrade_fixture()
User = self.classes.User
opt = sa.orm.subqueryload('*', User.addresses)
assert_raises_message(
sa.exc.ArgumentError,
"Wildcard token cannot be followed by another entity",
sess.query(User).options, opt
)
def test_global_star_ignored_no_entities_unbound(self):
sess = self._downgrade_fixture()
User = self.classes.User
opt = sa.orm.lazyload('*')
q = sess.query(User.name).options(opt)
eq_(q.all(), [('jack',), ('ed',), ('fred',), ('chuck',)])
def test_global_star_ignored_no_entities_bound(self):
sess = self._downgrade_fixture()
User = self.classes.User
opt = sa.orm.Load(User).lazyload('*')
q = sess.query(User.name).options(opt)
eq_(q.all(), [('jack',), ('ed',), ('fred',), ('chuck',)])
def test_select_with_joinedload(self):
"""Mapper load strategy defaults can be downgraded with
lazyload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# lazyload('*') shuts off 'orders' subquery: only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
# users[0] has orders, which need to lazy load, and 2 subquery:
# (same as with test_disable_eagerloads): 3 total sql
def go():
users[0].orders
self.assert_sql_count(testing.db, go, 3)
def test_select_with_subqueryload(self):
"""Mapper load strategy defaults can be downgraded with
lazyload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# now test 'default_strategy' option combined with 'subquery'
# shuts off 'addresses' load AND orders.items load: 2 sql expected
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('*'))\
.options(sa.orm.subqueryload(self.classes.User.orders))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
# Verify orders have already been loaded: 0 sql
def go():
for u, static in zip(users, self.static.user_all_result):
assert len(u.orders) == len(static.orders)
self.assert_sql_count(testing.db, go, 0)
# Verify lazyload('*') prevented orders.items load
# users[0].orders[0] has 3 items, each with keywords: 2 sql
# ('items' and 'items.keywords' subquery)
def go():
for i in users[0].orders[0].items:
i.keywords
self.assert_sql_count(testing.db, go, 2)
# lastly, make sure they actually loaded properly
eq_(users, self.static.user_all_result)
def test_noload_with_joinedload(self):
"""Mapper load strategy defaults can be downgraded with
noload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# test noload('*') shuts off 'orders' subquery, only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.noload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
# User.orders should have loaded "noload" (meaning [])
def go():
for u in users:
assert u.orders == []
self.assert_sql_count(testing.db, go, 0)
def test_noload_with_subqueryload(self):
"""Mapper load strategy defaults can be downgraded with
noload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# test noload('*') option combined with subqueryload()
# shuts off 'addresses' load AND orders.items load: 2 sql expected
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.noload('*'))\
.options(sa.orm.subqueryload(self.classes.User.orders))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
def go():
# Verify orders have already been loaded: 0 sql
for u, static in zip(users, self.static.user_all_result):
assert len(u.orders) == len(static.orders)
# Verify noload('*') prevented orders.items load
# and set 'items' to []
for u in users:
for o in u.orders:
assert o.items == []
self.assert_sql_count(testing.db, go, 0)
def test_joined(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option."""
sess = self._upgrade_fixture()
users = []
# test upgrade all to joined: 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_joined_path_wildcards(self):
sess = self._upgrade_fixture()
users = []
# test upgrade all to joined: 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload('.*'))\
.options(sa.orm.joinedload("addresses.*"))\
.options(sa.orm.joinedload("orders.*"))\
.options(sa.orm.joinedload("orders.items.*"))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
self._assert_fully_loaded(users)
def test_joined_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test joined all but 'keywords': upgraded to 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('orders.items.keywords'))\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# everything (but keywords) loaded ok
# (note self.static.user_all_result contains no keywords)
def go():
eq_(users, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 0)
# verify the items were loaded, while item.keywords were not
def go():
# redundant with last test, but illustrative
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
def test_joined_with_subqueryload(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test upgrade all but 'addresses', which is subquery loaded (2 sql)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload(self.classes.User.addresses))\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option."""
sess = self._upgrade_fixture()
users = []
# test upgrade all to subquery: 1 sql + 4 relationships = 5
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 5)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery_path_wildcards(self):
sess = self._upgrade_fixture()
users = []
# test upgrade all to subquery: 1 sql + 4 relationships = 5
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('.*'))\
.options(sa.orm.subqueryload('addresses.*'))\
.options(sa.orm.subqueryload('orders.*'))\
.options(sa.orm.subqueryload('orders.items.*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 5)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test subquery all but 'keywords' (1 sql + 3 relationships = 4)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('orders.items.keywords'))\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 4)
# no more sql
# (note self.static.user_all_result contains no keywords)
def go():
eq_(users, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 0)
# verify the item.keywords were not loaded
def go():
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
def test_subquery_with_joinedload(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while multiple explicit
joinedload() options are still honored"""
sess = self._upgrade_fixture()
users = []
# test upgrade all but 'addresses' & 'orders', which are joinedloaded
# (1 sql + items + keywords = 3)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.options(sa.orm.joinedload(self.classes.User.orders))\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 3)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
|
johankaito/fufuka | refs/heads/master | microblog/flask/venv/lib/python2.7/site-packages/kazoo/recipe/__init__.py | 9480 | #
|
igemsoftware/SYSU-Software2013 | refs/heads/master | project/Python27/Lib/chunk.py | 386 | """Simple class to read IFF chunks.
An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
Format)) has the following structure:
+----------------+
| ID (4 bytes) |
+----------------+
| size (4 bytes) |
+----------------+
| data |
| ... |
+----------------+
The ID is a 4-byte string which identifies the type of chunk.
The size field (a 32-bit value, encoded using big-endian byte order)
gives the size of the whole chunk, including the 8-byte header.
Usually an IFF-type file consists of one or more chunks. The proposed
usage of the Chunk class defined here is to instantiate an instance at
the start of each chunk and read from the instance until it reaches
the end, after which a new instance can be instantiated. At the end
of the file, creating a new instance will fail with a EOFError
exception.
Usage:
while True:
try:
chunk = Chunk(file)
except EOFError:
break
chunktype = chunk.getname()
while True:
data = chunk.read(nbytes)
if not data:
pass
# do something with data
The interface is file-like. The implemented methods are:
read, close, seek, tell, isatty.
Extra methods are: skip() (called by close, skips to the end of the chunk),
getname() (returns the name (ID) of the chunk)
The __init__ method has one required argument, a file-like object
(including a chunk instance), and one optional argument, a flag which
specifies whether or not chunks are aligned on 2-byte boundaries. The
default is 1, i.e. aligned.
"""
class Chunk:
def __init__(self, file, align=True, bigendian=True, inclheader=False):
import struct
self.closed = False
self.align = align # whether to align to word (2-byte) boundaries
if bigendian:
strflag = '>'
else:
strflag = '<'
self.file = file
self.chunkname = file.read(4)
if len(self.chunkname) < 4:
raise EOFError
try:
self.chunksize = struct.unpack(strflag+'L', file.read(4))[0]
except struct.error:
raise EOFError
if inclheader:
self.chunksize = self.chunksize - 8 # subtract header
self.size_read = 0
try:
self.offset = self.file.tell()
except (AttributeError, IOError):
self.seekable = False
else:
self.seekable = True
def getname(self):
"""Return the name (ID) of the current chunk."""
return self.chunkname
def getsize(self):
"""Return the size of the current chunk."""
return self.chunksize
def close(self):
if not self.closed:
self.skip()
self.closed = True
def isatty(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return False
def seek(self, pos, whence=0):
"""Seek to specified position into the chunk.
Default position is 0 (start of chunk).
If the file is not seekable, this will result in an error.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if not self.seekable:
raise IOError, "cannot seek"
if whence == 1:
pos = pos + self.size_read
elif whence == 2:
pos = pos + self.chunksize
if pos < 0 or pos > self.chunksize:
raise RuntimeError
self.file.seek(self.offset + pos, 0)
self.size_read = pos
def tell(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return self.size_read
def read(self, size=-1):
"""Read at most size bytes from the chunk.
If size is omitted or negative, read until the end
of the chunk.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.size_read >= self.chunksize:
return ''
if size < 0:
size = self.chunksize - self.size_read
if size > self.chunksize - self.size_read:
size = self.chunksize - self.size_read
data = self.file.read(size)
self.size_read = self.size_read + len(data)
if self.size_read == self.chunksize and \
self.align and \
(self.chunksize & 1):
dummy = self.file.read(1)
self.size_read = self.size_read + len(dummy)
return data
def skip(self):
"""Skip the rest of the chunk.
If you are not interested in the contents of the chunk,
this method should be called so that the file points to
the start of the next chunk.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.seekable:
try:
n = self.chunksize - self.size_read
# maybe fix alignment
if self.align and (self.chunksize & 1):
n = n + 1
self.file.seek(n, 1)
self.size_read = self.size_read + n
return
except IOError:
pass
while self.size_read < self.chunksize:
n = min(8192, self.chunksize - self.size_read)
dummy = self.read(n)
if not dummy:
raise EOFError
|
pepeportela/edx-platform | refs/heads/master | openedx/core/djangoapps/cache_toolbox/templatetags/cache_toolbox.py | 35 | """
Implementation of custom django template tags for
automatically caching template fragments.
"""
from django import template
from django.core.cache import cache
from django.template import Node, TemplateSyntaxError, Variable
from django.template import resolve_variable
register = template.Library() # pylint: disable=invalid-name
class CacheNode(Node):
"""
Subclass of django's template Node class that
caches the rendered value of a template fragment. This is a
simpler implementation of django.templatetags.cache.CacheNode.
"""
def __init__(self, nodelist, expire_time, key):
self.nodelist = nodelist
self.expire_time = Variable(expire_time)
self.key = key
def render(self, context):
key = resolve_variable(self.key, context)
expire_time = int(self.expire_time.resolve(context))
value = cache.get(key)
if value is None:
value = self.nodelist.render(context)
cache.set(key, value, expire_time)
return value
@register.tag
def cachedeterministic(parser, token):
"""
This will cache the contents of a template fragment for a given amount of
time, just like {% cache .. %} except that the key is deterministic and not
mangled or run through MD5.
Usage::
{% cachedeterministic [expire_time] [key] %}
.. some expensive processing ..
{% endcachedeterministic %}
"""
nodelist = parser.parse(('endcachedeterministic',))
parser.delete_first_token()
tokens = token.contents.split()
if len(tokens) != 3:
raise TemplateSyntaxError(u"'%r' tag requires 2 arguments." % tokens[0])
return CacheNode(nodelist, tokens[1], tokens[2])
class ShowIfCachedNode(Node):
"""
Subclass of django's template Node class that
renders the cached value for the given key, only
if already cached.
"""
def __init__(self, key):
self.key = key
def render(self, context):
key = resolve_variable(self.key, context)
return cache.get(key) or ''
@register.tag
def showifcached(parser, token): # pylint: disable=unused-argument
"""
Show content if it exists in the cache, otherwise display nothing.
The key is entirely deterministic and not mangled or run through MD5 (cf.
{% cache %})
Usage::
{% showifcached [key] %}
"""
tokens = token.contents.split()
if len(tokens) != 2:
raise TemplateSyntaxError(u"'%r' tag requires 1 argument." % tokens[0])
return ShowIfCachedNode(tokens[1])
|
noironetworks/horizon | refs/heads/master | openstack_dashboard/dashboards/project/overview/urls.py | 14 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import url
from openstack_dashboard.dashboards.project.overview import views
urlpatterns = [
url(r'^$', views.ProjectOverview.as_view(), name='index'),
url(r'^warning$', views.WarningView.as_view(), name='warning'),
]
|
nrwahl2/ansible | refs/heads/devel | lib/ansible/modules/storage/netapp/netapp_e_hostgroup.py | 33 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netapp_e_hostgroup
version_added: "2.2"
short_description: Manage NetApp Storage Array Host Groups
author: Kevin Hulquest (@hulquest)
description:
- Create, update or destroy host groups on a NetApp E-Series storage array.
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
required: true
description:
- The ID of the array to manage (as configured on the web services proxy).
state:
required: true
description:
- Whether the specified host group should exist or not.
choices: ['present', 'absent']
name:
required: false
description:
- The name of the host group to manage. Either this or C(id_num) must be supplied.
new_name:
required: false
description:
- specify this when you need to update the name of a host group
id:
required: false
description:
- The id number of the host group to manage. Either this or C(name) must be supplied.
hosts::
required: false
description:
- a list of host names/labels to add to the group
'''
EXAMPLES = '''
- name: Configure Hostgroup
netapp_e_hostgroup:
ssid: "{{ ssid }}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
validate_certs: "{{ netapp_api_validate_certs }}"
state: present
'''
RETURN = '''
clusterRef:
description: The unique identification value for this object. Other objects may use this reference value to refer to the cluster.
returned: always except when state is absent
type: string
sample: "3233343536373839303132333100000000000000"
confirmLUNMappingCreation:
description: If true, indicates that creation of LUN-to-volume mappings should require careful confirmation from the end-user, since such a mapping
will alter the volume access rights of other clusters, in addition to this one.
returned: always
type: boolean
sample: false
hosts:
description: A list of the hosts that are part of the host group after all operations.
returned: always except when state is absent
type: list
sample: ["HostA","HostB"]
id:
description: The id number of the hostgroup
returned: always except when state is absent
type: string
sample: "3233343536373839303132333100000000000000"
isSAControlled:
description: If true, indicates that I/O accesses from this cluster are subject to the storage array's default LUN-to-volume mappings. If false,
indicates that I/O accesses from the cluster are subject to cluster-specific LUN-to-volume mappings.
returned: always except when state is absent
type: boolean
sample: false
label:
description: The user-assigned, descriptive label string for the cluster.
returned: always
type: string
sample: "MyHostGroup"
name:
description: same as label
returned: always except when state is absent
type: string
sample: "MyHostGroup"
protectionInformationCapableAccessMethod:
description: This field is true if the host has a PI capable access method.
returned: always except when state is absent
type: boolean
sample: true
'''
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json"
}
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import open_url
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError as e:
r = e.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
def group_exists(module, id_type, ident, ssid, api_url, user, pwd):
rc, data = get_hostgroups(module, ssid, api_url, user, pwd)
for group in data:
if group[id_type] == ident:
return True, data
else:
continue
return False, data
def get_hostgroups(module, ssid, api_url, user, pwd):
groups = "storage-systems/%s/host-groups" % ssid
url = api_url + groups
try:
rc, data = request(url, headers=HEADERS, url_username=user, url_password=pwd)
return rc, data
except HTTPError as e:
module.fail_json(msg="Failed to get host groups. Id [%s]. Error [%s]." % (ssid, to_native(e)))
def get_hostref(module, ssid, name, api_url, user, pwd):
all_hosts = 'storage-systems/%s/hosts' % ssid
url = api_url + all_hosts
try:
rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to get hosts. Id [%s]. Error [%s]." % (ssid, to_native(e)))
for host in data:
if host['name'] == name:
return host['hostRef']
else:
continue
module.fail_json(msg="No host with the name %s could be found" % name)
def create_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None):
groups = "storage-systems/%s/host-groups" % ssid
url = api_url + groups
hostrefs = []
if hosts:
for host in hosts:
href = get_hostref(module, ssid, host, api_url, user, pwd)
hostrefs.append(href)
post_data = json.dumps(dict(name=name, hosts=hostrefs))
try:
rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to create host group. Id [%s]. Error [%s]." % (ssid, to_native(e)))
return rc, data
def update_hostgroup(module, ssid, name, api_url, user, pwd, hosts=None, new_name=None):
gid = get_hostgroup_id(module, ssid, name, api_url, user, pwd)
groups = "storage-systems/%s/host-groups/%s" % (ssid, gid)
url = api_url + groups
hostrefs = []
if hosts:
for host in hosts:
href = get_hostref(module, ssid, host, api_url, user, pwd)
hostrefs.append(href)
if new_name:
post_data = json.dumps(dict(name=new_name, hosts=hostrefs))
else:
post_data = json.dumps(dict(hosts=hostrefs))
try:
rc, data = request(url, method='POST', data=post_data, headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to update host group. Group [%s]. Id [%s]. Error [%s]." % (gid, ssid,
to_native(e)))
return rc, data
def delete_hostgroup(module, ssid, group_id, api_url, user, pwd):
groups = "storage-systems/%s/host-groups/%s" % (ssid, group_id)
url = api_url + groups
# TODO: Loop through hosts, do mapping to href, make new list to pass to data
try:
rc, data = request(url, method='DELETE', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(msg="Failed to delete host group. Group [%s]. Id [%s]. Error [%s]." % (group_id, ssid, to_native(e)))
return rc, data
def get_hostgroup_id(module, ssid, name, api_url, user, pwd):
all_groups = 'storage-systems/%s/host-groups' % ssid
url = api_url + all_groups
rc, data = request(url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
for hg in data:
if hg['name'] == name:
return hg['id']
else:
continue
module.fail_json(msg="A hostgroup with the name %s could not be found" % name)
def get_hosts_in_group(module, ssid, group_name, api_url, user, pwd):
all_groups = 'storage-systems/%s/host-groups' % ssid
g_url = api_url + all_groups
try:
g_rc, g_data = request(g_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(
msg="Failed in first step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % (group_name,
ssid,
to_native(e)))
all_hosts = 'storage-systems/%s/hosts' % ssid
h_url = api_url + all_hosts
try:
h_rc, h_data = request(h_url, method='GET', headers=HEADERS, url_username=user, url_password=pwd)
except Exception as e:
module.fail_json(
msg="Failed in second step getting hosts from group. Group: [%s]. Id [%s]. Error [%s]." % (
group_name,
ssid,
to_native(e)))
hosts_in_group = []
for hg in g_data:
if hg['name'] == group_name:
clusterRef = hg['clusterRef']
for host in h_data:
if host['clusterRef'] == clusterRef:
hosts_in_group.append(host['name'])
return hosts_in_group
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=False),
new_name=dict(required=False),
ssid=dict(required=True),
id=dict(required=False),
state=dict(required=True, choices=['present', 'absent']),
hosts=dict(required=False, type='list'),
api_url=dict(required=True),
api_username=dict(required=True),
validate_certs=dict(required=False, default=True),
api_password=dict(required=True, no_log=True)
),
supports_check_mode=False,
mutually_exclusive=[['name', 'id']],
required_one_of=[['name', 'id']]
)
name = module.params['name']
new_name = module.params['new_name']
ssid = module.params['ssid']
id_num = module.params['id']
state = module.params['state']
hosts = module.params['hosts']
user = module.params['api_username']
pwd = module.params['api_password']
api_url = module.params['api_url']
if not api_url.endswith('/'):
api_url += '/'
if name:
id_type = 'name'
id_key = name
elif id_num:
id_type = 'id'
id_key = id_num
exists, group_data = group_exists(module, id_type, id_key, ssid, api_url, user, pwd)
if state == 'present':
if not exists:
try:
rc, data = create_hostgroup(module, ssid, name, api_url, user, pwd, hosts)
except Exception as e:
module.fail_json(msg="Failed to create a host group. Id [%s]. Error [%s]." % (ssid, to_native(e)))
hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd)
module.exit_json(changed=True, hosts=hosts, **data)
else:
current_hosts = get_hosts_in_group(module, ssid, name, api_url, user, pwd)
if not current_hosts:
current_hosts = []
if not hosts:
hosts = []
if set(current_hosts) != set(hosts):
try:
rc, data = update_hostgroup(module, ssid, name, api_url, user, pwd, hosts, new_name)
except Exception as e:
module.fail_json(
msg="Failed to update host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e)))
module.exit_json(changed=True, hosts=hosts, **data)
else:
for group in group_data:
if group['name'] == name:
module.exit_json(changed=False, hosts=current_hosts, **group)
elif state == 'absent':
if exists:
hg_id = get_hostgroup_id(module, ssid, name, api_url, user, pwd)
try:
rc, data = delete_hostgroup(module, ssid, hg_id, api_url, user, pwd)
except Exception as e:
module.fail_json(
msg="Failed to delete host group. Group: [%s]. Id [%s]. Error [%s]." % (name, ssid, to_native(e)))
module.exit_json(changed=True, msg="Host Group deleted")
else:
module.exit_json(changed=False, msg="Host Group is already absent")
if __name__ == '__main__':
main()
|
bob123bob/Sick-Beard | refs/heads/development | sickbeard/clients/requests/__init__.py | 56 | # -*- coding: utf-8 -*-
# __
# /__) _ _ _ _ _/ _
# / ( (- (/ (/ (- _) / _)
# /
"""
requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print r.text
{
...
"form": {
"key2": "value2",
"key1": "value1"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2013 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
__version__ = '1.2.0'
__build__ = 0x010200
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kenneth Reitz'
from . import utils
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError
)
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
o3project/ryu-oe | refs/heads/master | ryu/services/protocols/bgp/utils/__init__.py | 12133432 | |
be-cloud-be/horizon-addons | refs/heads/9.0 | server/addons/l10n_hr/__openerp__.py | 18 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
# Author: Goran Kliska
# mail: goran.kliska(AT)slobodni-programi.hr
# Copyright (C) 2011- Slobodni programi d.o.o., Zagreb
# Contributions:
# Tomislav Bošnjaković, Storm Computers d.o.o. :
# - account types
{
"name": "Croatia - Accounting (RRIF 2012)",
"description": """
Croatian localisation.
======================
Author: Goran Kliska, Slobodni programi d.o.o., Zagreb
https://www.slobodni-programi.hr
Contributions:
Tomislav Bošnjaković, Storm Computers: tipovi konta
Ivan Vađić, Slobodni programi: tipovi konta
Description:
Croatian Chart of Accounts (RRIF ver.2012)
RRIF-ov računski plan za poduzetnike za 2012.
Vrste konta
Kontni plan prema RRIF-u, dorađen u smislu kraćenja naziva i dodavanja analitika
Porezne grupe prema poreznoj prijavi
Porezi PDV obrasca
Ostali porezi
Osnovne fiskalne pozicije
Izvori podataka:
https://www.rrif.hr/dok/preuzimanje/rrif-rp2011.rar
https://www.rrif.hr/dok/preuzimanje/rrif-rp2012.rar
""",
"version": "13.0",
"author": "OpenERP Croatian Community",
'category': 'Localization',
"website": "https://code.launchpad.net/openobject-croatia",
'depends': [
'account',
],
'data': [
'data/account_chart_template.xml',
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account_chart_tag.xml',
'data/account.tax.template.csv',
'data/fiscal_position_template.xml',
'data/account_chart_template.yml',
],
"demo": [],
'test': [],
"active": False,
"installable": True,
}
|
anudeepsharma/autorest | refs/heads/master | src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDate/setup.py | 28 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# coding: utf-8
from setuptools import setup, find_packages
NAME = "autorestdatetestservice"
VERSION = "1.0.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["msrest>=0.2.0"]
setup(
name=NAME,
version=VERSION,
description="AutoRestDateTestService",
author_email="",
url="",
keywords=["Swagger", "AutoRestDateTestService"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True,
long_description="""\
Test Infrastructure for AutoRest
"""
)
|
noironetworks/neutron | refs/heads/master | neutron/tests/unit/services/logapi/common/test_sg_callback.py | 2 | # Copyright (c) 2018 Fujitsu Limited
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron.services.logapi.common import sg_callback
from neutron.services.logapi.drivers import base as log_driver_base
from neutron.services.logapi.drivers import manager as driver_mgr
from neutron.tests import base
FAKE_DRIVER = None
class FakeDriver(log_driver_base.DriverBase):
@staticmethod
def create():
return FakeDriver(
name='fake_driver',
vif_types=[],
vnic_types=[],
supported_logging_types=['security_group'],
requires_rpc=True
)
def fake_register():
global FAKE_DRIVER
if not FAKE_DRIVER:
FAKE_DRIVER = FakeDriver.create()
driver_mgr.register(resources.SECURITY_GROUP_RULE,
sg_callback.SecurityGroupRuleCallBack)
class TestSecurityGroupRuleCallback(base.BaseTestCase):
def setUp(self):
super(TestSecurityGroupRuleCallback, self).setUp()
self.driver_manager = driver_mgr.LoggingServiceDriverManager()
@mock.patch.object(sg_callback.SecurityGroupRuleCallBack, 'handle_event')
def test_handle_event(self, mock_sg_cb):
fake_register()
self.driver_manager.register_driver(FAKE_DRIVER)
registry.notify(
resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, mock.ANY)
mock_sg_cb.assert_called_once_with(
resources.SECURITY_GROUP_RULE, events.AFTER_CREATE, mock.ANY)
mock_sg_cb.reset_mock()
registry.notify('fake_resource', events.AFTER_DELETE, mock.ANY)
mock_sg_cb.assert_not_called()
|
willharris/django | refs/heads/master | tests/utils_tests/test_no_submodule.py | 737 | # Used to test for modules which don't have submodules.
|
alexwlchan/pygmentizr | refs/heads/master | pygmentizr/forms.py | 1 | # -*- encoding: utf-8 -*-
from flask.ext.wtf import Form
from wtforms import SelectField, TextAreaField
from wtforms.validators import DataRequired
from pygmentizr import app
from pygmentizr.renderer import STYLE_OPTIONS
class SnippetForm(Form):
"""
Form for handling code snippets.
"""
language = SelectField(
'language',
validators=[DataRequired()],
choices=[(k, k) for k in app.config['SELECTED_LEXERS']],
)
code = TextAreaField('code_snippet', validators=[DataRequired()])
style = SelectField(
'style',
validators=[DataRequired()],
choices=[(k, k) for k in STYLE_OPTIONS]
)
|
TFenby/python-mode | refs/heads/develop | pymode/libs2/rope/refactor/introduce_factory.py | 19 | import rope.base.exceptions
import rope.base.pyobjects
from rope.base import libutils
from rope.base import taskhandle, evaluate
from rope.base.change import (ChangeSet, ChangeContents)
from rope.refactor import rename, occurrences, sourceutils, importutils
class IntroduceFactory(object):
def __init__(self, project, resource, offset):
self.project = project
self.offset = offset
this_pymodule = self.project.get_pymodule(resource)
self.old_pyname = evaluate.eval_location(this_pymodule, offset)
if self.old_pyname is None or \
not isinstance(self.old_pyname.get_object(),
rope.base.pyobjects.PyClass):
raise rope.base.exceptions.RefactoringError(
'Introduce factory should be performed on a class.')
self.old_name = self.old_pyname.get_object().get_name()
self.pymodule = self.old_pyname.get_object().get_module()
self.resource = self.pymodule.get_resource()
def get_changes(self, factory_name, global_factory=False, resources=None,
task_handle=taskhandle.NullTaskHandle()):
"""Get the changes this refactoring makes
`factory_name` indicates the name of the factory function to
be added. If `global_factory` is `True` the factory will be
global otherwise a static method is added to the class.
`resources` can be a list of `rope.base.resource.File`\s that
this refactoring should be applied on; if `None` all python
files in the project are searched.
"""
if resources is None:
resources = self.project.get_python_files()
changes = ChangeSet('Introduce factory method <%s>' % factory_name)
job_set = task_handle.create_jobset('Collecting Changes',
len(resources))
self._change_module(resources, changes, factory_name,
global_factory, job_set)
return changes
def get_name(self):
"""Return the name of the class"""
return self.old_name
def _change_module(self, resources, changes,
factory_name, global_, job_set):
if global_:
replacement = '__rope_factory_%s_' % factory_name
else:
replacement = self._new_function_name(factory_name, global_)
for file_ in resources:
job_set.started_job(file_.path)
if file_ == self.resource:
self._change_resource(changes, factory_name, global_)
job_set.finished_job()
continue
changed_code = self._rename_occurrences(file_, replacement,
global_)
if changed_code is not None:
if global_:
new_pymodule = libutils.get_string_module(
self.project, changed_code, self.resource)
modname = libutils.modname(self.resource)
changed_code, imported = importutils.add_import(
self.project, new_pymodule, modname, factory_name)
changed_code = changed_code.replace(replacement, imported)
changes.add_change(ChangeContents(file_, changed_code))
job_set.finished_job()
def _change_resource(self, changes, factory_name, global_):
class_scope = self.old_pyname.get_object().get_scope()
source_code = self._rename_occurrences(
self.resource, self._new_function_name(factory_name,
global_), global_)
if source_code is None:
source_code = self.pymodule.source_code
else:
self.pymodule = libutils.get_string_module(
self.project, source_code, resource=self.resource)
lines = self.pymodule.lines
start = self._get_insertion_offset(class_scope, lines)
result = source_code[:start]
result += self._get_factory_method(lines, class_scope,
factory_name, global_)
result += source_code[start:]
changes.add_change(ChangeContents(self.resource, result))
def _get_insertion_offset(self, class_scope, lines):
start_line = class_scope.get_end()
if class_scope.get_scopes():
start_line = class_scope.get_scopes()[-1].get_end()
start = lines.get_line_end(start_line) + 1
return start
def _get_factory_method(self, lines, class_scope,
factory_name, global_):
unit_indents = ' ' * sourceutils.get_indent(self.project)
if global_:
if self._get_scope_indents(lines, class_scope) > 0:
raise rope.base.exceptions.RefactoringError(
'Cannot make global factory method for nested classes.')
return ('\ndef %s(*args, **kwds):\n%sreturn %s(*args, **kwds)\n' %
(factory_name, unit_indents, self.old_name))
unindented_factory = \
('@staticmethod\ndef %s(*args, **kwds):\n' % factory_name +
'%sreturn %s(*args, **kwds)\n' % (unit_indents, self.old_name))
indents = self._get_scope_indents(lines, class_scope) + \
sourceutils.get_indent(self.project)
return '\n' + sourceutils.indent_lines(unindented_factory, indents)
def _get_scope_indents(self, lines, scope):
return sourceutils.get_indents(lines, scope.get_start())
def _new_function_name(self, factory_name, global_):
if global_:
return factory_name
else:
return self.old_name + '.' + factory_name
def _rename_occurrences(self, file_, changed_name, global_factory):
finder = occurrences.create_finder(self.project, self.old_name,
self.old_pyname, only_calls=True)
result = rename.rename_in_module(finder, changed_name, resource=file_,
replace_primary=global_factory)
return result
IntroduceFactoryRefactoring = IntroduceFactory
|
rec/echomesh | refs/heads/master | code/python/echomesh/util/dict/Setter.py | 1 | from __future__ import absolute_import, division, print_function, unicode_literals
import six
def setter(table, *address):
for part in address[:-1]:
try:
table = table[part]
except:
return None, None
return table, address[-1]
def apply_list(table, function, *addresses):
for address in addresses:
table, part = setter(table, *address)
if table:
table[part] = function(table[part])
return table
def apply_dict(table, function, addresses):
def recurse(table, addresses, key):
try:
items = six.iteritems(addresses)
except:
table[key] = function(table[key])
else:
for subkey, subaddresses in items:
recurse(table[key] if key else table, subaddresses, subkey)
recurse(table, addresses, None)
return table
def list_to_dict(*addresses):
return apply_list({}, lambda x: True, *addresses)
|
dxq-git/zulip | refs/heads/master | api/integrations/trac/zulip_trac.py | 115 | # -*- coding: utf-8 -*-
# Copyright © 2012 Zulip, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Zulip trac plugin -- sends zulips when tickets change.
#
# Install by copying this file and zulip_trac_config.py to the trac
# plugins/ subdirectory, customizing the constants in
# zulip_trac_config.py, and then adding "zulip_trac" to the
# components section of the conf/trac.ini file, like so:
#
# [components]
# zulip_trac = enabled
#
# You may then need to restart trac (or restart Apache) for the bot
# (or changes to the bot) to actually be loaded by trac.
from trac.core import Component, implements
from trac.ticket import ITicketChangeListener
import sys
import os.path
sys.path.insert(0, os.path.dirname(__file__))
import zulip_trac_config as config
VERSION = "0.9"
if config.ZULIP_API_PATH is not None:
sys.path.append(config.ZULIP_API_PATH)
import zulip
client = zulip.Client(
email=config.ZULIP_USER,
site=config.ZULIP_SITE,
api_key=config.ZULIP_API_KEY,
client="ZulipTrac/" + VERSION)
def markdown_ticket_url(ticket, heading="ticket"):
return "[%s #%s](%s/%s)" % (heading, ticket.id, config.TRAC_BASE_TICKET_URL, ticket.id)
def markdown_block(desc):
return "\n\n>" + "\n> ".join(desc.split("\n")) + "\n"
def truncate(string, length):
if len(string) <= length:
return string
return string[:length - 3] + "..."
def trac_subject(ticket):
return truncate("#%s: %s" % (ticket.id, ticket.values.get("summary")), 60)
def send_update(ticket, content):
client.send_message({
"type": "stream",
"to": config.STREAM_FOR_NOTIFICATIONS,
"content": content,
"subject": trac_subject(ticket)
})
class ZulipPlugin(Component):
implements(ITicketChangeListener)
def ticket_created(self, ticket):
"""Called when a ticket is created."""
content = "%s created %s in component **%s**, priority **%s**:\n" % \
(ticket.values.get("reporter"), markdown_ticket_url(ticket),
ticket.values.get("component"), ticket.values.get("priority"))
# Include the full subject if it will be truncated
if len(ticket.values.get("summary")) > 60:
content += "**%s**\n" % (ticket.values.get("summary"),)
if ticket.values.get("description") != "":
content += "%s" % (markdown_block(ticket.values.get("description")),)
send_update(ticket, content)
def ticket_changed(self, ticket, comment, author, old_values):
"""Called when a ticket is modified.
`old_values` is a dictionary containing the previous values of the
fields that have changed.
"""
if not (set(old_values.keys()).intersection(set(config.TRAC_NOTIFY_FIELDS)) or
(comment and "comment" in set(config.TRAC_NOTIFY_FIELDS))):
return
content = "%s updated %s" % (author, markdown_ticket_url(ticket))
if comment:
content += ' with comment: %s\n\n' % (markdown_block(comment),)
else:
content += ":\n\n"
field_changes = []
for key in old_values.keys():
if key == "description":
content += '- Changed %s from %s to %s' % (key, markdown_block(old_values.get(key)),
markdown_block(ticket.values.get(key)))
elif old_values.get(key) == "":
field_changes.append('%s: => **%s**' % (key, ticket.values.get(key)))
elif ticket.values.get(key) == "":
field_changes.append('%s: **%s** => ""' % (key, old_values.get(key)))
else:
field_changes.append('%s: **%s** => **%s**' % (key, old_values.get(key),
ticket.values.get(key)))
content += ", ".join(field_changes)
send_update(ticket, content)
def ticket_deleted(self, ticket):
"""Called when a ticket is deleted."""
content = "%s was deleted." % markdown_ticket_url(ticket, heading="Ticket")
send_update(ticket, content)
|
croach/Flask-Fixtures | refs/heads/master | tests/test_fixtures.py | 1 | """
test_fixtures
~~~~~~~~~~~~~
A set of tests that check the default functionality of Flask-Fixtures.
:copyright: (c) 2015 Christopher Roach <[email protected]>.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import unittest
from myapp import app
from myapp.models import db, Book, Author
from flask_fixtures import FixturesMixin
# Configure the app with the testing configuration
app.config.from_object('myapp.config.TestConfig')
class TestMyApp(unittest.TestCase, FixturesMixin):
"""A basic set of tests to make sure that fixtures works.
"""
# Specify the fixtures file(s) you want to load
fixtures = ['authors.json']
# Specify the Flask app and database we want to use for this set of tests
app = app
db = db
# Your tests go here
def test_add_author(self):
# Add another author on the fly
author = Author()
author.first_name = 'George'
author.last_name = 'Orwell'
self.db.session.add(author)
self.db.session.commit()
# Make sure to inherit from the FixturesMixin class
class TestMyAppWithUserDefinedFunctions(unittest.TestCase, FixturesMixin):
"""Tests that functions like setUp and tearDown work
"""
# Specify the fixtures file(s) you want to load
fixtures = ['authors.json']
# Specify the Flask app and database we want to use for this set of tests
app = app
db = db
def setUp(self):
# Make sure that the user defined setUp method runs after the fixtures
# setup function (i.e., the database should be setup already)
assert Author.query.count() == 1
assert Book.query.count() == 3
# Add another author on the fly
author = Author()
author.first_name = 'George'
author.last_name = 'Orwell'
self.db.session.add(author)
self.db.session.commit()
def tearDown(self):
# This should run before the fixtures tear down function, so the
# database should still contain all the fixtures data
assert Author.query.count() == 2
assert Book.query.count() == 3
def test_authors(self):
authors = Author.query.all()
assert len(authors) == Author.query.count() == 2
assert len(authors[0].books) == 3
def test_books(self):
books = Book.query.all()
assert len(books) == Book.query.count() == 3
gibson = Author.query.filter(Author.last_name=='Gibson').one()
for book in books:
assert book.author == gibson
class TestMyAppWithRequestContext(TestMyAppWithUserDefinedFunctions):
"""Tests that manually pushing a request context works.
Just as with the app context test (see above), fixtures should work when
the user manually pushes a request context onto the stack, e.g., when the
developer uses the `test_request_context()` context manager.
"""
# Make sure the app object is None, so this test will fail if we don't
# have an app context on the stack
app = None
def setUp(self):
self.ctx = app.test_request_context()
self.ctx.push()
super(TestMyAppWithRequestContext, self).setUp()
def tearDown(self):
super(TestMyAppWithRequestContext, self).tearDown()
self.ctx.pop()
# Only run this test if we are using a version of Flask that supports app
# contexts (i.e., Flask >= 0.9)
if hasattr(app, 'app_context'):
class TestMyAppWithAppContext(TestMyAppWithUserDefinedFunctions):
"""Tests that manually pushing a app context works.
The normal way to specify a Flask application to test is to set it equal
to the `app` class variable. However, this could also be done by creating
an app context and pushing onto the stack as well. This test makes sure
that everything works, even when this method is used.
"""
# Make sure the app object is None, so this test will fail if we don't
# have an app context on the stack
app = None
def setUp(self):
self.ctx = app.app_context()
self.ctx.push()
super(TestMyAppWithAppContext, self).setUp()
def tearDown(self):
super(TestMyAppWithAppContext, self).tearDown()
self.ctx.pop()
|
rmmh/skybot | refs/heads/master | plugins/imdb.py | 1 | # IMDb lookup plugin by Ghetto Wizard (2011).
from __future__ import unicode_literals
from util import hook, http
# http://www.omdbapi.com/apikey.aspx
@hook.api_key("omdbapi")
@hook.command
def imdb(inp, api_key=None):
""".imdb <movie> -- gets information about <movie> from IMDb"""
if not api_key:
return None
content = http.get_json("https://www.omdbapi.com/", t=inp, apikey=api_key)
if content["Response"] == "Movie Not Found":
return "movie not found"
elif content["Response"] == "True":
content["URL"] = "http://www.imdb.com/title/%(imdbID)s" % content
out = "\x02%(Title)s\x02 (%(Year)s) (%(Genre)s): %(Plot)s"
if content["Runtime"] != "N/A":
out += " \x02%(Runtime)s\x02."
if content["imdbRating"] != "N/A" and content["imdbVotes"] != "N/A":
out += " \x02%(imdbRating)s/10\x02 with \x02%(imdbVotes)s\x02 votes."
out += " %(URL)s"
return out % content
else:
return "unknown error"
|
vmthunder/virtman | refs/heads/master | virtman/openstack/__init__.py | 12133432 | |
snailwalker/python | refs/heads/master | renzongxian/0023/mysite/guestbook/migrations/__init__.py | 12133432 | |
nimbis/django-central-message | refs/heads/master | central_message/__init__.py | 12133432 | |
spiralsyzygy/cloudbuster | refs/heads/master | inventory/api/__init__.py | 12133432 | |
carl-mastrangelo/grpc | refs/heads/master | test/core/bad_ssl/gen_build_yaml.py | 5 | #!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates the appropriate build.json data for all the end2end tests."""
import collections
import yaml
TestOptions = collections.namedtuple('TestOptions', 'flaky cpu_cost')
default_test_options = TestOptions(False, 1.0)
# maps test names to options
BAD_CLIENT_TESTS = {
'cert': default_test_options._replace(cpu_cost=0.1),
# Disabling this test because it does not link correctly as written
# 'alpn': default_test_options._replace(cpu_cost=0.1),
}
def main():
json = {
'#': 'generated with test/bad_ssl/gen_build_json.py',
'libs': [
{
'name': 'bad_ssl_test_server',
'build': 'private',
'language': 'c',
'src': ['test/core/bad_ssl/server_common.cc'],
'headers': ['test/core/bad_ssl/server_common.h'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': [
'grpc_test_util',
'grpc',
'gpr'
]
}
],
'targets': [
{
'name': 'bad_ssl_%s_server' % t,
'build': 'test',
'language': 'c',
'run': False,
'src': ['test/core/bad_ssl/servers/%s.cc' % t],
'vs_proj_dir': 'test/bad_ssl',
'platforms': ['linux', 'posix', 'mac'],
'deps': [
'bad_ssl_test_server',
'grpc_test_util',
'grpc',
'gpr'
]
}
for t in sorted(BAD_CLIENT_TESTS.keys())] + [
{
'name': 'bad_ssl_%s_test' % t,
'cpu_cost': BAD_CLIENT_TESTS[t].cpu_cost,
'build': 'test',
'language': 'c',
'src': ['test/core/bad_ssl/bad_ssl_test.cc'],
'vs_proj_dir': 'test',
'platforms': ['linux', 'posix', 'mac'],
'deps': [
'grpc_test_util',
'grpc',
'gpr'
]
}
for t in sorted(BAD_CLIENT_TESTS.keys())]}
print yaml.dump(json)
if __name__ == '__main__':
main()
|
FOSSRIT/Nova | refs/heads/master | controllers/match.py | 1 | # coding: utf8
# try something like
def index():
return dict(message="Welcome to matchmaking!")
def browse():
if request.args(0):
nodeCategory = db(db.matchingCategory.namePlural == request.args(0)).select().first()
if request.args(1):
matchedNodes = db(
(db.matchingAttribute.category==nodeCategory) &
(db.matchingAttribute.value == request.args(1).replace("_", " ")) &
(db.node.id == db.matchingAttribute.node)
).select(db.matchingAttribute.ALL, db.node.name, db.node.url, db.node.picFile).as_list()
return dict(category=nodeCategory.as_dict(), matchedNodes=matchedNodes)
else:
count = db.matchingAttribute.value.count()
categoryValues = [{'value':x.matchingAttribute.value, 'count':x[count]} \
for x in nodeCategory.matchingAttribute.select(db.matchingAttribute.value, count, groupby=db.matchingAttribute.value)]
return dict(category=nodeCategory.as_dict(), categoryValues=categoryValues)
else:
return dict(categories=db(db.matchingCategory.id>0).select(orderby=db.matchingCategory.namePlural).as_list())
@auth.requires_login()
def addAttribute():
if len(request.args) != 3 and len(request.args) != 4:
raise HTTP(404, "Unexpected Request")
nodeCategory = db(db.matchingCategory.name == request.args(0)).select().first()
if not nodeCategory:
raise HTTP(404, "Category Not Found")
node = get_node_or_404(request.args(1))
if not can_edit(node):
raise HTTP(403, "Not allowed to edit this node's Attributes")
if "provides" == request.args(2):
provides = True
elif "wants" == request.args(2):
provides = False
else:
raise HTTP(404, "Unknown mode")
db.matchingAttribute.value.widget = SQLFORM.widgets.autocomplete(
request, db.matchingAttribute.value,limitby=(0,10), min_length=2,
db=db(db.matchingAttribute.category==nodeCategory), keyword="w2p_autocomplete_matchingattr", distinct=True)
submit_str = "Add Desired %s" if request.args(2) == "wants" else "Add %s"
db.matchingAttribute.value.label = nodeCategory.name
if request.args(3):
attrMatch = db(
(db.matchingAttribute.category==nodeCategory) &
(db.matchingAttribute.node == node) &
(db.matchingAttribute.provides == provides) &
(db.matchingAttribute.value == request.args(3).replace("_", " "))).select().first()
submit_str = "Update Desired %s" if request.args(2) == "wants" else "Update %s"
if not attrMatch:
raise HTTP(404, "Attribute Not Found")
else:
attrMatch = None
form = SQLFORM(db.matchingAttribute, attrMatch, showid = False, submit_button=submit_str % nodeCategory.name, deletable=True)
form.vars.category = nodeCategory
form.vars.node = node
form.vars.provides = provides
if form.accepts(request.vars, session):
if attrMatch:
if form.vars.delete_this_record:
db.syslog.insert(action="Removed Match Element", target=node, target2=attrMatch.id)
else:
db.syslog.insert(action="Edited Match Element", target=node, target2=attrMatch.id)
else:
db.syslog.insert(action="Added Match Element", target=node, target2=form.vars.id)
return LOAD("match","viewNode",args=[node.url])
return dict(node=node.as_dict(), category=nodeCategory.as_dict(), form=form)
def viewNode():
node = get_node_or_404(request.args(0))
match = []
for category in db(db.matchingCategory.id>0).select():
match.append(
{"category":category,
"provides":db((db.matchingAttribute.category==category) &
(db.matchingAttribute.node==node)&(db.matchingAttribute.provides==True)).select(db.matchingAttribute.value, db.matchingAttribute.description).as_list(),
"wants":db((db.matchingAttribute.category==category) &
(db.matchingAttribute.node==node)&(db.matchingAttribute.provides==False)).select().as_list(),
})
return dict(match=match, node=node.as_dict(), can_edit=can_edit(node))
def findMatch():
if len(request.args) != 2:
raise HTTP(404, "Unexpected Request")
nodeCategory = db(db.matchingCategory.namePlural == request.args(0)).select(
db.matchingCategory.id,db.matchingCategory.name,db.matchingCategory.namePlural).first()
if not nodeCategory:
raise HTTP(404, "Category Not Found")
node = get_node_or_404(request.args(1))
attrs = db((db.matchingAttribute.category == nodeCategory) & (db.matchingAttribute.node == node)).select()
skillsIOffer = []
skillsIWant = []
for attr in attrs:
if attr.provides:
skillsIOffer.append(attr.value)
else:
skillsIWant.append(attr.value)
imLookingFor = {}
skillsIWantCount = len(skillsIWant)
for match in db((db.matchingAttribute.category == nodeCategory) &
(db.matchingAttribute.value.belongs(skillsIWant)) &
(db.matchingAttribute.provides == True)).select():
if match.node.id in imLookingFor:
imLookingFor[match.node.id]["attrs"].append({"name":match.value, "description":match.description})
else:
ct = db((db.matchingAttribute.node==match.node)&
(db.matchingAttribute.category == nodeCategory) &
(db.matchingAttribute.value.belongs(skillsIWant)) &
(db.matchingAttribute.provides == True)).count()
imLookingFor[match.node.id] = {
"node":match.node,
"match":(float(ct)/skillsIWantCount)*100,
"matchCt": ct,
"totalCt": skillsIWantCount,
"attrs":[{"name":match.value, "description":match.description}]}
lookingForMe = {}
for match in db((db.matchingAttribute.category == nodeCategory) &
(db.matchingAttribute.value.belongs(skillsIOffer)) &
(db.matchingAttribute.provides == False)).select():
ctTotal = db((db.matchingAttribute.node==match.node)&
(db.matchingAttribute.category == nodeCategory) &
(db.matchingAttribute.provides == False)).count()
ctLessMatch = db((db.matchingAttribute.node==match.node)&
(db.matchingAttribute.category == nodeCategory) &
~(db.matchingAttribute.value.belongs(skillsIOffer)) &
(db.matchingAttribute.provides == False)).count()
if match.node.id in lookingForMe :
lookingForMe[match.node.id]["attrs"].append({"name":match.value, "description":match.description})
else:
lookingForMe[match.node.id] = {
"node":match.node,
"match":min((float(ctTotal - ctLessMatch)/ctTotal)*100, 100.0),
"matchCt": ctTotal - ctLessMatch,
"totalCt": ctTotal,
"attrs":[{"name":match.value, "description":match.description}]}
return dict(node=node,category=nodeCategory.as_dict(),imLookingFor=imLookingFor, lookingForMe=lookingForMe )
|
jmr0/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_response.py | 109 | import unittest
from types import MethodType
import wptserve
from .base import TestUsingServer
def send_body_as_header(self):
if self._response.add_required_headers:
self.write_default_headers()
self.write("X-Body: ")
self._headers_complete = True
class TestResponse(TestUsingServer):
def test_head_without_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.end_headers = MethodType(send_body_as_header,
response.writer,
wptserve.response.ResponseWriter)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_without_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("", resp.info()['x-body'])
def test_head_with_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.send_body_for_head_request = True
response.writer.end_headers = MethodType(send_body_as_header,
response.writer,
wptserve.response.ResponseWriter)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_with_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("body", resp.info()['X-Body'])
if __name__ == '__main__':
unittest.main()
|
fhe-odoo/odoo | refs/heads/8.0 | addons/l10n_multilang/account.py | 348 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
#in this file, we mostly add the tag translate=True on existing fields that we now want to be translated
class account_account_template(osv.osv):
_inherit = 'account.account.template'
_columns = {
'name': fields.char('Name', required=True, select=True, translate=True),
}
class account_account(osv.osv):
_inherit = 'account.account'
_columns = {
'name': fields.char('Name', required=True, select=True, translate=True),
}
class account_tax(osv.osv):
_inherit = 'account.tax'
_columns = {
'name': fields.char('Tax Name', required=True, select=True, translate=True),
}
class account_tax_template(osv.osv):
_inherit = 'account.tax.template'
_columns = {
'name': fields.char('Tax Name', required=True, select=True, translate=True),
}
class account_tax_code_template(osv.osv):
_inherit = 'account.tax.code.template'
_columns = {
'name': fields.char('Tax Case Name', required=True, translate=True),
}
class account_chart_template(osv.osv):
_inherit = 'account.chart.template'
_columns={
'name': fields.char('Name', required=True, translate=True),
'spoken_languages': fields.char('Spoken Languages', help="State here the languages for which the translations of templates could be loaded at the time of installation of this localization module and copied in the final object when generating them from templates. You must provide the language codes separated by ';'"),
}
_order = 'name'
class account_fiscal_position(osv.osv):
_inherit = 'account.fiscal.position'
_columns = {
'name': fields.char('Fiscal Position', required=True, translate=True),
'note': fields.text('Notes', translate=True),
}
class account_fiscal_position_template(osv.osv):
_inherit = 'account.fiscal.position.template'
_columns = {
'name': fields.char('Fiscal Position Template', required=True, translate=True),
'note': fields.text('Notes', translate=True),
}
class account_journal(osv.osv):
_inherit = 'account.journal'
_columns = {
'name': fields.char('Journal Name', required=True, translate=True),
}
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_columns = {
'name': fields.char('Account Name', required=True, translate=True),
}
class account_analytic_journal(osv.osv):
_inherit = 'account.analytic.journal'
_columns = {
'name': fields.char('Journal Name', required=True, translate=True),
}
|
kdwink/intellij-community | refs/heads/master | python/testData/refactoring/extractmethod/AsyncDef.before.py | 54 | async def foo(x):
<selection>y = await x</selection>
return await y
|
Srisai85/scikit-learn | refs/heads/master | examples/ensemble/plot_voting_probas.py | 316 | """
===========================================================
Plot class probabilities calculated by the VotingClassifier
===========================================================
Plot the class probabilities of the first sample in a toy dataset
predicted by three different classifiers and averaged by the
`VotingClassifier`.
First, three examplary classifiers are initialized (`LogisticRegression`,
`GaussianNB`, and `RandomForestClassifier`) and used to initialize a
soft-voting `VotingClassifier` with weights `[1, 1, 5]`, which means that
the predicted probabilities of the `RandomForestClassifier` count 5 times
as much as the weights of the other classifiers when the averaged probability
is calculated.
To visualize the probability weighting, we fit each classifier on the training
set and plot the predicted class probabilities for the first sample in this
example dataset.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import VotingClassifier
clf1 = LogisticRegression(random_state=123)
clf2 = RandomForestClassifier(random_state=123)
clf3 = GaussianNB()
X = np.array([[-1.0, -1.0], [-1.2, -1.4], [-3.4, -2.2], [1.1, 1.2]])
y = np.array([1, 1, 2, 2])
eclf = VotingClassifier(estimators=[('lr', clf1), ('rf', clf2), ('gnb', clf3)],
voting='soft',
weights=[1, 1, 5])
# predict class probabilities for all classifiers
probas = [c.fit(X, y).predict_proba(X) for c in (clf1, clf2, clf3, eclf)]
# get class probabilities for the first sample in the dataset
class1_1 = [pr[0, 0] for pr in probas]
class2_1 = [pr[0, 1] for pr in probas]
# plotting
N = 4 # number of groups
ind = np.arange(N) # group positions
width = 0.35 # bar width
fig, ax = plt.subplots()
# bars for classifier 1-3
p1 = ax.bar(ind, np.hstack(([class1_1[:-1], [0]])), width, color='green')
p2 = ax.bar(ind + width, np.hstack(([class2_1[:-1], [0]])), width, color='lightgreen')
# bars for VotingClassifier
p3 = ax.bar(ind, [0, 0, 0, class1_1[-1]], width, color='blue')
p4 = ax.bar(ind + width, [0, 0, 0, class2_1[-1]], width, color='steelblue')
# plot annotations
plt.axvline(2.8, color='k', linestyle='dashed')
ax.set_xticks(ind + width)
ax.set_xticklabels(['LogisticRegression\nweight 1',
'GaussianNB\nweight 1',
'RandomForestClassifier\nweight 5',
'VotingClassifier\n(average probabilities)'],
rotation=40,
ha='right')
plt.ylim([0, 1])
plt.title('Class probabilities for sample 1 by different classifiers')
plt.legend([p1[0], p2[0]], ['class 1', 'class 2'], loc='upper left')
plt.show()
|
nguyenkims/satellizer-demo | refs/heads/master | app.py | 1 | """This sample is published as part of the blog article at www.toptal.com/blog
Visit www.toptal.com/blog and subscribe to our newsletter to read great posts
"""
import json
import os
import flask
import jwt
import requests
from datetime import datetime, timedelta
from flask import Flask, jsonify, request
from flask_sqlalchemy import SQLAlchemy
from jwt import DecodeError, ExpiredSignature
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['TOKEN_SECRET'] = 'very secret'
app.config['FACEBOOK_SECRET'] = os.environ.get('FACEBOOK_SECRET')
db = SQLAlchemy(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(100), nullable=False)
facebook_id = db.Column(db.String(100)) # facebook_id
password = db.Column(db.String(100))
def token(self):
payload = {
'sub': self.id,
'iat': datetime.utcnow(),
'exp': datetime.utcnow() + timedelta(days=14)
}
token = jwt.encode(payload, app.config['TOKEN_SECRET'])
return token.decode('unicode_escape')
if os.path.exists('app.db'):
os.remove('app.db')
db.create_all()
@app.route('/auth/signup', methods=['POST'])
def signup():
data = request.json
email = data["email"]
password = data["password"]
user = User(email=email, password=password)
db.session.add(user)
db.session.commit()
return jsonify(token=user.token())
@app.route('/auth/login', methods=['POST'])
def login():
data = request.json
email = data.get("email")
password = data.get("password")
user = User.query.filter_by(email=email).first()
if not user:
return jsonify(error="No such user"), 404
if user.password == password:
return jsonify(token=user.token()), 200
else:
return jsonify(error="Wrong email or password"), 400
@app.route('/user')
def user_info():
if not request.headers.get('Authorization'):
return jsonify(error='Authorization header missing'), 401
token = request.headers.get('Authorization').split()[1]
try:
payload = jwt.decode(token, app.config['TOKEN_SECRET'])
except DecodeError:
return jsonify(error='Invalid token'), 401
except ExpiredSignature:
return jsonify(error='Expired token'), 401
else:
user_id = payload['sub']
user = User.query.filter_by(id=user_id).first()
if user is None:
return jsonify(error='Should not happen ...'), 500
return jsonify(id=user.id, email=user.email), 200
return jsonify(error="never reach here..."), 500
@app.route('/auth/facebook', methods=['POST'])
def auth_facebook():
access_token_url = 'https://graph.facebook.com/v2.8/oauth/access_token'
graph_api_url = 'https://graph.facebook.com/v2.8/me?fields=id,email'
params = {
'client_id': request.json['clientId'],
'redirect_uri': request.json['redirectUri'],
'client_secret': app.config['FACEBOOK_SECRET'],
'code': request.json['code']
}
# Exchange authorization code for access token.
r = requests.get(access_token_url, params=params)
# use json.loads instad of urlparse.parse_qsl
access_token = json.loads(r.text)
# Step 2. Retrieve information about the current user.
r = requests.get(graph_api_url, params=access_token)
profile = json.loads(r.text)
# Step 3. Create a new account or return an existing one.
user = User.query.filter_by(facebook_id=profile['id']).first()
if user:
return jsonify(token=user.token())
u = User(facebook_id=profile['id'], email=profile['email'])
db.session.add(u)
db.session.commit()
return jsonify(token=u.token())
@app.route('/islive')
def islive():
return "it's live"
@app.route('/')
def index():
return flask.redirect('/static/index.html')
if __name__ == '__main__':
app.run(debug=True, port=5003, host='0.0.0.0')
|
nodegit/node-gyp | refs/heads/master | gyp/pylib/gyp/win_tool.py | 8 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from __future__ import print_function
import os
import re
import shutil
import subprocess
import stat
import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
PY3 = bytes != str
# A regex matching an argument corresponding to the output filename passed to
# link.exe.
_LINK_EXE_OUT_ARG = re.compile("/OUT:(?P<out>.+)$", re.IGNORECASE)
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def _UseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
shared one."""
if len(args) < 1:
raise Exception("Not enough arguments")
if args[0] != "link.exe":
return
# Use the output filename passed to the linker to generate an endpoint name
# for mspdbsrv.exe.
endpoint_name = None
for arg in args:
m = _LINK_EXE_OUT_ARG.match(arg)
if m:
endpoint_name = re.sub(
r"\W+", "", "%s_%d" % (m.group("out"), os.getpid())
)
break
if endpoint_name is None:
return
# Adds the appropriate environment variable. This will be read by link.exe
# to know which instance of mspdbsrv.exe it should connect to (if it's
# not set then the default endpoint is used).
env["_MSPDBSRV_ENDPOINT_"] = endpoint_name
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace("-", "")
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split("\0")
kvs = [item.split("=", 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, "w").close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
def _on_error(fn, path, excinfo):
# The operation failed, possibly because the file is set to
# read-only. If that's why, make it writable and try the op again.
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWRITE)
fn(path)
shutil.rmtree(dest, onerror=_on_error)
else:
if not os.access(dest, os.W_OK):
# Attempt to make the file writable before deleting it.
os.chmod(dest, stat.S_IWRITE)
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
env = self._GetEnv(arch)
if use_separate_mspdbsrv == "True":
self._UseSeparateMspdbsrv(env, args)
if sys.platform == "win32":
args = list(args) # *args is a tuple by default, which is read-only.
args[0] = args[0].replace("/", "\\")
# https://docs.python.org/2/library/subprocess.html:
# "On Unix with shell=True [...] if args is a sequence, the first item
# specifies the command string, and any additional items will be treated as
# additional arguments to the shell itself. That is to say, Popen does the
# equivalent of:
# Popen(['/bin/sh', '-c', args[0], args[1], ...])"
# For that reason, since going through the shell doesn't seem necessary on
# non-Windows don't do that there.
link = subprocess.Popen(
args,
shell=sys.platform == "win32",
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
out, _ = link.communicate()
if PY3:
out = out.decode("utf-8")
for line in out.splitlines():
if (
not line.startswith(" Creating library ")
and not line.startswith("Generating code")
and not line.startswith("Finished generating code")
):
print(line)
return link.returncode
def ExecLinkWithManifests(
self,
arch,
embed_manifest,
out,
ldcmd,
resname,
mt,
rc,
intermediate_manifest,
*manifests
):
"""A wrapper for handling creating a manifest resource and then executing
a link command."""
# The 'normal' way to do manifests is to have link generate a manifest
# based on gathering dependencies from the object files, then merge that
# manifest with other manifests supplied as sources, convert the merged
# manifest to a resource, and then *relink*, including the compiled
# version of the manifest resource. This breaks incremental linking, and
# is generally overly complicated. Instead, we merge all the manifests
# provided (along with one that includes what would normally be in the
# linker-generated one, see msvs_emulation.py), and include that into the
# first and only link. We still tell link to generate a manifest, but we
# only use that to assert that our simpler process did not miss anything.
variables = {
"python": sys.executable,
"arch": arch,
"out": out,
"ldcmd": ldcmd,
"resname": resname,
"mt": mt,
"rc": rc,
"intermediate_manifest": intermediate_manifest,
"manifests": " ".join(manifests),
}
add_to_ld = ""
if manifests:
subprocess.check_call(
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
"-manifest %(manifests)s -out:%(out)s.manifest" % variables
)
if embed_manifest == "True":
subprocess.check_call(
"%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest"
" %(out)s.manifest.rc %(resname)s" % variables
)
subprocess.check_call(
"%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s "
"%(out)s.manifest.rc" % variables
)
add_to_ld = " %(out)s.manifest.res" % variables
subprocess.check_call(ldcmd + add_to_ld)
# Run mt.exe on the theoretically complete manifest we generated, merging
# it with the one the linker generated to confirm that the linker
# generated one does not add anything. This is strictly unnecessary for
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
# used in a #pragma comment.
if manifests:
# Merge the intermediate one with ours to .assert.manifest, then check
# that .assert.manifest is identical to ours.
subprocess.check_call(
"%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo "
"-manifest %(out)s.manifest %(intermediate_manifest)s "
"-out:%(out)s.assert.manifest" % variables
)
assert_manifest = "%(out)s.assert.manifest" % variables
our_manifest = "%(out)s.manifest" % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
with open(our_manifest, "r") as our_f:
with open(assert_manifest, "r") as assert_f:
our_data = our_f.read().translate(None, string.whitespace)
assert_data = assert_f.read().translate(None, string.whitespace)
if our_data != assert_data:
os.unlink(out)
def dump(filename):
print(filename, file=sys.stderr)
print("-----", file=sys.stderr)
with open(filename, "r") as f:
print(f.read(), file=sys.stderr)
print("-----", file=sys.stderr)
dump(intermediate_manifest)
dump(our_manifest)
dump(assert_manifest)
sys.stderr.write(
'Linker generated manifest "%s" added to final manifest "%s" '
'(result in "%s"). '
"Were /MANIFEST switches used in #pragma statements? "
% (intermediate_manifest, our_manifest, assert_manifest)
)
return 1
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
for line in out.splitlines():
if line and "manifest authoring warning 81010002" not in line:
print(line)
return popen.returncode
def ExecManifestToRc(self, arch, *args):
"""Creates a resource file pointing a SxS assembly manifest.
|args| is tuple containing path to resource file, path to manifest file
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
manifest_path, resource_path, resource_name = args
with open(resource_path, "w") as output:
output.write(
'#include <windows.h>\n%s RT_MANIFEST "%s"'
% (resource_name, os.path.abspath(manifest_path).replace("\\", "/"))
)
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = (
["midl", "/nologo"]
+ list(flags)
+ [
"/out",
outdir,
"/tlb",
tlb,
"/h",
h,
"/dlldata",
dlldata,
"/iid",
iid,
"/proxy",
proxy,
idl,
]
)
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefixes = ("Processing ", "64 bit Processing ")
processing = set(os.path.basename(x) for x in lines if x.startswith(prefixes))
for line in lines:
if not line.startswith(prefixes) and line not in processing:
print(line)
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Copyright (C) Microsoft Corporation")
and not line.startswith("Microsoft (R) Macro Assembler")
and not line.startswith(" Assembling: ")
and line
):
print(line)
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(
args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
out, _ = popen.communicate()
if PY3:
out = out.decode("utf-8")
for line in out.splitlines():
if (
not line.startswith("Microsoft (R) Windows (R) Resource Compiler")
and not line.startswith("Copyright (C) Microsoft Corporation")
and line
):
print(line)
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
# TODO(scottmg): This is a temporary hack to get some specific variables
# through to actions that are set after gyp-time. http://crbug.com/333738.
for k, v in os.environ.items():
if k not in env:
env[k] = v
args = open(rspfile).read()
dir = dir[0] if dir else None
return subprocess.call(args, shell=True, env=env, cwd=dir)
def ExecClCompile(self, project_dir, selected_files):
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
build selected C/C++ files."""
project_dir = os.path.relpath(project_dir, BASE_DIR)
selected_files = selected_files.split(";")
ninja_targets = [
os.path.join(project_dir, filename) + "^^" for filename in selected_files
]
cmd = ["ninja.exe"]
cmd.extend(ninja_targets)
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
sss/calibre-at-bzr | refs/heads/upstream/master | src/calibre/ebooks/pdf/from_comic.py | 24 | from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal [email protected]'
__docformat__ = 'restructuredtext en'
'Convert a comic in CBR/CBZ format to pdf'
import sys
from functools import partial
from calibre.ebooks.lrf.comic.convert_from import do_convert, option_parser, config, main as _main
convert = partial(do_convert, output_format='pdf')
main = partial(_main, output_format='pdf')
if __name__ == '__main__':
sys.exit(main())
if False:
option_parser
config
|
azureplus/hue | refs/heads/master | desktop/core/ext-py/Django-1.6.10/django/contrib/gis/gdal/prototypes/generation.py | 219 | """
This module contains functions that generate ctypes prototypes for the
GDAL routines.
"""
from ctypes import c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.prototypes.errcheck import (
check_arg_errcode, check_errcode, check_geom, check_geom_offset,
check_pointer, check_srs, check_str_arg, check_string, check_const_string)
class gdal_char_p(c_char_p):
pass
def double_output(func, argtypes, errcheck=False, strarg=False):
"Generates a ctypes function that returns a double value."
func.argtypes = argtypes
func.restype = c_double
if errcheck: func.errcheck = check_arg_errcode
if strarg: func.errcheck = check_str_arg
return func
def geom_output(func, argtypes, offset=None):
"""
Generates a function that returns a Geometry either by reference
or directly (if the return_geom keyword is set to True).
"""
# Setting the argument types
func.argtypes = argtypes
if not offset:
# When a geometry pointer is directly returned.
func.restype = c_void_p
func.errcheck = check_geom
else:
# Error code returned, geometry is returned by-reference.
func.restype = c_int
def geomerrcheck(result, func, cargs):
return check_geom_offset(result, func, cargs, offset)
func.errcheck = geomerrcheck
return func
def int_output(func, argtypes):
"Generates a ctypes function that returns an integer value."
func.argtypes = argtypes
func.restype = c_int
return func
def srs_output(func, argtypes):
"""
Generates a ctypes prototype for the given function with
the given C arguments that returns a pointer to an OGR
Spatial Reference System.
"""
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_srs
return func
def const_string_output(func, argtypes, offset=None, decoding=None):
func.argtypes = argtypes
if offset:
func.restype = c_int
else:
func.restype = c_char_p
def _check_const(result, func, cargs):
res = check_const_string(result, func, cargs, offset=offset)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_const
return func
def string_output(func, argtypes, offset=-1, str_result=False, decoding=None):
"""
Generates a ctypes prototype for the given function with the
given argument types that returns a string from a GDAL pointer.
The `const` flag indicates whether the allocated pointer should
be freed via the GDAL library routine VSIFree -- but only applies
only when `str_result` is True.
"""
func.argtypes = argtypes
if str_result:
# Use subclass of c_char_p so the error checking routine
# can free the memory at the pointer's address.
func.restype = gdal_char_p
else:
# Error code is returned
func.restype = c_int
# Dynamically defining our error-checking function with the
# given offset.
def _check_str(result, func, cargs):
res = check_string(result, func, cargs,
offset=offset, str_result=str_result)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_str
return func
def void_output(func, argtypes, errcheck=True):
"""
For functions that don't only return an error code that needs to
be examined.
"""
if argtypes: func.argtypes = argtypes
if errcheck:
# `errcheck` keyword may be set to False for routines that
# return void, rather than a status code.
func.restype = c_int
func.errcheck = check_errcode
else:
func.restype = None
return func
def voidptr_output(func, argtypes):
"For functions that return c_void_p."
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_pointer
return func
|
shibinp/google_python | refs/heads/master | basic/string2.py | 1 | #!/usr/bin/python2.4 -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic string exercises
# D. verbing
# Given a string, if its length is at least 3,
# add 'ing' to its end.
# Unless it already ends in 'ing', in which case
# add 'ly' instead.
# If the string length is less than 3, leave it unchanged.
# Return the resulting string.
def verbing(s):
if len(s) >= 3:
if s[-3:] != 'ing': s = s + 'ing'
else: s = s + 'ly'
return s
# E. not_bad
# Given a string, find the first appearance of the
# substring 'not' and 'bad'. If the 'bad' follows
# the 'not', replace the whole 'not'...'bad' substring
# with 'good'.
# Return the resulting string.
# So 'This dinner is not that bad!' yields:
# This dinner is good!
def not_bad(s):
a= s.find("not")
b=s.find("bad")
if a!=-1 and b!=-1 and b > a:
s= s[:a]+"good"+s[b+3:]
return s
# F. front_back
# Consider dividing a string into two halves.
# If the length is even, the front and back halves are the same length.
# If the length is odd, we'll say that the extra char goes in the front half.
# e.g. 'abcde', the front half is 'abc', the back half 'de'.
# Given 2 strings, a and b, return a string of the form
# a-front + b-front + a-back + b-back
def front_back(a, b):
a_middle = len(a) / 2
b_middle = len(b) / 2
if len(a) % 2 == 1: # add 1 if length is odd
a_middle = a_middle + 1
if len(b) % 2 == 1:
b_middle = b_middle + 1
return a[:a_middle] + b[:b_middle] + a[a_middle:] + b[b_middle:]
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# main() calls the above functions with interesting inputs,
# using the above test() to check if the result is correct or not.
def main():
print 'verbing'
test(verbing('hail'), 'hailing')
test(verbing('swiming'), 'swimingly')
test(verbing('do'), 'do')
print
print 'not_bad'
test(not_bad('This movie is not so bad'), 'This movie is good')
test(not_bad('This dinner is not that bad!'), 'This dinner is good!')
test(not_bad('This tea is not hot'), 'This tea is not hot')
test(not_bad("It's bad yet not"), "It's bad yet not")
print
print 'front_back'
test(front_back('abcd', 'xy'), 'abxcdy')
test(front_back('abcde', 'xyz'), 'abcxydez')
test(front_back('Kitten', 'Donut'), 'KitDontenut')
if __name__ == '__main__':
main()
|
JCBarahona/edX | refs/heads/master | common/djangoapps/course_modes/migrations/0005_auto__add_field_coursemode_expiration_datetime.py | 114 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CourseMode.expiration_datetime'
db.add_column('course_modes_coursemode', 'expiration_datetime',
self.gf('django.db.models.fields.DateTimeField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CourseMode.expiration_datetime'
db.delete_column('course_modes_coursemode', 'expiration_datetime')
models = {
'course_modes.coursemode': {
'Meta': {'unique_together': "(('course_id', 'mode_slug', 'currency'),)", 'object_name': 'CourseMode'},
'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'usd'", 'max_length': '8'}),
'expiration_date': ('django.db.models.fields.DateField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'expiration_datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_price': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'mode_display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'mode_slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'suggested_prices': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'default': "''", 'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['course_modes']
|
jasonxmueller/gcc-python-plugin | refs/heads/master | tests/cpychecker/refcounts/ticket-20/script.py | 623 | # -*- coding: utf-8 -*-
# Copyright 2011 David Malcolm <[email protected]>
# Copyright 2011 Red Hat, Inc.
#
# This is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
from libcpychecker import main
main(verify_refcounting=True,
dump_traces=True,
show_traces=False)
|
tescalada/npyscreen-restructure | refs/heads/master | npyscreen/global_options.py | 1 | # encoding: utf-8
DISABLE_ALL_COLORS = False
# See the safe_string function in widget. At the moment the encoding is not safe
ASCII_ONLY = False |
lsaffre/lino | refs/heads/master | lino/modlib/notify/fixtures/demo2.py | 1 | # Copyright 2016-2018 Rumma & Ko Ltd
# License: BSD (see file COPYING for details)
"""Emits a notification "The database has been initialized." to every
user.
"""
import datetime
from django.utils import translation
from atelier.utils import i2t
from lino.api import dd, rt, _
from django.conf import settings
from django.utils.timezone import make_aware
def objects():
now = datetime.datetime.combine(dd.today(), i2t(548))
if settings.USE_TZ:
now = make_aware(now)
mt = rt.models.notify.MessageTypes.system
for u in rt.models.users.User.objects.order_by('username'):
# if u.user_type.has_required_roles()
with translation.override(u.language):
yield rt.models.notify.Message.create_message(
u, subject=_("The database has been initialized."),
mail_mode=u.mail_mode, created=now, message_type=mt,
sent=now)
|
frank-tancf/scikit-learn | refs/heads/master | sklearn/utils/graph.py | 289 | """
Graph utilities and algorithms
Graphs are represented with their adjacency matrices, preferably using
sparse matrices.
"""
# Authors: Aric Hagberg <[email protected]>
# Gael Varoquaux <[email protected]>
# Jake Vanderplas <[email protected]>
# License: BSD 3 clause
import numpy as np
from scipy import sparse
from .validation import check_array
from .graph_shortest_path import graph_shortest_path
###############################################################################
# Path and connected component analysis.
# Code adapted from networkx
def single_source_shortest_path_length(graph, source, cutoff=None):
"""Return the shortest path length from source to all reachable nodes.
Returns a dictionary of shortest path lengths keyed by target.
Parameters
----------
graph: sparse matrix or 2D array (preferably LIL matrix)
Adjacency matrix of the graph
source : node label
Starting node for path
cutoff : integer, optional
Depth to stop the search - only
paths of length <= cutoff are returned.
Examples
--------
>>> from sklearn.utils.graph import single_source_shortest_path_length
>>> import numpy as np
>>> graph = np.array([[ 0, 1, 0, 0],
... [ 1, 0, 1, 0],
... [ 0, 1, 0, 1],
... [ 0, 0, 1, 0]])
>>> single_source_shortest_path_length(graph, 0)
{0: 0, 1: 1, 2: 2, 3: 3}
>>> single_source_shortest_path_length(np.ones((6, 6)), 2)
{0: 1, 1: 1, 2: 0, 3: 1, 4: 1, 5: 1}
"""
if sparse.isspmatrix(graph):
graph = graph.tolil()
else:
graph = sparse.lil_matrix(graph)
seen = {} # level (number of hops) when seen in BFS
level = 0 # the current level
next_level = [source] # dict of nodes to check at next level
while next_level:
this_level = next_level # advance to next level
next_level = set() # and start a new list (fringe)
for v in this_level:
if v not in seen:
seen[v] = level # set the level of vertex v
next_level.update(graph.rows[v])
if cutoff is not None and cutoff <= level:
break
level += 1
return seen # return all path lengths as dictionary
if hasattr(sparse, 'connected_components'):
connected_components = sparse.connected_components
else:
from .sparsetools import connected_components
###############################################################################
# Graph laplacian
def graph_laplacian(csgraph, normed=False, return_diag=False):
""" Return the Laplacian matrix of a directed graph.
For non-symmetric graphs the out-degree is used in the computation.
Parameters
----------
csgraph : array_like or sparse matrix, 2 dimensions
compressed-sparse graph, with shape (N, N).
normed : bool, optional
If True, then compute normalized Laplacian.
return_diag : bool, optional
If True, then return diagonal as well as laplacian.
Returns
-------
lap : ndarray
The N x N laplacian matrix of graph.
diag : ndarray
The length-N diagonal of the laplacian matrix.
diag is returned only if return_diag is True.
Notes
-----
The Laplacian matrix of a graph is sometimes referred to as the
"Kirchoff matrix" or the "admittance matrix", and is useful in many
parts of spectral graph theory. In particular, the eigen-decomposition
of the laplacian matrix can give insight into many properties of the graph.
For non-symmetric directed graphs, the laplacian is computed using the
out-degree of each node.
"""
if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]:
raise ValueError('csgraph must be a square matrix or array')
if normed and (np.issubdtype(csgraph.dtype, np.int)
or np.issubdtype(csgraph.dtype, np.uint)):
csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True)
if sparse.isspmatrix(csgraph):
return _laplacian_sparse(csgraph, normed=normed,
return_diag=return_diag)
else:
return _laplacian_dense(csgraph, normed=normed,
return_diag=return_diag)
def _laplacian_sparse(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
if not graph.format == 'coo':
lap = (-graph).tocoo()
else:
lap = -graph.copy()
diag_mask = (lap.row == lap.col)
if not diag_mask.sum() == n_nodes:
# The sparsity pattern of the matrix has holes on the diagonal,
# we need to fix that
diag_idx = lap.row[diag_mask]
diagonal_holes = list(set(range(n_nodes)).difference(diag_idx))
new_data = np.concatenate([lap.data, np.ones(len(diagonal_holes))])
new_row = np.concatenate([lap.row, diagonal_holes])
new_col = np.concatenate([lap.col, diagonal_holes])
lap = sparse.coo_matrix((new_data, (new_row, new_col)),
shape=lap.shape)
diag_mask = (lap.row == lap.col)
lap.data[diag_mask] = 0
w = -np.asarray(lap.sum(axis=1)).squeeze()
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap.data /= w[lap.row]
lap.data /= w[lap.col]
lap.data[diag_mask] = (1 - w_zeros[lap.row[diag_mask]]).astype(
lap.data.dtype)
else:
lap.data[diag_mask] = w[lap.row[diag_mask]]
if return_diag:
return lap, w
return lap
def _laplacian_dense(graph, normed=False, return_diag=False):
n_nodes = graph.shape[0]
lap = -np.asarray(graph) # minus sign leads to a copy
# set diagonal to zero
lap.flat[::n_nodes + 1] = 0
w = -lap.sum(axis=0)
if normed:
w = np.sqrt(w)
w_zeros = (w == 0)
w[w_zeros] = 1
lap /= w
lap /= w[:, np.newaxis]
lap.flat[::n_nodes + 1] = (1 - w_zeros).astype(lap.dtype)
else:
lap.flat[::n_nodes + 1] = w.astype(lap.dtype)
if return_diag:
return lap, w
return lap
|
loco-odoo/localizacion_co | refs/heads/master | openerp/addons/account/wizard/account_journal_select.py | 385 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class account_journal_select(osv.osv_memory):
"""
Account Journal Select
"""
_name = "account.journal.select"
_description = "Account Journal Select"
def action_open_window(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
if context is None:
context = {}
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_move_line_select')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id])[0]
cr.execute('select journal_id, period_id from account_journal_period where id=%s', (context['active_id'],))
res = cr.fetchone()
if res:
journal_id, period_id = res
result['domain'] = str([('journal_id', '=', journal_id), ('period_id', '=', period_id)])
result['context'] = str({'journal_id': journal_id, 'period_id': period_id})
return result
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
qedsoftware/commcare-hq | refs/heads/master | corehq/apps/api/resources/v0_3.py | 1 | from tastypie import fields
from casexml.apps.case.models import CommCareCase
from corehq.apps.api.resources import DomainSpecificResourceMixin
from corehq.apps.api.resources import HqBaseResource
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.api.resources.meta import CustomResourceMeta
from corehq.apps.api.util import object_does_not_exist, get_obj
from corehq.apps.cloudcare.api import es_filter_cases
from corehq.apps.users.models import Permissions
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
class CaseListFilters(object):
format = 'json'
def __init__(self, params):
self.filters = dict((k, v) for k, v in params.items())
#hacky hack for v0.3.
#for v0.4, the API will explicitly require name and type
#for this version, magically behind the scenes override the query for case_name and case_type to be name, type
#note, on return output, the name will return as case_name, and type will return as case_type
if 'case_name' in self.filters:
self.filters['name'] = self.filters['case_name']
del(self.filters['case_name'])
if 'case_type' in self.filters:
self.filters['type'] = self.filters['case_type']
del(self.filters['case_type'])
if 'format' in self.filters:
self.format = self.filters['format']
del self.filters['format']
if 'order_by' in self.filters:
del self.filters['order_by']
class CommCareCaseResource(HqBaseResource, DomainSpecificResourceMixin):
type = "case"
id = fields.CharField(attribute='case_id', readonly=True, unique=True)
case_id = id
user_id = fields.CharField(attribute='user_id', null=True)
date_modified = fields.CharField(attribute='date_modified', default="1900-01-01")
closed = fields.BooleanField(attribute='closed')
date_closed = fields.CharField(attribute='closed_on', null=True)
server_date_modified = fields.CharField(attribute='server_date_modified', default="1900-01-01")
server_date_opened = fields.CharField(attribute='server_date_opened', null=True)
xform_ids = fields.ListField(attribute='xform_ids')
properties = fields.DictField()
def dehydrate_properties(self, bundle):
return bundle.obj.get_properties_in_api_format()
indices = fields.DictField()
def dehydrate_indices(self, bundle):
return bundle.obj.get_index_map()
def detail_uri_kwargs(self, bundle_or_obj):
return {
'pk': get_obj(bundle_or_obj).case_id
}
def obj_get(self, bundle, **kwargs):
case_id = kwargs['pk']
try:
return CaseAccessors(kwargs['domain']).get_case(case_id)
except CaseNotFound:
raise object_does_not_exist("CommCareCase", case_id)
def obj_get_list(self, bundle, domain, **kwargs):
filters = CaseListFilters(bundle.request.GET)
return es_filter_cases(domain, filters=filters.filters)
class Meta(CustomResourceMeta):
authentication = RequirePermissionAuthentication(Permissions.edit_data)
object_class = CommCareCase
resource_name = 'case'
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
|
flakey-bit/plugin.audio.spotify | refs/heads/master | resources/playbackservice.py | 1 | # -*- coding: utf8 -*-
from __future__ import print_function, unicode_literals
import os, os.path
import xbmc, xbmcgui
import threading
import weakref
import re
import traceback
from utils import *
load_all_libraries()
from spotify import MainLoop, ConnectionState, ErrorType, Bitrate, link
from spotify import track as _track
from spotify.utils.loaders import load_track, load_albumbrowse
from spotify.session import Session, SessionCallbacks
from spotifyproxy.httpproxy import ProxyRunner
from spotifyproxy.audio import BufferManager
from threading import Event
class Application:
__vars = None
def __init__(self):
self.__vars = {}
def set_var(self, name, value):
self.__vars[name] = value
def has_var(self, name):
return name in self.__vars
def get_var(self, name):
return self.__vars[name]
def remove_var(self, name):
del self.__vars[name]
class Callbacks(SessionCallbacks):
__mainloop = None
__audio_buffer = None
__logout_event = None
__app = None
def __init__(self, mainloop, audio_buffer, app):
self.__mainloop = mainloop
self.__audio_buffer = audio_buffer
self.__app = app
def logged_in(self, session, error_num):
logMsg('logged in: {0:d}'.format(error_num))
self.__app.set_var('login_last_error', error_num)
if error_num != ErrorType.Ok:
self.__app.get_var('connstate_event').set()
def logged_out(self, session):
logMsg('logged out')
self.__app.get_var('logout_event').set()
def connection_error(self, session, error):
logMsg('connection error: {0:d}'.format(error))
def message_to_user(self, session, data):
logMsg('message to user: {0}'.format(data))
def log_message(self, session, data):
logMsg("Spotify Callbacks: %s" %data, True)
pass
def streaming_error(self, session, error):
logMsg('streaming error: {0:d}'.format(error))
def play_token_lost(self, session):
self.__audio_buffer.stop()
#Only stop if we're actually playing spotify content
if xbmc.getInfoLabel("MusicPlayer.(0).Property(spotifytrackid)"):
xbmc.executebuiltin('PlayerControl(stop)')
def end_of_track(self, session):
self.__audio_buffer.set_track_ended()
def notify_main_thread(self, session):
self.__mainloop.notify()
def music_delivery(self, session, data, num_samples, sample_type, sample_rate, num_channels):
return self.__audio_buffer.music_delivery( data, num_samples, sample_type, sample_rate, num_channels)
def connectionstate_changed(self, session):
self.__app.get_var('connstate_event').set()
def search_complete(self, result):
pass
class MainLoopRunner(threading.Thread):
__mainloop = None
__session = None
__proxy = None
def __init__(self, mainloop, session):
threading.Thread.__init__(self)
self.__mainloop = mainloop
self.__session = weakref.proxy(session)
def run(self):
self.__mainloop.loop(self.__session)
def stop(self):
self.__mainloop.quit()
self.join(4)
def get_audio_buffer_size():
buffer_size = 10
try:
crossfadevalue = getJSON('Settings.GetSettingValue', '{"setting":"musicplayer.crossfade"}')
buffer_size += crossfadevalue.get("value")
except:
logMsg('Failed reading crossfade setting. Using default value.')
return buffer_size
def set_settings(session):
session.preferred_bitrate(Bitrate.Rate320k)
session.set_volume_normalization(True)
session.set_volume_normalization(True)
def do_login(session, app):
#Get the last error if we have one
if app.has_var('login_last_error'):
prev_error = app.get_var('login_last_error')
else:
prev_error = 0
#Get login details from settings
username = SETTING("username").decode("utf-8")
password = SETTING("password").decode("utf-8")
#If no previous errors and we have a remembered user
logMsg('Checking remembered_user ..')
if prev_error == 0 and try_decode(session.remembered_user()) == username:
session.relogin()
status = True
logMsg( "Cached session found" )
else:
#do login with stored credentials
session.login(username, password, True)
return session
def login_get_last_error(app):
if app.has_var('login_last_error'):
return app.get_var('login_last_error')
else:
return 0
def wait_for_connstate(session, app, state):
last_login_error = login_get_last_error(app)
cs = app.get_var('connstate_event')
def continue_loop():
cur_login_error = login_get_last_error(app)
return (
not app.get_var('exit_requested') and
session.connectionstate() != state and (
last_login_error == cur_login_error or
cur_login_error == ErrorType.Ok
)
)
while continue_loop():
cs.wait(5)
cs.clear()
return session.connectionstate() == state
def get_next_track(sess_obj):
next_trackid = xbmc.getInfoLabel("MusicPlayer.(1).Property(spotifytrackid)")
if next_trackid:
#Try loading it as a spotify track
link_obj = link.create_from_string("spotify:track:%s" %next_trackid)
if link_obj:
return load_track(sess_obj, link_obj.as_track())
#Try to parse as a local track
link_obj = link.create_from_string("spotify:local:%s" %next_trackid)
if link_obj:
local_track = link_obj.as_track()
return load_track(sess_obj, local_track.get_playable(sess_obj))
else: return None
def get_preloader_callback(session, buffer):
session = weakref.proxy(session)
def preloader():
next_track = get_next_track(session)
if next_track:
ta = next_track.get_availability(session)
if ta == _track.TrackAvailability.Available:
buffer.open(session, next_track)
return preloader
def main():
try:
app = Application()
logout_event = Event()
connstate_event = Event()
monitor = xbmc.Monitor()
app.set_var('logout_event', logout_event)
app.set_var('login_last_error', ErrorType.Ok)
app.set_var('connstate_event', connstate_event)
app.set_var('exit_requested', False)
app.set_var('monitor', monitor)
data_dir, cache_dir, settings_dir = check_dirs()
#Initialize spotify stuff
ml = MainLoop()
buf = BufferManager(get_audio_buffer_size())
callbacks = Callbacks(ml, buf, app)
sess = Session(
callbacks,
app_key=appkey,
user_agent="python ctypes bindings",
settings_location=settings_dir,
cache_location=cache_dir,
initially_unload_playlists=False
)
set_settings(sess)
ml_runner = MainLoopRunner(ml, sess)
ml_runner.start()
#Set the exit flag if login was cancelled
if not do_login(sess, app):
WINDOW.setProperty("Spotify.ServiceReady","error")
app.set_var('exit_requested', True)
elif wait_for_connstate(sess, app, ConnectionState.LoggedIn):
proxy_runner = ProxyRunner(sess, buf, host='127.0.0.1', allow_ranges=True)
proxy_runner.start()
logMsg('starting proxy at port {0}'.format(proxy_runner.get_port()) )
preloader_cb = get_preloader_callback(sess, buf)
logMsg('Setting callback ..')
proxy_runner.set_stream_end_callback(preloader_cb)
user_agent = try_decode('Spotify/{0} (XBMC/{1})'.format(ADDON_VERSION, xbmc.getInfoLabel("System.BuildVersion"))).decode('utf-8', 'ignore')
logMsg('Obtaining user token ..')
playtoken = proxy_runner.get_user_token(user_agent)
header_dict = {
'User-Agent': user_agent,
'x-csrf-token': playtoken
}
logMsg('Encoding header ..')
url_headers = urlencode(header_dict)
WINDOW.setProperty("Spotify.PlayToken",url_headers)
WINDOW.setProperty("Spotify.PlayServer","%s:%s" %(proxy_runner.get_host(),proxy_runner.get_port()))
WINDOW.setProperty("Spotify.ServiceReady","ready")
#wait untill abortrequested
while not app.get_var('exit_requested'):
if monitor.abortRequested() or xbmc.abortRequested:
logMsg("Shutdown requested!")
app.set_var('exit_requested', True)
monitor.waitForAbort(0.5)
logMsg("Shutting down background processing...")
#Playback and proxy deinit sequence
xbmc.executebuiltin('PlayerControl(stop)')
logMsg('Clearing stream / stopping ..')
proxy_runner.clear_stream_end_callback()
proxy_runner.stop()
buf.cleanup()
#Clear some vars and collect garbage
proxy_runner = None
preloader_cb = None
#Logout
logMsg('Logging out ..')
if sess.user():
sess.logout()
logout_event.wait(2)
#Stop main loop
error = login_get_last_error(app)
WINDOW.setProperty("Spotify.LastError",str(login_get_last_error(app)))
ml_runner.stop()
except (Exception) as ex:
if str(ex) != '':
# trace = traceback.format_exc()
logMsg("TRACE: " + ( ''.join(traceback.format_stack()) ) )
logMsg("EXCEPTION in background service: " + str(ex))
# logMsg("STACK: %s" %trace, True)
if "Unable to find" in str(ex):
WINDOW.setProperty("Spotify.LastError","999")
else:
error = str(ex)
WINDOW.clearProperty("Spotify.ServiceReady")
WINDOW.setProperty("Spotify.LastError",error)
|
Soya93/Extract-Refactoring | refs/heads/master | python/testData/refactoring/changeSignature/classMethod.before.py | 73 | class A:
def fo<caret>o(self, a):
pass
class B(A):
def foo(self, a):
pass
class С(A):
def foo(self, a):
pass
a = A()
a.foo(1)
b = B()
b.foo(2) |
mcdenhoed/redo | refs/heads/master | actor.py | 1 | import pygame
import sys, os
class Actor(pygame.sprite.Sprite):
grav = 25#2.9
maxVel = 120
velDamp = .1
accDamp = .35
accDefault = 3
groundAcc = 30
airAcc = 10
left, right, onGround, onWall = False, False, False, False
def __init__(self, acc):
pygame.sprite.Sprite.__init__(self)
x, y = acc
self.pos = [x,y]
self.vel = [0.0,0.0]
self.acc = [0.0, Actor.grav]
self.theta = 0.0
self.dtheta = 0.0
imgpath = os.path.join("assets", "images", "rect.png")
self.image = pygame.Surface((30,30), pygame.SRCALPHA, 32).convert_alpha()#pygame.image.load(imgpath).convert_alpha()
self.image.fill((0,0,0,100))
self.rect = self.image.get_rect()
self.initialpos = self.rect.center = self.pos
def setLocation(self, pos):
x,y = pos
self.pos = [x,y]
self.vel = [0,0]
self.rect.center = self.pos
def jump(self):
if self.onGround is True or self.onWall is True:
self.vel[1] = -200
self.onGround, self.onWall = False, False
def offset(self, x, y):
self.pos = [a[0] + a[1] for a in zip(self.pos, [x,y])]
self.rect.center = self.pos
def update(self, offset=[0.0, 0.0]):
self.pos = [a+b+Actor.velDamp*c for a,b,c in zip(self.pos, offset, self.vel)]
#On above line: self.pos = [a +b + Actor.velDamp*c for a, b, c in zip(stuff)]
if abs(self.vel[0]) > Actor.maxVel and self.acc[0]*self.vel[0] > 0:
self.acc[0] = 0
self.vel = [a[0]+Actor.accDamp*a[1] for a in zip(self.vel, self.acc)]
if not (self.left or self.right):
if (self.onGround):
self.acc[0] = -.4*self.vel[0]
else:
self.acc[0] = -.12*self.vel[0]
self.rect.center = self.pos
if self.left:
self.leftPress()
elif self.right:
self.rightPress()
def leftPress(self):
if self.onGround: self.acc[0] = -Actor.groundAcc
else: self.acc[0] = -Actor.airAcc
def rightPress(self):
if self.onGround: self.acc[0] = Actor.groundAcc
else: self.acc[0] = Actor.airAcc
def reset(self):
self.pos = self.initialpos
self.rect.center = self.pos
|
jsteemann/arangodb | refs/heads/devel | 3rdParty/V8-4.3.61/third_party/python_26/Lib/test/test_profile.py | 51 | """Test suite for the profile module."""
import os
import sys
import pstats
import unittest
from StringIO import StringIO
from test.test_support import run_unittest
import profile
from test.profilee import testfunc, timer
class ProfileTest(unittest.TestCase):
profilerclass = profile.Profile
methodnames = ['print_stats', 'print_callers', 'print_callees']
expected_output = {}
@classmethod
def do_profiling(cls):
results = []
prof = cls.profilerclass(timer, 0.001)
start_timer = timer()
prof.runctx("testfunc()", globals(), locals())
results.append(timer() - start_timer)
for methodname in cls.methodnames:
s = StringIO()
stats = pstats.Stats(prof, stream=s)
stats.strip_dirs().sort_stats("stdname")
getattr(stats, methodname)()
results.append(s.getvalue())
return results
def test_cprofile(self):
results = self.do_profiling()
self.assertEqual(results[0], 1000)
for i, method in enumerate(self.methodnames):
self.assertEqual(results[i+1], self.expected_output[method],
"Stats.%s output for %s doesn't fit expectation!" %
(method, self.profilerclass.__name__))
def regenerate_expected_output(filename, cls):
filename = filename.rstrip('co')
print 'Regenerating %s...' % filename
results = cls.do_profiling()
newfile = []
with open(filename, 'r') as f:
for line in f:
newfile.append(line)
if line[:6] == '#--cut':
break
with open(filename, 'w') as f:
f.writelines(newfile)
for i, method in enumerate(cls.methodnames):
f.write('%s.expected_output[%r] = """\\\n%s"""\n' % (
cls.__name__, method, results[i+1]))
f.write('\nif __name__ == "__main__":\n main()\n')
def test_main():
run_unittest(ProfileTest)
def main():
if '-r' not in sys.argv:
test_main()
else:
regenerate_expected_output(__file__, ProfileTest)
# Don't remove this comment. Everything below it is auto-generated.
#--cut--------------------------------------------------------------------------
ProfileTest.expected_output['print_stats'] = """\
127 function calls (107 primitive calls) in 999.749 CPU seconds
Ordered by: standard name
ncalls tottime percall cumtime percall filename:lineno(function)
4 -0.004 -0.001 -0.004 -0.001 :0(append)
4 -0.004 -0.001 -0.004 -0.001 :0(exc_info)
12 -0.024 -0.002 11.964 0.997 :0(hasattr)
8 -0.008 -0.001 -0.008 -0.001 :0(range)
1 0.000 0.000 0.000 0.000 :0(setprofile)
1 -0.002 -0.002 999.751 999.751 <string>:1(<module>)
0 0.000 0.000 profile:0(profiler)
1 -0.002 -0.002 999.749 999.749 profile:0(testfunc())
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
1 269.996 269.996 999.753 999.753 profilee.py:25(testfunc)
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
2 39.986 19.993 599.814 299.907 profilee.py:55(helper)
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
2 -0.006 -0.003 139.942 69.971 profilee.py:84(helper2_indirect)
8 311.976 38.997 399.896 49.987 profilee.py:88(helper2)
8 63.968 7.996 79.944 9.993 profilee.py:98(subhelper)
"""
ProfileTest.expected_output['print_callers'] = """\
Ordered by: standard name
Function was called by...
:0(append) <- profilee.py:73(helper1)(4) 119.964
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
profilee.py:88(helper2)(8) 399.896
:0(range) <- profilee.py:98(subhelper)(8) 79.944
:0(setprofile) <- profile:0(testfunc())(1) 999.749
<string>:1(<module>) <- profile:0(testfunc())(1) 999.749
profile:0(profiler) <-
profile:0(testfunc()) <- profile:0(profiler)(1) 0.000
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
profilee.py:98(subhelper)(16) 79.944
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.751
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.753
profilee.py:35(factorial)(20) 169.917
profilee.py:84(helper2_indirect)(2) 139.942
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.753
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.814
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.814
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.814
profilee.py:84(helper2_indirect)(2) 139.942
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.896
"""
ProfileTest.expected_output['print_callees'] = """\
Ordered by: standard name
Function called...
:0(append) ->
:0(exc_info) ->
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
:0(range) ->
:0(setprofile) ->
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.753
profile:0(profiler) -> profile:0(testfunc())(1) 999.749
profile:0(testfunc()) -> :0(setprofile)(1) 0.000
<string>:1(<module>)(1) 999.751
profilee.py:110(__getattr__) ->
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
profilee.py:55(helper)(2) 599.814
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
profilee.py:48(mul)(20) 19.980
profilee.py:48(mul) ->
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
profilee.py:84(helper2_indirect)(2) 139.942
profilee.py:88(helper2)(6) 399.896
profilee.py:73(helper1) -> :0(append)(4) -0.004
:0(exc_info)(4) -0.004
:0(hasattr)(4) 11.964
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
profilee.py:88(helper2)(2) 399.896
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
profilee.py:98(subhelper)(8) 79.944
profilee.py:98(subhelper) -> :0(range)(8) -0.008
profilee.py:110(__getattr__)(16) 27.972
"""
if __name__ == "__main__":
main()
|
google/grumpy | refs/heads/master | lib/itertools_test.py | 7 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import weetest
def TestCycle():
want = []
got = []
for x in itertools.cycle(()):
got.append(x)
assert got == want, 'empty cycle yields no elements'
arg = (0, 1, 2)
want = (0, 1, 2) * 10
got = []
limit = 10 * len(arg)
counter = 0
for x in itertools.cycle((0, 1, 2)):
got.append(x)
counter += 1
if counter == limit:
break
assert tuple(got) == want, 'tuple(cycle%s) == %s, want %s' % (arg, tuple(got), want)
def TestDropwhile():
r = range(10)
cases = [
((lambda x: x < 5, r), (5, 6, 7, 8, 9)),
((lambda x: True, r), ()),
((lambda x: False, r), tuple(r)),
]
for args, want in cases:
got = tuple(itertools.dropwhile(*args))
assert got == want, 'tuple(dropwhile%s) == %s, want %s' % (args, got, want)
def TestChain():
r = range(10)
cases = [
([r], tuple(r)),
([r, r], tuple(r) + tuple(r)),
([], ())
]
for args, want in cases:
got = tuple(itertools.chain(*args))
assert got == want, 'tuple(chain%s) == %s, want %s' % (args, got, want)
def TestFromIterable():
r = range(10)
cases = [
([r], tuple(r)),
([r, r], tuple(r) + tuple(r)),
([], ())
]
for args, want in cases:
got = tuple(itertools.chain.from_iterable(args))
assert got == want, 'tuple(from_iterable%s) == %s, want %s' % (args, got, want)
def TestIFilter():
r = range(10)
cases = [
((lambda x: x < 5, r), (0, 1, 2, 3, 4)),
((lambda x: False, r), ()),
((lambda x: True, r), tuple(r)),
((None, r), (1, 2, 3, 4, 5, 6, 7, 8, 9))
]
for args, want in cases:
got = tuple(itertools.ifilter(*args))
assert got == want, 'tuple(ifilter%s) == %s, want %s' % (args, got, want)
def TestIFilterFalse():
r = range(10)
cases = [
((lambda x: x < 5, r), (5, 6, 7, 8, 9)),
((lambda x: False, r), tuple(r)),
((lambda x: True, r), ()),
((None, r), (0,))
]
for args, want in cases:
got = tuple(itertools.ifilterfalse(*args))
assert got == want, 'tuple(ifilterfalse%s) == %s, want %s' % (args, got, want)
def TestISlice():
r = range(10)
cases = [
((r, 5), (0, 1, 2, 3, 4)),
((r, 25, 30), ()),
((r, 1, None, 3), (1, 4, 7)),
]
for args, want in cases:
got = tuple(itertools.islice(*args))
assert got == want, 'tuple(islice%s) == %s, want %s' % (args, got, want)
def TestIZipLongest():
cases = [
(('abc', range(6)), (('a', 0), ('b', 1), ('c', 2), (None, 3), (None, 4), (None, 5))),
((range(6), 'abc'), ((0, 'a'), (1, 'b'), (2, 'c'), (3, None), (4, None), (5, None))),
(([1, None, 3], 'ab', range(1)), ((1, 'a', 0), (None, 'b', None), (3, None, None))),
]
for args, want in cases:
got = tuple(itertools.izip_longest(*args))
assert got == want, 'tuple(izip_longest%s) == %s, want %s' % (args, got, want)
def TestProduct():
cases = [
(([1, 2], ['a', 'b']), ((1, 'a'), (1, 'b'), (2, 'a'), (2, 'b'))),
(([1], ['a', 'b']), ((1, 'a'), (1, 'b'))),
(([],), ()),
]
for args, want in cases:
got = tuple(itertools.product(*args))
assert got == want, 'tuple(product%s) == %s, want %s' % (args, got, want)
def TestPermutations():
cases = [
(('AB',), (('A', 'B'), ('B', 'A'))),
(('ABC', 2), (('A', 'B'), ('A', 'C'), ('B', 'A'), ('B', 'C'), ('C', 'A'), ('C', 'B'))),
((range(3),), ((0, 1, 2), (0, 2, 1), (1, 0, 2), (1, 2, 0), (2, 0, 1), (2, 1, 0))),
(([],), ((),)),
(([], 0), ((),)),
((range(3), 4), ()),
]
for args, want in cases:
got = tuple(itertools.permutations(*args))
assert got == want, 'tuple(permutations%s) == %s, want %s' % (args, got, want)
def TestCombinations():
cases = [
((range(4), 3), ((0, 1, 2), (0, 1, 3), (0, 2, 3), (1, 2, 3))),
]
for args, want in cases:
got = tuple(itertools.combinations(*args))
assert got == want, 'tuple(combinations%s) == %s, want %s' % (args, got, want)
def TestCombinationsWithReplacement():
cases = [
(([-12], 2), (((-12, -12),))),
(('AB', 3), (('A', 'A', 'A'), ('A', 'A', 'B'), ('A', 'B', 'B'), ('B', 'B', 'B'))),
(([], 2), ()),
(([], 0), ((),))
]
for args, want in cases:
got = tuple(itertools.combinations_with_replacement(*args))
assert got == want, 'tuple(combinations_with_replacement%s) == %s, want %s' % (args, got, want)
def TestGroupBy():
cases = [
(([1, 2, 2, 3, 3, 3, 4, 4, 4, 4],), [(1, [1]), (2, [2, 2]), (3, [3, 3, 3]), (4, [4, 4, 4, 4])]),
((['aa', 'ab', 'abc', 'bcd', 'abcde'], len), [(2, ['aa', 'ab']), (3, ['abc', 'bcd']), (5, ['abcde'])]),
]
for args, want in cases:
got = [(k, list(v)) for k, v in itertools.groupby(*args)]
assert got == want, 'groupby %s == %s, want %s' % (args, got, want)
def TestTakewhile():
r = range(10)
cases = [
((lambda x: x % 2 == 0, r), (0,)),
((lambda x: True, r), tuple(r)),
((lambda x: False, r), ())
]
for args, want in cases:
got = tuple(itertools.takewhile(*args))
assert got == want, 'tuple(takewhile%s) == %s, want %s' % (args, got, want)
if __name__ == '__main__':
weetest.RunTests()
|
pierrebeaucamp/Exercism-Python | refs/heads/master | word-count/word_count_test.py | 9 | # -*- coding: utf-8 -*-
import unittest
from wordcount import word_count
# to be backwards compatible with the old Python 2.X
def decode_if_needed(string):
try:
return string.decode('utf-8')
except AttributeError:
return string
class WordCountTests(unittest.TestCase):
def test_count_one_word(self):
self.assertEqual(
{'word': 1},
word_count('word')
)
def test_count_one_of_each(self):
self.assertEqual(
{'one': 1, 'of': 1, 'each': 1},
word_count('one of each')
)
def test_count_multiple_occurences(self):
self.assertEqual(
{'one': 1, 'fish': 4, 'two': 1, 'red': 1, 'blue': 1},
word_count('one fish two fish red fish blue fish')
)
def test_preserves_punctuation(self):
self.assertEqual(
{'car': 1, 'carpet': 1, 'as': 1, 'java': 1, 'javascript': 1},
word_count('car : carpet as java : javascript!!&@$%^&')
)
def test_include_numbers(self):
self.assertEqual(
{'testing': 2, '1': 1, '2': 1},
word_count('testing 1 2 testing')
)
def test_mixed_case(self):
self.assertEqual(
[2, 3],
sorted(list(word_count('go Go GO Stop stop').values()))
)
def test_multiple_spaces(self):
self.assertEqual(
{'wait': 1, 'for': 1, 'it': 1},
word_count('wait for it')
)
def test_newlines(self):
self.assertEqual(
{'rah': 2, 'ah': 3, 'roma': 2, 'ma': 1, 'ga': 2, 'oh': 1, 'la': 2,
'want': 1, 'your': 1, 'bad': 1, 'romance': 1},
word_count('rah rah ah ah ah\nroma roma ma\n'
'ga ga oh la la\nwant your bad romance')
)
def test_tabs(self):
self.assertEqual(
{'rah': 2, 'ah': 3, 'roma': 2, 'ma': 1, 'ga': 2, 'oh': 1, 'la': 2,
'want': 1, 'your': 1, 'bad': 1, 'romance': 1},
word_count('rah rah ah ah ah\troma roma ma\tga ga oh la la\t'
'want your bad romance')
)
def test_non_alphanumeric(self):
self.assertEqual(
{'hey': 1, 'my': 1, 'spacebar': 1, 'is': 1, 'broken': 1},
word_count('hey,my_spacebar_is_broken.')
)
def test_unicode(self):
self.assertEqual(
{decode_if_needed('до'): 1, decode_if_needed('свидания'): 1},
word_count('до🖖свидания!')
)
if __name__ == '__main__':
unittest.main()
|
damien-dg/horizon | refs/heads/master | tools/install_venv.py | 99 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2010 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
import install_venv_common as install_venv # noqa
def print_help(venv, root):
help = """
OpenStack development environment setup is complete.
OpenStack development uses virtualenv to track and manage Python
dependencies while in development and testing.
To activate the OpenStack virtualenv for the extent of your current shell
session you can run:
$ source %s/bin/activate
Or, if you prefer, you can run commands in the virtualenv on a case by case
basis by running:
$ %s/tools/with_venv.sh <your command>
Also, make test will automatically use the virtualenv.
"""
print(help % (venv, root))
def main(argv):
root = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
if os.environ.get('tools_path'):
root = os.environ['tools_path']
venv = os.path.join(root, '.venv')
if os.environ.get('venv'):
venv = os.environ['venv']
pip_requires = os.path.join(root, 'requirements.txt')
test_requires = os.path.join(root, 'test-requirements.txt')
py_version = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
project = 'OpenStack'
install = install_venv.InstallVenv(root, venv, pip_requires, test_requires,
py_version, project)
options = install.parse_args(argv)
install.check_python_version()
install.check_dependencies()
install.create_virtualenv(no_site_packages=options.no_site_packages)
install.install_dependencies()
print_help(venv, root)
if __name__ == '__main__':
main(sys.argv)
|
highweb-project/highweb-webcl-html5spec | refs/heads/highweb-20160310 | testing/legion/lib/comm_server/base_handler.py | 18 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Base handler class for all category handlers."""
class BaseHandler(object):
"""Sets up default verb handlers for the child class."""
def do_PUT(self, request):
request.send_response(501)
def do_POST(self, request):
request.send_response(501)
def do_GET(self, request):
request.send_response(501)
def do_DELETE(self, request):
request.send_response(501)
|
PanDAWMS/panda-bigmon-lsst | refs/heads/wenaus | lsst/admin/views.py | 1 | import logging, re, json, commands, os, copy
from datetime import datetime, timedelta
import time
import json
from django.http import HttpResponse
from django.shortcuts import render_to_response, render, redirect
from django.template import RequestContext, loader
from django.db.models import Count
from django import forms
from django.views.decorators.csrf import csrf_exempt
from django.utils import timezone
from django.utils.cache import patch_cache_control, patch_response_headers
from core.common.settings import STATIC_URL, FILTER_UI_ENV, defaultDatetimeFormat
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from core.common.models import RequestStat
from core.common.settings.config import ENV
from time import gmtime, strftime
from core.common.models import Users
from lsst.views import initRequest
from lsst.views import extensibleURL
from django.http import HttpResponseRedirect
def login(request):
if 'userdn' in request.session:
userrec = Users.objects.filter(dn__startswith=request.session['userdn']).values()
if len(userrec) > 0:
request.session['username'] = userrec[0]['name']
return True,None
else:
data = {
'viewParams' : request.session['viewParams'],
'requestParams' : request.session['requestParams'],
"errormessage" : "Sorry, we could not find your DN '%s' in database" % request.session['userdn'],\
}
return False, render_to_response('adError.html', data, RequestContext(request))
else:
try:
url="https://"+request.META['SERVER_NAME']+request.META['REQUEST_URI']
except:
url=''
data = {
'viewParams' : request.session['viewParams'],
'requestParams' : request.session['requestParams'],
'url': url,
"errormessage" : "No valid client certificate found.",\
}
return False, render_to_response('adError.html', data, RequestContext(request))
def adMain(request):
valid, response = initRequest(request)
if not valid: return response
valid, response = login(request)
if not valid: return response
data = {\
'request': request,
'user': request.session['username'],
'url' : request.path,\
}
return render_to_response('adMain.html', data, RequestContext(request))
def listReqPlot(request):
valid, response = initRequest(request)
if not valid: return response
valid, response = login(request)
if not valid: return response
sortby='id'
if 'sortby' in request.GET:
sortby=request.GET['sortby']
LAST_N_HOURS_MAX=7*24
limit=5000
if 'hours' in request.session['requestParams']:
LAST_N_HOURS_MAX = int(request.session['requestParams']['hours'])
if 'days' in request.session['requestParams']:
LAST_N_HOURS_MAX = int(request.session['requestParams']['days'])*24
if u'display_limit' in request.session['requestParams']:
display_limit = int(request.session['requestParams']['display_limit'])
else:
display_limit = 1000
nmax = display_limit
if LAST_N_HOURS_MAX>=168:
flag=12
elif LAST_N_HOURS_MAX>=48:
flag=6
else:
flag=2
startdate = None
if not startdate:
startdate = timezone.now() - timedelta(hours=LAST_N_HOURS_MAX)
enddate = None
if enddate == None:
enddate = timezone.now()#.strftime(defaultDatetimeFormat)
query = { 'qtime__range' : [startdate.strftime(defaultDatetimeFormat), enddate.strftime(defaultDatetimeFormat)] }
values = 'urls', 'qtime','remote','qduration','duration'
reqs=[]
reqs = RequestStat.objects.filter(**query).order_by(sortby).reverse().values(*values)
reqHist = {}
drHist =[]
mons=[]
for req in reqs:
mon={}
#mon['duration'] = (req['qduration'] - req['qtime']).seconds
mon['duration'] = req['duration']
mon['urls'] = req['urls']
mon['remote'] = req['remote']
mon['qduration']=req['qduration'].strftime('%Y-%m-%d %H:%M:%S')
mon['qtime'] = req['qtime'].strftime('%Y-%m-%d %H:%M:%S')
mons.append(mon)
##plot
tm=req['qtime']
tm = tm - timedelta(hours=tm.hour % flag, minutes=tm.minute, seconds=tm.second, microseconds=tm.microsecond)
if not tm in reqHist: reqHist[tm] = 0
reqHist[tm] += 1
##plot -view duration
dr=int(mon['duration'])
drHist.append(dr)
kys = reqHist.keys()
kys.sort()
reqHists = []
for k in kys:
reqHists.append( [ k, reqHist[k] ] )
drcount=[[x,drHist.count(x)] for x in set(drHist)]
drcount.sort()
#do paging
paginator = Paginator(mons, 200)
page = request.GET.get('page')
try:
reqPages = paginator.page(page)
except PageNotAnInteger:
reqPages = paginator.page(1)
except EmptyPage:
reqPages = paginator.page(paginator.num_pages)
url= request.get_full_path()
if url.count('?')>0:
url += '&'
else:
url += '?'
data = {\
'request' : request,
'viewParams' : request.session['viewParams'],
'requestParams' : request.session['requestParams'],
'mons': mons[:nmax],
#'nmax': nmax,
'request': request,
'user': request.session['username'],
'reqPages': reqPages,
'url' : url,
'drHist': drcount,
'reqHist': reqHists,\
}
return render_to_response('req_plot.html', data, RequestContext(request))
def drLinechart(request):
valid, response = initRequest(request)
if not valid: return response
LAST_N_HOURS_MAX=7*24
if 'hours' in request.session['requestParams']:
LAST_N_HOURS_MAX = int(request.session['requestParams']['hours'])
if 'days' in request.session['requestParams']:
LAST_N_HOURS_MAX = int(request.session['requestParams']['days'])*24
startdate = None
if not startdate:
startdate = timezone.now() - timedelta(hours=LAST_N_HOURS_MAX)
enddate = None
if enddate == None:
enddate = timezone.now()#.strftime(defaultDatetimeFormat)
#query = { 'qtime__range' : [startdate.strftime(defaultDatetimeFormat), enddate.strftime(defaultDatetimeFormat)] }
##top queries
tquery=['home','task/','user/','job/','tasks/','wns/','dash/','sites/','errors/','incidents/']
i=0
itms=[0]*10
for tq in tquery:
query = { 'qtime__range' : [startdate.strftime(defaultDatetimeFormat), enddate.strftime(defaultDatetimeFormat)] }
pls=plots(query,tq)
if len(pls)==0: continue
itms[i]=plots(query,tq)
i +=1
data = {\
'request' : request,
'viewParams' : request.session['viewParams'],
'requestParams' : request.session['requestParams'],
'itm': itms,\
}
return render_to_response('plots.html', data, RequestContext(request))
def plots(query,tq):
if tq=='home':
query['urls__iendswith'] = 'ch/'
else:
query['urls__icontains'] = tq
values = 'qtime','duration'
reqs=[]
reqs = RequestStat.objects.filter(**query).order_by('qtime').values(*values)
mons=[]
for req in reqs:
mon={}
mon['duration'] = req['duration']
mon['url'] = tq.replace("/","")
mon['qtime'] = req['qtime'].strftime('%H:%M')
mons.append(mon)
return mons
|
gtoonstra/airflow | refs/heads/master | tests/hooks/test_pig_hook.py | 14 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from airflow.hooks.pig_hook import PigCliHook
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class TestPigCliHook(unittest.TestCase):
def setUp(self):
super(TestPigCliHook, self).setUp()
self.extra_dejson = mock.MagicMock()
self.extra_dejson.get.return_value = None
self.conn = mock.MagicMock()
self.conn.extra_dejson = self.extra_dejson
conn = self.conn
class SubPigCliHook(PigCliHook):
def get_connection(self, id):
return conn
self.pig_hook = SubPigCliHook
def test_init(self):
self.pig_hook()
self.extra_dejson.get.assert_called_with('pig_properties', '')
@mock.patch('subprocess.Popen')
def test_run_cli_success(self, popen_mock):
proc_mock = mock.MagicMock()
proc_mock.returncode = 0
proc_mock.stdout.readline.return_value = b''
popen_mock.return_value = proc_mock
hook = self.pig_hook()
stdout = hook.run_cli("")
self.assertEqual(stdout, "")
@mock.patch('subprocess.Popen')
def test_run_cli_fail(self, popen_mock):
proc_mock = mock.MagicMock()
proc_mock.returncode = 1
proc_mock.stdout.readline.return_value = b''
popen_mock.return_value = proc_mock
hook = self.pig_hook()
from airflow.exceptions import AirflowException
self.assertRaises(AirflowException, hook.run_cli, "")
@mock.patch('subprocess.Popen')
def test_run_cli_with_properties(self, popen_mock):
test_properties = "one two"
proc_mock = mock.MagicMock()
proc_mock.returncode = 0
proc_mock.stdout.readline.return_value = b''
popen_mock.return_value = proc_mock
hook = self.pig_hook()
hook.pig_properties = test_properties
stdout = hook.run_cli("")
self.assertEqual(stdout, "")
popen_first_arg = popen_mock.call_args[0][0]
for pig_prop in test_properties.split():
self.assertIn(pig_prop, popen_first_arg)
@mock.patch('subprocess.Popen')
def test_run_cli_verbose(self, popen_mock):
test_stdout_lines = [b"one", b"two", b""]
test_stdout_strings = [s.decode('utf-8') for s in test_stdout_lines]
proc_mock = mock.MagicMock()
proc_mock.returncode = 0
proc_mock.stdout.readline = mock.Mock(side_effect=test_stdout_lines)
popen_mock.return_value = proc_mock
hook = self.pig_hook()
stdout = hook.run_cli("", verbose=True)
self.assertEqual(stdout, "".join(test_stdout_strings))
def test_kill_no_sp(self):
sp_mock = mock.Mock()
hook = self.pig_hook()
hook.sp = sp_mock
hook.kill()
self.assertFalse(sp_mock.kill.called)
def test_kill_sp_done(self):
sp_mock = mock.Mock()
sp_mock.poll.return_value = 0
hook = self.pig_hook()
hook.sp = sp_mock
hook.kill()
self.assertFalse(sp_mock.kill.called)
def test_kill(self):
sp_mock = mock.Mock()
sp_mock.poll.return_value = None
hook = self.pig_hook()
hook.sp = sp_mock
hook.kill()
self.assertTrue(sp_mock.kill.called)
|
david-abel/simple_rl | refs/heads/master | simple_rl/tasks/maze_1d/Maze1DStateClass.py | 1 | from simple_rl.mdp.StateClass import State
class Maze1DState(State):
''' Class for 1D Maze POMDP States '''
def __init__(self, name):
self.name = name
is_terminal = name == 'goal'
State.__init__(self, data=name, is_terminal=is_terminal)
def __hash__(self):
return hash(tuple(self.data))
def __str__(self):
return '1DMazeState::{}'.format(self.data)
def __repr__(self):
return self.__str__()
def __eq__(self, other):
return isinstance(other, Maze1DState) and self.data == other.data
|
manishpatell/erpcustomizationssaiimpex123qwe | refs/heads/master | addons/base_action_rule/base_action_rule.py | 54 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
import logging
import openerp
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
DATE_RANGE_FUNCTION = {
'minutes': lambda interval: timedelta(minutes=interval),
'hour': lambda interval: timedelta(hours=interval),
'day': lambda interval: timedelta(days=interval),
'month': lambda interval: timedelta(months=interval),
False: lambda interval: timedelta(0),
}
def get_datetime(date_str):
'''Return a datetime from a date string or a datetime string'''
# complete date time if date_str contains only a date
if ' ' not in date_str:
date_str = date_str + " 00:00:00"
return datetime.strptime(date_str, DEFAULT_SERVER_DATETIME_FORMAT)
class base_action_rule(osv.osv):
""" Base Action Rules """
_name = 'base.action.rule'
_description = 'Action Rules'
_order = 'sequence'
_columns = {
'name': fields.char('Rule Name', required=True),
'model_id': fields.many2one('ir.model', 'Related Document Model',
required=True, domain=[('osv_memory', '=', False)]),
'model': fields.related('model_id', 'model', type="char", string='Model'),
'create_date': fields.datetime('Create Date', readonly=1),
'active': fields.boolean('Active',
help="When unchecked, the rule is hidden and will not be executed."),
'sequence': fields.integer('Sequence',
help="Gives the sequence order when displaying a list of rules."),
'kind': fields.selection(
[('on_create', 'On Creation'),
('on_write', 'On Update'),
('on_create_or_write', 'On Creation & Update'),
('on_time', 'Based on Timed Condition')],
string='When to Run'),
'trg_date_id': fields.many2one('ir.model.fields', string='Trigger Date',
help="When should the condition be triggered. If present, will be checked by the scheduler. If empty, will be checked at creation and update.",
domain="[('model_id', '=', model_id), ('ttype', 'in', ('date', 'datetime'))]"),
'trg_date_range': fields.integer('Delay after trigger date',
help="Delay after the trigger date." \
"You can put a negative number if you need a delay before the" \
"trigger date, like sending a reminder 15 minutes before a meeting."),
'trg_date_range_type': fields.selection([('minutes', 'Minutes'), ('hour', 'Hours'),
('day', 'Days'), ('month', 'Months')], 'Delay type'),
'trg_date_calendar_id': fields.many2one(
'resource.calendar', 'Use Calendar',
help='When calculating a day-based timed condition, it is possible to use a calendar to compute the date based on working days.',
ondelete='set null',
),
'act_user_id': fields.many2one('res.users', 'Set Responsible'),
'act_followers': fields.many2many("res.partner", string="Add Followers"),
'server_action_ids': fields.many2many('ir.actions.server', string='Server Actions',
domain="[('model_id', '=', model_id)]",
help="Examples: email reminders, call object service, etc."),
'filter_pre_id': fields.many2one('ir.filters', string='Before Update Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before the update of the record."),
'filter_id': fields.many2one('ir.filters', string='Filter',
ondelete='restrict',
domain="[('model_id', '=', model_id.model)]",
help="If present, this condition must be satisfied before executing the action rule."),
'last_run': fields.datetime('Last Run', readonly=1, copy=False),
}
_defaults = {
'active': True,
'trg_date_range_type': 'day',
}
def onchange_kind(self, cr, uid, ids, kind, context=None):
clear_fields = []
if kind in ['on_create', 'on_create_or_write']:
clear_fields = ['filter_pre_id', 'trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind in ['on_write', 'on_create_or_write']:
clear_fields = ['trg_date_id', 'trg_date_range', 'trg_date_range_type']
elif kind == 'on_time':
clear_fields = ['filter_pre_id']
return {'value': dict.fromkeys(clear_fields, False)}
def _filter(self, cr, uid, action, action_filter, record_ids, context=None):
""" filter the list record_ids that satisfy the action filter """
if record_ids and action_filter:
assert action.model == action_filter.model_id, "Filter model different from action rule model"
model = self.pool[action_filter.model_id]
domain = [('id', 'in', record_ids)] + eval(action_filter.domain)
ctx = dict(context or {})
ctx.update(eval(action_filter.context))
record_ids = model.search(cr, uid, domain, context=ctx)
return record_ids
def _process(self, cr, uid, action, record_ids, context=None):
""" process the given action on the records """
model = self.pool[action.model_id.model]
# modify records
values = {}
if 'date_action_last' in model._fields:
values['date_action_last'] = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)
if action.act_user_id and 'user_id' in model._fields:
values['user_id'] = action.act_user_id.id
if values:
model.write(cr, uid, record_ids, values, context=context)
if action.act_followers and hasattr(model, 'message_subscribe'):
follower_ids = map(int, action.act_followers)
model.message_subscribe(cr, uid, record_ids, follower_ids, context=context)
# execute server actions
if action.server_action_ids:
server_action_ids = map(int, action.server_action_ids)
for record in model.browse(cr, uid, record_ids, context):
action_server_obj = self.pool.get('ir.actions.server')
ctx = dict(context, active_model=model._name, active_ids=[record.id], active_id=record.id)
action_server_obj.run(cr, uid, server_action_ids, context=ctx)
return True
def _register_hook(self, cr, ids=None):
""" Wrap the methods `create` and `write` of the models specified by
the rules given by `ids` (or all existing rules if `ids` is `None`.)
"""
#
# Note: the patched methods create and write must be defined inside
# another function, otherwise their closure may be wrong. For instance,
# the function create refers to the outer variable 'create', which you
# expect to be bound to create itself. But that expectation is wrong if
# create is defined inside a loop; in that case, the variable 'create'
# is bound to the last function defined by the loop.
#
def make_create():
""" instanciate a create method that processes action rules """
def create(self, cr, uid, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return create.origin(self, cr, uid, vals, context=context)
# call original method with a modified context
context = dict(context or {}, action=True)
new_id = create.origin(self, cr, uid, vals, context=context, **kwargs)
# as it is a new record, we do not consider the actions that have a prefilter
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_create', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
# check postconditions, and execute actions on the records that satisfy them
for action in action_model.browse(cr, uid, action_ids, context=context):
if action_model._filter(cr, uid, action, action.filter_id, [new_id], context=context):
action_model._process(cr, uid, action, [new_id], context=context)
return new_id
return create
def make_write():
""" instanciate a write method that processes action rules """
def write(self, cr, uid, ids, vals, context=None, **kwargs):
# avoid loops or cascading actions
if context and context.get('action'):
return write.origin(self, cr, uid, ids, vals, context=context)
# modify context
context = dict(context or {}, action=True)
ids = [ids] if isinstance(ids, (int, long, str)) else ids
# retrieve the action rules to possibly execute
action_model = self.pool.get('base.action.rule')
action_dom = [('model', '=', self._name),
('kind', 'in', ['on_write', 'on_create_or_write'])]
action_ids = action_model.search(cr, uid, action_dom, context=context)
actions = action_model.browse(cr, uid, action_ids, context=context)
# check preconditions
pre_ids = {}
for action in actions:
pre_ids[action] = action_model._filter(cr, uid, action, action.filter_pre_id, ids, context=context)
# call original method
write.origin(self, cr, uid, ids, vals, context=context, **kwargs)
# check postconditions, and execute actions on the records that satisfy them
for action in actions:
post_ids = action_model._filter(cr, uid, action, action.filter_id, pre_ids[action], context=context)
if post_ids:
action_model._process(cr, uid, action, post_ids, context=context)
return True
return write
updated = False
if ids is None:
ids = self.search(cr, SUPERUSER_ID, [])
for action_rule in self.browse(cr, SUPERUSER_ID, ids):
model = action_rule.model_id.model
model_obj = self.pool.get(model)
if model_obj and not hasattr(model_obj, 'base_action_ruled'):
# monkey-patch methods create and write
model_obj._patch_method('create', make_create())
model_obj._patch_method('write', make_write())
model_obj.base_action_ruled = True
updated = True
return updated
def _update_cron(self, cr, uid, context=None):
try:
cron = self.pool['ir.model.data'].get_object(
cr, uid, 'base_action_rule', 'ir_cron_crm_action', context=context)
except ValueError:
return False
return cron.toggle(model=self._name, domain=[('kind', '=', 'on_time')])
def create(self, cr, uid, vals, context=None):
res_id = super(base_action_rule, self).create(cr, uid, vals, context=context)
if self._register_hook(cr, [res_id]):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
self._update_cron(cr, uid, context=context)
return res_id
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
super(base_action_rule, self).write(cr, uid, ids, vals, context=context)
if self._register_hook(cr, ids):
openerp.modules.registry.RegistryManager.signal_registry_change(cr.dbname)
self._update_cron(cr, uid, context=context)
return True
def unlink(self, cr, uid, ids, context=None):
res = super(base_action_rule, self).unlink(cr, uid, ids, context=context)
self._update_cron(cr, uid, context=context)
return res
def onchange_model_id(self, cr, uid, ids, model_id, context=None):
data = {'model': False, 'filter_pre_id': False, 'filter_id': False}
if model_id:
model = self.pool.get('ir.model').browse(cr, uid, model_id, context=context)
data.update({'model': model.model})
return {'value': data}
def _check_delay(self, cr, uid, action, record, record_dt, context=None):
if action.trg_date_calendar_id and action.trg_date_range_type == 'day':
start_dt = get_datetime(record_dt)
action_dt = self.pool['resource.calendar'].schedule_days_get_date(
cr, uid, action.trg_date_calendar_id.id, action.trg_date_range,
day_date=start_dt, compute_leaves=True, context=context
)
else:
delay = DATE_RANGE_FUNCTION[action.trg_date_range_type](action.trg_date_range)
action_dt = get_datetime(record_dt) + delay
return action_dt
def _check(self, cr, uid, automatic=False, use_new_cursor=False, context=None):
""" This Function is called by scheduler. """
context = context or {}
# retrieve all the action rules to run based on a timed condition
action_dom = [('kind', '=', 'on_time')]
action_ids = self.search(cr, uid, action_dom, context=context)
for action in self.browse(cr, uid, action_ids, context=context):
now = datetime.now()
if action.last_run:
last_run = get_datetime(action.last_run)
else:
last_run = datetime.utcfromtimestamp(0)
# retrieve all the records that satisfy the action's condition
model = self.pool[action.model_id.model]
domain = []
ctx = dict(context)
if action.filter_id:
domain = eval(action.filter_id.domain)
ctx.update(eval(action.filter_id.context))
if 'lang' not in ctx:
# Filters might be language-sensitive, attempt to reuse creator lang
# as we are usually running this as super-user in background
[filter_meta] = action.filter_id.get_metadata()
user_id = filter_meta['write_uid'] and filter_meta['write_uid'][0] or \
filter_meta['create_uid'][0]
ctx['lang'] = self.pool['res.users'].browse(cr, uid, user_id).lang
record_ids = model.search(cr, uid, domain, context=ctx)
# determine when action should occur for the records
date_field = action.trg_date_id.name
if date_field == 'date_action_last' and 'create_date' in model._fields:
get_record_dt = lambda record: record[date_field] or record.create_date
else:
get_record_dt = lambda record: record[date_field]
# process action on the records that should be executed
for record in model.browse(cr, uid, record_ids, context=context):
record_dt = get_record_dt(record)
if not record_dt:
continue
action_dt = self._check_delay(cr, uid, action, record, record_dt, context=context)
if last_run <= action_dt < now:
try:
context = dict(context or {}, action=True)
self._process(cr, uid, action, [record.id], context=context)
except Exception:
import traceback
_logger.error(traceback.format_exc())
action.write({'last_run': now.strftime(DEFAULT_SERVER_DATETIME_FORMAT)})
if automatic:
# auto-commit for batch processing
cr.commit()
|
odoomrp/odoomrp-wip | refs/heads/8.0 | mrp_bom_sale_pack/__openerp__.py | 16 | # -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Mrp Bom Sale Pack",
"version": "8.0.1.0.0",
"author": "OdooMRP team, "
"AvanzOSC, "
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"website": "www.odoomrp.com",
"category": "Sales Management",
"license": "AGPL-3",
"contributors": ["Esther Martín <[email protected]>",
"Pedro M. Baeza <[email protected]>",
"Ana Juaristi <[email protected]>",
"Oihane Crucelaegui <[email protected]>"],
"depends": ["base", "sale", "mrp", "sale_stock"],
"data": ["security/ir.model.access.csv",
"views/mrp_bom_sale_pack_view.xml",
"views/sale_order_mrp_view.xml"],
"installable": True
}
|
AngryBork/apex-sigma-plugins | refs/heads/master | fun/jokes/csshumor.py | 3 | import aiohttp
from lxml import html
async def csshumor(cmd, message, args):
url = 'https://csshumor.com/'
async with aiohttp.ClientSession() as session:
async with session.get(url) as data:
data = await data.text()
root = html.fromstring(data)
codeblock = root.cssselect('.crayon-code')[0]
codeblock_content = codeblock.text_content()
await message.channel.send(f'```css\n{codeblock_content}\n```')
|
marctc/django | refs/heads/master | tests/syndication_tests/models.py | 281 | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Entry(models.Model):
title = models.CharField(max_length=200)
updated = models.DateTimeField()
published = models.DateTimeField()
class Meta:
ordering = ('updated',)
def __str__(self):
return self.title
def get_absolute_url(self):
return "/blog/%s/" % self.pk
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=200)
entry = models.ForeignKey(Entry, models.CASCADE)
def __str__(self):
return self.title
|
GUR9000/KerasNeuralFingerprint | refs/heads/master | keras/activations.py | 7 | from __future__ import absolute_import
from . import backend as K
def softmax(x):
ndim = K.ndim(x)
if ndim == 2:
return K.softmax(x)
elif ndim == 3:
e = K.exp(x - K.max(x, axis=-1, keepdims=True))
s = K.sum(e, axis=-1, keepdims=True)
return e / s
else:
raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +
'Here, ndim=' + str(ndim))
def softplus(x):
return K.softplus(x)
def softsign(x):
return K.softsign(x)
def relu(x, alpha=0., max_value=None):
return K.relu(x, alpha=alpha, max_value=max_value)
def tanh(x):
return K.tanh(x)
def sigmoid(x):
return K.sigmoid(x)
def hard_sigmoid(x):
return K.hard_sigmoid(x)
def linear(x):
'''
The function returns the variable that is passed in, so all types work.
'''
return x
from .utils.generic_utils import get_from_module
def get(identifier):
if identifier is None:
return linear
return get_from_module(identifier, globals(), 'activation function')
|
bop/rango | refs/heads/master | lib/python2.7/site-packages/django/core/management/commands/sqlsequencereset.py | 242 | from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import AppCommand
from django.db import connections, models, DEFAULT_DB_ALIAS
class Command(AppCommand):
help = 'Prints the SQL statements for resetting sequences for the given app name(s).'
option_list = AppCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to print the '
'SQL for. Defaults to the "default" database.'),
)
output_transaction = True
def handle_app(self, app, **options):
connection = connections[options.get('database')]
return '\n'.join(connection.ops.sequence_reset_sql(self.style, models.get_models(app, include_auto_created=True)))
|
SachaMPS/django-cms | refs/heads/develop | cms/management/commands/subcommands/uninstall.py | 61 | # -*- coding: utf-8 -*-
from django.core.management.base import LabelCommand
from django.utils.six.moves import input
from cms.management.commands.subcommands.base import SubcommandsCommand
from cms.models import Page
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_pool import plugin_pool
class UninstallApphooksCommand(LabelCommand):
args = "APPHOK_NAME"
label = 'apphook name (eg SampleApp)'
help = 'Uninstalls (sets to null) specified apphooks for all pages'
def handle_label(self, label, **options):
queryset = Page.objects.filter(application_urls=label)
number_of_apphooks = queryset.count()
if number_of_apphooks > 0:
if options.get('interactive'):
confirm = input("""
You have requested to remove %d %r apphooks.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (number_of_apphooks, label))
else:
confirm = 'yes'
if confirm == 'yes':
queryset.update(application_urls=None)
self.stdout.write(u'%d %r apphooks uninstalled\n' % (number_of_apphooks, label))
else:
self.stdout.write(u'no %r apphooks found\n' % label)
class UninstallPluginsCommand(LabelCommand):
args = "PLUGIN_NAME"
label = 'plugin name (eg SamplePlugin)'
help = 'Uninstalls (deletes) specified plugins from the CMSPlugin model'
def handle_label(self, label, **options):
plugin_pool.get_all_plugins()
queryset = CMSPlugin.objects.filter(plugin_type=label)
number_of_plugins = queryset.count()
if number_of_plugins > 0:
if options.get('interactive'):
confirm = input("""
You have requested to remove %d %r plugins.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (number_of_plugins, label))
else:
confirm = 'yes'
if confirm == 'yes':
queryset.delete()
self.stdout.write(u'%d %r plugins uninstalled\n' % (number_of_plugins, label))
else:
self.stdout.write(u'Aborted')
else:
self.stdout.write(u'no %r plugins found\n' % label)
class UninstallCommand(SubcommandsCommand):
help = 'Uninstall commands'
subcommands = {
'apphooks': UninstallApphooksCommand,
'plugins': UninstallPluginsCommand
}
|
StalkR/misc | refs/heads/master | burp/extensions/template_extension.py | 1 | """Burp editor extension template.
The only thing you need to change is CHANGEME in the import line.
It creates a new tab in the proxy tab for some requests and responses, defined
by your criteria. In this tab, you define which text you want displayed. In the
repeater tab, you may edit that text, and the request will be rebuilt according
to your logic.
This design allows to minimize the amount of Python code interacting with Burp,
which runs the extension with Jython, and is slow to load, unload and debug.
By abstracting the interesting logic (present and edit requests) in a separate
Python module with no Burp dependency, you can add your own tests and develop
your extension faster.
Note Jython does not handle exceptions inside callbacks so try/except will not
work. Exceptions will be propagated to Java which then shows a Java stack trace
with no Python information, so you do not know where it was raised. Develop
your module so that there are no exceptions by checking for errors before.
Another interesting fact: calls from Python to Java are slow, so by putting
the parsing logic in a separate library rather than using some of the helpers
methods exposed by burp, it is noticeably faster.
"""
from burp import IBurpExtender
from burp import IMessageEditorTabFactory
from burp import IMessageEditorTab
import CHANGEME as editor
class BurpExtender(IBurpExtender, IMessageEditorTabFactory):
def registerExtenderCallbacks(self, callbacks):
self.callbacks = callbacks
callbacks.setExtensionName(editor.NAME)
callbacks.registerMessageEditorTabFactory(self)
def createNewInstance(self, controller, editable):
return EditorTab(self, controller, editable)
class EditorTab(IMessageEditorTab):
def __init__(self, extender, controller, editable):
self.editor = extender.callbacks.createTextEditor()
self.editor.setEditable(editable)
def getTabCaption(self):
return editor.TITLE
def getUiComponent(self):
return self.editor.getComponent()
def isEnabled(self, content, isRequest):
s = content.tostring()
r = editor.Request.Parse(s) if isRequest else editor.Response.Parse(s)
return r.Enabled() if r else False
def setMessage(self, content, isRequest):
if not content:
return
s = content.tostring()
r = editor.Request.Parse(s) if isRequest else editor.Response.Parse(s)
self.editor.setText(r.Text())
self.r = r
def getMessage(self):
if not self.editor.isTextModified():
return self.editor.getText()
# we rely on setMessage being called before to set self.r
self.r.Load(self.editor.getText().tostring())
return self.r.String()
def isModified(self):
return self.editor.isTextModified()
def getSelectedData(self):
return self.editor.getSelectedText()
|
JonathanStein/odoo | refs/heads/8.0 | addons/claim_from_delivery/__openerp__.py | 261 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Claim on Deliveries',
'version' : '1.0',
'author' : 'OpenERP SA',
'category' : 'Warehouse Management',
'depends' : ['base', 'crm_claim', 'stock'],
'demo' : [],
'description': """
Create a claim from a delivery order.
=====================================
Adds a Claim link to the delivery order.
""",
'data' : [
'claim_delivery_view.xml',
'claim_delivery_data.xml',],
'auto_install': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Mappy/pycnikr | refs/heads/master | pycnikr/style_sheets/example2.py | 1 | """
This file is a python style sheet which describes layers and styles for Mapnik.
Describe layers and styles by this way is simple and easy-readable
It is not usable by Mapnik directly, you have to translate it with Pycnik, if
you want to try execute pycnik_sample.py
"""
from pycnik.model import *
# Standard zoom level
Map.TILE_SIZE = 256
Map.LEVEL_NUMBER = 20
Map.ZOOM_FACTOR = 2
# Map definition
#Map.background_color = 'rgb(70,130,180)' # steelblue
Map.background_color = 'rgb(0,0,0)' # black
Map.srs = '+init=epsg:3857' # pseudo mercator
Map.minimum_version = '2.0'
Map.buffer_size = 128
# Layers
countries = Layer('countries')
countries.datasource = {
'type': 'shape',
'file': '/srv/pycnikr/pycnikr/geo_data/ne_110m_admin_0_countries_merc.shp',
}
zoom_style = {
POLYGON: {
'fill': 'rgb(195,211,188)'
},
}
# Assign th same style for all zoom levels
for zoom in xrange(20):
countries.style('countries')[zoom] = zoom_style
|
credativUK/OCB | refs/heads/7.0-local | addons/document_webdav/__init__.py | 58 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import webdav
import webdav_server
import document_webdav
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
hoosteeno/fjord | refs/heads/master | vendor/packages/Babel-2.1.1/babel/plural.py | 136 | # -*- coding: utf-8 -*-
"""
babel.numbers
~~~~~~~~~~~~~
CLDR Plural support. See UTS #35.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
import re
_plural_tags = ('zero', 'one', 'two', 'few', 'many', 'other')
_fallback_tag = 'other'
class PluralRule(object):
"""Represents a set of language pluralization rules. The constructor
accepts a list of (tag, expr) tuples or a dict of CLDR rules. The
resulting object is callable and accepts one parameter with a positive or
negative number (both integer and float) for the number that indicates the
plural form for a string and returns the tag for the format:
>>> rule = PluralRule({'one': 'n is 1'})
>>> rule(1)
'one'
>>> rule(2)
'other'
Currently the CLDR defines these tags: zero, one, two, few, many and
other where other is an implicit default. Rules should be mutually
exclusive; for a given numeric value, only one rule should apply (i.e.
the condition should only be true for one of the plural rule elements.
"""
__slots__ = ('abstract', '_func')
def __init__(self, rules):
"""Initialize the rule instance.
:param rules: a list of ``(tag, expr)``) tuples with the rules
conforming to UTS #35 or a dict with the tags as keys
and expressions as values.
:raise RuleError: if the expression is malformed
"""
if isinstance(rules, dict):
rules = rules.items()
found = set()
self.abstract = []
for key, expr in sorted(list(rules)):
if key not in _plural_tags:
raise ValueError('unknown tag %r' % key)
elif key in found:
raise ValueError('tag %r defined twice' % key)
found.add(key)
self.abstract.append((key, _Parser(expr).ast))
def __repr__(self):
rules = self.rules
return '<%s %r>' % (
type(self).__name__,
', '.join(['%s: %s' % (tag, rules[tag]) for tag in _plural_tags
if tag in rules])
)
@classmethod
def parse(cls, rules):
"""Create a `PluralRule` instance for the given rules. If the rules
are a `PluralRule` object, that object is returned.
:param rules: the rules as list or dict, or a `PluralRule` object
:raise RuleError: if the expression is malformed
"""
if isinstance(rules, cls):
return rules
return cls(rules)
@property
def rules(self):
"""The `PluralRule` as a dict of unicode plural rules.
>>> rule = PluralRule({'one': 'n is 1'})
>>> rule.rules
{'one': 'n is 1'}
"""
_compile = _UnicodeCompiler().compile
return dict([(tag, _compile(ast)) for tag, ast in self.abstract])
tags = property(lambda x: frozenset([i[0] for i in x.abstract]), doc="""
A set of explicitly defined tags in this rule. The implicit default
``'other'`` rules is not part of this set unless there is an explicit
rule for it.""")
def __getstate__(self):
return self.abstract
def __setstate__(self, abstract):
self.abstract = abstract
def __call__(self, n):
if not hasattr(self, '_func'):
self._func = to_python(self)
return self._func(n)
def to_javascript(rule):
"""Convert a list/dict of rules or a `PluralRule` object into a JavaScript
function. This function depends on no external library:
>>> to_javascript({'one': 'n is 1'})
"(function(n) { return (n == 1) ? 'one' : 'other'; })"
Implementation detail: The function generated will probably evaluate
expressions involved into range operations multiple times. This has the
advantage that external helper functions are not required and is not a
big performance hit for these simple calculations.
:param rule: the rules as list or dict, or a `PluralRule` object
:raise RuleError: if the expression is malformed
"""
to_js = _JavaScriptCompiler().compile
result = ['(function(n) { return ']
for tag, ast in PluralRule.parse(rule).abstract:
result.append('%s ? %r : ' % (to_js(ast), tag))
result.append('%r; })' % _fallback_tag)
return ''.join(result)
def to_python(rule):
"""Convert a list/dict of rules or a `PluralRule` object into a regular
Python function. This is useful in situations where you need a real
function and don't are about the actual rule object:
>>> func = to_python({'one': 'n is 1', 'few': 'n in 2..4'})
>>> func(1)
'one'
>>> func(3)
'few'
>>> func = to_python({'one': 'n in 1,11', 'few': 'n in 3..10,13..19'})
>>> func(11)
'one'
>>> func(15)
'few'
:param rule: the rules as list or dict, or a `PluralRule` object
:raise RuleError: if the expression is malformed
"""
namespace = {
'IN': in_range_list,
'WITHIN': within_range_list,
'MOD': cldr_modulo
}
to_python = _PythonCompiler().compile
result = ['def evaluate(n):']
for tag, ast in PluralRule.parse(rule).abstract:
# the str() call is to coerce the tag to the native string. It's
# a limited ascii restricted set of tags anyways so that is fine.
result.append(' if (%s): return %r' % (to_python(ast), str(tag)))
result.append(' return %r' % _fallback_tag)
code = compile('\n'.join(result), '<rule>', 'exec')
eval(code, namespace)
return namespace['evaluate']
def to_gettext(rule):
"""The plural rule as gettext expression. The gettext expression is
technically limited to integers and returns indices rather than tags.
>>> to_gettext({'one': 'n is 1', 'two': 'n is 2'})
'nplurals=3; plural=((n == 1) ? 0 : (n == 2) ? 1 : 2)'
:param rule: the rules as list or dict, or a `PluralRule` object
:raise RuleError: if the expression is malformed
"""
rule = PluralRule.parse(rule)
used_tags = rule.tags | set([_fallback_tag])
_compile = _GettextCompiler().compile
_get_index = [tag for tag in _plural_tags if tag in used_tags].index
result = ['nplurals=%d; plural=(' % len(used_tags)]
for tag, ast in rule.abstract:
result.append('%s ? %d : ' % (_compile(ast), _get_index(tag)))
result.append('%d)' % _get_index(_fallback_tag))
return ''.join(result)
def in_range_list(num, range_list):
"""Integer range list test. This is the callback for the "in" operator
of the UTS #35 pluralization rule language:
>>> in_range_list(1, [(1, 3)])
True
>>> in_range_list(3, [(1, 3)])
True
>>> in_range_list(3, [(1, 3), (5, 8)])
True
>>> in_range_list(1.2, [(1, 4)])
False
>>> in_range_list(10, [(1, 4)])
False
>>> in_range_list(10, [(1, 4), (6, 8)])
False
"""
return num == int(num) and within_range_list(num, range_list)
def within_range_list(num, range_list):
"""Float range test. This is the callback for the "within" operator
of the UTS #35 pluralization rule language:
>>> within_range_list(1, [(1, 3)])
True
>>> within_range_list(1.0, [(1, 3)])
True
>>> within_range_list(1.2, [(1, 4)])
True
>>> within_range_list(8.8, [(1, 4), (7, 15)])
True
>>> within_range_list(10, [(1, 4)])
False
>>> within_range_list(10.5, [(1, 4), (20, 30)])
False
"""
return any(num >= min_ and num <= max_ for min_, max_ in range_list)
def cldr_modulo(a, b):
"""Javaish modulo. This modulo operator returns the value with the sign
of the dividend rather than the divisor like Python does:
>>> cldr_modulo(-3, 5)
-3
>>> cldr_modulo(-3, -5)
-3
>>> cldr_modulo(3, 5)
3
"""
reverse = 0
if a < 0:
a *= -1
reverse = 1
if b < 0:
b *= -1
rv = a % b
if reverse:
rv *= -1
return rv
class RuleError(Exception):
"""Raised if a rule is malformed."""
class _Parser(object):
"""Internal parser. This class can translate a single rule into an abstract
tree of tuples. It implements the following grammar::
condition = and_condition ('or' and_condition)*
and_condition = relation ('and' relation)*
relation = is_relation | in_relation | within_relation | 'n' <EOL>
is_relation = expr 'is' ('not')? value
in_relation = expr ('not')? 'in' range_list
within_relation = expr ('not')? 'within' range_list
expr = 'n' ('mod' value)?
range_list = (range | value) (',' range_list)*
value = digit+
digit = 0|1|2|3|4|5|6|7|8|9
range = value'..'value
- Whitespace can occur between or around any of the above tokens.
- Rules should be mutually exclusive; for a given numeric value, only one
rule should apply (i.e. the condition should only be true for one of
the plural rule elements).
- The in and within relations can take comma-separated lists, such as:
'n in 3,5,7..15'.
The translator parses the expression on instanciation into an attribute
called `ast`.
"""
_rules = [
(None, re.compile(r'\s+(?u)')),
('word', re.compile(r'\b(and|or|is|(?:with)?in|not|mod|n)\b')),
('value', re.compile(r'\d+')),
('comma', re.compile(r',')),
('ellipsis', re.compile(r'\.\.'))
]
def __init__(self, string):
string = string.lower()
result = []
pos = 0
end = len(string)
while pos < end:
for tok, rule in self._rules:
match = rule.match(string, pos)
if match is not None:
pos = match.end()
if tok:
result.append((tok, match.group()))
break
else:
raise RuleError('malformed CLDR pluralization rule. '
'Got unexpected %r' % string[pos])
self.tokens = result[::-1]
self.ast = self.condition()
if self.tokens:
raise RuleError('Expected end of rule, got %r' %
self.tokens[-1][1])
def test(self, type, value=None):
return self.tokens and self.tokens[-1][0] == type and \
(value is None or self.tokens[-1][1] == value)
def skip(self, type, value=None):
if self.test(type, value):
return self.tokens.pop()
def expect(self, type, value=None, term=None):
token = self.skip(type, value)
if token is not None:
return token
if term is None:
term = repr(value is None and type or value)
if not self.tokens:
raise RuleError('expected %s but end of rule reached' % term)
raise RuleError('expected %s but got %r' % (term, self.tokens[-1][1]))
def condition(self):
op = self.and_condition()
while self.skip('word', 'or'):
op = 'or', (op, self.and_condition())
return op
def and_condition(self):
op = self.relation()
while self.skip('word', 'and'):
op = 'and', (op, self.relation())
return op
def relation(self):
left = self.expr()
if self.skip('word', 'is'):
return self.skip('word', 'not') and 'isnot' or 'is', \
(left, self.value())
negated = self.skip('word', 'not')
method = 'in'
if self.skip('word', 'within'):
method = 'within'
else:
self.expect('word', 'in', term="'within' or 'in'")
rv = 'relation', (method, left, self.range_list())
if negated:
rv = 'not', (rv,)
return rv
def range_or_value(self):
left = self.value()
if self.skip('ellipsis'):
return((left, self.value()))
else:
return((left, left))
def range_list(self):
range_list = [self.range_or_value()]
while self.skip('comma'):
range_list.append(self.range_or_value())
return 'range_list', range_list
def expr(self):
self.expect('word', 'n')
if self.skip('word', 'mod'):
return 'mod', (('n', ()), self.value())
return 'n', ()
def value(self):
return 'value', (int(self.expect('value')[1]),)
def _binary_compiler(tmpl):
"""Compiler factory for the `_Compiler`."""
return lambda self, l, r: tmpl % (self.compile(l), self.compile(r))
def _unary_compiler(tmpl):
"""Compiler factory for the `_Compiler`."""
return lambda self, x: tmpl % self.compile(x)
class _Compiler(object):
"""The compilers are able to transform the expressions into multiple
output formats.
"""
def compile(self, arg):
op, args = arg
return getattr(self, 'compile_' + op)(*args)
compile_n = lambda x: 'n'
compile_value = lambda x, v: str(v)
compile_and = _binary_compiler('(%s && %s)')
compile_or = _binary_compiler('(%s || %s)')
compile_not = _unary_compiler('(!%s)')
compile_mod = _binary_compiler('(%s %% %s)')
compile_is = _binary_compiler('(%s == %s)')
compile_isnot = _binary_compiler('(%s != %s)')
def compile_relation(self, method, expr, range_list):
raise NotImplementedError()
class _PythonCompiler(_Compiler):
"""Compiles an expression to Python."""
compile_and = _binary_compiler('(%s and %s)')
compile_or = _binary_compiler('(%s or %s)')
compile_not = _unary_compiler('(not %s)')
compile_mod = _binary_compiler('MOD(%s, %s)')
def compile_relation(self, method, expr, range_list):
compile_range_list = '[%s]' % ','.join(
['(%s, %s)' % tuple(map(self.compile, range_))
for range_ in range_list[1]])
return '%s(%s, %s)' % (method.upper(), self.compile(expr),
compile_range_list)
class _GettextCompiler(_Compiler):
"""Compile into a gettext plural expression."""
def compile_relation(self, method, expr, range_list):
rv = []
expr = self.compile(expr)
for item in range_list[1]:
if item[0] == item[1]:
rv.append('(%s == %s)' % (
expr,
self.compile(item[0])
))
else:
min, max = map(self.compile, item)
rv.append('(%s >= %s && %s <= %s)' % (
expr,
min,
expr,
max
))
return '(%s)' % ' || '.join(rv)
class _JavaScriptCompiler(_GettextCompiler):
"""Compiles the expression to plain of JavaScript."""
def compile_relation(self, method, expr, range_list):
code = _GettextCompiler.compile_relation(
self, method, expr, range_list)
if method == 'in':
expr = self.compile(expr)
code = '(parseInt(%s) == %s && %s)' % (expr, expr, code)
return code
class _UnicodeCompiler(_Compiler):
"""Returns a unicode pluralization rule again."""
compile_is = _binary_compiler('%s is %s')
compile_isnot = _binary_compiler('%s is not %s')
compile_and = _binary_compiler('%s and %s')
compile_or = _binary_compiler('%s or %s')
compile_mod = _binary_compiler('%s mod %s')
def compile_not(self, relation):
return self.compile_relation(negated=True, *relation[1])
def compile_relation(self, method, expr, range_list, negated=False):
ranges = []
for item in range_list[1]:
if item[0] == item[1]:
ranges.append(self.compile(item[0]))
else:
ranges.append('%s..%s' % tuple(map(self.compile, item)))
return '%s%s %s %s' % (
self.compile(expr), negated and ' not' or '',
method, ','.join(ranges)
)
|
Taywee/aix-scripts | refs/heads/master | os400pwgen.py | 1 | #!/usr/bin/python3
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
'''os400pwgen.py: generate secure as possible level-0 passwords for as400 users'''
import string
import random
sets = [set(string.ascii_uppercase), set(string.digits), set('$@#_')]
newset = set()
output = []
rand = random.SystemRandom()
# This is done by first taking a random item from each set and putting it into
# the output, then removing it, and then adding the set to the full list to be
# sampled. We do this to have a full password with at least one element from
# each set, and no repeating characters. This is, in fact, less secure, but
# password policies demand it, annoyingly.
for s in sets:
item = rand.sample(s, 1)[0]
s.remove(item)
output.append(item)
newset.update(s)
output.extend(rand.sample(newset, 7))
rand.shuffle(output)
print(''.join(output))
|
Teamxrtc/webrtc-streaming-node | refs/heads/master | third_party/webrtc/src/chromium/src/tools/cygprofile/symbol_extractor_unittest.py | 23 | #!/usr/bin/python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import symbol_extractor
import unittest
class TestSymbolInfo(unittest.TestCase):
def testIgnoresBlankLine(self):
symbol_info = symbol_extractor._FromObjdumpLine('')
self.assertIsNone(symbol_info)
def testIgnoresMalformedLine(self):
# This line is too short.
line = ('00c1b228 F .text 00000060 _ZN20trace_event')
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNone(symbol_info)
# This line has the wrong marker.
line = '00c1b228 l f .text 00000060 _ZN20trace_event'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNone(symbol_info)
def testAssertionErrorOnInvalidLines(self):
# This line has an invalid scope.
line = ('00c1b228 z F .text 00000060 _ZN20trace_event')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
# This line has too many fields.
line = ('00c1b228 l F .text 00000060 _ZN20trace_event too many')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
# This line has invalid characters in the symbol.
line = ('00c1b228 l F .text 00000060 _ZN20trace_$bad')
self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
def testSymbolInfo(self):
line = ('00c1c05c l F .text 0000002c '
'_GLOBAL__sub_I_chrome_main_delegate.cc')
test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
test_offset = 0x00c1c05c
test_size = 0x2c
test_section = '.text'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNotNone(symbol_info)
self.assertEquals(test_offset, symbol_info.offset)
self.assertEquals(test_size, symbol_info.size)
self.assertEquals(test_name, symbol_info.name)
self.assertEquals(test_section, symbol_info.section)
def testHiddenSymbol(self):
line = ('00c1c05c l F .text 0000002c '
'.hidden _GLOBAL__sub_I_chrome_main_delegate.cc')
test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
test_offset = 0x00c1c05c
test_size = 0x2c
test_section = '.text'
symbol_info = symbol_extractor._FromObjdumpLine(line)
self.assertIsNotNone(symbol_info)
self.assertEquals(test_offset, symbol_info.offset)
self.assertEquals(test_size, symbol_info.size)
self.assertEquals(test_name, symbol_info.name)
self.assertEquals(test_section, symbol_info.section)
class TestSymbolInfosFromStream(unittest.TestCase):
def testSymbolInfosFromStream(self):
lines = ['Garbage',
'',
'00c1c05c l F .text 0000002c first',
''
'more garbage',
'00155 g F .text 00000012 second']
symbol_infos = symbol_extractor._SymbolInfosFromStream(lines)
self.assertEquals(len(symbol_infos), 2)
first = symbol_extractor.SymbolInfo('first', 0x00c1c05c, 0x2c, '.text')
self.assertEquals(first, symbol_infos[0])
second = symbol_extractor.SymbolInfo('second', 0x00155, 0x12, '.text')
self.assertEquals(second, symbol_infos[1])
class TestSymbolInfoMappings(unittest.TestCase):
def setUp(self):
self.symbol_infos = [
symbol_extractor.SymbolInfo('firstNameAtOffset', 0x42, 42, '.text'),
symbol_extractor.SymbolInfo('secondNameAtOffset', 0x42, 42, '.text'),
symbol_extractor.SymbolInfo('thirdSymbol', 0x64, 20, '.text')]
def testGroupSymbolInfosByOffset(self):
offset_to_symbol_info = symbol_extractor.GroupSymbolInfosByOffset(
self.symbol_infos)
self.assertEquals(len(offset_to_symbol_info), 2)
self.assertIn(0x42, offset_to_symbol_info)
self.assertEquals(offset_to_symbol_info[0x42][0], self.symbol_infos[0])
self.assertEquals(offset_to_symbol_info[0x42][1], self.symbol_infos[1])
self.assertIn(0x64, offset_to_symbol_info)
self.assertEquals(offset_to_symbol_info[0x64][0], self.symbol_infos[2])
def testCreateNameToSymbolInfo(self):
name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
self.symbol_infos)
self.assertEquals(len(name_to_symbol_info), 3)
for i in range(3):
name = self.symbol_infos[i].name
self.assertIn(name, name_to_symbol_info)
self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
def testSymbolCollisions(self):
symbol_infos_with_collision = list(self.symbol_infos)
symbol_infos_with_collision.append(symbol_extractor.SymbolInfo(
'secondNameAtOffset', 0x84, 42, '.text'))
# The symbol added above should not affect the output.
name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
self.symbol_infos)
self.assertEquals(len(name_to_symbol_info), 3)
for i in range(3):
name = self.symbol_infos[i].name
self.assertIn(name, name_to_symbol_info)
self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
if __name__ == '__main__':
unittest.main()
|
aiifabbf/pycairo | refs/heads/master | examples/gtk/png_view.py | 14 | #!/usr/bin/env python
"""Display a png file
"""
import sys
import cairo
import gtk
def expose_event(widget, event, surface):
ctx = widget.window.cairo_create()
ctx.set_source_surface(surface, 0,0)
ctx.paint()
if len(sys.argv) != 2:
raise SystemExit('usage: png_view.py png_file')
filename = sys.argv[1]
surface = cairo.ImageSurface.create_from_png(filename)
Width = surface.get_width()
Height = surface.get_height()
win = gtk.Window()
win.connect('destroy', gtk.main_quit)
drawingarea = gtk.DrawingArea()
win.add(drawingarea)
drawingarea.connect('expose_event', expose_event, surface)
drawingarea.set_size_request(Width,Height)
win.show_all()
gtk.main()
|
jgoclawski/django | refs/heads/master | tests/forms_tests/urls.py | 452 | from django.conf.urls import url
from .views import ArticleFormView
urlpatterns = [
url(r'^model_form/(?P<pk>[0-9]+)/$', ArticleFormView.as_view(), name="article_form"),
]
|
IV-GII/SocialCookies | refs/heads/master | ENV1/lib/python2.7/site-packages/django/contrib/flatpages/__init__.py | 12133432 | |
CapstoneGrader/codeta | refs/heads/master | codeta/forms/__init__.py | 12133432 | |
smmribeiro/intellij-community | refs/heads/master | python/helpers/py3only/docutils/languages/de.py | 200 | # $Id: de.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: Gunnar Schwant <[email protected]>
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
German language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
'author': 'Autor',
'authors': 'Autoren',
'organization': 'Organisation',
'address': 'Adresse',
'contact': 'Kontakt',
'version': 'Version',
'revision': 'Revision',
'status': 'Status',
'date': 'Datum',
'dedication': 'Widmung',
'copyright': 'Copyright',
'abstract': 'Zusammenfassung',
'attention': 'Achtung!',
'caution': 'Vorsicht!',
'danger': '!GEFAHR!',
'error': 'Fehler',
'hint': 'Hinweis',
'important': 'Wichtig',
'note': 'Bemerkung',
'tip': 'Tipp',
'warning': 'Warnung',
'contents': 'Inhalt'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
'autor': 'author',
'autoren': 'authors',
'organisation': 'organization',
'adresse': 'address',
'kontakt': 'contact',
'version': 'version',
'revision': 'revision',
'status': 'status',
'datum': 'date',
'copyright': 'copyright',
'widmung': 'dedication',
'zusammenfassung': 'abstract'}
"""German (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.