repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
dahlstrom-g/intellij-community
|
python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/btm_matcher.py
|
326
|
6834
|
"""A bottom-up tree matching algorithm implementation meant to speed
up 2to3's matching process. After the tree patterns are reduced to
their rarest linear path, a linear Aho-Corasick automaton is
created. The linear automaton traverses the linear paths from the
leaves to the root of the AST and returns a set of nodes for further
matching. This reduces significantly the number of candidate nodes."""
__author__ = "George Boutsioukis <[email protected]>"
import logging
import itertools
from collections import defaultdict
from . import pytree
from .btm_utils import reduce_tree
class BMNode(object):
"""Class for a node of the Aho-Corasick automaton used in matching"""
count = itertools.count()
def __init__(self):
self.transition_table = {}
self.fixers = []
self.id = next(BMNode.count)
self.content = ''
class BottomMatcher(object):
"""The main matcher class. After instantiating the patterns should
be added using the add_fixer method"""
def __init__(self):
self.match = set()
self.root = BMNode()
self.nodes = [self.root]
self.fixers = []
self.logger = logging.getLogger("RefactoringTool")
def add_fixer(self, fixer):
"""Reduces a fixer's pattern tree to a linear path and adds it
to the matcher(a common Aho-Corasick automaton). The fixer is
appended on the matching states and called when they are
reached"""
self.fixers.append(fixer)
tree = reduce_tree(fixer.pattern_tree)
linear = tree.get_linear_subpattern()
match_nodes = self.add(linear, start=self.root)
for match_node in match_nodes:
match_node.fixers.append(fixer)
def add(self, pattern, start):
"Recursively adds a linear pattern to the AC automaton"
#print("adding pattern", pattern, "to", start)
if not pattern:
#print("empty pattern")
return [start]
if isinstance(pattern[0], tuple):
#alternatives
#print("alternatives")
match_nodes = []
for alternative in pattern[0]:
#add all alternatives, and add the rest of the pattern
#to each end node
end_nodes = self.add(alternative, start=start)
for end in end_nodes:
match_nodes.extend(self.add(pattern[1:], end))
return match_nodes
else:
#single token
#not last
if pattern[0] not in start.transition_table:
#transition did not exist, create new
next_node = BMNode()
start.transition_table[pattern[0]] = next_node
else:
#transition exists already, follow
next_node = start.transition_table[pattern[0]]
if pattern[1:]:
end_nodes = self.add(pattern[1:], start=next_node)
else:
end_nodes = [next_node]
return end_nodes
def run(self, leaves):
"""The main interface with the bottom matcher. The tree is
traversed from the bottom using the constructed
automaton. Nodes are only checked once as the tree is
retraversed. When the automaton fails, we give it one more
shot(in case the above tree matches as a whole with the
rejected leaf), then we break for the next leaf. There is the
special case of multiple arguments(see code comments) where we
recheck the nodes
Args:
The leaves of the AST tree to be matched
Returns:
A dictionary of node matches with fixers as the keys
"""
current_ac_node = self.root
results = defaultdict(list)
for leaf in leaves:
current_ast_node = leaf
while current_ast_node:
current_ast_node.was_checked = True
for child in current_ast_node.children:
# multiple statements, recheck
if isinstance(child, pytree.Leaf) and child.value == u";":
current_ast_node.was_checked = False
break
if current_ast_node.type == 1:
#name
node_token = current_ast_node.value
else:
node_token = current_ast_node.type
if node_token in current_ac_node.transition_table:
#token matches
current_ac_node = current_ac_node.transition_table[node_token]
for fixer in current_ac_node.fixers:
if not fixer in results:
results[fixer] = []
results[fixer].append(current_ast_node)
else:
#matching failed, reset automaton
current_ac_node = self.root
if (current_ast_node.parent is not None
and current_ast_node.parent.was_checked):
#the rest of the tree upwards has been checked, next leaf
break
#recheck the rejected node once from the root
if node_token in current_ac_node.transition_table:
#token matches
current_ac_node = current_ac_node.transition_table[node_token]
for fixer in current_ac_node.fixers:
if not fixer in results.keys():
results[fixer] = []
results[fixer].append(current_ast_node)
current_ast_node = current_ast_node.parent
return results
def print_ac(self):
"Prints a graphviz diagram of the BM automaton(for debugging)"
print("digraph g{")
def print_node(node):
for subnode_key in node.transition_table.keys():
subnode = node.transition_table[subnode_key]
print("%d -> %d [label=%s] //%s" %
(node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
if subnode_key == 1:
print(subnode.content)
print_node(subnode)
print_node(self.root)
print("}")
# taken from pytree.py for debugging; only used by print_ac
_type_reprs = {}
def type_repr(type_num):
global _type_reprs
if not _type_reprs:
from .pygram import python_symbols
# printing tokens is possible but not as useful
# from .pgen2 import token // token.__dict__.items():
for name, val in python_symbols.__dict__.items():
if type(val) == int: _type_reprs[val] = name
return _type_reprs.setdefault(type_num, type_num)
|
apache-2.0
| -2,602,448,573,515,517,000 | -2,800,807,042,735,513,600 | 39.678571 | 89 | 0.567164 | false |
mollstam/UnrealPy
|
UnrealPyEmbed/Source/Python/Lib/python27/test/test_tcl.py
|
11
|
29639
|
import unittest
import re
import sys
import os
from test import test_support
from subprocess import Popen, PIPE
# Skip this test if the _tkinter module wasn't built.
_tkinter = test_support.import_module('_tkinter')
# Make sure tkinter._fix runs to set up the environment
tkinter = test_support.import_fresh_module('Tkinter')
from Tkinter import Tcl
from _tkinter import TclError
try:
from _testcapi import INT_MAX, PY_SSIZE_T_MAX
except ImportError:
INT_MAX = PY_SSIZE_T_MAX = sys.maxsize
tcl_version = tuple(map(int, _tkinter.TCL_VERSION.split('.')))
_tk_patchlevel = None
def get_tk_patchlevel():
global _tk_patchlevel
if _tk_patchlevel is None:
tcl = Tcl()
patchlevel = tcl.call('info', 'patchlevel')
m = re.match(r'(\d+)\.(\d+)([ab.])(\d+)$', patchlevel)
major, minor, releaselevel, serial = m.groups()
major, minor, serial = int(major), int(minor), int(serial)
releaselevel = {'a': 'alpha', 'b': 'beta', '.': 'final'}[releaselevel]
if releaselevel == 'final':
_tk_patchlevel = major, minor, serial, releaselevel, 0
else:
_tk_patchlevel = major, minor, 0, releaselevel, serial
return _tk_patchlevel
class TkinterTest(unittest.TestCase):
def testFlattenLen(self):
# flatten(<object with no length>)
self.assertRaises(TypeError, _tkinter._flatten, True)
class TclTest(unittest.TestCase):
def setUp(self):
self.interp = Tcl()
self.wantobjects = self.interp.tk.wantobjects()
def testEval(self):
tcl = self.interp
tcl.eval('set a 1')
self.assertEqual(tcl.eval('set a'),'1')
def testEvalException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.eval,'set a')
def testEvalException2(self):
tcl = self.interp
self.assertRaises(TclError,tcl.eval,'this is wrong')
def testCall(self):
tcl = self.interp
tcl.call('set','a','1')
self.assertEqual(tcl.call('set','a'),'1')
def testCallException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.call,'set','a')
def testCallException2(self):
tcl = self.interp
self.assertRaises(TclError,tcl.call,'this','is','wrong')
def testSetVar(self):
tcl = self.interp
tcl.setvar('a','1')
self.assertEqual(tcl.eval('set a'),'1')
def testSetVarArray(self):
tcl = self.interp
tcl.setvar('a(1)','1')
self.assertEqual(tcl.eval('set a(1)'),'1')
def testGetVar(self):
tcl = self.interp
tcl.eval('set a 1')
self.assertEqual(tcl.getvar('a'),'1')
def testGetVarArray(self):
tcl = self.interp
tcl.eval('set a(1) 1')
self.assertEqual(tcl.getvar('a(1)'),'1')
def testGetVarException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.getvar,'a')
def testGetVarArrayException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.getvar,'a(1)')
def testUnsetVar(self):
tcl = self.interp
tcl.setvar('a',1)
self.assertEqual(tcl.eval('info exists a'),'1')
tcl.unsetvar('a')
self.assertEqual(tcl.eval('info exists a'),'0')
def testUnsetVarArray(self):
tcl = self.interp
tcl.setvar('a(1)',1)
tcl.setvar('a(2)',2)
self.assertEqual(tcl.eval('info exists a(1)'),'1')
self.assertEqual(tcl.eval('info exists a(2)'),'1')
tcl.unsetvar('a(1)')
self.assertEqual(tcl.eval('info exists a(1)'),'0')
self.assertEqual(tcl.eval('info exists a(2)'),'1')
def testUnsetVarException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.unsetvar,'a')
def get_integers(self):
integers = (0, 1, -1, 2**31-1, -2**31)
if tcl_version >= (8, 4): # wideInt was added in Tcl 8.4
integers += (2**31, -2**31-1, 2**63-1, -2**63)
# bignum was added in Tcl 8.5, but its support is able only since 8.5.8
if (get_tk_patchlevel() >= (8, 6, 0, 'final') or
(8, 5, 8) <= get_tk_patchlevel() < (8, 6)):
integers += (2**63, -2**63-1, 2**1000, -2**1000)
return integers
def test_getint(self):
tcl = self.interp.tk
for i in self.get_integers():
result = tcl.getint(' %d ' % i)
self.assertEqual(result, i)
self.assertIsInstance(result, type(int(result)))
if tcl_version >= (8, 5):
self.assertEqual(tcl.getint(' {:#o} '.format(i)), i)
self.assertEqual(tcl.getint(' %#o ' % i), i)
self.assertEqual(tcl.getint(' %#x ' % i), i)
if tcl_version < (8, 5): # bignum was added in Tcl 8.5
self.assertRaises(TclError, tcl.getint, str(2**1000))
self.assertEqual(tcl.getint(42), 42)
self.assertRaises(TypeError, tcl.getint)
self.assertRaises(TypeError, tcl.getint, '42', '10')
self.assertRaises(TypeError, tcl.getint, 42.0)
self.assertRaises(TclError, tcl.getint, 'a')
self.assertRaises((TypeError, ValueError, TclError),
tcl.getint, '42\0')
if test_support.have_unicode:
self.assertEqual(tcl.getint(unicode('42')), 42)
self.assertRaises((UnicodeEncodeError, ValueError, TclError),
tcl.getint, '42' + unichr(0xd800))
def test_getdouble(self):
tcl = self.interp.tk
self.assertEqual(tcl.getdouble(' 42 '), 42.0)
self.assertEqual(tcl.getdouble(' 42.5 '), 42.5)
self.assertEqual(tcl.getdouble(42.5), 42.5)
self.assertRaises(TypeError, tcl.getdouble)
self.assertRaises(TypeError, tcl.getdouble, '42.5', '10')
self.assertRaises(TypeError, tcl.getdouble, 42)
self.assertRaises(TclError, tcl.getdouble, 'a')
self.assertRaises((TypeError, ValueError, TclError),
tcl.getdouble, '42.5\0')
if test_support.have_unicode:
self.assertEqual(tcl.getdouble(unicode('42.5')), 42.5)
self.assertRaises((UnicodeEncodeError, ValueError, TclError),
tcl.getdouble, '42.5' + unichr(0xd800))
def test_getboolean(self):
tcl = self.interp.tk
self.assertIs(tcl.getboolean('on'), True)
self.assertIs(tcl.getboolean('1'), True)
self.assertIs(tcl.getboolean(u'on'), True)
self.assertIs(tcl.getboolean(u'1'), True)
self.assertIs(tcl.getboolean(42), True)
self.assertIs(tcl.getboolean(0), False)
self.assertIs(tcl.getboolean(42L), True)
self.assertIs(tcl.getboolean(0L), False)
self.assertRaises(TypeError, tcl.getboolean)
self.assertRaises(TypeError, tcl.getboolean, 'on', '1')
self.assertRaises(TypeError, tcl.getboolean, 1.0)
self.assertRaises(TclError, tcl.getboolean, 'a')
self.assertRaises((TypeError, ValueError, TclError),
tcl.getboolean, 'on\0')
if test_support.have_unicode:
self.assertIs(tcl.getboolean(unicode('on')), True)
self.assertRaises((UnicodeEncodeError, ValueError, TclError),
tcl.getboolean, 'on' + unichr(0xd800))
def testEvalFile(self):
tcl = self.interp
filename = "testEvalFile.tcl"
fd = open(filename,'w')
script = """set a 1
set b 2
set c [ expr $a + $b ]
"""
fd.write(script)
fd.close()
tcl.evalfile(filename)
os.remove(filename)
self.assertEqual(tcl.eval('set a'),'1')
self.assertEqual(tcl.eval('set b'),'2')
self.assertEqual(tcl.eval('set c'),'3')
def test_evalfile_null_in_result(self):
tcl = self.interp
with open(test_support.TESTFN, 'wb') as f:
self.addCleanup(test_support.unlink, test_support.TESTFN)
f.write("""
set a "a\0b"
set b "a\\0b"
""")
tcl.evalfile(test_support.TESTFN)
self.assertEqual(tcl.eval('set a'), 'a\xc0\x80b')
self.assertEqual(tcl.eval('set b'), 'a\xc0\x80b')
def testEvalFileException(self):
tcl = self.interp
filename = "doesnotexists"
try:
os.remove(filename)
except Exception,e:
pass
self.assertRaises(TclError,tcl.evalfile,filename)
def testPackageRequireException(self):
tcl = self.interp
self.assertRaises(TclError,tcl.eval,'package require DNE')
@unittest.skipUnless(sys.platform == 'win32', "only applies to Windows")
def testLoadWithUNC(self):
# Build a UNC path from the regular path.
# Something like
# \\%COMPUTERNAME%\c$\python27\python.exe
fullname = os.path.abspath(sys.executable)
if fullname[1] != ':':
self.skipTest('unusable path: %r' % fullname)
unc_name = r'\\%s\%s$\%s' % (os.environ['COMPUTERNAME'],
fullname[0],
fullname[3:])
with test_support.EnvironmentVarGuard() as env:
env.unset("TCL_LIBRARY")
cmd = '%s -c "import Tkinter; print Tkinter"' % (unc_name,)
try:
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
except WindowsError as e:
if e.winerror == 5:
self.skipTest('Not permitted to start the child process')
else:
raise
out_data, err_data = p.communicate()
msg = '\n\n'.join(['"Tkinter.py" not in output',
'Command:', cmd,
'stdout:', out_data,
'stderr:', err_data])
self.assertIn('Tkinter.py', out_data, msg)
self.assertEqual(p.wait(), 0, 'Non-zero exit code')
def test_exprstring(self):
tcl = self.interp
tcl.call('set', 'a', 3)
tcl.call('set', 'b', 6)
def check(expr, expected):
result = tcl.exprstring(expr)
self.assertEqual(result, expected)
self.assertIsInstance(result, str)
self.assertRaises(TypeError, tcl.exprstring)
self.assertRaises(TypeError, tcl.exprstring, '8.2', '+6')
self.assertRaises(TclError, tcl.exprstring, 'spam')
check('', '0')
check('8.2 + 6', '14.2')
check('3.1 + $a', '6.1')
check('2 + "$a.$b"', '5.6')
check('4*[llength "6 2"]', '8')
check('{word one} < "word $a"', '0')
check('4*2 < 7', '0')
check('hypot($a, 4)', '5.0')
check('5 / 4', '1')
check('5 / 4.0', '1.25')
check('5 / ( [string length "abcd"] + 0.0 )', '1.25')
check('20.0/5.0', '4.0')
check('"0x03" > "2"', '1')
check('[string length "a\xc2\xbd\xe2\x82\xac"]', '3')
check(r'[string length "a\xbd\u20ac"]', '3')
check('"abc"', 'abc')
check('"a\xc2\xbd\xe2\x82\xac"', 'a\xc2\xbd\xe2\x82\xac')
check(r'"a\xbd\u20ac"', 'a\xc2\xbd\xe2\x82\xac')
check(r'"a\0b"', 'a\xc0\x80b')
if tcl_version >= (8, 5): # bignum was added in Tcl 8.5
check('2**64', str(2**64))
def test_exprdouble(self):
tcl = self.interp
tcl.call('set', 'a', 3)
tcl.call('set', 'b', 6)
def check(expr, expected):
result = tcl.exprdouble(expr)
self.assertEqual(result, expected)
self.assertIsInstance(result, float)
self.assertRaises(TypeError, tcl.exprdouble)
self.assertRaises(TypeError, tcl.exprdouble, '8.2', '+6')
self.assertRaises(TclError, tcl.exprdouble, 'spam')
check('', 0.0)
check('8.2 + 6', 14.2)
check('3.1 + $a', 6.1)
check('2 + "$a.$b"', 5.6)
check('4*[llength "6 2"]', 8.0)
check('{word one} < "word $a"', 0.0)
check('4*2 < 7', 0.0)
check('hypot($a, 4)', 5.0)
check('5 / 4', 1.0)
check('5 / 4.0', 1.25)
check('5 / ( [string length "abcd"] + 0.0 )', 1.25)
check('20.0/5.0', 4.0)
check('"0x03" > "2"', 1.0)
check('[string length "a\xc2\xbd\xe2\x82\xac"]', 3.0)
check(r'[string length "a\xbd\u20ac"]', 3.0)
self.assertRaises(TclError, tcl.exprdouble, '"abc"')
if tcl_version >= (8, 5): # bignum was added in Tcl 8.5
check('2**64', float(2**64))
def test_exprlong(self):
tcl = self.interp
tcl.call('set', 'a', 3)
tcl.call('set', 'b', 6)
def check(expr, expected):
result = tcl.exprlong(expr)
self.assertEqual(result, expected)
self.assertIsInstance(result, int)
self.assertRaises(TypeError, tcl.exprlong)
self.assertRaises(TypeError, tcl.exprlong, '8.2', '+6')
self.assertRaises(TclError, tcl.exprlong, 'spam')
check('', 0)
check('8.2 + 6', 14)
check('3.1 + $a', 6)
check('2 + "$a.$b"', 5)
check('4*[llength "6 2"]', 8)
check('{word one} < "word $a"', 0)
check('4*2 < 7', 0)
check('hypot($a, 4)', 5)
check('5 / 4', 1)
check('5 / 4.0', 1)
check('5 / ( [string length "abcd"] + 0.0 )', 1)
check('20.0/5.0', 4)
check('"0x03" > "2"', 1)
check('[string length "a\xc2\xbd\xe2\x82\xac"]', 3)
check(r'[string length "a\xbd\u20ac"]', 3)
self.assertRaises(TclError, tcl.exprlong, '"abc"')
if tcl_version >= (8, 5): # bignum was added in Tcl 8.5
self.assertRaises(TclError, tcl.exprlong, '2**64')
def test_exprboolean(self):
tcl = self.interp
tcl.call('set', 'a', 3)
tcl.call('set', 'b', 6)
def check(expr, expected):
result = tcl.exprboolean(expr)
self.assertEqual(result, expected)
self.assertIsInstance(result, int)
self.assertNotIsInstance(result, bool)
self.assertRaises(TypeError, tcl.exprboolean)
self.assertRaises(TypeError, tcl.exprboolean, '8.2', '+6')
self.assertRaises(TclError, tcl.exprboolean, 'spam')
check('', False)
for value in ('0', 'false', 'no', 'off'):
check(value, False)
check('"%s"' % value, False)
check('{%s}' % value, False)
for value in ('1', 'true', 'yes', 'on'):
check(value, True)
check('"%s"' % value, True)
check('{%s}' % value, True)
check('8.2 + 6', True)
check('3.1 + $a', True)
check('2 + "$a.$b"', True)
check('4*[llength "6 2"]', True)
check('{word one} < "word $a"', False)
check('4*2 < 7', False)
check('hypot($a, 4)', True)
check('5 / 4', True)
check('5 / 4.0', True)
check('5 / ( [string length "abcd"] + 0.0 )', True)
check('20.0/5.0', True)
check('"0x03" > "2"', True)
check('[string length "a\xc2\xbd\xe2\x82\xac"]', True)
check(r'[string length "a\xbd\u20ac"]', True)
self.assertRaises(TclError, tcl.exprboolean, '"abc"')
if tcl_version >= (8, 5): # bignum was added in Tcl 8.5
check('2**64', True)
@unittest.skipUnless(tcl_version >= (8, 5), 'requires Tcl version >= 8.5')
def test_booleans(self):
tcl = self.interp
def check(expr, expected):
result = tcl.call('expr', expr)
if tcl.wantobjects():
self.assertEqual(result, expected)
self.assertIsInstance(result, int)
else:
self.assertIn(result, (expr, str(int(expected))))
self.assertIsInstance(result, str)
check('true', True)
check('yes', True)
check('on', True)
check('false', False)
check('no', False)
check('off', False)
check('1 < 2', True)
check('1 > 2', False)
def test_expr_bignum(self):
tcl = self.interp
for i in self.get_integers():
result = tcl.call('expr', str(i))
if self.wantobjects:
self.assertEqual(result, i)
self.assertIsInstance(result, (int, long))
if abs(result) < 2**31:
self.assertIsInstance(result, int)
else:
self.assertEqual(result, str(i))
self.assertIsInstance(result, str)
if tcl_version < (8, 5): # bignum was added in Tcl 8.5
self.assertRaises(TclError, tcl.call, 'expr', str(2**1000))
def test_passing_values(self):
def passValue(value):
return self.interp.call('set', '_', value)
self.assertEqual(passValue(True), True if self.wantobjects else '1')
self.assertEqual(passValue(False), False if self.wantobjects else '0')
self.assertEqual(passValue('string'), 'string')
self.assertEqual(passValue('string\xbd'), 'string\xbd')
self.assertEqual(passValue('string\xe2\x82\xac'), u'string\u20ac')
self.assertEqual(passValue(u'string'), u'string')
self.assertEqual(passValue(u'string\xbd'), u'string\xbd')
self.assertEqual(passValue(u'string\u20ac'), u'string\u20ac')
self.assertEqual(passValue('str\x00ing'), 'str\x00ing')
self.assertEqual(passValue('str\xc0\x80ing'), 'str\x00ing')
self.assertEqual(passValue(u'str\x00ing'), u'str\x00ing')
self.assertEqual(passValue(u'str\x00ing\xbd'), u'str\x00ing\xbd')
self.assertEqual(passValue(u'str\x00ing\u20ac'), u'str\x00ing\u20ac')
for i in self.get_integers():
self.assertEqual(passValue(i), i if self.wantobjects else str(i))
if tcl_version < (8, 5): # bignum was added in Tcl 8.5
self.assertEqual(passValue(2**1000), str(2**1000))
for f in (0.0, 1.0, -1.0, 1//3, 1/3.0,
sys.float_info.min, sys.float_info.max,
-sys.float_info.min, -sys.float_info.max):
if self.wantobjects:
self.assertEqual(passValue(f), f)
else:
self.assertEqual(float(passValue(f)), f)
if self.wantobjects:
f = passValue(float('nan'))
self.assertNotEqual(f, f)
self.assertEqual(passValue(float('inf')), float('inf'))
self.assertEqual(passValue(-float('inf')), -float('inf'))
else:
self.assertEqual(float(passValue(float('inf'))), float('inf'))
self.assertEqual(float(passValue(-float('inf'))), -float('inf'))
# XXX NaN representation can be not parsable by float()
self.assertEqual(passValue((1, '2', (3.4,))),
(1, '2', (3.4,)) if self.wantobjects else '1 2 3.4')
def test_user_command(self):
result = []
def testfunc(arg):
result.append(arg)
return arg
self.interp.createcommand('testfunc', testfunc)
self.addCleanup(self.interp.tk.deletecommand, 'testfunc')
def check(value, expected=None, eq=self.assertEqual):
if expected is None:
expected = value
del result[:]
r = self.interp.call('testfunc', value)
self.assertEqual(len(result), 1)
self.assertIsInstance(result[0], (str, unicode))
eq(result[0], expected)
self.assertIsInstance(r, (str, unicode))
eq(r, expected)
def float_eq(actual, expected):
self.assertAlmostEqual(float(actual), expected,
delta=abs(expected) * 1e-10)
check(True, '1')
check(False, '0')
check('string')
check('string\xbd')
check('string\xe2\x82\xac', u'string\u20ac')
check('')
check(u'string')
check(u'string\xbd')
check(u'string\u20ac')
check(u'')
check('str\xc0\x80ing', u'str\x00ing')
check('str\xc0\x80ing\xe2\x82\xac', u'str\x00ing\u20ac')
check(u'str\x00ing')
check(u'str\x00ing\xbd')
check(u'str\x00ing\u20ac')
for i in self.get_integers():
check(i, str(i))
if tcl_version < (8, 5): # bignum was added in Tcl 8.5
check(2**1000, str(2**1000))
for f in (0.0, 1.0, -1.0):
check(f, repr(f))
for f in (1/3.0, sys.float_info.min, sys.float_info.max,
-sys.float_info.min, -sys.float_info.max):
check(f, eq=float_eq)
check(float('inf'), eq=float_eq)
check(-float('inf'), eq=float_eq)
# XXX NaN representation can be not parsable by float()
check((), '')
check((1, (2,), (3, 4), '5 6', ()), '1 2 {3 4} {5 6} {}')
def test_splitlist(self):
splitlist = self.interp.tk.splitlist
call = self.interp.tk.call
self.assertRaises(TypeError, splitlist)
self.assertRaises(TypeError, splitlist, 'a', 'b')
self.assertRaises(TypeError, splitlist, 2)
testcases = [
('2', ('2',)),
('', ()),
('{}', ('',)),
('""', ('',)),
('a\n b\t\r c\n ', ('a', 'b', 'c')),
(u'a\n b\t\r c\n ', ('a', 'b', 'c')),
('a \xe2\x82\xac', ('a', '\xe2\x82\xac')),
(u'a \u20ac', ('a', '\xe2\x82\xac')),
('a\xc0\x80b c\xc0\x80d', ('a\xc0\x80b', 'c\xc0\x80d')),
('a {b c}', ('a', 'b c')),
(r'a b\ c', ('a', 'b c')),
(('a', 'b c'), ('a', 'b c')),
('a 2', ('a', '2')),
(('a', 2), ('a', 2)),
('a 3.4', ('a', '3.4')),
(('a', 3.4), ('a', 3.4)),
((), ()),
(call('list', 1, '2', (3.4,)),
(1, '2', (3.4,)) if self.wantobjects else
('1', '2', '3.4')),
]
if tcl_version >= (8, 5):
if not self.wantobjects:
expected = ('12', '\xe2\x82\xac', '\xe2\x82\xac', '3.4')
elif get_tk_patchlevel() < (8, 5, 5):
# Before 8.5.5 dicts were converted to lists through string
expected = ('12', u'\u20ac', u'\u20ac', '3.4')
else:
expected = (12, u'\u20ac', u'\u20ac', (3.4,))
testcases += [
(call('dict', 'create', 12, u'\u20ac', '\xe2\x82\xac', (3.4,)),
expected),
]
for arg, res in testcases:
self.assertEqual(splitlist(arg), res)
self.assertRaises(TclError, splitlist, '{')
def test_split(self):
split = self.interp.tk.split
call = self.interp.tk.call
self.assertRaises(TypeError, split)
self.assertRaises(TypeError, split, 'a', 'b')
self.assertRaises(TypeError, split, 2)
testcases = [
('2', '2'),
('', ''),
('{}', ''),
('""', ''),
('{', '{'),
('a\n b\t\r c\n ', ('a', 'b', 'c')),
(u'a\n b\t\r c\n ', ('a', 'b', 'c')),
('a \xe2\x82\xac', ('a', '\xe2\x82\xac')),
(u'a \u20ac', ('a', '\xe2\x82\xac')),
('a\xc0\x80b', 'a\xc0\x80b'),
('a\xc0\x80b c\xc0\x80d', ('a\xc0\x80b', 'c\xc0\x80d')),
('a {b c}', ('a', ('b', 'c'))),
(r'a b\ c', ('a', ('b', 'c'))),
(('a', 'b c'), ('a', ('b', 'c'))),
(('a', u'b c'), ('a', ('b', 'c'))),
('a 2', ('a', '2')),
(('a', 2), ('a', 2)),
('a 3.4', ('a', '3.4')),
(('a', 3.4), ('a', 3.4)),
(('a', (2, 3.4)), ('a', (2, 3.4))),
((), ()),
(call('list', 1, '2', (3.4,)),
(1, '2', (3.4,)) if self.wantobjects else
('1', '2', '3.4')),
]
if tcl_version >= (8, 5):
if not self.wantobjects:
expected = ('12', '\xe2\x82\xac', '\xe2\x82\xac', '3.4')
elif get_tk_patchlevel() < (8, 5, 5):
# Before 8.5.5 dicts were converted to lists through string
expected = ('12', u'\u20ac', u'\u20ac', '3.4')
else:
expected = (12, u'\u20ac', u'\u20ac', (3.4,))
testcases += [
(call('dict', 'create', 12, u'\u20ac', '\xe2\x82\xac', (3.4,)),
expected),
]
for arg, res in testcases:
self.assertEqual(split(arg), res)
def test_splitdict(self):
splitdict = tkinter._splitdict
tcl = self.interp.tk
arg = '-a {1 2 3} -something foo status {}'
self.assertEqual(splitdict(tcl, arg, False),
{'-a': '1 2 3', '-something': 'foo', 'status': ''})
self.assertEqual(splitdict(tcl, arg),
{'a': '1 2 3', 'something': 'foo', 'status': ''})
arg = ('-a', (1, 2, 3), '-something', 'foo', 'status', '{}')
self.assertEqual(splitdict(tcl, arg, False),
{'-a': (1, 2, 3), '-something': 'foo', 'status': '{}'})
self.assertEqual(splitdict(tcl, arg),
{'a': (1, 2, 3), 'something': 'foo', 'status': '{}'})
self.assertRaises(RuntimeError, splitdict, tcl, '-a b -c ')
self.assertRaises(RuntimeError, splitdict, tcl, ('-a', 'b', '-c'))
arg = tcl.call('list',
'-a', (1, 2, 3), '-something', 'foo', 'status', ())
self.assertEqual(splitdict(tcl, arg),
{'a': (1, 2, 3) if self.wantobjects else '1 2 3',
'something': 'foo', 'status': ''})
if tcl_version >= (8, 5):
arg = tcl.call('dict', 'create',
'-a', (1, 2, 3), '-something', 'foo', 'status', ())
if not self.wantobjects or get_tk_patchlevel() < (8, 5, 5):
# Before 8.5.5 dicts were converted to lists through string
expected = {'a': '1 2 3', 'something': 'foo', 'status': ''}
else:
expected = {'a': (1, 2, 3), 'something': 'foo', 'status': ''}
self.assertEqual(splitdict(tcl, arg), expected)
character_size = 4 if sys.maxunicode > 0xFFFF else 2
class BigmemTclTest(unittest.TestCase):
def setUp(self):
self.interp = Tcl()
@test_support.cpython_only
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX, "needs UINT_MAX < SIZE_MAX")
@test_support.precisionbigmemtest(size=INT_MAX + 1, memuse=5, dry_run=False)
def test_huge_string_call(self, size):
value = ' ' * size
self.assertRaises(OverflowError, self.interp.call, 'set', '_', value)
@test_support.cpython_only
@unittest.skipUnless(test_support.have_unicode, 'requires unicode support')
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX, "needs UINT_MAX < SIZE_MAX")
@test_support.precisionbigmemtest(size=INT_MAX + 1,
memuse=2*character_size + 2,
dry_run=False)
def test_huge_unicode_call(self, size):
value = unicode(' ') * size
self.assertRaises(OverflowError, self.interp.call, 'set', '_', value)
@test_support.cpython_only
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX, "needs UINT_MAX < SIZE_MAX")
@test_support.precisionbigmemtest(size=INT_MAX + 1, memuse=9, dry_run=False)
def test_huge_string_builtins(self, size):
value = '1' + ' ' * size
self.check_huge_string_builtins(value)
@test_support.cpython_only
@unittest.skipUnless(test_support.have_unicode, 'requires unicode support')
@unittest.skipUnless(INT_MAX < PY_SSIZE_T_MAX, "needs UINT_MAX < SIZE_MAX")
@test_support.precisionbigmemtest(size=INT_MAX + 1,
memuse=2*character_size + 7,
dry_run=False)
def test_huge_unicode_builtins(self, size):
value = unicode('1' + ' ' * size)
self.check_huge_string_builtins(value)
def check_huge_string_builtins(self, value):
self.assertRaises(OverflowError, self.interp.tk.getint, value)
self.assertRaises(OverflowError, self.interp.tk.getdouble, value)
self.assertRaises(OverflowError, self.interp.tk.getboolean, value)
self.assertRaises(OverflowError, self.interp.eval, value)
self.assertRaises(OverflowError, self.interp.evalfile, value)
self.assertRaises(OverflowError, self.interp.record, value)
self.assertRaises(OverflowError, self.interp.adderrorinfo, value)
self.assertRaises(OverflowError, self.interp.setvar, value, 'x', 'a')
self.assertRaises(OverflowError, self.interp.setvar, 'x', value, 'a')
self.assertRaises(OverflowError, self.interp.unsetvar, value)
self.assertRaises(OverflowError, self.interp.unsetvar, 'x', value)
self.assertRaises(OverflowError, self.interp.adderrorinfo, value)
self.assertRaises(OverflowError, self.interp.exprstring, value)
self.assertRaises(OverflowError, self.interp.exprlong, value)
self.assertRaises(OverflowError, self.interp.exprboolean, value)
self.assertRaises(OverflowError, self.interp.splitlist, value)
self.assertRaises(OverflowError, self.interp.split, value)
self.assertRaises(OverflowError, self.interp.createcommand, value, max)
self.assertRaises(OverflowError, self.interp.deletecommand, value)
def setUpModule():
if test_support.verbose:
tcl = Tcl()
print 'patchlevel =', tcl.call('info', 'patchlevel')
def test_main():
test_support.run_unittest(TclTest, TkinterTest, BigmemTclTest)
if __name__ == "__main__":
test_main()
|
mit
| -3,273,086,371,378,449,400 | 4,015,046,754,632,952,000 | 38.99865 | 80 | 0.532542 | false |
lstephen/construi
|
construi/errors.py
|
1
|
1545
|
import sys
import traceback
from typing import Any, Callable, Dict, NoReturn
import construi.console as console
from compose.errors import OperationFailedError
from compose.service import BuildError
from docker.errors import APIError
from .config import ConfigException, NoSuchTargetException
from .target import BuildFailedException
def show_error(fmt, arg=lambda e: "", show_traceback=False):
# type: (str, Callable[[Any], Any], bool) -> Callable[[Exception], None]
def f(e):
# type: (Exception) -> None
console.error(("\n" + fmt + "\n").format(arg(e)))
if show_traceback:
traceback.print_exc()
return f
def on_keyboard_interrupt(e):
# type: (KeyboardInterrupt) -> None
console.warn("\nBuild Interrupted.")
def on_unhandled_exception(e):
# type: (Exception) -> NoReturn
raise e
HANDLERS = {
KeyboardInterrupt: on_keyboard_interrupt,
APIError: show_error("Docker Error: {}", lambda e: e.explanation),
OperationFailedError: show_error(
"Unexpected Error: {}", lambda e: e.msg, show_traceback=True
),
BuildError: show_error("Error building docker image."),
NoSuchTargetException: show_error("No such target: {}", lambda e: e.target),
ConfigException: show_error("Configuration Error: {}", lambda e: e.msg),
BuildFailedException: show_error("Build Failed."),
} # type: Dict[Any, Callable[[Any], None]]
def on_exception(e):
# type: (Exception) -> NoReturn
HANDLERS.get(type(e), on_unhandled_exception)(e)
sys.exit(1)
|
apache-2.0
| 4,957,551,427,214,988,000 | -2,446,776,460,216,278,000 | 28.711538 | 80 | 0.682201 | false |
garvitr/sympy
|
sympy/strategies/branch/tests/test_core.py
|
58
|
2416
|
from sympy.strategies.branch.core import (exhaust, debug, multiplex,
condition, notempty, chain, onaction, sfilter, yieldify, do_one,
identity)
from sympy.core.compatibility import get_function_name, range
def posdec(x):
if x > 0:
yield x-1
else:
yield x
def branch5(x):
if 0 < x < 5:
yield x-1
elif 5 < x < 10:
yield x+1
elif x == 5:
yield x+1
yield x-1
else:
yield x
even = lambda x: x%2 == 0
def inc(x):
yield x + 1
def one_to_n(n):
for i in range(n):
yield i
def test_exhaust():
brl = exhaust(branch5)
assert set(brl(3)) == set([0])
assert set(brl(7)) == set([10])
assert set(brl(5)) == set([0, 10])
def test_debug():
from sympy.core.compatibility import StringIO
file = StringIO()
rl = debug(posdec, file)
list(rl(5))
log = file.getvalue()
file.close()
assert get_function_name(posdec) in log
assert '5' in log
assert '4' in log
def test_multiplex():
brl = multiplex(posdec, branch5)
assert set(brl(3)) == set([2])
assert set(brl(7)) == set([6, 8])
assert set(brl(5)) == set([4, 6])
def test_condition():
brl = condition(even, branch5)
assert set(brl(4)) == set(branch5(4))
assert set(brl(5)) == set([])
def test_sfilter():
brl = sfilter(even, one_to_n)
assert set(brl(10)) == set([0, 2, 4, 6, 8])
def test_notempty():
def ident_if_even(x):
if even(x):
yield x
brl = notempty(ident_if_even)
assert set(brl(4)) == set([4])
assert set(brl(5)) == set([5])
def test_chain():
assert list(chain()(2)) == [2] # identity
assert list(chain(inc, inc)(2)) == [4]
assert list(chain(branch5, inc)(4)) == [4]
assert set(chain(branch5, inc)(5)) == set([5, 7])
assert list(chain(inc, branch5)(5)) == [7]
def test_onaction():
L = []
def record(fn, input, output):
L.append((input, output))
list(onaction(inc, record)(2))
assert L == [(2, 3)]
list(onaction(identity, record)(2))
assert L == [(2, 3)]
def test_yieldify():
inc = lambda x: x + 1
yinc = yieldify(inc)
assert list(yinc(3)) == [4]
def test_do_one():
def bad(expr):
raise ValueError()
yield False
assert list(do_one(inc)(3)) == [4]
assert list(do_one(inc, bad)(3)) == [4]
assert list(do_one(inc, posdec)(3)) == [4]
|
bsd-3-clause
| 2,487,779,981,402,664,000 | -3,725,254,205,702,953,000 | 22.009524 | 72 | 0.555877 | false |
tedi3231/openerp
|
build/lib/openerp/addons/project_mrp/__init__.py
|
68
|
1092
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_procurement
import project_mrp
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| 4,258,098,064,661,350,000 | -3,348,697,538,537,579,500 | 42.68 | 78 | 0.621795 | false |
Jitsusama/lets-do-dns
|
tests/unit/acme_dns_auth/authenticate/test_cleanup_stage.py
|
1
|
2731
|
"""Tests the lets_do_dns.acme_dns_auth.authenticate.py module."""
from mock import call, ANY
import pytest
from lets_do_dns.environment import Environment
from lets_do_dns.acme_dns_auth.authenticate import Authenticate
def test_properly_initializes_resource(mocker):
stub_environment = mocker.MagicMock(
spec=Environment,
api_key='stub-api-key', domain='stub-domain', validation_key=None,
fqdn='stub-host.stub-domain', record_id=984567, post_cmd=None)
mock_resource = mocker.patch(
'lets_do_dns.acme_dns_auth.authenticate.Resource')
authentication = Authenticate(environment=stub_environment)
authentication.perform()
mock_resource.assert_called_once_with(
'stub-api-key', '_acme-challenge.stub-host',
'stub-domain', None, 984567)
def test_triggers_resource_delete_after_resource_init(mocker):
stub_environment = mocker.MagicMock(
spec=Environment,
api_key=None, domain='stub-domain', validation_key=None,
fqdn='stub-host.stub-domain', record_id=0, post_cmd=None)
mock_resource = mocker.patch(
'lets_do_dns.acme_dns_auth.authenticate.Resource')
authentication = Authenticate(environment=stub_environment)
authentication.perform()
initialize_then_delete = [
call(ANY, ANY, ANY, ANY, ANY),
call().delete()]
mock_resource.assert_has_calls(initialize_then_delete)
def test_does_not_call_sleep(mocker):
stub_environment = mocker.MagicMock(
spec=Environment,
api_key=None, domain='stub-domain', validation_key=None,
fqdn='stub-host.stub-domain', record_id=1, post_cmd=None)
mocker.patch('lets_do_dns.acme_dns_auth.authenticate.Resource')
mock_sleep = mocker.patch(
'lets_do_dns.acme_dns_auth.authenticate.sleep')
authentication = Authenticate(environment=stub_environment)
authentication.perform()
mock_sleep.assert_not_called()
@pytest.mark.parametrize(
'fqdn', ['stub-host1.stub-domain', 'stub-host2.stub-domain'])
def test_passes_postcmd_to_run(mocker, fqdn):
stub_environment = mocker.MagicMock(
spec=Environment,
api_key=None, domain='stub-domain', validation_key=None,
fqdn=fqdn, record_id=3, post_cmd='test-program --help')
mocker.patch('lets_do_dns.acme_dns_auth.authenticate.Resource')
mock_run = mocker.patch('lets_do_dns.acme_dns_auth.authenticate.run')
authentication = Authenticate(environment=stub_environment)
authentication.perform()
mock_run.assert_called_once_with(
'test-program --help',
env={'CERTBOT_HOSTNAME': fqdn,
'PATH': ("/bin:/sbin:/usr/bin:/usr/sbin:"
"/usr/local/bin:/usr/local/sbin")})
|
apache-2.0
| 7,843,739,848,194,001,000 | 3,401,985,618,185,654,000 | 33.56962 | 74 | 0.683266 | false |
WSCU/crazyflie_ros
|
lib/cflib/crtp/exceptions.py
|
31
|
1605
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Exception used when the URI is not for the current driver
(ie. radio:// for the serial driver ...)
It basically means that an oser driver could do the job
It does NOT means that the URI is good or bad
"""
__author__ = 'Bitcraze AB'
__all__ = ['WrongUriType', 'CommunicationException']
class WrongUriType (Exception):
""" Wrong type of URI for this interface """
pass
class CommunicationException (Exception):
""" Communication problem when communicating with a Crazyflie """
pass
|
gpl-2.0
| 3,341,380,656,887,272,400 | 7,981,921,806,682,222,000 | 33.148936 | 69 | 0.613707 | false |
WoLpH/CouchPotatoServer
|
couchpotato/core/notifications/nmj/main.py
|
10
|
3675
|
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.encoding import tryUrlencode
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import re
import telnetlib
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
log = CPLog(__name__)
class NMJ(Notification):
def __init__(self):
addEvent('renamer.after', self.addToLibrary)
addApiView(self.testNotifyName(), self.test)
addApiView('notify.nmj.auto_config', self.autoConfig)
def autoConfig(self, host = 'localhost', **kwargs):
mount = ''
try:
terminal = telnetlib.Telnet(host)
except Exception:
log.error('Warning: unable to get a telnet session to %s', host)
return self.failed()
log.debug('Connected to %s via telnet', host)
terminal.read_until('sh-3.00# ')
terminal.write('cat /tmp/source\n')
terminal.write('cat /tmp/netshare\n')
terminal.write('exit\n')
tnoutput = terminal.read_all()
match = re.search(r'(.+\.db)\r\n?(.+)(?=sh-3.00# cat /tmp/netshare)', tnoutput)
if match:
database = match.group(1)
device = match.group(2)
log.info('Found NMJ database %s on device %s', (database, device))
else:
log.error('Could not get current NMJ database on %s, NMJ is probably not running!', host)
return self.failed()
if device.startswith('NETWORK_SHARE/'):
match = re.search('.*(?=\r\n?%s)' % (re.escape(device[14:])), tnoutput)
if match:
mount = match.group().replace('127.0.0.1', host)
log.info('Found mounting url on the Popcorn Hour in configuration: %s', mount)
else:
log.error('Detected a network share on the Popcorn Hour, but could not get the mounting url')
return self.failed()
return {
'success': True,
'database': database,
'mount': mount,
}
def addToLibrary(self, message = None, group = None):
if self.isDisabled(): return
if not group: group = {}
host = self.conf('host')
mount = self.conf('mount')
database = self.conf('database')
if mount:
log.debug('Try to mount network drive via url: %s', mount)
try:
self.urlopen(mount)
except:
return False
params = {
'arg0': 'scanner_start',
'arg1': database,
'arg2': 'background',
'arg3': '',
}
params = tryUrlencode(params)
UPDATE_URL = 'http://%(host)s:8008/metadata_database?%(params)s'
updateUrl = UPDATE_URL % {'host': host, 'params': params}
try:
response = self.urlopen(updateUrl)
except:
return False
try:
et = etree.fromstring(response)
result = et.findtext('returnValue')
except SyntaxError, e:
log.error('Unable to parse XML returned from the Popcorn Hour: %s', e)
return False
if int(result) > 0:
log.error('Popcorn Hour returned an errorcode: %s', result)
return False
else:
log.info('NMJ started background scan')
return True
def failed(self):
return {
'success': False
}
def test(self, **kwargs):
return {
'success': self.addToLibrary()
}
|
gpl-3.0
| -7,295,658,608,551,993,000 | 1,278,027,828,111,699,200 | 29.371901 | 109 | 0.56 | false |
miguel-negrao/supercollider
|
external_libraries/simplejson-2.3.2/__init__.py
|
44
|
18618
|
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.3.2'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict',
]
__author__ = 'Bob Ippolito <[email protected]>'
from decimal import Decimal
from .decoder import JSONDecoder, JSONDecodeError
from .encoder import JSONEncoder
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from ._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
**kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=True,
namedtuple_as_object=True,
tuple_as_array=True,
**kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``True``) then decimal.Decimal
will be natively serialized to JSON with full precision.
If *namedtuple_as_object* is true (default: ``True``),
:class:`tuple` subclasses with ``_asdict()`` methods will be encoded
as JSON objects.
If *tuple_as_array* is true (default: ``True``),
:class:`tuple` (and subclasses) will be encoded as JSON arrays.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and use_decimal
and namedtuple_as_object and tuple_as_array and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal,
namedtuple_as_object=namedtuple_as_object,
tuple_as_array=tuple_as_array,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, namedtuple_as_object=True, tuple_as_array=True,
**kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
|
gpl-3.0
| 4,405,071,892,640,513,500 | -394,382,010,334,133,600 | 38.95279 | 79 | 0.653078 | false |
ye11ow/phantomjs
|
src/breakpad/src/tools/gyp/test/library/gyptest-static.py
|
430
|
2241
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies simple build of a "Hello, world!" program with static libraries,
including verifying that libraries are rebuilt correctly when functions
move between libraries.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('library.gyp',
'-Dlibrary=static_library',
'-Dmoveable_function=lib1',
chdir='src')
test.relocate('src', 'relocate/src')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=static_library',
'-Dmoveable_function=lib2',
chdir='relocate/src')
# Update program.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('Hello', 'Hello again')
test.write('relocate/src/program.c', contents)
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib2_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.run_gyp('library.gyp',
'-Dlibrary=static_library',
'-Dmoveable_function=lib1',
chdir='relocate/src')
# Update program.c and lib2.c to force a rebuild.
test.sleep()
contents = test.read('relocate/src/program.c')
contents = contents.replace('again', 'again again')
test.write('relocate/src/program.c', contents)
# TODO(sgk): we have to force a rebuild of lib2 so that it weeds out
# the "moved" module. This should be done in gyp by adding a dependency
# on the generated .vcproj file itself.
test.touch('relocate/src/lib2.c')
test.build('library.gyp', test.ALL, chdir='relocate/src')
expect = """\
Hello again again from program.c
Hello from lib1.c
Hello from lib2.c
Hello from lib1_moveable.c
"""
test.run_built_executable('program', chdir='relocate/src', stdout=expect)
test.pass_test()
|
bsd-3-clause
| -3,160,535,150,349,104,600 | -3,712,888,063,357,327,000 | 25.678571 | 73 | 0.696118 | false |
phoebe-project/phoebe2-docs
|
2.1/examples/minimal_contact_binary.py
|
1
|
5694
|
#!/usr/bin/env python
# coding: utf-8
# Minimal Contact Binary System
# ============================
#
# Setup
# -----------------------------
# Let's first make sure we have the latest version of PHOEBE 2.1 installed. (You can comment out this line if you don't use pip for your installation or don't want to update to the latest release).
# In[ ]:
get_ipython().system('pip install -I "phoebe>=2.1,<2.2"')
# As always, let's do imports and initialize a logger and a new bundle. See [Building a System](../tutorials/building_a_system.html) for more details.
# In[1]:
get_ipython().run_line_magic('matplotlib', 'inline')
# In[2]:
import phoebe
from phoebe import u # units
import numpy as np
import matplotlib.pyplot as plt
logger = phoebe.logger()
# Here we'll initialize a default binary, but ask for it to be created as a contact system.
# In[3]:
b_cb = phoebe.default_binary(contact_binary=True)
# We'll compare this to the default detached binary
# In[4]:
b_detached = phoebe.default_binary()
# Hierarchy
# -------------
# Let's first look at the hierarchy of the default detached binary, and then compare that to the hierarchy of the overcontact system
# In[5]:
print b_detached.hierarchy
# In[6]:
print b_cb.hierarchy
# As you can see, the overcontact system has an additional "component" with method "envelope" and component label "contact_envelope".
#
# Next let's look at the parameters in the envelope and star components. You can see that most of parameters in the envelope class are constrained, while the equivalent radius of the primary is unconstrained. The value of primary equivalent radius constrains the potential and fillout factor of the envelope, as well as the equivalent radius of the secondary.
# In[7]:
print b_cb.filter(component='contact_envelope', kind='envelope', context='component')
# In[8]:
print b_cb.filter(component='primary', kind='star', context='component')
# In[9]:
b_cb['requiv@primary'] = 1.5
# In[10]:
b_cb['pot@contact_envelope@component']
# In[11]:
b_cb['fillout_factor@contact_envelope@component']
# In[12]:
b_cb['requiv@secondary@component']
# Now, of course, if we didn't originally know we wanted a contact binary and built the default detached system, we could still turn it into an contact binary just by changing the hierarchy.
# In[13]:
b_detached.add_component('envelope', component='contact_envelope')
# In[14]:
hier = phoebe.hierarchy.binaryorbit(b_detached['binary'], b_detached['primary'], b_detached['secondary'], b_detached['contact_envelope'])
print hier
# In[15]:
b_detached.set_hierarchy(hier)
# In[16]:
print b_detached.hierarchy
# However, since our system was detached, the system is not overflowing, and therefore doesn't pass system checks
# In[17]:
b_detached.run_checks()
# And because of this, the potential and requiv@secondary constraints cannot be computed
# In[18]:
b_detached['pot@component']
# In[19]:
b_detached['requiv@secondary@component']
# Likewise, we can make a contact system detached again simply by removing the envelope from the hierarchy. The parameters themselves will still exist (unless you remove them), so you can always just change the hierarchy again to change back to an overcontact system.
# In[20]:
hier = phoebe.hierarchy.binaryorbit(b_detached['binary'], b_detached['primary'], b_detached['secondary'])
print hier
# In[21]:
b_detached.set_hierarchy(hier)
# In[22]:
print b_detached.hierarchy
# Although the constraints have been removed, PHOEBE has lost the original value of the secondary radius (because of the failed contact constraints), so we'll have to reset that here as well.
# In[23]:
b_detached['requiv@secondary'] = 1.0
# Adding Datasets
# ---------------------
# In[24]:
b_cb.add_dataset('mesh', times=[0], dataset='mesh01')
# In[25]:
b_cb.add_dataset('orb', times=np.linspace(0,1,201), dataset='orb01')
# In[26]:
b_cb.add_dataset('lc', times=np.linspace(0,1,21), dataset='lc01')
# In[27]:
b_cb.add_dataset('rv', times=np.linspace(0,1,21), dataset='rv01')
# For comparison, we'll do the same to our detached system
# In[28]:
b_detached.add_dataset('mesh', times=[0], dataset='mesh01')
# In[29]:
b_detached.add_dataset('orb', times=np.linspace(0,1,201), dataset='orb01')
# In[30]:
b_detached.add_dataset('lc', times=np.linspace(0,1,21), dataset='lc01')
# In[31]:
b_detached.add_dataset('rv', times=np.linspace(0,1,21), dataset='rv01')
# Running Compute
# --------------------
# In[32]:
b_cb.run_compute(irrad_method='none')
# In[33]:
b_detached.run_compute(irrad_method='none')
# Synthetics
# ------------------
# To ensure compatibility with computing synthetics in detached and semi-detached systems in Phoebe, the synthetic meshes for our overcontact system are attached to each component separetely, instead of the contact envelope.
# In[34]:
print b_cb['mesh01@model'].components
# In[35]:
print b_detached['mesh01@model'].components
# Plotting
# ---------------
# ### Meshes
# In[36]:
afig, mplfig = b_cb['mesh01@model'].plot(x='ws', show=True)
# In[37]:
afig, mplfig = b_detached['mesh01@model'].plot(x='ws', show=True)
# ### Orbits
# In[38]:
afig, mplfig = b_cb['orb01@model'].plot(x='ws',show=True)
# In[39]:
afig, mplfig = b_detached['orb01@model'].plot(x='ws',show=True)
# ### Light Curves
# In[40]:
afig, mplfig = b_cb['lc01@model'].plot(show=True)
# In[41]:
afig, mplfig = b_detached['lc01@model'].plot(show=True)
# ### RVs
# In[42]:
afig, mplfig = b_cb['rv01@model'].plot(show=True)
# In[43]:
afig, mplfig = b_detached['rv01@model'].plot(show=True)
# In[ ]:
|
gpl-3.0
| -1,010,169,458,772,014,100 | 4,462,138,588,708,154,400 | 15.946429 | 359 | 0.680541 | false |
caot/intellij-community
|
python/lib/Lib/site-packages/django/conf/locale/id/formats.py
|
78
|
1855
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j N Y'
DATETIME_FORMAT = "j N Y, G:i:s"
TIME_FORMAT = 'G:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y G:i:s'
FIRST_DAY_OF_WEEK = 1 #Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d-%m-%y', '%d/%m/%y', # '25-10-09' , 25/10/09'
'%d-%m-%Y', '%d/%m/%Y', # '25-10-2009' , 25/10/2009'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d-%m-%Y %H:%M:%S', # '25-10-2009 14:30:59'
'%d-%m-%Y %H:%M', # '25-10-2009 14:30'
'%d-%m-%Y', # '25-10-2009'
'%d-%m-%y %H:%M:%S', # '25-10-09' 14:30:59'
'%d-%m-%y %H:%M', # '25-10-09' 14:30'
'%d-%m-%y', # '25-10-09''
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%m/%d/%Y %H:%M:%S', # '25/10/2009 14:30:59'
'%m/%d/%Y %H:%M', # '25/10/2009 14:30'
'%m/%d/%Y', # '10/25/2009'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
apache-2.0
| -4,476,468,122,330,762,000 | -7,730,917,557,009,450,000 | 38.468085 | 79 | 0.442588 | false |
erkrishna9/odoo
|
openerp/addons/base/tests/test_ir_actions.py
|
48
|
19830
|
import unittest2
from openerp.osv.orm import except_orm
import openerp.tests.common as common
from openerp.tools import mute_logger
class TestServerActionsBase(common.TransactionCase):
def setUp(self):
super(TestServerActionsBase, self).setUp()
cr, uid = self.cr, self.uid
# Models
self.ir_actions_server = self.registry('ir.actions.server')
self.ir_actions_client = self.registry('ir.actions.client')
self.ir_values = self.registry('ir.values')
self.ir_model = self.registry('ir.model')
self.ir_model_fields = self.registry('ir.model.fields')
self.res_partner = self.registry('res.partner')
self.res_country = self.registry('res.country')
# Data on which we will run the server action
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry',
'code': 'TY',
'address_format': 'SuperFormat',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner',
'city': 'OrigCity',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Model data
self.res_partner_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.partner')])[0]
self.res_partner_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'name')])[0]
self.res_partner_city_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'city')])[0]
self.res_partner_country_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'country_id')])[0]
self.res_partner_parent_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'parent_id')])[0]
self.res_country_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.country')])[0]
self.res_country_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'name')])[0]
self.res_country_code_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'code')])[0]
# create server action to
self.act_id = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
class TestServerActions(TestServerActionsBase):
def test_00_action(self):
cr, uid = self.cr, self.uid
# Do: eval 'True' condition
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
self.test_partner.write({'comment': False})
# Do: eval False condition, that should be considered as True (void = True)
self.ir_actions_server.write(cr, uid, [self.act_id], {'condition': False})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
# Do: create contextual action
self.ir_actions_server.create_action(cr, uid, [self.act_id])
# Test: ir_values created
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 1, 'ir_actions_server: create_action should have created an entry in ir_values')
ir_value = self.ir_values.browse(cr, uid, ir_values_ids[0])
self.assertEqual(ir_value.value, 'ir.actions.server,%s' % self.act_id, 'ir_actions_server: created ir_values should reference the server action')
self.assertEqual(ir_value.model, 'res.partner', 'ir_actions_server: created ir_values should be linked to the action base model')
# Do: remove contextual action
self.ir_actions_server.unlink_action(cr, uid, [self.act_id])
# Test: ir_values removed
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 0, 'ir_actions_server: unlink_action should remove the ir_values record')
def test_10_code(self):
cr, uid = self.cr, self.uid
self.ir_actions_server.write(cr, uid, self.act_id, {
'state': 'code',
'code': """partner_name = obj.name + '_code'
self.pool["res.partner"].create(cr, uid, {"name": partner_name}, context=context)
workflow"""
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: code server action correctly finished should return False')
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner_code')])
self.assertEqual(len(pids), 1, 'ir_actions_server: 1 new partner should have been created')
def test_20_trigger(self):
cr, uid = self.cr, self.uid
# Data: code server action (at this point code-based actions should work)
act_id2 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction2',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
act_id3 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction3',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_country_model_id,
'state': 'code',
'code': 'obj.write({"code": "ZZ"})',
})
# Data: create workflows
partner_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.partner',
'on_create': True,
})
partner_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerStart',
'wkf_id': partner_wf_id,
'flow_start': True
})
partner_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerTwo',
'wkf_id': partner_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id2,
})
partner_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'partner_trans',
'act_from': partner_act1_id,
'act_to': partner_act2_id
})
country_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.country',
'on_create': True,
})
country_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryStart',
'wkf_id': country_wf_id,
'flow_start': True
})
country_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryTwo',
'wkf_id': country_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id3,
})
country_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'country_trans',
'act_from': country_act1_id,
'act_to': country_act2_id
})
# Data: re-create country and partner to benefit from the workflows
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry2',
'code': 'T2',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner2',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Run the action on partner object itself ('base')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'trigger',
'use_relational_model': 'base',
'wkf_model_id': self.res_partner_model_id,
'wkf_model_name': 'res.partner',
'wkf_transition_id': partner_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: incorrect signal trigger')
# Run the action on related country object ('relational')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_relational_model': 'relational',
'wkf_model_id': self.res_country_model_id,
'wkf_model_name': 'res.country',
'wkf_field_id': self.res_partner_country_field_id,
'wkf_transition_id': country_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_country.refresh()
self.assertEqual(self.test_country.code, 'ZZ', 'ir_actions_server: incorrect signal trigger')
# Clear workflow cache, otherwise openerp will try to create workflows even if it has been deleted
from openerp.workflow import clear_cache
clear_cache(cr, uid)
def test_30_client(self):
cr, uid = self.cr, self.uid
client_action_id = self.registry('ir.actions.client').create(cr, uid, {
'name': 'TestAction2',
'tag': 'Test',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'client_action',
'action_id': client_action_id,
})
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertEqual(res['name'], 'TestAction2', 'ir_actions_server: incorrect return result for a client action')
def test_40_crud_create(self):
cr, uid = self.cr, self.uid
_city = 'TestCity'
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new',
'link_new_record': True,
'link_field_id': self.res_partner_parent_field_id,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': _city})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, _city, 'ir_actions_server: TODO')
# Test: new partner linked
self.test_partner.refresh()
self.assertEqual(self.test_partner.parent_id.id, pids[0], 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_current',
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': 'TestCopyCurrent'}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': 'TestCity'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'TestCity', 'ir_actions_server: TODO')
self.assertEqual(partner.country_id.id, self.test_partner.country_id.id, 'ir_actions_server: TODO')
# Do: create a new record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'obj.name[0:2]', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestingPartner')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'TE', 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'NY', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'NY', 'ir_actions_server: TODO')
self.assertEqual(country.address_format, 'SuperFormat', 'ir_actions_server: TODO')
def test_50_crud_write(self):
cr, uid = self.cr, self.uid
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_write',
'use_write': 'current',
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'OrigCity', 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'other',
'crud_model_id': self.res_country_model_id,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestNew')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'expression',
'crud_model_id': self.res_country_model_id,
'write_expression': 'object.country_id',
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_60_multi(self):
cr, uid = self.cr, self.uid
# Data: 2 server actions that will be nested
act1_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction1',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_window"}',
})
# Do: create a new record in the same model and link it
act2_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction2',
'model_id': self.res_partner_model_id,
'state': 'object_create',
'use_create': 'copy_current',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'multi',
'child_ids': [(6, 0, [act1_id, act2_id])],
})
# Do: run the action
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
# Test: action returned
self.assertEqual(res.get('type'), 'ir.actions.act_window', '')
# Test loops
self.assertRaises(except_orm, self.ir_actions_server.write, cr, uid, [self.act_id], {
'child_ids': [(6, 0, [self.act_id])]
})
if __name__ == '__main__':
unittest2.main()
|
agpl-3.0
| 3,236,952,717,117,221,400 | 2,796,973,148,649,707,000 | 48.699248 | 163 | 0.581594 | false |
RichardLitt/wyrd-django-dev
|
tests/regressiontests/localflavor/pt/tests.py
|
13
|
1059
|
from __future__ import unicode_literals
from django.contrib.localflavor.pt.forms import PTZipCodeField, PTPhoneNumberField
from django.test import SimpleTestCase
class PTLocalFlavorTests(SimpleTestCase):
def test_PTZipCodeField(self):
error_format = ['Enter a zip code in the format XXXX-XXX.']
valid = {
'3030-034': '3030-034',
'1003456': '1003-456',
}
invalid = {
'2A200': error_format,
'980001': error_format,
}
self.assertFieldOutput(PTZipCodeField, valid, invalid)
def test_PTPhoneNumberField(self):
error_format = ['Phone numbers must have 9 digits, or start by + or 00.']
valid = {
'917845189': '917845189',
'91 784 5189': '917845189',
'+351 91 111': '+35191111',
'00351873': '00351873',
}
invalid = {
'91 784 51 8': error_format,
'091 456 987 1': error_format,
}
self.assertFieldOutput(PTPhoneNumberField, valid, invalid)
|
bsd-3-clause
| -1,165,562,226,851,392,800 | -5,841,547,736,117,855,000 | 31.090909 | 82 | 0.578848 | false |
c86j224s/snippet
|
Python_asyncio_binary_echo/pyclient2/Lib/site-packages/pip/_internal/operations/freeze.py
|
8
|
10025
|
from __future__ import absolute_import
import collections
import logging
import os
import re
import warnings
from pip._vendor import pkg_resources, six
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import RequirementParseError
from pip._internal.exceptions import InstallationError
from pip._internal.req import InstallRequirement
from pip._internal.req.req_file import COMMENT_RE
from pip._internal.utils.deprecation import RemovedInPip11Warning
from pip._internal.utils.misc import (
dist_is_editable, get_installed_distributions,
)
logger = logging.getLogger(__name__)
def freeze(
requirement=None,
find_links=None, local_only=None, user_only=None, skip_regex=None,
isolated=False,
wheel_cache=None,
exclude_editable=False,
skip=()):
find_links = find_links or []
skip_match = None
if skip_regex:
skip_match = re.compile(skip_regex).search
dependency_links = []
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt')
)
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
yield '-f %s' % link
installations = {}
for dist in get_installed_distributions(local_only=local_only,
skip=(),
user_only=user_only):
try:
req = FrozenRequirement.from_dist(
dist,
dependency_links
)
except RequirementParseError:
logger.warning(
"Could not parse requirement: %s",
dist.project_name
)
continue
if exclude_editable and req.editable:
continue
installations[req.name] = req
if requirement:
# the options that don't get turned into an InstallRequirement
# should only be emitted once, even if the same option is in multiple
# requirements files, so we need to keep track of what has been emitted
# so that we don't emit it again if it's seen again
emitted_options = set()
# keep track of which files a requirement is in so that we can
# give an accurate warning if a requirement appears multiple times.
req_files = collections.defaultdict(list)
for req_file_path in requirement:
with open(req_file_path) as req_file:
for line in req_file:
if (not line.strip() or
line.strip().startswith('#') or
(skip_match and skip_match(line)) or
line.startswith((
'-r', '--requirement',
'-Z', '--always-unzip',
'-f', '--find-links',
'-i', '--index-url',
'--pre',
'--trusted-host',
'--process-dependency-links',
'--extra-index-url'))):
line = line.rstrip()
if line not in emitted_options:
emitted_options.add(line)
yield line
continue
if line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(
line,
isolated=isolated,
wheel_cache=wheel_cache,
)
else:
line_req = InstallRequirement.from_line(
COMMENT_RE.sub('', line).strip(),
isolated=isolated,
wheel_cache=wheel_cache,
)
if not line_req.name:
logger.info(
"Skipping line in requirement file [%s] because "
"it's not clear what it would install: %s",
req_file_path, line.strip(),
)
logger.info(
" (add #egg=PackageName to the URL to avoid"
" this warning)"
)
elif line_req.name not in installations:
# either it's not installed, or it is installed
# but has been processed already
if not req_files[line_req.name]:
logger.warning(
"Requirement file [%s] contains %s, but that "
"package is not installed",
req_file_path,
COMMENT_RE.sub('', line).strip(),
)
else:
req_files[line_req.name].append(req_file_path)
else:
yield str(installations[line_req.name]).rstrip()
del installations[line_req.name]
req_files[line_req.name].append(req_file_path)
# Warn about requirements that were included multiple times (in a
# single requirements file or in different requirements files).
for name, files in six.iteritems(req_files):
if len(files) > 1:
logger.warning("Requirement %s included multiple times [%s]",
name, ', '.join(sorted(set(files))))
yield(
'## The following requirements were added by '
'pip freeze:'
)
for installation in sorted(
installations.values(), key=lambda x: x.name.lower()):
if canonicalize_name(installation.name) not in skip:
yield str(installation).rstrip()
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
self.name = name
self.req = req
self.editable = editable
self.comments = comments
_rev_re = re.compile(r'-r(\d+)$')
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, dependency_links):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip._internal.vcs import vcs, get_src_requirement
if dist_is_editable(dist) and vcs.get_backend_name(location):
editable = True
try:
req = get_src_requirement(dist, location)
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
req = None
if req is None:
logger.warning(
'Could not determine repository location of %s', location
)
comments.append(
'## !! Could not determine repository location'
)
req = dist.as_requirement()
editable = False
else:
editable = False
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
(specs, dist)
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warning(
'Warning: cannot find svn location for %s', req,
)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
warnings.warn(
"SVN editable detection based on dependency links "
"will be dropped in the future.",
RemovedInPip11Warning,
)
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)
@staticmethod
def egg_name(dist):
name = dist.egg_name()
match = re.search(r'-py\d\.\d$', name)
if match:
name = name[:match.start()]
return name
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
|
apache-2.0
| -6,020,381,973,057,315,000 | 7,610,793,678,030,967,000 | 38.781746 | 79 | 0.468229 | false |
TeamSWAP/swap
|
external/pyinstaller/PyInstaller/cliutils/makespec.py
|
10
|
1488
|
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Automatically build spec files containing a description of the project
"""
import optparse
import os
import PyInstaller.makespec
import PyInstaller.compat
import PyInstaller.log
from PyInstaller.utils import misc
def run():
misc.check_not_running_as_root()
p = optparse.OptionParser(
usage='python %prog [opts] <scriptname> [<scriptname> ...]'
)
PyInstaller.makespec.__add_options(p)
PyInstaller.log.__add_options(p)
PyInstaller.compat.__add_obsolete_options(p)
opts, args = p.parse_args()
PyInstaller.log.__process_options(p, opts)
# Split pathex by using the path separator
temppaths = opts.pathex[:]
opts.pathex = []
for p in temppaths:
opts.pathex.extend(p.split(os.pathsep))
if not args:
p.error('Requires at least one scriptname file')
try:
name = PyInstaller.makespec.main(args, **opts.__dict__)
print 'wrote %s' % name
print 'now run pyinstaller.py to build the executable'
except KeyboardInterrupt:
raise SystemExit("Aborted by user request.")
|
apache-2.0
| -6,374,173,938,722,902,000 | -8,793,036,765,108,841,000 | 27.615385 | 78 | 0.618952 | false |
Alexey-T/CudaText
|
app/py/sys/urllib3/util/timeout.py
|
27
|
10003
|
from __future__ import absolute_import
import time
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
from ..exceptions import TimeoutStateError
# A sentinel value to indicate that no timeout was specified by the user in
# urllib3
_Default = object()
# Use time.monotonic if available.
current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
"""Timeout configuration.
Timeouts can be defined as a default for a pool:
.. code-block:: python
timeout = Timeout(connect=2.0, read=7.0)
http = PoolManager(timeout=timeout)
response = http.request('GET', 'http://example.com/')
Or per-request (which overrides the default for the pool):
.. code-block:: python
response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
Timeouts can be disabled by setting all the parameters to ``None``:
.. code-block:: python
no_timeout = Timeout(connect=None, read=None)
response = http.request('GET', 'http://example.com/, timeout=no_timeout)
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
event that both a connect timeout and a total are specified, or a read
timeout and a total are specified, the shorter timeout will be applied.
Defaults to None.
:type total: int, float, or None
:param connect:
The maximum amount of time (in seconds) to wait for a connection
attempt to a server to succeed. Omitting the parameter will default the
connect timeout to the system default, probably `the global default
timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
:type connect: int, float, or None
:param read:
The maximum amount of time (in seconds) to wait between consecutive
read operations for a response from the server. Omitting the parameter
will default the read timeout to the system default, probably `the
global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
:type read: int, float, or None
.. note::
Many factors can affect the total amount of time for urllib3 to return
an HTTP response.
For example, Python's DNS resolver does not obey the timeout specified
on the socket. Other factors that can affect total request time include
high CPU load, high swap, the program running at a low priority level,
or other behaviors.
In addition, the read and total timeouts only measure the time between
read operations on the socket connecting the client and the server,
not the total amount of time for the request to return a complete
response. For most requests, the timeout is raised because the server
has not sent the first byte in the specified time. This is not always
the case; if a server streams one byte every fifteen seconds, a timeout
of 20 seconds will not trigger, even though the request will take
several minutes to complete.
If your goal is to cut off any request after a set amount of wall clock
time, consider having a second "watcher" thread to cut off a slow
request.
"""
#: A sentinel object representing the default timeout value
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
self._connect = self._validate_timeout(connect, "connect")
self._read = self._validate_timeout(read, "read")
self.total = self._validate_timeout(total, "total")
self._start_connect = None
def __repr__(self):
return "%s(connect=%r, read=%r, total=%r)" % (
type(self).__name__,
self._connect,
self._read,
self.total,
)
# __str__ provided for backwards compatibility
__str__ = __repr__
@classmethod
def _validate_timeout(cls, value, name):
"""Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
used to specify in error messages.
:return: The validated and casted version of the given value.
:raises ValueError: If it is a numeric value less than or equal to
zero, or the type is not an integer, float, or None.
"""
if value is _Default:
return cls.DEFAULT_TIMEOUT
if value is None or value is cls.DEFAULT_TIMEOUT:
return value
if isinstance(value, bool):
raise ValueError(
"Timeout cannot be a boolean value. It must "
"be an int, float or None."
)
try:
float(value)
except (TypeError, ValueError):
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
try:
if value <= 0:
raise ValueError(
"Attempted to set %s timeout to %s, but the "
"timeout cannot be set to a value less "
"than or equal to 0." % (name, value)
)
except TypeError:
# Python 3
raise ValueError(
"Timeout value %s was %s, but it must be an "
"int, float or None." % (name, value)
)
return value
@classmethod
def from_float(cls, timeout):
"""Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
object that sets the individual timeouts to the ``timeout`` value
passed to this function.
:param timeout: The legacy timeout value.
:type timeout: integer, float, sentinel default object, or None
:return: Timeout object
:rtype: :class:`Timeout`
"""
return Timeout(read=timeout, connect=timeout)
def clone(self):
"""Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
:return: a copy of the timeout object
:rtype: :class:`Timeout`
"""
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
return Timeout(connect=self._connect, read=self._read, total=self.total)
def start_connect(self):
"""Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
"""
if self._start_connect is not None:
raise TimeoutStateError("Timeout timer has already been started.")
self._start_connect = current_time()
return self._start_connect
def get_connect_duration(self):
"""Gets the time elapsed since the call to :meth:`start_connect`.
:return: Elapsed time in seconds.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
raise TimeoutStateError(
"Can't get connect duration for timer that has not started."
)
return current_time() - self._start_connect
@property
def connect_timeout(self):
"""Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
:return: Connect timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
"""
if self.total is None:
return self._connect
if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
return self.total
return min(self._connect, self.total)
@property
def read_timeout(self):
"""Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
If self.total is set, the read timeout is dependent on the amount of
time taken by the connect timeout. If the connection time has not been
established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
raised.
:return: Value to use for the read timeout.
:rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
if (
self.total is not None
and self.total is not self.DEFAULT_TIMEOUT
and self._read is not None
and self._read is not self.DEFAULT_TIMEOUT
):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
return max(0, min(self.total - self.get_connect_duration(), self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
return self._read
|
mpl-2.0
| 7,900,443,729,840,546,000 | 8,975,743,096,573,655,000 | 36.324627 | 84 | 0.629211 | false |
alistairlow/tensorflow
|
tensorflow/python/framework/dtypes_test.py
|
10
|
14160
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.dtypes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
def _is_numeric_dtype_enum(datatype_enum):
non_numeric_dtypes = [types_pb2.DT_VARIANT,
types_pb2.DT_VARIANT_REF,
types_pb2.DT_INVALID,
types_pb2.DT_RESOURCE,
types_pb2.DT_RESOURCE_REF]
return datatype_enum not in non_numeric_dtypes
class TypesTest(test_util.TensorFlowTestCase):
def testAllTypesConstructible(self):
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
self.assertEqual(datatype_enum,
dtypes.DType(datatype_enum).as_datatype_enum)
def testAllTypesConvertibleToDType(self):
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
dt = dtypes.as_dtype(datatype_enum)
self.assertEqual(datatype_enum, dt.as_datatype_enum)
def testAllTypesConvertibleToNumpyDtype(self):
for datatype_enum in types_pb2.DataType.values():
if not _is_numeric_dtype_enum(datatype_enum):
continue
dtype = dtypes.as_dtype(datatype_enum)
numpy_dtype = dtype.as_numpy_dtype
_ = np.empty((1, 1, 1, 1), dtype=numpy_dtype)
if dtype.base_dtype != dtypes.bfloat16:
# NOTE(touts): Intentionally no way to feed a DT_BFLOAT16.
self.assertEqual(
dtypes.as_dtype(datatype_enum).base_dtype,
dtypes.as_dtype(numpy_dtype))
def testInvalid(self):
with self.assertRaises(TypeError):
dtypes.DType(types_pb2.DT_INVALID)
with self.assertRaises(TypeError):
dtypes.as_dtype(types_pb2.DT_INVALID)
def testNumpyConversion(self):
self.assertIs(dtypes.float32, dtypes.as_dtype(np.float32))
self.assertIs(dtypes.float64, dtypes.as_dtype(np.float64))
self.assertIs(dtypes.int32, dtypes.as_dtype(np.int32))
self.assertIs(dtypes.int64, dtypes.as_dtype(np.int64))
self.assertIs(dtypes.uint8, dtypes.as_dtype(np.uint8))
self.assertIs(dtypes.uint16, dtypes.as_dtype(np.uint16))
self.assertIs(dtypes.int16, dtypes.as_dtype(np.int16))
self.assertIs(dtypes.int8, dtypes.as_dtype(np.int8))
self.assertIs(dtypes.complex64, dtypes.as_dtype(np.complex64))
self.assertIs(dtypes.complex128, dtypes.as_dtype(np.complex128))
self.assertIs(dtypes.string, dtypes.as_dtype(np.object))
self.assertIs(dtypes.string,
dtypes.as_dtype(np.array(["foo", "bar"]).dtype))
self.assertIs(dtypes.bool, dtypes.as_dtype(np.bool))
with self.assertRaises(TypeError):
dtypes.as_dtype(np.dtype([("f1", np.uint), ("f2", np.int32)]))
def testRealDtype(self):
for dtype in [
dtypes.float32, dtypes.float64, dtypes.bool, dtypes.uint8, dtypes.int8,
dtypes.int16, dtypes.int32, dtypes.int64
]:
self.assertIs(dtype.real_dtype, dtype)
self.assertIs(dtypes.complex64.real_dtype, dtypes.float32)
self.assertIs(dtypes.complex128.real_dtype, dtypes.float64)
def testStringConversion(self):
self.assertIs(dtypes.float32, dtypes.as_dtype("float32"))
self.assertIs(dtypes.float64, dtypes.as_dtype("float64"))
self.assertIs(dtypes.int32, dtypes.as_dtype("int32"))
self.assertIs(dtypes.uint8, dtypes.as_dtype("uint8"))
self.assertIs(dtypes.uint16, dtypes.as_dtype("uint16"))
self.assertIs(dtypes.int16, dtypes.as_dtype("int16"))
self.assertIs(dtypes.int8, dtypes.as_dtype("int8"))
self.assertIs(dtypes.string, dtypes.as_dtype("string"))
self.assertIs(dtypes.complex64, dtypes.as_dtype("complex64"))
self.assertIs(dtypes.complex128, dtypes.as_dtype("complex128"))
self.assertIs(dtypes.int64, dtypes.as_dtype("int64"))
self.assertIs(dtypes.bool, dtypes.as_dtype("bool"))
self.assertIs(dtypes.qint8, dtypes.as_dtype("qint8"))
self.assertIs(dtypes.quint8, dtypes.as_dtype("quint8"))
self.assertIs(dtypes.qint32, dtypes.as_dtype("qint32"))
self.assertIs(dtypes.bfloat16, dtypes.as_dtype("bfloat16"))
self.assertIs(dtypes.float32_ref, dtypes.as_dtype("float32_ref"))
self.assertIs(dtypes.float64_ref, dtypes.as_dtype("float64_ref"))
self.assertIs(dtypes.int32_ref, dtypes.as_dtype("int32_ref"))
self.assertIs(dtypes.uint8_ref, dtypes.as_dtype("uint8_ref"))
self.assertIs(dtypes.int16_ref, dtypes.as_dtype("int16_ref"))
self.assertIs(dtypes.int8_ref, dtypes.as_dtype("int8_ref"))
self.assertIs(dtypes.string_ref, dtypes.as_dtype("string_ref"))
self.assertIs(dtypes.complex64_ref, dtypes.as_dtype("complex64_ref"))
self.assertIs(dtypes.complex128_ref, dtypes.as_dtype("complex128_ref"))
self.assertIs(dtypes.int64_ref, dtypes.as_dtype("int64_ref"))
self.assertIs(dtypes.bool_ref, dtypes.as_dtype("bool_ref"))
self.assertIs(dtypes.qint8_ref, dtypes.as_dtype("qint8_ref"))
self.assertIs(dtypes.quint8_ref, dtypes.as_dtype("quint8_ref"))
self.assertIs(dtypes.qint32_ref, dtypes.as_dtype("qint32_ref"))
self.assertIs(dtypes.bfloat16_ref, dtypes.as_dtype("bfloat16_ref"))
with self.assertRaises(TypeError):
dtypes.as_dtype("not_a_type")
def testDTypesHaveUniqueNames(self):
dtypez = []
names = set()
for datatype_enum in types_pb2.DataType.values():
if datatype_enum == types_pb2.DT_INVALID:
continue
dtype = dtypes.as_dtype(datatype_enum)
dtypez.append(dtype)
names.add(dtype.name)
self.assertEqual(len(dtypez), len(names))
def testIsInteger(self):
self.assertEqual(dtypes.as_dtype("int8").is_integer, True)
self.assertEqual(dtypes.as_dtype("int16").is_integer, True)
self.assertEqual(dtypes.as_dtype("int32").is_integer, True)
self.assertEqual(dtypes.as_dtype("int64").is_integer, True)
self.assertEqual(dtypes.as_dtype("uint8").is_integer, True)
self.assertEqual(dtypes.as_dtype("uint16").is_integer, True)
self.assertEqual(dtypes.as_dtype("complex64").is_integer, False)
self.assertEqual(dtypes.as_dtype("complex128").is_integer, False)
self.assertEqual(dtypes.as_dtype("float").is_integer, False)
self.assertEqual(dtypes.as_dtype("double").is_integer, False)
self.assertEqual(dtypes.as_dtype("string").is_integer, False)
self.assertEqual(dtypes.as_dtype("bool").is_integer, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint8").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint16").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint32").is_integer, False)
self.assertEqual(dtypes.as_dtype("quint8").is_integer, False)
self.assertEqual(dtypes.as_dtype("quint16").is_integer, False)
def testIsFloating(self):
self.assertEqual(dtypes.as_dtype("int8").is_floating, False)
self.assertEqual(dtypes.as_dtype("int16").is_floating, False)
self.assertEqual(dtypes.as_dtype("int32").is_floating, False)
self.assertEqual(dtypes.as_dtype("int64").is_floating, False)
self.assertEqual(dtypes.as_dtype("uint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("uint16").is_floating, False)
self.assertEqual(dtypes.as_dtype("complex64").is_floating, False)
self.assertEqual(dtypes.as_dtype("complex128").is_floating, False)
self.assertEqual(dtypes.as_dtype("float32").is_floating, True)
self.assertEqual(dtypes.as_dtype("float64").is_floating, True)
self.assertEqual(dtypes.as_dtype("string").is_floating, False)
self.assertEqual(dtypes.as_dtype("bool").is_floating, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_integer, False)
self.assertEqual(dtypes.as_dtype("qint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("qint16").is_floating, False)
self.assertEqual(dtypes.as_dtype("qint32").is_floating, False)
self.assertEqual(dtypes.as_dtype("quint8").is_floating, False)
self.assertEqual(dtypes.as_dtype("quint16").is_floating, False)
def testIsComplex(self):
self.assertEqual(dtypes.as_dtype("int8").is_complex, False)
self.assertEqual(dtypes.as_dtype("int16").is_complex, False)
self.assertEqual(dtypes.as_dtype("int32").is_complex, False)
self.assertEqual(dtypes.as_dtype("int64").is_complex, False)
self.assertEqual(dtypes.as_dtype("uint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("uint16").is_complex, False)
self.assertEqual(dtypes.as_dtype("complex64").is_complex, True)
self.assertEqual(dtypes.as_dtype("complex128").is_complex, True)
self.assertEqual(dtypes.as_dtype("float32").is_complex, False)
self.assertEqual(dtypes.as_dtype("float64").is_complex, False)
self.assertEqual(dtypes.as_dtype("string").is_complex, False)
self.assertEqual(dtypes.as_dtype("bool").is_complex, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint16").is_complex, False)
self.assertEqual(dtypes.as_dtype("qint32").is_complex, False)
self.assertEqual(dtypes.as_dtype("quint8").is_complex, False)
self.assertEqual(dtypes.as_dtype("quint16").is_complex, False)
def testIsUnsigned(self):
self.assertEqual(dtypes.as_dtype("int8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("int64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("uint8").is_unsigned, True)
self.assertEqual(dtypes.as_dtype("uint16").is_unsigned, True)
self.assertEqual(dtypes.as_dtype("float32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("float64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("bool").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("string").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("complex64").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("complex128").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("bfloat16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint16").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("qint32").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("quint8").is_unsigned, False)
self.assertEqual(dtypes.as_dtype("quint16").is_unsigned, False)
def testMinMax(self):
# make sure min/max evaluates for all data types that have min/max
for datatype_enum in types_pb2.DataType.values():
if not _is_numeric_dtype_enum(datatype_enum):
continue
dtype = dtypes.as_dtype(datatype_enum)
numpy_dtype = dtype.as_numpy_dtype
# ignore types for which there are no minimum/maximum (or we cannot
# compute it, such as for the q* types)
if (dtype.is_quantized or dtype.base_dtype == dtypes.bool or
dtype.base_dtype == dtypes.string or
dtype.base_dtype == dtypes.complex64 or
dtype.base_dtype == dtypes.complex128):
continue
print("%s: %s - %s" % (dtype, dtype.min, dtype.max))
# check some values that are known
if numpy_dtype == np.bool_:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 1)
if numpy_dtype == np.int8:
self.assertEquals(dtype.min, -128)
self.assertEquals(dtype.max, 127)
if numpy_dtype == np.int16:
self.assertEquals(dtype.min, -32768)
self.assertEquals(dtype.max, 32767)
if numpy_dtype == np.int32:
self.assertEquals(dtype.min, -2147483648)
self.assertEquals(dtype.max, 2147483647)
if numpy_dtype == np.int64:
self.assertEquals(dtype.min, -9223372036854775808)
self.assertEquals(dtype.max, 9223372036854775807)
if numpy_dtype == np.uint8:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 255)
if numpy_dtype == np.uint16:
if dtype == dtypes.uint16:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 65535)
elif dtype == dtypes.bfloat16:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 4294967295)
if numpy_dtype == np.uint32:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 4294967295)
if numpy_dtype == np.uint64:
self.assertEquals(dtype.min, 0)
self.assertEquals(dtype.max, 18446744073709551615)
if numpy_dtype in (np.float16, np.float32, np.float64):
self.assertEquals(dtype.min, np.finfo(numpy_dtype).min)
self.assertEquals(dtype.max, np.finfo(numpy_dtype).max)
def testRepr(self):
for enum, name in dtypes._TYPE_TO_STRING.items():
if enum > 100:
continue
dtype = dtypes.DType(enum)
self.assertEquals(repr(dtype), "tf." + name)
import tensorflow as tf
dtype2 = eval(repr(dtype))
self.assertEquals(type(dtype2), dtypes.DType)
self.assertEquals(dtype, dtype2)
def testEqWithNonTFTypes(self):
self.assertNotEqual(dtypes.int32, int)
self.assertNotEqual(dtypes.float64, 2.1)
if __name__ == "__main__":
googletest.main()
|
apache-2.0
| 8,883,778,270,837,274,000 | -2,767,033,826,623,895,000 | 46.676768 | 80 | 0.695833 | false |
basicthinker/Sexain-MemController
|
gem5-stable/src/dev/BadDevice.py
|
69
|
1789
|
# Copyright (c) 2005-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.params import *
from Device import BasicPioDevice
class BadDevice(BasicPioDevice):
type = 'BadDevice'
cxx_header = "dev/baddev.hh"
devicename = Param.String("Name of device to error on")
|
apache-2.0
| -204,069,563,891,544,220 | 5,404,549,690,934,032,000 | 50.114286 | 72 | 0.787032 | false |
isolver/MarkWrite
|
distribution/MarkWrite/runapp.py
|
2
|
1605
|
# -*- coding: utf-8 -*-
from __future__ import division
#
# This file is part of the open-source MarkWrite application.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import markwrite
import sys
if sys.platform == 'win32':
# Work around so that MarkWrite app icon is also used as task bar icon.
# http://stackoverflow.com/questions/1551605/how-to-set-applications-taskbar-icon-in-windows-7/1552105#1552105
import ctypes
myappid = u'isolver.markwrite.editor.version' # arbitrary string
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(myappid)
import pyqtgraph as pg
from markwrite.gui.mainwin import MarkWriteMainWindow
# # Switch to using white background and black foreground
pg.setConfigOption('background', markwrite.SETTINGS['plotviews_background_color'])
pg.setConfigOption('foreground', markwrite.SETTINGS['plotviews_foreground_color'])
wmwin = MarkWriteMainWindow(markwrite.app)
MarkWriteMainWindow._appdirs = markwrite.appdirs
wmwin.show()
status = markwrite.app.exec_()
|
gpl-3.0
| 3,713,506,043,947,756,000 | 8,169,105,918,568,434,000 | 41.263158 | 114 | 0.775078 | false |
kmmartins/xbmc
|
lib/libUPnP/Neptune/Extras/Tools/Logging/NeptuneLogConsole.py
|
265
|
2923
|
#!/usr/bin/env python
from socket import *
from optparse import OptionParser
UDP_ADDR = "0.0.0.0"
UDP_PORT = 7724
BUFFER_SIZE = 65536
#HEADER_KEYS = ['Logger', 'Level', 'Source-File', 'Source-Function', 'Source-Line', 'TimeStamp']
HEADER_KEYS = {
'mini': ('Level'),
'standard': ('Logger', 'Level', 'Source-Function'),
'long': ('Logger', 'Level', 'Source-File', 'Source-Line', 'Source-Function'),
'all': ('Logger', 'Level', 'Source-File', 'Source-Line', 'Source-Function', 'TimeStamp'),
'custom': ()
}
Senders = {}
class LogRecord:
def __init__(self, data):
offset = 0
self.headers = {}
for line in data.split("\r\n"):
offset += len(line)+2
if ':' not in line: break
key,value=line.split(":",1)
self.headers[key] = value.strip()
self.body = data[offset:]
def __getitem__(self, index):
return self.headers[index]
def format(self, sender_index, keys):
parts = ['['+str(sender_index)+']']
if 'Level' in keys:
parts.append('['+self.headers['Level']+']')
if 'Logger' in keys:
parts.append(self.headers['Logger'])
if 'TimeStamp' in keys:
parts.append(self.headers['TimeStamp'])
if 'Source-File' in keys:
if 'Source-Line' in keys:
parts.append(self.headers['Source-File']+':'+self.headers['Source-Line'])
else:
parts.append(self.headers['Source-File'])
if 'TimeStamp' in keys:
parts.append(self.headers['TimeStamp'])
if 'Source-Function' in keys:
parts.append(self.headers['Source-Function'])
parts.append(self.body)
return ' '.join(parts)
class Listener:
def __init__(self, format='standard', port=UDP_PORT):
self.socket = socket(AF_INET,SOCK_DGRAM)
self.socket.bind((UDP_ADDR, port))
self.format_keys = HEADER_KEYS[format]
def listen(self):
while True:
data,addr = self.socket.recvfrom(BUFFER_SIZE)
sender_index = len(Senders.keys())
if addr in Senders:
sender_index = Senders[addr]
else:
print "### NEW SENDER:", addr
Senders[addr] = sender_index
record = LogRecord(data)
print record.format(sender_index, self.format_keys)
### main
parser = OptionParser(usage="%prog [options]")
parser.add_option("-p", "--port", dest="port", help="port number to listen on", type="int", default=UDP_PORT)
parser.add_option("-f", "--format", dest="format", help="log format (mini, standard, long, or all)", choices=('mini', 'standard', 'long', 'all'), default='standard')
(options, args) = parser.parse_args()
print "Listening on port", options.port
l = Listener(format=options.format, port=options.port)
l.listen()
|
gpl-2.0
| 81,988,384,282,787,790 | 6,561,067,361,782,414,000 | 34.646341 | 165 | 0.567225 | false |
imgmix/django-registration
|
registration/backends/simple/urls.py
|
18
|
1601
|
"""
URLconf for registration and activation, using django-registration's
one-step backend.
If the default behavior of these views is acceptable to you, simply
use a line like this in your root URLconf to set up the default URLs
for registration::
(r'^accounts/', include('registration.backends.simple.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
If you'd like to customize registration behavior, feel free to set up
your own URL patterns for these views instead.
"""
from django.conf.urls import include
from django.conf.urls import patterns
from django.conf.urls import url
from django.conf import settings
from django.views.generic.base import TemplateView
from .views import RegistrationView
urlpatterns = patterns('',
url(r'^register/closed/$',
TemplateView.as_view(template_name='registration/registration_closed.html'),
name='registration_disallowed'),
url(r'^register/complete/$',
TemplateView.as_view(template_name='registration/registration_complete.html'),
name='registration_complete'),
)
if getattr(settings, 'INCLUDE_REGISTER_URL', True):
urlpatterns += patterns('',
url(r'^register/$',
RegistrationView.as_view(),
name='registration_register'),
)
if getattr(settings, 'INCLUDE_AUTH_URLS', True):
urlpatterns += patterns('',
(r'', include('registration.auth_urls')),
)
|
bsd-3-clause
| -8,989,767,190,875,004,000 | 2,557,205,302,093,003,300 | 32.354167 | 105 | 0.655215 | false |
ThoriumGroup/thorium
|
setup.py
|
1
|
7980
|
#!/usr/bin/env python
"""
setup.py
========
This is a generic as possible setup.py template. The goal is to retrieve almost
all of the information from the main module file, rather than relying on values
explicitly entered here.
## Usage
This setup.py script needs to modified in the following ways:
- `MAIN_FILE` needs to be pointed at the main metadata file, this can be done
easily by modifyng the second arg.
- `setup` kwargs need to be modified:
- `classifiers` needs to be modified to suit your project.
- `keywords` needs to be modified to suit your project.
- If you have files that need to be included (such as `LICENSE`, you need to
create a MANIFEST.in file and `include FILENAME` them.
Other than that, all the metadata should live in your main file, just like
the example below.
## Metadata Example
The following should be placed in your project module's __init__.py file:
::
__author__ = "Ivan Busquets"
__author_email__ = "[email protected]"
__copyright__ = "Copyright 2011, Ivan Busquets"
__credits__ = ["Ivan Busquets", "Sean Wallitsch", ]
__license__ = "MIT"
__version__ = "1.2"
__maintainer__ = "Sean Wallitsch"
__maintainer_email__ = "[email protected]"
__module_name__ = "animatedSnap3D"
__short_desc__ = "An extension to Nuke's 'snap' options for animated verts"
__status__ = "Development"
__url__ = 'http://github.com/ThoriumGroup/animatedSnap3D'
Note: At this time `credits` is unused.
"""
# ==============================================================================
# IMPORTS
# ==============================================================================
from setuptools import setup, find_packages
import codecs
import os
import re
# ==============================================================================
# GLOBALS
# ==============================================================================
HERE = os.path.abspath(os.path.dirname(__file__))
MAIN_FILE = os.path.join(HERE, 'thorium', '__init__.py')
# Get the long description from the relevant file
with codecs.open('README.rst', encoding='utf-8') as readme_file:
LONG_DESCRIPTION = readme_file.read()
# ==============================================================================
# PRIVATE FUNCTIONS
# ==============================================================================
def _find_metadata(filepath):
"""Reads all the metadata from a source file by opening manually.
Why open and read it and not import?
https://groups.google.com/d/topic/pypa-dev/0PkjVpcxTzQ/discussion
Args:
filepath : (str)
Filepath to the file containing the metadata.
Returns:
{str: str}
Dictionary with metadata keys and values.
Raises:
RuntimeError
Cannot proceed if version or module_name not found
"""
# Open in Latin-1 so that we avoid encoding errors.
# Use codecs.open for Python 2 compatibility
with codecs.open(filepath, 'r', 'latin1') as meta_file:
metadata_file = meta_file.read()
metadata = {}
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
author_match = re.search(r"^__author__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
author_email_match = re.search(r"^__author_email__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
copyright_match = re.search(r"^__copyright__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
credits_match = re.search(r"^__credits__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
license_match = re.search(r"^__license__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
maint_match = re.search(r"^__maintainer__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
maint_email_match = re.search(r"^__maintainer_email__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
module_name_match = re.search(r"^__module_name__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
short_desc_match = re.search(r"^__short_desc__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
status_match = re.search(r"^__status__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
url_match = re.search(r"^__url__ = ['\"]([^'\"]*)['\"]",
metadata_file, re.M)
if not version_match or not module_name_match:
raise RuntimeError("Unable to find version or module_name string.")
if author_match:
metadata['author'] = author_match.group(1)
if author_email_match:
metadata['author_email'] = author_email_match.group(1)
if copyright_match:
metadata['copyright'] = copyright_match.group(1)
if credits_match:
metadata['credits'] = credits_match.group(1)
if license_match:
metadata['license'] = license_match.group(1)
if maint_match:
metadata['maintainer'] = maint_match.group(1)
if maint_email_match:
metadata['maintainer_email'] = maint_email_match.group(1)
if module_name_match:
metadata['module_name'] = module_name_match.group(1)
if short_desc_match:
metadata['short_desc'] = short_desc_match.group(1)
if status_match:
metadata['status'] = status_match.group(1)
if version_match:
metadata['version'] = version_match.group(1)
if url_match:
metadata['url'] = url_match.group(1)
return metadata
# ==============================================================================
# MAIN
# ==============================================================================
metadata = _find_metadata(MAIN_FILE)
setup(
name=metadata['module_name'],
version=metadata['version'],
description=metadata.get('short_desc', ''),
long_description=LONG_DESCRIPTION,
# The project URL.
url=metadata.get('url', ''),
# Author & Maintainer details
author=metadata.get('author', ''),
author_email=metadata.get('author_email', ''),
maintainer=metadata.get('maintainer', ''),
maintainer_email=metadata.get('maintainer_email', ''),
# Choose your license
license=metadata.get('license', ''),
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Topic :: Multimedia :: Graphics',
'Topic :: Multimedia :: Video',
'Topic :: Software Development :: Libraries :: Python Modules',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
# OS
'Operating System :: OS Independent',
# Language
'Natural Language :: English',
],
# What does your project relate to?
keywords='film tv color vfx nuke',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages.
packages=find_packages(exclude=['tests']),
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={},
include_package_data=True,
# Targeted OS
platforms='any',
)
|
mit
| 5,906,062,170,837,130,000 | 1,076,208,261,235,634,400 | 34.784753 | 81 | 0.545865 | false |
Ironarcher/casso-backend
|
lib/flask/module.py
|
850
|
1363
|
# -*- coding: utf-8 -*-
"""
flask.module
~~~~~~~~~~~~
Implements a class that represents module blueprints.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
from .blueprints import Blueprint
def blueprint_is_module(bp):
"""Used to figure out if something is actually a module"""
return isinstance(bp, Module)
class Module(Blueprint):
"""Deprecated module support. Until Flask 0.6 modules were a different
name of the concept now available as blueprints in Flask. They are
essentially doing the same but have some bad semantics for templates and
static files that were fixed with blueprints.
.. versionchanged:: 0.7
Modules were deprecated in favor for blueprints.
"""
def __init__(self, import_name, name=None, url_prefix=None,
static_path=None, subdomain=None):
if name is None:
assert '.' in import_name, 'name required if package name ' \
'does not point to a submodule'
name = import_name.rsplit('.', 1)[1]
Blueprint.__init__(self, name, import_name, url_prefix=url_prefix,
subdomain=subdomain, template_folder='templates')
if os.path.isdir(os.path.join(self.root_path, 'static')):
self._static_folder = 'static'
|
apache-2.0
| 8,851,059,215,051,427,000 | 599,650,876,936,918,500 | 31.452381 | 76 | 0.634629 | false |
geekboxzone/lollipop_external_chromium_org_third_party_WebKit
|
Tools/Scripts/webkitpy/layout_tests/port/config.py
|
68
|
2993
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# FIXME: Remove this file altogether. It's useless in a Blink checkout.
import logging
from webkitpy.common import webkit_finder
_log = logging.getLogger(__name__)
class Config(object):
_FLAGS_FROM_CONFIGURATIONS = {
"Debug": "--debug",
"Release": "--release",
}
def __init__(self, executive, filesystem, port_implementation=None):
self._executive = executive
self._filesystem = filesystem
self._webkit_finder = webkit_finder.WebKitFinder(self._filesystem)
self._default_configuration = None
self._build_directories = {}
self._port_implementation = port_implementation
def build_directory(self, configuration):
"""Returns the path to the build directory for the configuration."""
if configuration:
flags = ["--configuration", self.flag_for_configuration(configuration)]
else:
configuration = ""
flags = []
if self._port_implementation:
flags.append('--' + self._port_implementation)
if not self._build_directories.get(configuration):
self._build_directories[configuration] = self._webkit_finder.path_from_webkit_base('out', configuration)
return self._build_directories[configuration]
def flag_for_configuration(self, configuration):
return self._FLAGS_FROM_CONFIGURATIONS[configuration]
def default_configuration(self):
return 'Release'
|
bsd-3-clause
| 569,399,918,490,875,970 | 2,071,804,570,626,363,400 | 40 | 116 | 0.719011 | false |
vadimtk/chrome4sdp
|
build/android/gyp/util/build_device.py
|
2
|
3283
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" A simple device interface for build steps.
"""
import logging
import os
import re
import sys
from util import build_utils
BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..', '..')
sys.path.append(BUILD_ANDROID_DIR)
from pylib.device import adb_wrapper
from pylib.device import device_errors
from pylib.device import device_utils
def GetAttachedDevices():
return [a.GetDeviceSerial()
for a in adb_wrapper.AdbWrapper.Devices()]
class BuildDevice(object):
def __init__(self, configuration):
self.id = configuration['id']
self.description = configuration['description']
self.install_metadata = configuration['install_metadata']
self.device = device_utils.DeviceUtils(self.id)
def RunShellCommand(self, *args, **kwargs):
return self.device.RunShellCommand(*args, **kwargs)
def PushChangedFiles(self, *args, **kwargs):
return self.device.PushChangedFiles(*args, **kwargs)
def GetSerialNumber(self):
return self.id
def Install(self, *args, **kwargs):
return self.device.Install(*args, **kwargs)
def InstallSplitApk(self, *args, **kwargs):
return self.device.InstallSplitApk(*args, **kwargs)
def GetInstallMetadata(self, apk_package):
"""Gets the metadata on the device for the apk_package apk."""
# Matches lines like:
# -rw-r--r-- system system 7376582 2013-04-19 16:34 \
# org.chromium.chrome.shell.apk
# -rw-r--r-- system system 7376582 2013-04-19 16:34 \
# org.chromium.chrome.shell-1.apk
apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s)
matches = filter(apk_matcher, self.install_metadata)
return matches[0] if matches else None
def GetConfigurationForDevice(device_id):
device = device_utils.DeviceUtils(device_id)
configuration = None
has_root = False
is_online = device.IsOnline()
if is_online:
cmd = 'ls -l /data/app; getprop ro.build.description'
cmd_output = device.RunShellCommand(cmd)
has_root = not 'Permission denied' in cmd_output[0]
if not has_root:
# Disable warning log messages from EnableRoot()
logging.getLogger().disabled = True
try:
device.EnableRoot()
has_root = True
except device_errors.CommandFailedError:
has_root = False
finally:
logging.getLogger().disabled = False
cmd_output = device.RunShellCommand(cmd)
configuration = {
'id': device_id,
'description': cmd_output[-1],
'install_metadata': cmd_output[:-1],
}
return configuration, is_online, has_root
def WriteConfigurations(configurations, path):
# Currently we only support installing to the first device.
build_utils.WriteJson(configurations[:1], path, only_if_changed=True)
def ReadConfigurations(path):
return build_utils.ReadJson(path)
def GetBuildDevice(configurations):
assert len(configurations) == 1
return BuildDevice(configurations[0])
def GetBuildDeviceFromPath(path):
configurations = ReadConfigurations(path)
if len(configurations) > 0:
return GetBuildDevice(ReadConfigurations(path))
return None
|
bsd-3-clause
| 4,432,735,159,422,179,300 | 250,831,173,475,667,940 | 28.576577 | 76 | 0.697228 | false |
Fudan-University/sakai
|
reference/library/src/webapp/editor/FCKeditor/editor/filemanager/connectors/py/connector.py
|
126
|
4239
|
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
|
apache-2.0
| -4,380,830,258,635,485,700 | 3,505,162,292,759,804,400 | 33.033058 | 205 | 0.710545 | false |
ChakshuGautam/coursera-dl
|
coursera/test/test_parsing.py
|
15
|
3627
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test functionality of coursera module.
"""
import json
import os.path
import pytest
from six import iteritems
from mock import patch, Mock, mock_open
from coursera import coursera_dl
# JSon Handling
@pytest.fixture
def get_page(monkeypatch):
monkeypatch.setattr(coursera_dl, 'get_page', Mock())
@pytest.fixture
def json_path():
return os.path.join(os.path.dirname(__file__), "fixtures", "json")
def test_that_should_not_dl_if_file_exist(get_page, json_path):
coursera_dl.get_page = Mock()
coursera_dl.download_about(object(), "matrix-002", json_path)
assert coursera_dl.get_page.called is False
def test_that_we_parse_and_write_json_correctly(get_page, json_path):
unprocessed_json = os.path.join(os.path.dirname(__file__),
"fixtures", "json", "unprocessed.json")
raw_data = open(unprocessed_json).read()
coursera_dl.get_page = lambda x, y: raw_data
open_mock = mock_open()
with patch('coursera.coursera_dl.open', open_mock, create=True):
coursera_dl.download_about(object(), "networksonline-002", json_path)
about_json = os.path.join(json_path, 'networksonline-002-about.json')
open_mock.assert_called_once_with(about_json, 'w')
data = json.loads(open_mock().write.call_args[0][0])
assert data['id'] == 394
assert data['shortName'] == 'networksonline'
# Test Syllabus Parsing
@pytest.fixture
def get_video(monkeypatch):
"""
Mock some methods that would, otherwise, create repeateadly many web
requests.
More specifically, we mock:
* the search for hidden videos
* the actual download of videos
"""
# Mock coursera_dl.grab_hidden_video_url
monkeypatch.setattr(coursera_dl, 'grab_hidden_video_url',
lambda session, href: None)
# Mock coursera_dl.get_video
monkeypatch.setattr(coursera_dl, 'get_video',
lambda session, href: None)
@pytest.mark.parametrize(
"filename,num_sections,num_lectures,num_resources,num_videos", [
("regular-syllabus.html", 23, 102, 502, 102),
("links-to-wikipedia.html", 5, 37, 158, 36),
("preview.html", 20, 106, 106, 106),
("sections-not-to-be-missed.html", 9, 61, 224, 61),
("sections-not-to-be-missed-2.html", 20, 121, 397, 121),
("parsing-datasci-001-with-bs4.html", 10, 97, 358, 97), # issue 134
("parsing-startup-001-with-bs4.html", 4, 44, 136, 44), # issue 137
("parsing-wealthofnations-001-with-bs4.html", 8, 74, 296, 74), # issue 131
("parsing-malsoftware-001-with-bs4.html", 3, 18, 56, 16), # issue 148
("multiple-resources-with-the-same-format.html", 18, 97, 478, 97),
]
)
def test_parse(get_video, filename, num_sections, num_lectures, num_resources, num_videos):
filename = os.path.join(os.path.dirname(__file__), "fixtures", "html",
filename)
with open(filename) as syllabus:
syllabus_page = syllabus.read()
sections = coursera_dl.parse_syllabus(None, syllabus_page, None)
# section count
assert len(sections) == num_sections
# lecture count
lectures = [lec for sec in sections for lec in sec[1]]
assert len(lectures) == num_lectures
# resource count
resources = [(res[0], len(res[1]))
for lec in lectures for res in iteritems(lec[1])]
assert sum(r for f, r in resources) == num_resources
# mp4 count
assert sum(r for f, r in resources if f == "mp4") == num_videos
|
lgpl-3.0
| 3,627,427,251,181,220,000 | -3,178,741,639,451,970,600 | 30.53913 | 91 | 0.629997 | false |
paulscherrerinstitute/snapshot
|
snapshot/gui/snapshot_gui.py
|
1
|
15841
|
#!/usr/bin/env python
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
import datetime
import json
import os
import sys
from PyQt5 import QtCore
from PyQt5.QtCore import Qt, QTimer
from PyQt5.QtWidgets import (
QAction,
QApplication,
QCheckBox,
QDialog,
QFormLayout,
QLabel,
QMainWindow,
QMenu,
QMessageBox,
QPlainTextEdit,
QSplitter,
QStatusBar,
QVBoxLayout,
QWidget,
)
from snapshot.ca_core import Snapshot
from snapshot.core import (
SnapshotError,
background_workers,
enable_tracing,
global_thread_pool,
since_start,
)
from snapshot.parser import ReqParseError, get_save_files, initialize_config
from .compare import SnapshotCompareWidget
from .restore import SnapshotRestoreWidget
from .save import SnapshotSaveWidget
from .utils import DetailedMsgBox, SnapshotConfigureDialog, make_separator
class SnapshotGui(QMainWindow):
"""
Main GUI class for Snapshot application. It needs separate working
thread where core of the application is running
"""
def __init__(self, config: dict = {}, parent=None):
"""
:param config: application settings
:param parent: parent QtObject
:return:
"""
QMainWindow.__init__(self, parent)
self.resize(1500, 850)
if not config or config['config_ok'] is False:
msg = "Loading configuration file failed! " \
"Do you want to continue without it?\n"
msg_window = DetailedMsgBox(msg, config['config_error'], 'Warning')
reply = msg_window.exec_()
if reply == QMessageBox.No:
QTimer.singleShot(0, lambda: self.close())
return
self.common_settings = config
if not config['req_file_path'] or not config['macros_ok']:
req_file_macros = config['req_file_macros']
req_file_path = config['req_file_path']
init_path = config['init_path']
configure_dialog = \
SnapshotConfigureDialog(self,
init_macros=req_file_macros,
init_path=os.path.join(init_path,
req_file_path))
configure_dialog.accepted.connect(self.set_request_file)
if configure_dialog.exec_() == QDialog.Rejected:
QTimer.singleShot(0, lambda: self.close())
return
# Before creating GUI, snapshot must be initialized.
self.snapshot = Snapshot()
# Create main GUI components:
# menu bar
# ______________________________
# | save_widget | restore_widget |
# | | |
# | autorefresh | |
# --------------------------------
# | compare_widget |
# --------------------------------
# | sts_log |
# ______________________________
# status_bar
#
# menu bar
menu_bar = self.menuBar()
file_menu = QMenu("File", menu_bar)
open_new_req_file_action = QAction("Open", file_menu)
open_new_req_file_action.setMenuRole(QAction.NoRole)
open_new_req_file_action.triggered.connect(self.open_new_req_file)
file_menu.addAction(open_new_req_file_action)
quit_action = QAction("Quit", file_menu)
quit_action.setMenuRole(QAction.NoRole)
quit_action.triggered.connect(self.close)
file_menu.addAction(quit_action)
menu_bar.addMenu(file_menu)
# Status components are needed by other GUI elements
self.status_log = SnapshotStatusLog(self)
self.common_settings["sts_log"] = self.status_log
self.status_bar = SnapshotStatus(self.common_settings, self)
self.common_settings["sts_info"] = self.status_bar
# Create status log show/hide control and add it to status bar
self.show_log_control = QCheckBox("Show status log")
self.show_log_control.setStyleSheet("background-color: transparent")
self.show_log_control.stateChanged.connect(self.status_log.setVisible)
self.status_log.setVisible(False)
self.status_bar.addPermanentWidget(self.show_log_control)
# Creating main layout
# Compare widget. Must be updated in case of file selection
self.compare_widget = SnapshotCompareWidget(self.snapshot,
self.common_settings, self)
self.compare_widget.pvs_filtered.connect(self.handle_pvs_filtered)
self.compare_widget.restore_requested.connect(
self._handle_restore_request)
self.save_widget = SnapshotSaveWidget(self.snapshot,
self.common_settings, self)
self.restore_widget = SnapshotRestoreWidget(self.snapshot,
self.common_settings, self)
self.restore_widget.files_updated.connect(self.handle_files_updated)
self.restore_widget.files_selected.connect(self.handle_selected_files)
self.save_widget.saved.connect(self.restore_widget.rebuild_file_list)
self.autorefresh = QCheckBox("Periodic PV update")
self.autorefresh.setChecked(True)
self.autorefresh.toggled.connect(self.toggle_autorefresh)
left_layout = QVBoxLayout()
left_layout.addWidget(self.save_widget)
left_layout.addStretch()
left_layout.addWidget(make_separator(self, 'horizontal'))
left_layout.addWidget(self.autorefresh)
left_widget = QWidget()
left_widget.setLayout(left_layout)
sr_splitter = QSplitter(self)
sr_splitter.addWidget(left_widget)
sr_splitter.addWidget(self.restore_widget)
sr_splitter.setStretchFactor(0, 1)
sr_splitter.setStretchFactor(1, 2)
main_splitter = QSplitter(self)
main_splitter.addWidget(sr_splitter)
main_splitter.addWidget(self.compare_widget)
main_splitter.addWidget(self.status_log)
main_splitter.setOrientation(Qt.Vertical)
main_splitter.setStretchFactor(0, 1)
main_splitter.setStretchFactor(1, 3)
# Set default widget and add status bar
self.setCentralWidget(main_splitter)
self.setStatusBar(self.status_bar)
# Show GUI and manage window properties
self.show()
self.setWindowTitle(
os.path.basename(self.common_settings["req_file_path"]) + ' - Snapshot')
# Status log default height should be 100px Set with splitter methods
widgets_sizes = main_splitter.sizes()
widgets_sizes[main_splitter.indexOf(main_splitter)] = 100
main_splitter.setSizes(widgets_sizes)
# Schedule opening the request file for after the GUI is shown.
QTimer.singleShot(
100,
lambda: self.change_req_file(
self.common_settings['req_file_path'],
self.common_settings['req_file_macros'],))
def toggle_autorefresh(self, checked):
if checked:
background_workers.resume_one('pv_updater')
else:
background_workers.suspend_one('pv_updater')
def open_new_req_file(self):
configure_dialog = SnapshotConfigureDialog(self, init_path=self.common_settings['req_file_path'],
init_macros=self.common_settings['req_file_macros'])
configure_dialog.accepted.connect(self.change_req_file)
configure_dialog.exec_() # Do not act on rejected
def change_req_file(self, req_file_path, macros):
background_workers.suspend()
self.status_bar.set_status("Loading new request file ...", 0, "orange")
self.set_request_file(req_file_path, macros)
save_dir = self.common_settings['save_dir']
# Read snapshots and instantiate PVs in parallel
def getfiles(*args):
return get_save_files(*args)
future_files = global_thread_pool.submit(getfiles, save_dir,
req_file_path)
self.init_snapshot(req_file_path, macros)
if self.common_settings['save_dir'] == save_dir:
already_parsed_files = future_files.result()
else:
# Apparently init_snapshot() found that the request file was
# invalid, the save_dir changed, and we need to junk the
# already read snapfiles.
future_files.cancel()
already_parsed_files = get_save_files(
self.common_settings['save_dir'],
self.common_settings['req_file_path'])
# handle all gui components
self.restore_widget.handle_new_snapshot_instance(self.snapshot,
already_parsed_files)
self.save_widget.handle_new_snapshot_instance(self.snapshot)
self.compare_widget.handle_new_snapshot_instance(self.snapshot)
self.setWindowTitle(os.path.basename(req_file_path) + ' - Snapshot')
self.status_bar.set_status("New request file loaded.", 3000, "#64C864")
background_workers.resume()
since_start("GUI processing finished")
def set_request_file(self, path: str, macros: dict):
self.common_settings["req_file_path"] = path
self.common_settings["req_file_macros"] = macros
if not self.common_settings['save_dir']:
self.common_settings['save_dir'] = os.path.dirname(path)
def init_snapshot(self, req_file_path, req_macros=None):
self.snapshot.clear_pvs()
req_macros = req_macros or {}
reopen_config = False
try:
self.snapshot = Snapshot(req_file_path, req_macros)
self.set_request_file(req_file_path, req_macros)
except (ReqParseError, OSError) as e:
msg = 'Request file cannot be loaded. ' \
'See details for type of error.'
msg_window = DetailedMsgBox(msg, str(e), 'Warning', self,
QMessageBox.Ok)
msg_window.exec_()
reopen_config = True
except SnapshotError as e:
QMessageBox.warning(
self,
"Warning",
str(e),
QMessageBox.Ok,
QMessageBox.NoButton)
reopen_config = True
if reopen_config:
configure_dialog = SnapshotConfigureDialog(
self, init_path=req_file_path, init_macros=req_macros)
configure_dialog.accepted.connect(self.init_snapshot)
if configure_dialog.exec_() == QDialog.Rejected:
self.close()
# Merge request file metadata into common settings, replacing existing
# settings.
# TODO Labels and filters are only overridden if given in the request
# file, for backwards compatibility with config files. After config
# files are out of use, change this to always override old values.
req_labels = self.snapshot.req_file_metadata.get('labels', {})
if req_labels:
self.common_settings['force_default_labels'] = \
req_labels.get('force_default_labels', False)
self.common_settings['default_labels'] = \
req_labels.get('labels', [])
req_filters = self.snapshot.req_file_metadata.get('filters', {})
if req_filters:
filters = self.common_settings['predefined_filters']
for fltype in ('filters', 'rgx-filters'):
filters[fltype] = req_filters.get(fltype, [])
self.common_settings['machine_params'] = \
self.snapshot.req_file_metadata.get('machine_params', {})
# Metadata to be filled from snapshot files.
self.common_settings['existing_labels'] = []
self.common_settings['existing_params'] = []
def handle_files_updated(self):
self.save_widget.update_labels()
self.compare_widget.clear_snap_files()
def handle_selected_files(self, selected_files):
# selected_files is a dict() with file names as keywords and
# dict() of pv data as value
self.compare_widget.new_selected_files(selected_files)
def _handle_restore_request(self, pvs_list):
self.restore_widget.do_restore(pvs_list)
def handle_pvs_filtered(self, pv_names_set):
# Yes, this merely sets the reference to the set of names, so
# technically, it needn't be done every time. But good luck tracking
# down who updated the list without this ;)
self.restore_widget.filtered_pvs = pv_names_set
# -------- Status widgets -----------
class SnapshotStatusLog(QWidget):
""" Command line like logger widget """
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.sts_log = QPlainTextEdit(self)
self.sts_log.setReadOnly(True)
layout = QVBoxLayout()
layout.setContentsMargins(10, 10, 10, 10)
layout.addWidget(self.sts_log)
self.setLayout(layout)
def log_msgs(self, msgs, msg_times):
if not isinstance(msgs, list):
msgs = [msgs]
if not isinstance(msg_times, list):
msg_times = [msg_times] * len(msgs)
msg_times = (datetime.datetime.fromtimestamp(
t).strftime('%H:%M:%S.%f') for t in msg_times)
self.sts_log.insertPlainText(
"\n".join(
"[{}] {}".format(
*
t) for t in zip(
msg_times,
msgs)) +
"\n")
self.sts_log.ensureCursorVisible()
class SnapshotStatus(QStatusBar):
def __init__(self, common_settings, parent=None):
QStatusBar.__init__(self, parent)
self.common_settings = common_settings
self.setSizeGripEnabled(False)
self.timer = QtCore.QTimer(self)
self.timer.timeout.connect(self.clear_status)
self.status_txt = QLabel()
self.status_txt.setStyleSheet("background-color: transparent")
self.addWidget(self.status_txt)
self.set_status()
def set_status(self, text="Ready", duration=0,
background="rgba(0, 0, 0, 30)"):
# Stop any existing timers
self.timer.stop()
if self.common_settings["force"]:
text = "[force mode] " + text
self.status_txt.setText(text)
style = "background-color : " + background
self.setStyleSheet(style)
# Force GUI updates to show status
QtCore.QCoreApplication.processEvents()
if duration:
self.timer.start(duration)
def clear_status(self):
self.set_status("Ready", 0, "rgba(0, 0, 0, 30)")
# This function should be called from outside, to start the gui
def start_gui(*args, **kwargs):
if kwargs.get('trace_execution'):
enable_tracing()
since_start("Interpreter started")
config = initialize_config(**kwargs)
app = QApplication(sys.argv)
# Load an application style
default_style_path = os.path.dirname(os.path.realpath(__file__))
default_style_path = os.path.join(default_style_path, "qss/default.qss")
app.setStyleSheet("file:///" + default_style_path)
# IMPORTANT the reference to the SnapshotGui Object need to be retrieved
# otherwise the GUI will not show up
_ = SnapshotGui(config)
since_start("GUI constructed")
sys.exit(app.exec_())
|
gpl-3.0
| 2,498,204,030,185,270,300 | 4,922,762,961,024,302,000 | 36.716667 | 105 | 0.596806 | false |
bzloink/psi4
|
conda/_conda_vers.py
|
3
|
1480
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Dummy setup.py file solely for the purposes of getting an on-the-fly
computed version number into the conda recipe.
"""
import sys
from distutils.core import setup
def version_func():
import subprocess
command = 'python psi4/versioner.py --formatonly --format={versionlong}'
process = subprocess.Popen(command.split(), shell=False, stdout=subprocess.PIPE)
(out, err) = process.communicate()
if sys.version_info >= (3, 0):
return out.decode('utf-8').strip()
else:
return out.strip()
setup(
version=version_func(),
)
|
lgpl-3.0
| 9,170,106,331,771,376,000 | 4,358,292,418,266,059,000 | 29.204082 | 84 | 0.728378 | false |
sslattery/Chimera
|
doc/spn/fuel_assembly/sp3/fs_azilut02.py
|
1
|
16598
|
###############################################################################
## fs.py
## 9te [angband.ornl.gov]
## Wed Jan 12 10:37:50 2011
###############################################################################
## Copyright (C) 2008 Oak Ridge National Laboratory, UT-Battelle, LLC.
##---------------------------------------------------------------------------##
## generated by /data/denovo/production/head/setup/bin/pygen built on 20110112
###############################################################################
import os, sys, math, string
# pyspn equation type
from spn_fv import *
print_it = False
##---------------------------------------------------------------------------##
## MAIN
##---------------------------------------------------------------------------##
initialize(sys.argv)
if node() == 0:
print "Denovo - pyspn Python Front-End"
print "-------------------------------"
print "Release : %16s" % (release())
print "Release Date : %16s" % (release_date())
print "Build Date : %16s" % (build_date())
print
timer = Timer()
timer.start()
##---------------------------------------------------------------------------##
## XS DATA
####### UO2 Fuel-Clad Macroscopic Cross Sections ##########
## Transport-corrected Total Cross Sections
T_UO2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
T_UO2[0] = 1.77949e-1
T_UO2[1] = 3.29805e-1
T_UO2[2] = 4.80388e-1
T_UO2[3] = 5.54367e-1
T_UO2[4] = 3.11801e-1
T_UO2[5] = 3.95168e-1
T_UO2[6] = 5.64406e-1
## Fission Cross Section
F_UO2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
F_UO2[0] = 7.21206e-3
F_UO2[1] = 8.19301e-4
F_UO2[2] = 6.45320e-3
F_UO2[3] = 1.85648e-2
F_UO2[4] = 1.78084e-2
F_UO2[5] = 8.30348e-2
F_UO2[6] = 2.16004e-1
## Nu
N_UO2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
N_UO2[0] = 2.78145
N_UO2[1] = 2.47443
N_UO2[2] = 2.43383
N_UO2[3] = 2.43380
N_UO2[4] = 2.43380
N_UO2[5] = 2.43380
N_UO2[6] = 2.43380
## Chi
C_UO2 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
C_UO2[0] = 5.87910e-1
C_UO2[1] = 4.11760e-1
C_UO2[2] = 3.39060e-4
C_UO2[3] = 1.17610e-7
C_UO2[4] = 0.00000000
C_UO2[5] = 0.00000000
C_UO2[6] = 0.00000000
## Scattering Matrix for UO2 Fuel-Clad (Macroscopic)
S_UO2 = [ [[]], [[]], [[]], [[]], [[]], [[]], [[]]]
S_UO2[0] = [[1.27537e-1]]
S_UO2[1] = [[4.23780e-2], [3.24456e-1]]
S_UO2[2] = [[9.43740e-6], [1.63140e-3], [4.50940e-1]]
S_UO2[3] = [[5.51630e-9], [3.14270e-9], [2.67920e-3], [4.52565e-1], [1.25250e-4]]
S_UO2[4] = [[0.00000000], [0.00000000], [0.00000000], [5.56640e-3], [2.71401e-1], [1.29680e-3]]
S_UO2[5] = [[0.00000000], [0.00000000], [0.00000000], [0.00000000], [1.02550e-2], [2.65802e-1], [8.54580e-3]]
S_UO2[6] = [[0.00000000], [0.00000000], [0.00000000], [0.00000000], [1.00210e-8], [1.68090e-2], [2.73080e-1]]
## Upscattering Matrix
U_UO2 = [ [], [], [], [], [], [], [] ]
U_UO2[0] = []
U_UO2[1] = []
U_UO2[2] = []
U_UO2[3] = [4]
U_UO2[4] = [5]
U_UO2[5] = [6]
U_UO2[6] = []
######## 4.3% MOX Fuel-Clad Macroscopic Cross-Sections ############
## Transport-corrected Total Cross Sections
T_MOX43 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
T_MOX43[0] = 1.78731e-1
T_MOX43[1] = 3.30849e-1
T_MOX43[2] = 4.83772e-1
T_MOX43[3] = 5.66922e-1
T_MOX43[4] = 4.26227e-1
T_MOX43[5] = 6.78997e-1
T_MOX43[6] = 6.82852e-1
## Fission Cross-Sections
F_MOX43 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
F_MOX43[0] = 7.62704e-3
F_MOX43[1] = 8.76898e-4
F_MOX43[2] = 5.69835e-3
F_MOX43[3] = 2.28872e-2
F_MOX43[4] = 1.07635e-2
F_MOX43[5] = 2.32757e-1
F_MOX43[6] = 2.48968e-1
## Nu Cross-Sections
N_MOX43 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
N_MOX43[0] = 2.85209
N_MOX43[1] = 2.89099
N_MOX43[2] = 2.85486
N_MOX43[3] = 2.86073
N_MOX43[4] = 2.85447
N_MOX43[5] = 2.86415
N_MOX43[6] = 2.86780
## Chi Cross-Sections
C_MOX43 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
C_MOX43[0] = 5.87910e-1
C_MOX43[1] = 4.11760e-1
C_MOX43[2] = 3.39060e-4
C_MOX43[3] = 1.17610e-7
C_MOX43[4] = 0.00000000
C_MOX43[5] = 0.00000000
C_MOX43[6] = 0.00000000
## Scattering Matrix for 4.3% MOX Fuel-Clad (Macroscopic)
S_MOX43 = [ [[]], [[]], [[]], [[]], [[]], [[]], [[]] ]
S_MOX43[0] = [[1.28876e-1]]
S_MOX43[1] = [[4.14130e-2], [3.25452e-1]]
S_MOX43[2] = [[8.22900e-6], [1.63950e-3], [4.53188e-1]]
S_MOX43[3] = [[5.04050e-9], [1.59820e-9], [2.61420e-3], [4.57173e-1], [1.60460e-4]]
S_MOX43[4] = [[0.00000000], [0.00000000], [0.00000000], [5.53940e-3], [2.76814e-1], [2.00510e-3]]
S_MOX43[5] = [[0.00000000], [0.00000000], [0.00000000], [0.00000000], [9.31270e-3], [2.52962e-1], [8.49480e-3]]
S_MOX43[6] = [[0.00000000], [0.00000000], [0.00000000], [0.00000000], [9.16560e-9], [1.48500e-2], [2.65007e-1]]
## Upscattering Matrix
U_MOX43 = [ [], [], [], [], [], [], [] ]
U_MOX43[0] = []
U_MOX43[1] = []
U_MOX43[2] = []
U_MOX43[3] = [4]
U_MOX43[4] = [5]
U_MOX43[5] = [6]
U_MOX43[6] = []
############### Moderator 1 Macroscopic Cross-Sections ################
## Transport-corrected Total Cross Section
T_MOD1 = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
T_MOD1[0] = 1.59206e-1
T_MOD1[1] = 4.12970e-1
T_MOD1[2] = 5.90310e-1
T_MOD1[3] = 5.84350e-1
T_MOD1[4] = 7.18000e-1
T_MOD1[5] = 1.25445
T_MOD1[6] = 2.65038
## Scattering Matrix for Moderator (Macroscopic)
S_MOD1 = [ [[]], [[]], [[]], [[]], [[]], [[]], [[]] ]
S_MOD1[0] = [[4.44777e-2]]
S_MOD1[1] = [[1.13400e-1], [2.82334e-1]]
S_MOD1[2] = [[7.23470e-4], [1.29940e-1], [3.45256e-1]]
S_MOD1[3] = [[3.74990e-6], [6.23400e-4], [2.24570e-1], [9.10284e-2], [7.14370e-5]]
S_MOD1[4] = [[5.31840e-8], [4.80020e-5], [1.69990e-2], [4.15510e-1], [1.39138e-1], [2.21570e-3]]
S_MOD1[5] = [[0.00000000], [7.44860e-6], [2.64430e-3], [6.37320e-2], [5.11820e-1], [6.99913e-1], [1.32440e-1]]
S_MOD1[6] = [[0.00000000], [1.04550e-6], [5.03440e-4], [1.21390e-2], [6.12290e-2], [5.37320e-1], [2.48070 ]]
## Upscattering Matrix
U_MOD1 = [ [], [], [], [], [], [], [] ]
U_MOD1[0] = []
U_MOD1[1] = []
U_MOD1[2] = []
U_MOD1[3] = [4]
U_MOD1[4] = [5]
U_MOD1[5] = [6]
U_MOD1[6] = []
################### Create nuf vectors
NUF_UO2 = []
NUF_MOX43 = []
for i in range(0, 7):
NUF_UO2.append( N_UO2[i] * F_UO2[i] )
NUF_MOX43.append( N_MOX43[i] * F_MOX43[i] )
##---------------------------------------------------------------------------##
## BUILD MESH
def build_mesh(N):
# vacuum = 0
# UO2 = 1
# MOX = 2
# moderator = 3
# UO2 pins
uo2_pin = Pincell()
uo2_ids = [1]
uo2_r = [0.4759]
uo2_pin.set_shells(uo2_ids, uo2_r, 3)
# MOX pins
mox_pin = Pincell()
mox_ids = [2]
mox_r = [0.4759]
mox_pin.set_shells(mox_ids, mox_r, 3)
# Make a 2x2 uo2 lattice and a 2x2 mox lattice
uo2_lat = Lattice(2)
mox_lat = Lattice(2)
# lattices are uniform
layout = [0, 0, 0, 0]
uo2_lat.set_pins(layout)
mox_lat.set_pins(layout)
# assign the pins in the lattices
uo2_lat.assign_pin(uo2_pin, 0)
mox_lat.assign_pin(mox_pin, 0)
# build the lattice
uo2_lat.build_lattice(N)
mox_lat.build_lattice(N)
# print out mixing tables
if print_it:
print "UO2 Lattice"
for m in xrange(uo2_lat.num_mixtures()):
vf = uo2_lat.f(m)
print "%4i" % (m),
for f in vf:
print "%9.6f" % (f),
print
print "MOX Lattice"
for m in xrange(mox_lat.num_mixtures()):
vf = mox_lat.f(m)
print "%4i" % (m),
for f in vf:
print "%9.6f" % (f),
print
# make the mixtable for the combined lattices by appending the mox table
# to the UO2 table (don't include the clean mixtures at the front of the
# table)
num_mixtures = uo2_lat.num_mixtures() + mox_lat.num_mixtures() - 4
table = Vec_Dbl(num_mixtures * 4)
ctr = 0
mox_offset = uo2_lat.num_mixtures()
# add UO2 mixtures
for m in xrange(uo2_lat.num_mixtures()):
vf = uo2_lat.f(m)
for f in vf:
table[ctr] = f
ctr = ctr + 1
# add MOX mixtures, skipping the clean mixes
for m in xrange(4, mox_lat.num_mixtures()):
vf = mox_lat.f(m)
for f in vf:
table[ctr] = f
ctr = ctr + 1
# make the cleanids
cleanids = [0, 1, 2, 3]
# the total core is 3x3 assemblies (2x2 fuel surrounded by water)
xylat = uo2_lat.xy_planes()
Nr = len(xylat) - 1
delta = Vec_Dbl(Nr, 0.0)
for i in xrange(Nr):
delta[i] = xylat[i+1] - xylat[i]
if Nr % 2 != 0:
print "Non-even lattices cells."
sys.exit(1)
# build the core planes
xycore = Vec_Dbl(int(2.5*Nr) + 1, 0.0)
for n in xrange(2):
for i in xrange(Nr):
index = i + n * Nr
xycore[index + 1] = xycore[index] + delta[i]
for i in xrange(Nr/2):
index = i + 2 * Nr
xycore[index + 1] = xycore[index] + delta[i]
# z-planes (14 in each assembly)
height = 14.28 * 1.5
Nz = 21
z = [0.0] * (Nz + 1)
dz = height / float(Nz)
for k in xrange(Nz):
z[k+1] = z[k] + dz
# get matids for each lattice
uo2ids = Vec_Int(uo2_lat.mixids())
moxids = Vec_Int(mox_lat.mixids())
# update the mox mixtures (leave clean zones alone)
for m in xrange(len(moxids)):
if moxids[m] > 3:
moxids[m] = moxids[m] + mox_offset - 4
# assign the matids
Nx = len(xycore) - 1
Ny = len(xycore) - 1
# arrangement
# |-----|-----|-----|
# | | | |
# | mod | mod | mod |
# | | | |
# |-----|-----|-----|
# | | | |
# | mox | uo2 | mod | y
# | | | |
# |-----|-----|-----|
# | | | |
# | uo2 | mox | mod |
# | | | |
# |-----|-----|-----|
# x
mixids = Vec_Int(Nx * Ny * Nz, 3)
kend = Nz / 2
# (0, 0) lattice
for k in xrange(kend):
for j in xrange(Nr):
for i in xrange(Nr):
lat_cell = i + j * Nr
cell = i + j * Ny + k * Nx * Ny
mixids[cell] = uo2ids[lat_cell]
# (1, 0) lattice
for k in xrange(kend):
for j in xrange(Nr):
for i in xrange(Nr):
lat_cell = i + j * Nr
cell = (i + Nr) + j * Ny + k * Nx * Ny
mixids[cell] = moxids[lat_cell]
# (0, 1) lattice
for k in xrange(kend):
for j in xrange(Nr):
for i in xrange(Nr):
lat_cell = i + j * Nr
cell = i + (j + Nr) * Ny + k * Nx * Ny
mixids[cell] = moxids[lat_cell]
# (1, 1) lattice
for k in xrange(kend):
for j in xrange(Nr):
for i in xrange(Nr):
lat_cell = i + j * Nr
cell = (i + Nr) + (j + Nr) * Ny + k * Nx * Ny
mixids[cell] = uo2ids[lat_cell]
return (xycore, z, mixids, cleanids, table)
##---------------------------------------------------------------------------##
## DB
##---------------------------------------------------------------------------##
entries = {
"problem_type" : "FIXED_SOURCE",
"num_groups" : 7,
"downscatter" : False,
"Pn_order" : 0,
"tolerance" : 1.0e-3,
"max_itr" : 400,
"linear_solver_xml_file" : "azilut02.xml",
"boundary" : "reflect",
"boundary_db" : {"reflect" : [1, 0, 1, 0, 1, 0]},
"SPN_order" : 3
}
db = DB.from_dict(entries)
# decomposition
if nodes() == 1:
db.insert("num_blocks_i", 1)
db.insert("num_blocks_j", 1)
elif nodes() == 2:
db.insert("num_blocks_i", 2)
db.insert("num_blocks_j", 1)
elif nodes() == 16:
db.insert("num_blocks_i", 4)
db.insert("num_blocks_j", 4)
# Mesh
(r, z, mixids, cleanids, table) = build_mesh(10)
db.insert("x_edges", r)
db.insert("y_edges", r)
db.insert("z_edges", z)
##---------------------------------------------------------------------------##
## MANAGER
##---------------------------------------------------------------------------##
# make manager, material, and angles
manager = Manager()
mat = Mat()
# partition the problem
manager.partition(db, mat)
# get mapping and mesh objects
mapp = manager.get_map()
indexer = manager.get_indexer()
mesh = manager.get_mesh()
# global and local cell numbers
Gx = indexer.num_global(X)
Gy = indexer.num_global(Y)
Gz = mesh.num_cells_dim(Z)
Nx = mesh.num_cells_dim(X)
Ny = mesh.num_cells_dim(Y)
Nz = mesh.num_cells_dim(Z)
if node() == 0:
print ">>> Partitioned global mesh with %i x %i x %i cells" \
% (Gx, Gy, Gz)
##---------------------------------------------------------------------------##
## MATERIAL SETUP
##---------------------------------------------------------------------------##
# vacuum = 0
# UO2 = 1
# MOX = 2
# moderator = 3
# set database
xsdb = XS_DB(db)
xsdb.set_num(4)
xsdb.assign_zero(0)
for g in xrange(0, xsdb.num_groups()):
xsdb.assign_upscatter(1, g, T_UO2[g], U_UO2[g], S_UO2[g])
xsdb.assign_upscatter(2, g, T_MOX43[g], U_MOX43[g], S_MOX43[g])
xsdb.assign_upscatter(3, g, T_MOD1[g], U_MOD1[g], S_MOD1[g])
## Assign fission data
xsdb.assign_fission(1, NUF_UO2, C_UO2)
xsdb.assign_fission(2, NUF_MOX43, C_MOX43)
# make macro mixer
mixer = Macro_Mixer(xsdb)
mixer.set(cleanids, table)
# make the material database
mixer.mix_with_global_ids(mixids, mat)
##---------------------------------------------------------------------------##
## ENERGY PARTITIONING
##---------------------------------------------------------------------------##
manager.partition_energy(mat)
##---------------------------------------------------------------------------##
## SOURCE SETUP
##---------------------------------------------------------------------------##
# allocate source and problem state
source = Isotropic_Source()
manager.setup(source)
total = Gx * Gy * Gz
Ng = mat.num_groups()
srcids = Vec_Int(total, 0)
srcstr = Vec_Dbl(total, 0.0)
num_shapes = 2
shapes = Vec_Dbl(2 * mat.num_groups(), 0.0)
chi0 = xsdb.fission_data(1, 0, CHI)
chi1 = xsdb.fission_data(2, 0, CHI)
# source 0 spectrum -> UO2 Chi
# source 1 spectrum -> MOX Chi
# make shapes
ctr = 0
for g in xrange(Ng):
shapes[ctr] = xsdb.fission_data(1, g, CHI)
ctr += 1
for g in xrange(Ng):
shapes[ctr] = xsdb.fission_data(2, g, CHI)
ctr += 1
# assign ids and strengths
for cell in xrange(total):
matid = mixids[cell]
if mat.assigned_fission(matid):
for g in xrange(Ng):
srcstr[cell] += mat.fission_data(matid, g, NU_SIGMA_F)
if mat.fission_data(matid, 0, CHI) == chi1:
srcids[cell] = 1
# set the source
source.set(num_shapes, shapes, srcids, srcstr)
##---------------------------------------------------------------------------##
## SOLVE
##---------------------------------------------------------------------------##
if node() == 0:
print ">>> Setup complete"
print ">>> Solving with %s differencing" % (manager.spatial_descriptor())
# solve the problem
manager.solve(source)
##---------------------------------------------------------------------------##
## OUTPUT
##---------------------------------------------------------------------------##
# make SILO output
silo = SILO()
silo.add_mixer(mixer)
silo.open("fs")
phi = Vec_Dbl(mesh.num_cells(), 0.0)
for g in xrange(Ng):
flux = manager.moments(g)
for cell in xrange(mesh.num_cells()):
phi[cell] = phi[cell] + flux.scalar_flux(cell)
silo.add("phi", phi)
silo.close()
##---------------------------------------------------------------------------##
## TIMING
##---------------------------------------------------------------------------##
# output final database (has class-dependent defaults)
db.output()
timer.stop()
time = timer.wall_clock()
keys = timer_keys()
if len(keys) > 0 and node() == 0:
print "\n"
print "TIMING : Problem ran in %16.6e seconds." % (time)
print "------------------------------------------------------------------"
for key in keys:
print "%30s : %16.6e %16.6e" % (key, timer_value(key) / time, timer_value(key))
print "------------------------------------------------------------------"
##---------------------------------------------------------------------------##
manager.close()
finalize()
###############################################################################
## end of fs.py
###############################################################################
|
bsd-3-clause
| -1,582,002,662,133,596,200 | -2,255,706,447,026,715,400 | 27.617241 | 111 | 0.457826 | false |
hejunbok/paparazzi
|
sw/ground_segment/python/udp_link/udp_link.py
|
21
|
8628
|
#!/usr/bin/env python
from ivy.std_api import *
import socket
import struct
import os
import logging
import sys
import threading
import time
sys.path.append(os.getenv("PAPARAZZI_HOME") + "/sw/lib/python")
import messages_xml_map
PING_PERIOD = 5.0
STATUS_PERIOD = 1.0
STX = 0x99
STX_TS = 0x98
DATALINK_PORT = 4243
DOWNLINK_PORT = 4242
class DownLinkStatus():
def __init__(self, ac_id, address):
self.ac_id = ac_id
self.address = address
self.rx_bytes = 0
self.rx_msgs = 0
self.run_time = 0
self.last_rx_bytes = 0
self.last_rx_msgs = 0
self.last_ping_time = 0
self.last_pong_time = 0
class IvyUdpLink():
def __init__(self):
self.InitIvy()
self.status_timer = threading.Timer(STATUS_PERIOD, self.sendStatus)
self.ping_timer = threading.Timer(STATUS_PERIOD, self.sendPing)
self.ac_downlink_status = {}
self.rx_err = 0
messages_xml_map.ParseMessages()
self.data_types = {'float': ['f', 4],
'uint8': ['B', 1],
'uint16': ['H', 2],
'uint32': ['L', 4],
'int8': ['b', 1],
'int16': ['h', 2],
'int32': ['l', 4]
}
def Unpack(self, data_fields, type, start, length):
return struct.unpack(type, "".join(data_fields[start:start + length]))[0]
def InitIvy(self):
# initialising the bus
IvyInit("Link", # application name for Ivy
"READY", # ready message
0, # main loop is local (ie. using IvyMainloop)
lambda x, y: y, # handler called on connection/deconnection
lambda x, y: y # handler called when a diemessage is received
)
# starting the bus
logging.getLogger('Ivy').setLevel(logging.WARN)
IvyStart("")
IvyBindMsg(self.OnSettingMsg, "(^.* SETTING .*)")
def calculate_checksum(self, msg):
ck_a = 0
ck_b = 0
# start char not included in checksum for pprz protocol
for c in msg[1:]:
ck_a = (ck_a + ord(c)) % 256
ck_b = (ck_b + ck_a) % 256
return (ck_a, ck_b)
def buildPprzMsg(self, msg_id, *args):
stx = STX
length = 6
sender = 0
msg_fields = messages_xml_map.message_dictionary_types["datalink"][msg_id]
struct_string = "=BBBB"
typed_args = []
idx = 0
for msg_type in msg_fields:
struct_string += self.data_types[msg_type][0]
length += self.data_types[msg_type][1]
if (msg_type == "float"):
typed_args.append(float(args[idx]))
else:
typed_args.append(int(args[idx]))
idx += 1
msg = struct.pack(struct_string, stx, length, sender, msg_id, *typed_args)
(ck_a, ck_b) = self.calculate_checksum(msg)
msg = msg + struct.pack('=BB', ck_a, ck_b)
return msg
def OnSettingMsg(self, agent, *larg):
list = larg[0].split(' ')
sender = list[0]
msg_name = list[1]
ac_id = list[3]
args = list[2:]
msg_id = messages_xml_map.message_dictionary_name_id["datalink"][msg_name]
if self.ac_downlink_status.has_key(int(ac_id)):
msgbuf = self.buildPprzMsg(msg_id, *args)
address = (self.ac_downlink_status[int(ac_id)].address[0], DATALINK_PORT)
self.server.sendto(msgbuf, address)
def sendPing(self):
for (ac_id, value) in self.ac_downlink_status.items():
msg_id = messages_xml_map.message_dictionary_name_id["datalink"]["PING"]
msgbuf = self.buildPprzMsg(msg_id)
address = (self.ac_downlink_status[int(ac_id)].address[0], DATALINK_PORT)
self.server.sendto(msgbuf, address)
value.last_ping_time = time.clock()
self.ping_timer = threading.Timer(STATUS_PERIOD, self.sendPing)
self.ping_timer.start()
def sendStatus(self):
for (key, value) in self.ac_downlink_status.items():
IvySendMsg("%i DOWNLINK_STATUS %i %i %i %i %i %i %i" % (
value.ac_id,
value.run_time,
value.rx_bytes,
value.rx_msgs,
self.rx_err,
value.rx_bytes - value.last_rx_bytes,
value.rx_msgs - value.last_rx_msgs,
1000 * value.last_pong_time))
value.last_rx_bytes = value.rx_bytes
value.last_rx_msgs = value.rx_msgs
value.run_time = value.run_time + 1
self.status_timer = threading.Timer(STATUS_PERIOD, self.sendStatus)
self.status_timer.start()
def updateStatus(self, ac_id, length, address, isPong):
if not self.ac_downlink_status.has_key(ac_id):
self.ac_downlink_status[ac_id] = DownLinkStatus(ac_id, address)
self.ac_downlink_status[ac_id].rx_msgs += 1
self.ac_downlink_status[ac_id].rx_bytes += length
if isPong:
self.ac_downlink_status[ac_id].last_pong_time = time.clock() - self.ac_downlink_status[ac_id].last_ping_time
def ProcessPacket(self, msg, address):
if len(msg) < 4:
self.rx_err = self.rx_err + 1
return
msg_offset = 0
while msg_offset < len(msg):
start_byte = ord(msg[msg_offset])
msg_start_idx = msg_offset
msg_offset = msg_offset + 1
if start_byte != STX and start_byte != STX_TS:
self.rx_err = self.rx_err + 1
return
msg_length = ord(msg[msg_offset])
msg_offset = msg_offset + 1
if (start_byte == STX_TS):
timestamp = int(self.Unpack(msg, 'L', msg_offset, 4))
msg_offset = msg_offset + 4
ac_id = ord(msg[msg_offset])
msg_offset = msg_offset + 1
msg_id = ord(msg[msg_offset])
msg_offset = msg_offset + 1
msg_name = messages_xml_map.message_dictionary_id_name["telemetry"][msg_id]
msg_fields = messages_xml_map.message_dictionary_types["telemetry"][msg_id]
ivy_msg = "%i %s " % (ac_id, msg_name)
for field in msg_fields:
if field[-2:] == "[]":
baseType = field[:-2]
array_length = int(self.Unpack(msg, 'B', msg_offset, 1))
msg_offset = msg_offset + 1
for count in range(0, array_length):
array_value = str(
self.Unpack(msg, self.data_types[baseType][0], msg_offset, self.data_types[baseType][1]))
msg_offset = msg_offset + self.data_types[baseType][1]
if (count == array_length - 1):
ivy_msg += array_value + " "
else:
ivy_msg += array_value + ","
else:
ivy_msg += str(
self.Unpack(msg, self.data_types[field][0], msg_offset, self.data_types[field][1])) + " "
msg_offset = msg_offset + self.data_types[field][1]
if (msg_offset > len(msg)):
print "finished without parsing %s" % field
break
(ck_a, ck_b) = self.calculate_checksum(msg[msg_start_idx:msg_offset])
msg_ck_a = int(self.Unpack(msg, 'B', msg_offset, 1))
msg_offset += 1
msg_ck_b = int(self.Unpack(msg, 'B', msg_offset, 1))
msg_offset += 1
# check for valid checksum
if (ck_a, ck_b) == (msg_ck_a, msg_ck_b):
self.updateStatus(ac_id, msg_length, address,
msg_id == messages_xml_map.message_dictionary_name_id["telemetry"]["PONG"])
# strip off trailing whitespace
ivy_msg = ivy_msg[:-1]
IvySendMsg(ivy_msg)
def Run(self):
self.server = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.server.bind(('0.0.0.0', DOWNLINK_PORT))
self.status_timer.start()
self.ping_timer.start()
while True:
(msg, address) = self.server.recvfrom(2048)
self.ProcessPacket(msg, address)
def main():
udp_interface = IvyUdpLink()
udp_interface.Run()
if __name__ == '__main__':
main()
|
gpl-2.0
| 2,107,863,834,778,228,700 | -8,770,056,808,968,202,000 | 34.80083 | 120 | 0.521442 | false |
gurneyalex/odoo
|
addons/website/tests/test_qweb.py
|
3
|
6800
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import re
from odoo import http, tools
from odoo.addons.website.tools import MockRequest
from odoo.modules.module import get_module_resource
from odoo.tests.common import TransactionCase
class TestQweb(TransactionCase):
def _load(self, module, *args):
tools.convert_file(self.cr, 'website',
get_module_resource(module, *args),
{}, 'init', False, 'test', self.registry._assertion_report)
def test_qweb_cdn(self):
self._load('website', 'tests', 'template_qweb_test.xml')
website = self.env['website'].browse(1)
website.write({
"cdn_activated": True,
"cdn_url": "http://test.cdn"
})
demo = self.env['res.users'].search([('login', '=', 'demo')])[0]
demo.write({"signature": '''<span class="toto">
span<span class="fa"></span><img src="/web/image/1"/>
</span>'''})
demo_env = self.env(user=demo)
html = demo_env['ir.qweb'].render('website.test_template', {"user": demo}, website_id= website.id)
html = html.strip().decode('utf8')
html = re.sub(r'\?unique=[^"]+', '', html).encode('utf8')
attachments = demo_env['ir.attachment'].search([('url', '=like', '/web/content/%-%/website.test_bundle.%')])
self.assertEqual(len(attachments), 2)
self.assertEqual(html, ("""<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" href="http://test.external.link/style1.css"/>
<link rel="stylesheet" href="http://test.external.link/style2.css"/>
<link type="text/css" rel="stylesheet" href="http://test.cdn%(css)s"/>
<meta/>
<script type="text/javascript" src="http://test.external.link/javascript1.js"></script>
<script type="text/javascript" src="http://test.external.link/javascript2.js"></script>
<script type="text/javascript" src="http://test.cdn%(js)s"></script>
</head>
<body>
<img src="http://test.external.link/img.png"/>
<img src="http://test.cdn/website/static/img.png"/>
<a href="http://test.external.link/link">x</a>
<a href="http://test.cdn/web/content/local_link">x</a>
<span style="background-image: url('http://test.cdn/web/image/2')">xxx</span>
<div widget="html"><span class="toto">
span<span class="fa"></span><img src="http://test.cdn/web/image/1">
</span></div>
<div widget="image"><img src="http://test.cdn/web/image/res.users/%(user_id)s/image_1920/%(filename)s" class="img img-fluid" alt="%(alt)s"/></div>
</body>
</html>""" % {
"js": attachments[0].url,
"css": attachments[1].url,
"user_id": demo.id,
"filename": "Marc%20Demo",
"alt": "Marc Demo",
}).encode('utf8'))
class TestQwebProcessAtt(TransactionCase):
def setUp(self):
super(TestQwebProcessAtt, self).setUp()
self.website = self.env['website'].browse(1)
self.env.ref('base.lang_fr').active = True
self.website.language_ids = self.env.ref('base.lang_en') + self.env.ref('base.lang_fr')
self.website.default_lang_id = self.env.ref('base.lang_en')
self.website.cdn_activated = True
self.website.cdn_url = "http://test.cdn"
self.website.cdn_filters = "\n".join(["^(/[a-z]{2}_[A-Z]{2})?/a$", "^(/[a-z]{2})?/a$", "^/b$"])
def _test_att(self, url, expect, tag='a', attribute='href'):
self.assertEqual(
self.env['ir.qweb']._post_processing_att(tag, {attribute: url}, {}),
expect
)
def test_process_att_no_request(self):
# no request so no URL rewriting
self._test_att('/', {'href': '/'})
self._test_att('/en/', {'href': '/en/'})
self._test_att('/fr/', {'href': '/fr/'})
# no URL rewritting for CDN
self._test_att('/a', {'href': '/a'})
def test_process_att_no_website(self):
with MockRequest(self.env):
# no website so URL rewriting
self._test_att('/', {'href': '/'})
self._test_att('/en/', {'href': '/en/'})
self._test_att('/fr/', {'href': '/fr/'})
# no URL rewritting for CDN
self._test_att('/a', {'href': '/a'})
def test_process_att_monolang_route(self):
with MockRequest(self.env, website=self.website, multilang=False):
# lang not changed in URL but CDN enabled
self._test_att('/a', {'href': 'http://test.cdn/a'})
self._test_att('/en/a', {'href': 'http://test.cdn/en/a'})
self._test_att('/b', {'href': 'http://test.cdn/b'})
self._test_att('/en/b', {'href': '/en/b'})
def test_process_att_no_request_lang(self):
with MockRequest(self.env, website=self.website):
self._test_att('/', {'href': '/'})
self._test_att('/en/', {'href': '/'})
self._test_att('/fr/', {'href': '/fr/'})
def test_process_att_with_request_lang(self):
with MockRequest(self.env, website=self.website, context={'lang': 'fr_FR'}):
self._test_att('/', {'href': '/fr/'})
self._test_att('/en/', {'href': '/'})
self._test_att('/fr/', {'href': '/fr/'})
def test_process_att_matching_cdn_and_lang(self):
with MockRequest(self.env, website=self.website):
# lang prefix is added before CDN
self._test_att('/a', {'href': 'http://test.cdn/a'})
self._test_att('/en/a', {'href': 'http://test.cdn/a'})
self._test_att('/fr/a', {'href': 'http://test.cdn/fr/a'})
self._test_att('/b', {'href': 'http://test.cdn/b'})
self._test_att('/en/b', {'href': 'http://test.cdn/b'})
self._test_att('/fr/b', {'href': '/fr/b'})
def test_process_att_no_route(self):
with MockRequest(self.env, website=self.website, context={'lang': 'fr_FR'}, routing=False):
# default on multilang=True if route is not /{module}/static/
self._test_att('/web/static/hi', {'href': '/web/static/hi'})
self._test_att('/my-page', {'href': '/fr/my-page'})
def test_process_att_url_crap(self):
with MockRequest(self.env, website=self.website):
match = http.root.get_db_router.return_value.bind.return_value.match
# #{fragment} is stripped from URL when testing route
self._test_att('/x#y?z', {'href': '/x#y?z'})
match.assert_called_with('/x', method='POST', query_args=None)
match.reset_calls()
self._test_att('/x?y#z', {'href': '/x?y#z'})
match.assert_called_with('/x', method='POST', query_args='y')
|
agpl-3.0
| 3,506,843,397,490,566,700 | 907,845,395,139,432,100 | 44.333333 | 154 | 0.548971 | false |
jjmleiro/hue
|
desktop/core/ext-py/Django-1.6.10/django/forms/extras/widgets.py
|
117
|
4978
|
"""
Extra HTML Widget classes
"""
from __future__ import unicode_literals
import datetime
import re
from django.forms.widgets import Widget, Select
from django.utils import datetime_safe
from django.utils.dates import MONTHS
from django.utils.encoding import force_str
from django.utils.safestring import mark_safe
from django.utils.formats import get_format
from django.utils import six
from django.conf import settings
__all__ = ('SelectDateWidget',)
RE_DATE = re.compile(r'(\d{4})-(\d\d?)-(\d\d?)$')
def _parse_date_fmt():
fmt = get_format('DATE_FORMAT')
escaped = False
output = []
for char in fmt:
if escaped:
escaped = False
elif char == '\\':
escaped = True
elif char in 'Yy':
output.append('year')
#if not self.first_select: self.first_select = 'year'
elif char in 'bEFMmNn':
output.append('month')
#if not self.first_select: self.first_select = 'month'
elif char in 'dj':
output.append('day')
#if not self.first_select: self.first_select = 'day'
return output
class SelectDateWidget(Widget):
"""
A Widget that splits date input into three <select> boxes.
This also serves as an example of a Widget that has more than one HTML
element and hence implements value_from_datadict.
"""
none_value = (0, '---')
month_field = '%s_month'
day_field = '%s_day'
year_field = '%s_year'
def __init__(self, attrs=None, years=None, required=True):
# years is an optional list/tuple of years to use in the "year" select box.
self.attrs = attrs or {}
self.required = required
if years:
self.years = years
else:
this_year = datetime.date.today().year
self.years = range(this_year, this_year+10)
def render(self, name, value, attrs=None):
try:
year_val, month_val, day_val = value.year, value.month, value.day
except AttributeError:
year_val = month_val = day_val = None
if isinstance(value, six.string_types):
if settings.USE_L10N:
try:
input_format = get_format('DATE_INPUT_FORMATS')[0]
v = datetime.datetime.strptime(force_str(value), input_format)
year_val, month_val, day_val = v.year, v.month, v.day
except ValueError:
pass
else:
match = RE_DATE.match(value)
if match:
year_val, month_val, day_val = [int(v) for v in match.groups()]
choices = [(i, i) for i in self.years]
year_html = self.create_select(name, self.year_field, value, year_val, choices)
choices = list(six.iteritems(MONTHS))
month_html = self.create_select(name, self.month_field, value, month_val, choices)
choices = [(i, i) for i in range(1, 32)]
day_html = self.create_select(name, self.day_field, value, day_val, choices)
output = []
for field in _parse_date_fmt():
if field == 'year':
output.append(year_html)
elif field == 'month':
output.append(month_html)
elif field == 'day':
output.append(day_html)
return mark_safe('\n'.join(output))
def id_for_label(self, id_):
first_select = None
field_list = _parse_date_fmt()
if field_list:
first_select = field_list[0]
if first_select is not None:
return '%s_%s' % (id_, first_select)
else:
return '%s_month' % id_
def value_from_datadict(self, data, files, name):
y = data.get(self.year_field % name)
m = data.get(self.month_field % name)
d = data.get(self.day_field % name)
if y == m == d == "0":
return None
if y and m and d:
if settings.USE_L10N:
input_format = get_format('DATE_INPUT_FORMATS')[0]
try:
date_value = datetime.date(int(y), int(m), int(d))
except ValueError:
return '%s-%s-%s' % (y, m, d)
else:
date_value = datetime_safe.new_date(date_value)
return date_value.strftime(input_format)
else:
return '%s-%s-%s' % (y, m, d)
return data.get(name, None)
def create_select(self, name, field, value, val, choices):
if 'id' in self.attrs:
id_ = self.attrs['id']
else:
id_ = 'id_%s' % name
if not (self.required and val):
choices.insert(0, self.none_value)
local_attrs = self.build_attrs(id=field % id_)
s = Select(choices=choices)
select_html = s.render(field % name, val, local_attrs)
return select_html
|
apache-2.0
| -3,728,232,336,202,769,000 | 6,481,097,434,372,295,000 | 35.072464 | 90 | 0.544998 | false |
camptocamp/odoo
|
addons/account/project/wizard/account_analytic_journal_report.py
|
378
|
3164
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_analytic_journal_report(osv.osv_memory):
_name = 'account.analytic.journal.report'
_description = 'Account Analytic Journal'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
'analytic_account_journal_id': fields.many2many('account.analytic.journal', 'account_analytic_journal_name', 'journal_line_id', 'journal_print_id', 'Analytic Journals', required=True),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
ids_list = []
if context.get('active_id',False):
ids_list.append(context.get('active_id',False))
else:
record = self.browse(cr,uid,ids[0],context=context)
for analytic_record in record.analytic_account_journal_id:
ids_list.append(analytic_record.id)
datas = {
'ids': ids_list,
'model': 'account.analytic.journal',
'form': data
}
context2 = context.copy()
context2['active_model'] = 'account.analytic.journal'
context2['active_ids'] = ids_list
return self.pool['report'].get_action(cr, uid, [], 'account.report_analyticjournal', data=datas, context=context2)
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(account_analytic_journal_report, self).default_get(cr, uid, fields, context=context)
if not context.has_key('active_ids'):
journal_ids = self.pool.get('account.analytic.journal').search(cr, uid, [], context=context)
else:
journal_ids = context.get('active_ids')
if 'analytic_account_journal_id' in fields:
res.update({'analytic_account_journal_id': journal_ids})
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| -7,511,414,624,349,327,000 | -1,539,813,810,621,608,000 | 41.756757 | 192 | 0.606195 | false |
priyaganti/rockstor-core
|
src/rockstor/smart_manager/views/detail_views.py
|
2
|
2971
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from smart_manager.models import (ReplicaShare, ReplicaTrail, Replica,
ReceiveTrail,)
from smart_manager.serializers import (ReplicaShareSerializer,
ReplicaTrailSerializer,
ReplicaSerializer,
ReceiveTrailSerializer,)
import rest_framework_custom as rfc
from rest_framework.response import Response
class ReplicaShareDetailView(rfc.GenericView):
serializer_class = ReplicaShareSerializer
def get(self, *args, **kwargs):
try:
if ('sname' in self.kwargs):
data = ReplicaShare.objects.get(share=self.kwargs['sname'])
else:
data = ReplicaShare.objects.get(id=self.kwargs['rid'])
serialized_data = ReplicaShareSerializer(data)
return Response(serialized_data.data)
except:
return Response()
class ReplicaTrailDetailView(rfc.GenericView):
serializer_class = ReplicaTrailSerializer
def get(self, *args, **kwargs):
if ('rtid' in self.kwargs):
try:
return ReplicaTrail.objects.get(id=self.kwargs['rtid'])
except:
return Response()
class ReplicaDetailView(rfc.GenericView):
serializer_class = ReplicaSerializer
def get(self, *args, **kwargs):
if ('sname' in self.kwargs):
try:
data = Replica.objects.get(share=self.kwargs['sname'])
serialized_data = ReplicaSerializer(data)
return Response(serialized_data.data)
except:
return Response()
elif ('rid' in self.kwargs):
try:
data = Replica.objects.get(id=self.kwargs['rid'])
serialized_data = ReplicaSerializer(data)
return Response(serialized_data.data)
except:
return Response()
class ReceiveTrailDetailView(rfc.GenericView):
serializer_class = ReceiveTrailSerializer
def get(self, request, *args, **kwargs):
if ('rtid' in self.kwargs):
with self._handle_exception(request):
return ReceiveTrail.objects.get(id=self.kwargs['rtid'])
|
gpl-3.0
| 8,380,515,043,328,254,000 | 3,181,536,647,562,226,000 | 34.795181 | 75 | 0.629418 | false |
HydrelioxGitHub/home-assistant
|
homeassistant/components/intent_script/__init__.py
|
14
|
2945
|
"""Handle intents with scripts."""
import copy
import logging
import voluptuous as vol
from homeassistant.helpers import (
intent, template, script, config_validation as cv)
DOMAIN = 'intent_script'
CONF_INTENTS = 'intents'
CONF_SPEECH = 'speech'
CONF_ACTION = 'action'
CONF_CARD = 'card'
CONF_TYPE = 'type'
CONF_TITLE = 'title'
CONF_CONTENT = 'content'
CONF_TEXT = 'text'
CONF_ASYNC_ACTION = 'async_action'
DEFAULT_CONF_ASYNC_ACTION = False
CONFIG_SCHEMA = vol.Schema({
DOMAIN: {
cv.string: {
vol.Optional(CONF_ACTION): cv.SCRIPT_SCHEMA,
vol.Optional(CONF_ASYNC_ACTION,
default=DEFAULT_CONF_ASYNC_ACTION): cv.boolean,
vol.Optional(CONF_CARD): {
vol.Optional(CONF_TYPE, default='simple'): cv.string,
vol.Required(CONF_TITLE): cv.template,
vol.Required(CONF_CONTENT): cv.template,
},
vol.Optional(CONF_SPEECH): {
vol.Optional(CONF_TYPE, default='plain'): cv.string,
vol.Required(CONF_TEXT): cv.template,
}
}
}
}, extra=vol.ALLOW_EXTRA)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Activate Alexa component."""
intents = copy.deepcopy(config[DOMAIN])
template.attach(hass, intents)
for intent_type, conf in intents.items():
if CONF_ACTION in conf:
conf[CONF_ACTION] = script.Script(
hass, conf[CONF_ACTION],
"Intent Script {}".format(intent_type))
intent.async_register(hass, ScriptIntentHandler(intent_type, conf))
return True
class ScriptIntentHandler(intent.IntentHandler):
"""Respond to an intent with a script."""
def __init__(self, intent_type, config):
"""Initialize the script intent handler."""
self.intent_type = intent_type
self.config = config
async def async_handle(self, intent_obj):
"""Handle the intent."""
speech = self.config.get(CONF_SPEECH)
card = self.config.get(CONF_CARD)
action = self.config.get(CONF_ACTION)
is_async_action = self.config.get(CONF_ASYNC_ACTION)
slots = {key: value['value'] for key, value
in intent_obj.slots.items()}
if action is not None:
if is_async_action:
intent_obj.hass.async_create_task(action.async_run(slots))
else:
await action.async_run(slots)
response = intent_obj.create_response()
if speech is not None:
response.async_set_speech(speech[CONF_TEXT].async_render(slots),
speech[CONF_TYPE])
if card is not None:
response.async_set_card(
card[CONF_TITLE].async_render(slots),
card[CONF_CONTENT].async_render(slots),
card[CONF_TYPE])
return response
|
apache-2.0
| 6,707,569,390,141,613,000 | 4,305,157,905,033,366,000 | 29.360825 | 76 | 0.591171 | false |
Jollytown/Garuda
|
server/garuda/lib/python2.7/site-packages/pip/download.py
|
61
|
30557
|
from __future__ import absolute_import
import cgi
import email.utils
import hashlib
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
import pip
from pip.exceptions import InstallationError, HashMismatch
from pip.models import PyPI
from pip.utils import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file)
from pip.utils.filesystem import check_path_owner
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.packages import urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.six.moves import xmlrpc_client
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
distro = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], platform.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], platform.libc_ver()),
))
if libc:
distro["libc"] = libc
if distro:
data["distro"] = distro
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urllib_parse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.rsplit("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simple return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username = six.moves.input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)
return userinfo, None
return None, None
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def __init__(self, *args, **kwargs):
super(SafeFileCache, self).__init__(*args, **kwargs)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
# If it is not owned by the user executing pip then we will disable
# the cache and log a warning.
if not check_path_owner(self.directory):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want the -H flag.",
self.directory,
)
# Set our directory to None to disable the Cache
self.directory = None
def get(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
insecure_hosts = kwargs.pop("insecure_hosts", [])
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
status_forcelist=[503],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual evication from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# We want to use a non-validating adapter for any requests which are
# deemed insecure.
for host in insecure_hosts:
self.mount("https://{0}/".format(host), insecure_adapter)
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
with open(url) as f:
content = f.read()
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
# if we have a UNC path, prepend UNC share notation
if netloc:
netloc = '\\\\' + netloc
path = urllib_request.url2pathname(netloc + path)
return path
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
return url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
archives = (
'.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.whl'
)
ext = splitext(name)[1].lower()
if ext in archives:
return True
return False
def unpack_vcs_link(link, location, only_download=False):
vcs_backend = _get_used_vcs_backend(link)
if only_download:
vcs_backend.export(location)
else:
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.critical(
"Hash digest size of the package %d (%s) doesn't match the "
"expected hash name %s!",
download_hash.digest_size, link, link.hash_name,
)
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.critical(
"Hash of the package %s (%s) doesn't match the expected hash %s!",
link, download_hash.hexdigest(), link.hash,
)
raise HashMismatch(
'Bad %s hash for package %s' % (link.hash_name, link)
)
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warning(
"Unsupported hash name %s for package %s", link.hash_name, link,
)
return None
with open(target_file, 'rb') as fp:
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
return download_hash
def _download_url(resp, link, content_file):
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warning(
"Unsupported hash name %s for package %s",
link.hash_name, link,
)
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we do
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
progress_indicator = lambda x, *a, **k: x
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
if total_length:
logger.info(
"Downloading %s (%s)", url, format_size(total_length),
)
progress_indicator = DownloadProgressBar(
max=total_length,
).iter
else:
logger.info("Downloading %s", url)
progress_indicator = DownloadProgressSpinner().iter
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
logger.debug('Downloading from URL %s', link)
for chunk in progress_indicator(resp_read(4096), 4096):
if download_hash is not None:
download_hash.update(chunk)
content_file.write(chunk)
if link.hash and link.hash_name:
_check_hash(download_hash, link)
return download_hash
def _copy_file(filename, location, content_type, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(download_location), ('i', 'w', 'b'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(link, location, download_dir=None, session=None):
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link, session, temp_dir)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
if not already_downloaded_path:
os.unlink(from_path)
rmtree(temp_dir)
def unpack_file_url(link, location, download_dir=None):
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir."""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if os.path.isdir(link_path):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# if link has a hash, let's confirm it matches
if link.hash:
link_path_hash = _get_hash_from_file(link_path, link)
_check_hash(link_path_hash, link)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(link, location, download_dir=None,
only_download=False, session=None):
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location, only_download)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir)
if only_download:
write_delete_marker_file(location)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
)
if only_download:
write_delete_marker_file(location)
def _download_http_url(link, session, temp_dir):
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file)
return file_path, content_type
def _check_download_dir(link, download_dir):
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash, '
're-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
|
mit
| -9,040,276,209,840,403,000 | 5,067,595,743,050,437,000 | 34.203917 | 79 | 0.597146 | false |
MwanzanFelipe/rockletonfortune
|
lib/django/conf/urls/__init__.py
|
264
|
4592
|
import warnings
from importlib import import_module
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import (
LocaleRegexURLResolver, RegexURLPattern, RegexURLResolver,
)
from django.utils import six
from django.utils.deprecation import (
RemovedInDjango20Warning, RemovedInDjango110Warning,
)
__all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'patterns', 'url']
handler400 = 'django.views.defaults.bad_request'
handler403 = 'django.views.defaults.permission_denied'
handler404 = 'django.views.defaults.page_not_found'
handler500 = 'django.views.defaults.server_error'
def include(arg, namespace=None, app_name=None):
if app_name and not namespace:
raise ValueError('Must specify a namespace if specifying app_name.')
if app_name:
warnings.warn(
'The app_name argument to django.conf.urls.include() is deprecated. '
'Set the app_name in the included URLconf instead.',
RemovedInDjango20Warning, stacklevel=2
)
if isinstance(arg, tuple):
# callable returning a namespace hint
try:
urlconf_module, app_name = arg
except ValueError:
if namespace:
raise ImproperlyConfigured(
'Cannot override the namespace for a dynamic module that provides a namespace'
)
warnings.warn(
'Passing a 3-tuple to django.conf.urls.include() is deprecated. '
'Pass a 2-tuple containing the list of patterns and app_name, '
'and provide the namespace argument to include() instead.',
RemovedInDjango20Warning, stacklevel=2
)
urlconf_module, app_name, namespace = arg
else:
# No namespace hint - use manually provided namespace
urlconf_module = arg
if isinstance(urlconf_module, six.string_types):
urlconf_module = import_module(urlconf_module)
patterns = getattr(urlconf_module, 'urlpatterns', urlconf_module)
app_name = getattr(urlconf_module, 'app_name', app_name)
if namespace and not app_name:
warnings.warn(
'Specifying a namespace in django.conf.urls.include() without '
'providing an app_name is deprecated. Set the app_name attribute '
'in the included module, or pass a 2-tuple containing the list of '
'patterns and app_name instead.',
RemovedInDjango20Warning, stacklevel=2
)
namespace = namespace or app_name
# Make sure we can iterate through the patterns (without this, some
# testcases will break).
if isinstance(patterns, (list, tuple)):
for url_pattern in patterns:
# Test if the LocaleRegexURLResolver is used within the include;
# this should throw an error since this is not allowed!
if isinstance(url_pattern, LocaleRegexURLResolver):
raise ImproperlyConfigured(
'Using i18n_patterns in an included URLconf is not allowed.')
return (urlconf_module, app_name, namespace)
def patterns(prefix, *args):
warnings.warn(
'django.conf.urls.patterns() is deprecated and will be removed in '
'Django 1.10. Update your urlpatterns to be a list of '
'django.conf.urls.url() instances instead.',
RemovedInDjango110Warning, stacklevel=2
)
pattern_list = []
for t in args:
if isinstance(t, (list, tuple)):
t = url(prefix=prefix, *t)
elif isinstance(t, RegexURLPattern):
t.add_prefix(prefix)
pattern_list.append(t)
return pattern_list
def url(regex, view, kwargs=None, name=None, prefix=''):
if isinstance(view, (list, tuple)):
# For include(...) processing.
urlconf_module, app_name, namespace = view
return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace)
else:
if isinstance(view, six.string_types):
warnings.warn(
'Support for string view arguments to url() is deprecated and '
'will be removed in Django 1.10 (got %s). Pass the callable '
'instead.' % view,
RemovedInDjango110Warning, stacklevel=2
)
if not view:
raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex)
if prefix:
view = prefix + '.' + view
return RegexURLPattern(regex, view, kwargs, name)
|
bsd-3-clause
| 7,735,792,876,150,269,000 | -1,509,157,517,079,146,200 | 39.637168 | 112 | 0.639155 | false |
Mariaanisimova/pythonintask
|
INBa/2015/Shemenev_A_V/task_100_30.py
|
1
|
3036
|
#Задача №10, Вариант 30
#Напишите программу "Генератор персонажей" для игры.Пользователю должно быть предоставлено 30 пунктов,
#которые можно распределить между четырьмя характеристиками: Сила, Здоровье, Мудрость и Ловкость.
#Надо сделать так, чтобы пользователь мог не только брать эти пункты из общего "пула", но и возвращать их туда из характеристик,
#которым он решил присвоить другие значения.
#Шеменев А.В
#28.04.2016
print ("""
Добро пожаловать в "Генератор персонажей".
Вы можете распределить 30 очков между 4 характеристиками:
Сила, Здоровье, Мудрость и Ловкость. Вы можете как и брать из общего
числа пункотв, так и возвращать. Распределяйте характеристики с умом. Удачи!
""")
STR=0
HP=0
INT=0
AGL=0
point=30
number=0
print("Если хотите изменить Силу, то напишите 'Сила'. Если Здоровье, то 'Здоровье'. Если Мудрость, то 'Мудрость'. Если к Ловкость, то 'Ловкость'.")
while True:
if STR<0 or HP<0 or INT<0 or AGL<0 or point>30:
print("Ошибка")
break
#number=int(input("Напишите снова"))
elif point==0:
print("Вы распределили очки. Их распределение:\nСила:",STR,"\nЗдоровье:",HP,"\nМудрость:",INT,"\nЛовкость:",AGL)
break
print("Ваши очки:\nСила:",STR,"\nЗдоровье:",HP,"\nМудрость:",INT,"\nЛовкость:",AGL,"\nНераспределённые очки:",point)
user_input=input("")
if user_input=="Сила" :
number=int(input("Сколько хотите прибавить (отбавить)?"))
if chislo <= point :
STR+=number
point-=number
else :
print('Слишком много')
elif user_input=="Здоровье":
number=int(input("Сколько хотите прибавить (отбавить)?"))
if chislo <= point :
HP+=number
point-=number
else :
print('Слишком много')
elif user_input=="Мудрость":
number=int(input("Сколько хотите прибавить (отбавить)?"))
if number <= point :
INT+=number
point-=number
else :
print('Слишком много')
elif user_input=="Ловкость":
number=int(input("Сколько хотите прибавить (отбавить)?"))
if chislo <= point :
AGL+=number
point-=number
else :
print('Слишком много')
input("Нажмите Enter для выхода.")
|
apache-2.0
| -2,034,392,913,455,707,000 | -1,487,323,271,306,495,700 | 32.721311 | 148 | 0.707827 | false |
SangramChavan/Ubuntu-16.04-new-installation
|
GreenHat.py
|
5
|
1211
|
# Copyright (c) 2015 Angus H. (4148)
# Distributed under the GNU General Public License v3.0 (GPLv3).
from datetime import date, timedelta
from random import randint
from time import sleep
import sys
import subprocess
import os
# returns a date string for the date that is N days before STARTDATE
def get_date_string(n, startdate):
d = startdate - timedelta(days=n)
rtn = d.strftime("%a %b %d %X %Y %z -0400")
return rtn
# main app
def main(argv):
if len(argv) < 1 or len(argv) > 2:
print "Error: Bad input."
sys.exit(1)
n = int(argv[0])
if len(argv) == 1:
startdate = date.today()
if len(argv) == 2:
startdate = date(int(argv[1][0:4]), int(argv[1][5:7]), int(argv[1][8:10]))
i = 0
while i <= n:
curdate = get_date_string(i, startdate)
num_commits = randint(1, 10)
for commit in range(0, num_commits):
subprocess.call("echo '" + curdate + str(randint(0, 1000000)) +"' > realwork.txt; git add realwork.txt; GIT_AUTHOR_DATE='" + curdate + "' GIT_COMMITTER_DATE='" + curdate + "' git commit -m 'update'; git push;", shell=True)
sleep(.5)
i += 1
subprocess.call("git rm realwork.txt; git commit -m 'delete'; git push;", shell=True)
if __name__ == "__main__":
main(sys.argv[1:])
|
mit
| 6,014,841,997,278,207,000 | 9,128,369,945,450,903,000 | 30.868421 | 225 | 0.655656 | false |
jereze/scikit-learn
|
examples/cluster/plot_feature_agglomeration_vs_univariate_selection.py
|
218
|
3893
|
"""
==============================================
Feature agglomeration vs. univariate selection
==============================================
This example compares 2 dimensionality reduction strategies:
- univariate feature selection with Anova
- feature agglomeration with Ward hierarchical clustering
Both methods are compared in a regression problem using
a BayesianRidge as supervised estimator.
"""
# Author: Alexandre Gramfort <[email protected]>
# License: BSD 3 clause
print(__doc__)
import shutil
import tempfile
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg, ndimage
from sklearn.feature_extraction.image import grid_to_graph
from sklearn import feature_selection
from sklearn.cluster import FeatureAgglomeration
from sklearn.linear_model import BayesianRidge
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn.externals.joblib import Memory
from sklearn.cross_validation import KFold
###############################################################################
# Generate data
n_samples = 200
size = 40 # image size
roi_size = 15
snr = 5.
np.random.seed(0)
mask = np.ones([size, size], dtype=np.bool)
coef = np.zeros((size, size))
coef[0:roi_size, 0:roi_size] = -1.
coef[-roi_size:, -roi_size:] = 1.
X = np.random.randn(n_samples, size ** 2)
for x in X: # smooth data
x[:] = ndimage.gaussian_filter(x.reshape(size, size), sigma=1.0).ravel()
X -= X.mean(axis=0)
X /= X.std(axis=0)
y = np.dot(X, coef.ravel())
noise = np.random.randn(y.shape[0])
noise_coef = (linalg.norm(y, 2) / np.exp(snr / 20.)) / linalg.norm(noise, 2)
y += noise_coef * noise # add noise
###############################################################################
# Compute the coefs of a Bayesian Ridge with GridSearch
cv = KFold(len(y), 2) # cross-validation generator for model selection
ridge = BayesianRidge()
cachedir = tempfile.mkdtemp()
mem = Memory(cachedir=cachedir, verbose=1)
# Ward agglomeration followed by BayesianRidge
connectivity = grid_to_graph(n_x=size, n_y=size)
ward = FeatureAgglomeration(n_clusters=10, connectivity=connectivity,
memory=mem)
clf = Pipeline([('ward', ward), ('ridge', ridge)])
# Select the optimal number of parcels with grid search
clf = GridSearchCV(clf, {'ward__n_clusters': [10, 20, 30]}, n_jobs=1, cv=cv)
clf.fit(X, y) # set the best parameters
coef_ = clf.best_estimator_.steps[-1][1].coef_
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
coef_agglomeration_ = coef_.reshape(size, size)
# Anova univariate feature selection followed by BayesianRidge
f_regression = mem.cache(feature_selection.f_regression) # caching function
anova = feature_selection.SelectPercentile(f_regression)
clf = Pipeline([('anova', anova), ('ridge', ridge)])
# Select the optimal percentage of features with grid search
clf = GridSearchCV(clf, {'anova__percentile': [5, 10, 20]}, cv=cv)
clf.fit(X, y) # set the best parameters
coef_ = clf.best_estimator_.steps[-1][1].coef_
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
coef_selection_ = coef_.reshape(size, size)
###############################################################################
# Inverse the transformation to plot the results on an image
plt.close('all')
plt.figure(figsize=(7.3, 2.7))
plt.subplot(1, 3, 1)
plt.imshow(coef, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("True weights")
plt.subplot(1, 3, 2)
plt.imshow(coef_selection_, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("Feature Selection")
plt.subplot(1, 3, 3)
plt.imshow(coef_agglomeration_, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("Feature Agglomeration")
plt.subplots_adjust(0.04, 0.0, 0.98, 0.94, 0.16, 0.26)
plt.show()
# Attempt to remove the temporary cachedir, but don't worry if it fails
shutil.rmtree(cachedir, ignore_errors=True)
|
bsd-3-clause
| -63,375,413,048,758,840 | -719,697,906,575,825,900 | 35.046296 | 79 | 0.670434 | false |
kanpol/eli
|
2012/plugins_python/htmlize/core.py
|
13
|
2324
|
#-------------------------------------------------------------------------------
# htmlize: htmlize/core.py
#
# The core functionality of htmlize.
#
# Eli Bendersky ([email protected])
# This code is in the public domain
#-------------------------------------------------------------------------------
from collections import namedtuple
import re
# Regex for matching/capturing role text.
# E.g. :name:`text` - first capture group is "name", second group is "text"
#
ROLE_REGEX = re.compile(r':(\w+):`([^`]*)`')
RoleMatch = namedtuple('RoleMatch', 'name contents')
def htmlize(post, db, plugins=[]):
""" pass
"""
contents = post.contents
# Plugins are classes - we need to instantiate them to get objects.
plugins = [P(post, db) for P in plugins]
# Split the contents to paragraphs
paragraphs = re.split(r'\n\n+', contents)
for i, p in enumerate(paragraphs):
paragraphs[i] = '<p>' + p.replace('\n', ' ') + '</p>'
contents = '\n\n'.join(paragraphs)
# Find roles in the contents. Create a list of parts, where each
# part is either text that has no roles in it, or a RoleMatch
# object.
pos = 0
parts = []
while True:
match = ROLE_REGEX.search(contents, pos)
if match is None:
parts.append(contents[pos:])
break
parts.append(contents[pos:match.start()])
parts.append(RoleMatch(match.group(1), match.group(2)))
pos = match.end()
# Ask plugins to act on roles
for i, part in enumerate(parts):
if isinstance(part, RoleMatch):
parts[i] = _plugin_replace_role(
part.name, part.contents, plugins)
# Build full contents back again, and ask plugins to act on
# contents.
contents = ''.join(parts)
for p in plugins:
contents_hook = p.get_contents_hook()
if contents_hook:
contents = contents_hook(contents)
return contents
def _plugin_replace_role(name, contents, plugins):
""" The first plugin that handles this role is used.
"""
for p in plugins:
role_hook = p.get_role_hook(name)
if role_hook:
return role_hook(contents)
# If no plugin handling this role is found, return its original form
return ':{0}:`{1}`'.format(name, contents)
|
unlicense
| -4,954,257,668,341,913,000 | 4,383,446,216,892,196,400 | 29.181818 | 80 | 0.571859 | false |
jeremiedecock/snippets
|
python/matplotlib/hist_logscale_x.py
|
1
|
1804
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Make a histogram using a logarithmic scale on X axis
See:
- http://stackoverflow.com/questions/6855710/how-to-have-logarithmic-bins-in-a-python-histogram
"""
import numpy as np
import matplotlib.pyplot as plt
# SETUP #######################################################################
# histtype : [‘bar’ | ‘barstacked’ | ‘step’ | ‘stepfilled’]
HIST_TYPE='bar'
ALPHA=0.5
# MAKE DATA ###################################################################
data = np.random.exponential(size=1000000)
#data = np.abs(np.random.normal(size=1000000) * 10000.)
#data = np.random.chisquare(10, size=1000000)
# INIT FIGURE #################################################################
fig = plt.figure(figsize=(8.0, 6.0))
# AX1 #########################################################################
ax1 = fig.add_subplot(211)
res_tuple = ax1.hist(data,
bins=50,
histtype=HIST_TYPE,
alpha=ALPHA)
ax1.set_title("Normal scale")
ax1.set_xlabel("Value")
ax1.set_ylabel("Count")
# AX2 #########################################################################
ax2 = fig.add_subplot(212)
vmin = np.log10(data.min())
vmax = np.log10(data.max())
bins = np.logspace(vmin, vmax, 50) # <- make a range from 10**vmin to 10**vmax
print(bins)
res_tuple = ax2.hist(data,
bins=bins,
histtype=HIST_TYPE,
alpha=ALPHA)
ax2.set_xscale("log") # <- Activate log scale on X axis
ax2.set_title("Log scale")
ax2.set_xlabel("Value")
ax2.set_ylabel("Count")
# SHOW AND SAVE FILE ##########################################################
plt.tight_layout()
plt.savefig("hist_logscale_x.png")
plt.show()
|
mit
| -7,545,507,465,875,019,000 | 6,029,511,480,272,762,000 | 24.542857 | 95 | 0.47651 | false |
kekivelez/DjangoSkeleton
|
tcshealth/users/views.py
|
1
|
7243
|
from django.shortcuts import render
# Create your views here.
import uuid
from rest_framework import status
from rest_framework.response import Response
from rest_framework import generics
from rest_framework import serializers as DRFserializers
from rest_framework import permissions
from rest_framework import renderers
from rest_framework_jwt.views import ObtainJSONWebToken
from django.shortcuts import get_object_or_404
from . import serializers
from .models import User
from ..utils.views import PutUpdateAPIView
class AccessTokenView(ObtainJSONWebToken):
renderer_classes = (renderers.JSONRenderer, renderers.BrowsableAPIRenderer)
def post(self, request):
"""
Returns a Access Token that can be used for authenticated requests.
---
type:
email:
required: true
type: string
password:
required: true
type: string
omit_serializer: true
parameters:
- name: body
description: C7redentials to get a API access token.
required: true
type: AccessTokenPostResponse
paramType: body
"""
return super(AccessTokenView, self).post(request)
class RegisterView(generics.CreateAPIView):
authentication_classes = ()
permission_classes = ()
serializer_class = serializers.RegisterUserSerializer
def perform_create(self, serializer):
user = serializer.save(token_version=str(uuid.uuid4()))
user.send_account_activation_email()
def post(self, request, *args, **kwargs):
"""
Creates a new User and sends an email with the activation url.
"""
return super(RegisterView, self).post(request, *args, **kwargs)
class ChangePasswordView(PutUpdateAPIView):
model = User
permission_classes = (permissions.IsAuthenticated,)
serializer_class = serializers.ChangePasswordSerializer
def get_object(self):
return self.request.user
def put(self, request, *args, **kwargs):
"""
Changes the password of the current user.
---
omit_serializer: true
parameters:
- name: Authorization
description: The authorization token. The format for the header value is 'Bearer (token)'.
required: true
type: string
paramType: header
"""
super(ChangePasswordView, self).put(request, *args, **kwargs)
return Response(
data={"change_password": True}, status=status.HTTP_200_OK)
class UsersView(generics.ListAPIView):
model = User
serializer_class = serializers.UserSerializer
queryset = User.objects.all()
authentication_classes = ()
permission_classes = ()
filter_fields = ('email', 'gender', 'height')
search_fields = ('email', 'first_name', 'last_name')
ordering_fields = ('email', 'first_name', 'last_name')
def get(self, request, *args, **kwargs):
"""
Returns a list of all users.
---
parameters:
- name: search
description: Text to search for.
required: false
type: string
paramType: query
- name: email
description: Value to filter by. Example, [email protected]
required: false
type: string
paramType: query
- name: ordering
description: Values to order by. Example, order_by=email,country
required: false
type: string
paramType: query
"""
return super(UsersView, self).get(request, *args, **kwargs)
class CurrentUserView(generics.RetrieveAPIView):
model = User
serializer_class = serializers.UserSerializer
permission_classes = (permissions.IsAuthenticated,)
def get_object(self):
return self.request.user
def get(self, request, *args, **kwargs):
"""
Returns the information of the current user.
---
parameters:
- name: Authorization
description: The authorization token. The format for the header value is 'Bearer (token)'.
required: true
type: string
paramType: header
"""
return super(CurrentUserView, self).get(request, *args, **kwargs)
class ForgotPasswordView(generics.CreateAPIView):
authentication_classes = ()
permission_classes = ()
serializer_class = serializers.ForgotPasswordSerializer
def post(self, request, *args, **kwargs):
"""
Sends the forgot password email for the user with the given email.
"""
serializer = self.get_serializer(data=request.DATA)
serializer.is_valid(raise_exception=True)
serializer.send_password_reset_email()
return Response(serializer.validated_data)
class ResetPasswordView(generics.CreateAPIView):
authentication_classes = ()
permission_classes = ()
serializer_class = serializers.ResetPasswordSerializer
def post(self, request, *args, **kwargs):
"""
Resets the password for the current user.
---
omit_serializer: true
parameters:
- name: token
description: Password Reset token.
required: true
type: string
paramType: form
"""
serializer = self.get_serializer(data=request.DATA)
serializer.is_valid(raise_exception=True)
return Response(serializer.validated_data)
class UserSettingsView(generics.UpdateAPIView):
model = User
permission_classes = (permissions.IsAuthenticated,)
serializer_class = serializers.UserSerializer
def get_object(self):
return self.request.user
def put(self, request, *args, **kwargs):
"""
Updates the information of the current user.
---
parameters:
- name: Authorization
description: The authorization token. The format for the header value is 'Bearer (token)'.
required: true
type: string
paramType: header
"""
return super(UserSettingsView, self).put(request, *args, **kwargs)
def patch(self, request, *args, **kwargs):
"""
Updates the information of the current user partially.
---
parameters:
- name: Authorization
description: The authorization token. The format for the header value is 'Bearer (token)'.
required: true
type: string
paramType: header
"""
return super(UserSettingsView, self).patch(request, *args, **kwargs)
class SpecificUserView(generics.RetrieveAPIView):
model = User
serializer_class = serializers.UserSerializer
authentication_classes = ()
permission_classes = ()
def get_object(self):
return get_object_or_404(User, pk=self.kwargs['pk'])
def get(self, request, *args, **kwargs):
"""
Returns the public information of a user with the given id.
"""
return super(SpecificUserView, self).get(request, *args, **kwargs)
|
gpl-2.0
| 3,191,727,393,384,588,000 | 7,149,149,459,034,704,000 | 30.77193 | 104 | 0.620047 | false |
terryyin/linkchecker
|
third_party/dnspython/dns/rdtypes/ANY/NSEC.py
|
100
|
4812
|
# Copyright (C) 2004-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import cStringIO
import dns.exception
import dns.rdata
import dns.rdatatype
import dns.name
class NSEC(dns.rdata.Rdata):
"""NSEC record
@ivar next: the next name
@type next: dns.name.Name object
@ivar windows: the windowed bitmap list
@type windows: list of (window number, string) tuples"""
__slots__ = ['next', 'windows']
def __init__(self, rdclass, rdtype, next, windows):
super(NSEC, self).__init__(rdclass, rdtype)
self.next = next
self.windows = windows
def to_text(self, origin=None, relativize=True, **kw):
next = self.next.choose_relativity(origin, relativize)
text = ''
for (window, bitmap) in self.windows:
bits = []
for i in xrange(0, len(bitmap)):
byte = ord(bitmap[i])
for j in xrange(0, 8):
if byte & (0x80 >> j):
bits.append(dns.rdatatype.to_text(window * 256 + \
i * 8 + j))
text += (' ' + ' '.join(bits))
return '%s%s' % (next, text)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
next = tok.get_name()
next = next.choose_relativity(origin, relativize)
rdtypes = []
while 1:
token = tok.get().unescape()
if token.is_eol_or_eof():
break
nrdtype = dns.rdatatype.from_text(token.value)
if nrdtype == 0:
raise dns.exception.SyntaxError("NSEC with bit 0")
if nrdtype > 65535:
raise dns.exception.SyntaxError("NSEC with bit > 65535")
rdtypes.append(nrdtype)
rdtypes.sort()
window = 0
octets = 0
prior_rdtype = 0
bitmap = ['\0'] * 32
windows = []
for nrdtype in rdtypes:
if nrdtype == prior_rdtype:
continue
prior_rdtype = nrdtype
new_window = nrdtype // 256
if new_window != window:
windows.append((window, ''.join(bitmap[0:octets])))
bitmap = ['\0'] * 32
window = new_window
offset = nrdtype % 256
byte = offset // 8
bit = offset % 8
octets = byte + 1
bitmap[byte] = chr(ord(bitmap[byte]) | (0x80 >> bit))
windows.append((window, ''.join(bitmap[0:octets])))
return cls(rdclass, rdtype, next, windows)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
self.next.to_wire(file, None, origin)
for (window, bitmap) in self.windows:
file.write(chr(window))
file.write(chr(len(bitmap)))
file.write(bitmap)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
(next, cused) = dns.name.from_wire(wire[: current + rdlen], current)
current += cused
rdlen -= cused
windows = []
while rdlen > 0:
if rdlen < 3:
raise dns.exception.FormError("NSEC too short")
window = ord(wire[current])
octets = ord(wire[current + 1])
if octets == 0 or octets > 32:
raise dns.exception.FormError("bad NSEC octets")
current += 2
rdlen -= 2
if rdlen < octets:
raise dns.exception.FormError("bad NSEC bitmap length")
bitmap = wire[current : current + octets].unwrap()
current += octets
rdlen -= octets
windows.append((window, bitmap))
if not origin is None:
next = next.relativize(origin)
return cls(rdclass, rdtype, next, windows)
from_wire = classmethod(from_wire)
def choose_relativity(self, origin = None, relativize = True):
self.next = self.next.choose_relativity(origin, relativize)
def _cmp(self, other):
return self._wire_cmp(other)
|
gpl-2.0
| -2,329,689,913,387,617,000 | 2,852,646,907,940,208,000 | 36.59375 | 79 | 0.570449 | false |
purushothamc/myibitsolutions
|
strings/justified_text.py
|
1
|
1674
|
def fullJustify(strings_list, number):
result = []
if not strings_list or number <= 0:
return result
current_length, idx, firstWord = 0, 0, True
for word in strings_list:
if firstWord:
result.append(word)
current_length += len(result[-1])
firstWord = False
else:
next_word = " " + word
current_length += len(next_word)
if current_length <= number:
result[-1] += next_word
else:
current_length = len(word)
result.append(word)
result_len = len(result)
for idx in xrange(result_len):
string = result[idx]
space_count = string.count(" ")
string_len = len(string)
difference = number - string_len
if (difference > 0 and space_count == 0) or idx == result_len - 1:
string += " "*difference
result[idx] = string
else:
extra_left = difference % space_count
to_pad = difference / space_count
temp_list = []
for char in string:
if char != " ":
temp_list.append(char)
else:
spaced_char = ""
if extra_left:
spaced_char = " "
extra_left -= 1
spaced_char += " " + to_pad*" "
temp_list.append(spaced_char)
result[idx] = "".join(temp_list)
print result
A = ["This", "is", "an", "example", "of", "text", "justification."]
A = [ "What", "must", "be", "shall", "be." ]
B = 12
fullJustify(A, B)
|
gpl-3.0
| -5,414,480,106,533,465,000 | -2,142,990,050,360,889,000 | 31.843137 | 74 | 0.470729 | false |
lesh1k/beatport-verifier
|
venv/lib/python2.7/site-packages/wheel/pep425tags.py
|
220
|
2861
|
"""Generate and work with PEP 425 Compatibility Tags."""
import sys
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import distutils.util
def get_abbr_impl():
"""Return abbreviated implementation name."""
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif sys.platform == 'cli':
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
def get_impl_ver():
"""Return implementation version."""
impl_ver = sysconfig.get_config_var("py_version_nodot")
if not impl_ver:
impl_ver = ''.join(map(str, sys.version_info[:2]))
return impl_ver
def get_platform():
"""Return our platform name 'win32', 'linux_x86_64'"""
# XXX remove distutils dependency
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
def get_supported(versions=None):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
major = sys.version_info[0]
# Support all previous minor Python versions.
for minor in range(sys.version_info[1], -1, -1):
versions.append(''.join(map(str, (major, minor))))
impl = get_abbr_impl()
abis = []
soabi = sysconfig.get_config_var('SOABI')
if soabi and soabi.startswith('cpython-'):
abis[0:0] = ['cp' + soabi.split('-', 1)[-1]]
abi3s = set()
import imp
for suffix in imp.get_suffixes():
if suffix[0].startswith('.abi'):
abi3s.add(suffix[0].split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
arch = get_platform()
# Current version, current API (built specifically for our Python):
for abi in abis:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# No abi / arch, but requires our implementation:
for i, version in enumerate(versions):
supported.append(('%s%s' % (impl, version), 'none', 'any'))
if i == 0:
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported
|
cc0-1.0
| -4,670,959,992,773,885,000 | -8,431,738,424,183,075,000 | 28.494845 | 79 | 0.586858 | false |
cjerdonek/pip
|
pip/exceptions.py
|
123
|
1125
|
"""Exceptions used throughout package"""
from __future__ import absolute_import
class PipError(Exception):
"""Base pip exception"""
class InstallationError(PipError):
"""General exception during installation"""
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed. """
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class HashMismatch(InstallationError):
"""Distribution file hash values don't match."""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""
|
mit
| -667,631,408,790,138,000 | 3,926,074,747,111,128,000 | 22.93617 | 77 | 0.740444 | false |
daodaoliang/bokeh
|
bokeh/charts/builder/tests/test_line_builder.py
|
33
|
2376
|
""" This is the Bokeh charts testing interface.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
from collections import OrderedDict
import unittest
import numpy as np
from numpy.testing import assert_array_equal
import pandas as pd
from bokeh.charts import Line
from bokeh.charts.builder.tests._utils import create_chart
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class TestLine(unittest.TestCase):
def test_supported_input(self):
xyvalues = OrderedDict()
y_python = xyvalues['python'] = [2, 3, 7, 5, 26]
y_pypy = xyvalues['pypy'] = [12, 33, 47, 15, 126]
y_jython = xyvalues['jython'] = [22, 43, 10, 25, 26]
xyvaluesdf = pd.DataFrame(xyvalues)
for i, _xy in enumerate([xyvalues, xyvaluesdf]):
hm = create_chart(Line, _xy)
builder = hm._builders[0]
self.assertEqual(sorted(builder._groups), sorted(list(xyvalues.keys())))
assert_array_equal(builder._data['x'], [0, 1, 2, 3, 4])
assert_array_equal(builder._data['y_python'], y_python)
assert_array_equal(builder._data['y_pypy'], y_pypy)
assert_array_equal(builder._data['y_jython'], y_jython)
lvalues = [[2, 3, 7, 5, 26], [12, 33, 47, 15, 126], [22, 43, 10, 25, 26]]
for _xy in [lvalues, np.array(lvalues)]:
hm = create_chart(Line, _xy)
builder = hm._builders[0]
self.assertEqual(builder._groups, ['0', '1', '2'])
assert_array_equal(builder._data['x'], [0, 1, 2, 3, 4])
assert_array_equal(builder._data['y_0'], y_python)
assert_array_equal(builder._data['y_1'], y_pypy)
assert_array_equal(builder._data['y_2'], y_jython)
|
bsd-3-clause
| 8,661,524,628,610,371,000 | -984,541,885,210,037,800 | 39.965517 | 84 | 0.491582 | false |
opencord/voltha
|
voltha/adapters/adtran_olt/test/codec/test_olt_state.py
|
1
|
4156
|
# Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from resources.sample_json import olt_state_json
from voltha.adapters.adtran_olt.codec.olt_state import OltState
import pytest
@pytest.fixture()
def olt_state_object():
return OltState(olt_state_json)
@pytest.fixture()
def pon_object():
return OltState.Pon(olt_state_json["pon"][0])
@pytest.fixture()
def onu_object():
return OltState.Pon.Onu(olt_state_json["pon"][0]["onu"][0])
@pytest.fixture()
def gem_object():
return OltState.Pon.Gem(olt_state_json["pon"][0]["gem"][0])
def test_olt_to_string(olt_state_object):
assert str(olt_state_object) == "OltState: ngpon2_agent-13.0.32-1.657.815547"
def test_olt_state_software_version(olt_state_object):
assert olt_state_object.software_version == "ngpon2_agent-13.0.32-1.657.815547"
def test_olt_state_pons(olt_state_object):
assert str(olt_state_object.pons[0]) == "OltState.Pon: pon-id: 0"
def test_olt_state_len(olt_state_object):
assert len(olt_state_object) == 16
def test_olt_state_get_item(olt_state_object):
assert str(olt_state_object[1]) == "OltState.Pon: pon-id: 1"
def test_olt_state_get_item_not_int(olt_state_object):
with pytest.raises(TypeError):
olt_state_object["something"]
def test_olt_state_get_item_out_of_bounds(olt_state_object):
with pytest.raises(KeyError):
olt_state_object[16]
def test_olt_state_iter(olt_state_object):
with pytest.raises(NotImplementedError):
for _ in olt_state_object:
pass
def test_olt_state_contains(olt_state_object):
assert 5 in olt_state_object
def test_olt_state_contains_does_not_contain(olt_state_object):
assert not 16 in olt_state_object
def test_olt_state_contains_not_int(olt_state_object):
with pytest.raises(TypeError):
"something" in olt_state_object
def test_pon_to_string(pon_object):
assert str(pon_object) == "OltState.Pon: pon-id: 0"
def test_pon_properties(pon_object):
assert pon_object.pon_id == 0
assert pon_object.downstream_wavelength == 0
assert pon_object.upstream_wavelength == 0
assert pon_object.downstream_channel_id == 15
assert pon_object.rx_packets == 1625773517
assert pon_object.tx_packets == 761098346
assert pon_object.rx_bytes == 145149613233620
assert pon_object.tx_bytes == 141303797318481
assert pon_object.tx_bip_errors == 0
assert pon_object.ont_los == []
assert pon_object.discovered_onu == frozenset()
assert pon_object.wm_tuned_out_onus == "AAAAAAAAAAAAAAAAAAAAAA=="
def test_pon_gems(pon_object):
assert str(pon_object.gems[2176]) == "OltState.Pon.Gem: onu-id: 0, gem-id: 2176"
def test_pon_gems_existing(pon_object):
pon_object._gems = "existing"
assert pon_object.gems == "existing"
def test_pon_onus(pon_object):
assert str(pon_object.onus[0]) == "OltState.Pon.Onu: onu-id: 0"
def test_pon_onus_existing(pon_object):
pon_object._onus = "existing"
assert pon_object.onus == "existing"
def test_onu_properties(onu_object):
assert onu_object.onu_id == 0
assert onu_object.oper_status == "unknown"
assert onu_object.reported_password == "redacted"
assert onu_object.rssi == -207
assert onu_object.equalization_delay == 620952
assert onu_object.fiber_length == 47
def test_gem_properties(gem_object):
assert gem_object.onu_id == 0
assert gem_object.alloc_id == 1024
assert gem_object.gem_id == 2176
assert gem_object.tx_packets == 65405
assert gem_object.tx_bytes == 5420931
assert gem_object.rx_packets == 13859
assert gem_object.rx_bytes == 3242784
|
apache-2.0
| 813,308,735,875,654,500 | -8,197,186,897,937,566,000 | 27.272109 | 84 | 0.703321 | false |
MRigal/django
|
django/contrib/contenttypes/migrations/0001_initial.py
|
585
|
1227
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.contrib.contenttypes.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ContentType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('app_label', models.CharField(max_length=100)),
('model', models.CharField(max_length=100, verbose_name='python model class name')),
],
options={
'ordering': ('name',),
'db_table': 'django_content_type',
'verbose_name': 'content type',
'verbose_name_plural': 'content types',
},
bases=(models.Model,),
managers=[
('objects', django.contrib.contenttypes.models.ContentTypeManager()),
],
),
migrations.AlterUniqueTogether(
name='contenttype',
unique_together=set([('app_label', 'model')]),
),
]
|
bsd-3-clause
| 8,316,610,144,604,032,000 | 7,180,504,763,275,857,000 | 32.162162 | 114 | 0.537082 | false |
marinho/geraldo
|
site/newsite/site-geraldo/django/contrib/localflavor/is_/is_postalcodes.py
|
438
|
4913
|
# -*- coding: utf-8 -*-
IS_POSTALCODES = (
('101', u'101 Reykjavík'),
('103', u'103 Reykjavík'),
('104', u'104 Reykjavík'),
('105', u'105 Reykjavík'),
('107', u'107 Reykjavík'),
('108', u'108 Reykjavík'),
('109', u'109 Reykjavík'),
('110', u'110 Reykjavík'),
('111', u'111 Reykjavík'),
('112', u'112 Reykjavík'),
('113', u'113 Reykjavík'),
('116', u'116 Kjalarnes'),
('121', u'121 Reykjavík'),
('123', u'123 Reykjavík'),
('124', u'124 Reykjavík'),
('125', u'125 Reykjavík'),
('127', u'127 Reykjavík'),
('128', u'128 Reykjavík'),
('129', u'129 Reykjavík'),
('130', u'130 Reykjavík'),
('132', u'132 Reykjavík'),
('150', u'150 Reykjavík'),
('155', u'155 Reykjavík'),
('170', u'170 Seltjarnarnes'),
('172', u'172 Seltjarnarnes'),
('190', u'190 Vogar'),
('200', u'200 Kópavogur'),
('201', u'201 Kópavogur'),
('202', u'202 Kópavogur'),
('203', u'203 Kópavogur'),
('210', u'210 Garðabær'),
('212', u'212 Garðabær'),
('220', u'220 Hafnarfjörður'),
('221', u'221 Hafnarfjörður'),
('222', u'222 Hafnarfjörður'),
('225', u'225 Álftanes'),
('230', u'230 Reykjanesbær'),
('232', u'232 Reykjanesbær'),
('233', u'233 Reykjanesbær'),
('235', u'235 Keflavíkurflugvöllur'),
('240', u'240 Grindavík'),
('245', u'245 Sandgerði'),
('250', u'250 Garður'),
('260', u'260 Reykjanesbær'),
('270', u'270 Mosfellsbær'),
('300', u'300 Akranes'),
('301', u'301 Akranes'),
('302', u'302 Akranes'),
('310', u'310 Borgarnes'),
('311', u'311 Borgarnes'),
('320', u'320 Reykholt í Borgarfirði'),
('340', u'340 Stykkishólmur'),
('345', u'345 Flatey á Breiðafirði'),
('350', u'350 Grundarfjörður'),
('355', u'355 Ólafsvík'),
('356', u'356 Snæfellsbær'),
('360', u'360 Hellissandur'),
('370', u'370 Búðardalur'),
('371', u'371 Búðardalur'),
('380', u'380 Reykhólahreppur'),
('400', u'400 Ísafjörður'),
('401', u'401 Ísafjörður'),
('410', u'410 Hnífsdalur'),
('415', u'415 Bolungarvík'),
('420', u'420 Súðavík'),
('425', u'425 Flateyri'),
('430', u'430 Suðureyri'),
('450', u'450 Patreksfjörður'),
('451', u'451 Patreksfjörður'),
('460', u'460 Tálknafjörður'),
('465', u'465 Bíldudalur'),
('470', u'470 Þingeyri'),
('471', u'471 Þingeyri'),
('500', u'500 Staður'),
('510', u'510 Hólmavík'),
('512', u'512 Hólmavík'),
('520', u'520 Drangsnes'),
('522', u'522 Kjörvogur'),
('523', u'523 Bær'),
('524', u'524 Norðurfjörður'),
('530', u'530 Hvammstangi'),
('531', u'531 Hvammstangi'),
('540', u'540 Blönduós'),
('541', u'541 Blönduós'),
('545', u'545 Skagaströnd'),
('550', u'550 Sauðárkrókur'),
('551', u'551 Sauðárkrókur'),
('560', u'560 Varmahlíð'),
('565', u'565 Hofsós'),
('566', u'566 Hofsós'),
('570', u'570 Fljót'),
('580', u'580 Siglufjörður'),
('600', u'600 Akureyri'),
('601', u'601 Akureyri'),
('602', u'602 Akureyri'),
('603', u'603 Akureyri'),
('610', u'610 Grenivík'),
('611', u'611 Grímsey'),
('620', u'620 Dalvík'),
('621', u'621 Dalvík'),
('625', u'625 Ólafsfjörður'),
('630', u'630 Hrísey'),
('640', u'640 Húsavík'),
('641', u'641 Húsavík'),
('645', u'645 Fosshóll'),
('650', u'650 Laugar'),
('660', u'660 Mývatn'),
('670', u'670 Kópasker'),
('671', u'671 Kópasker'),
('675', u'675 Raufarhöfn'),
('680', u'680 Þórshöfn'),
('681', u'681 Þórshöfn'),
('685', u'685 Bakkafjörður'),
('690', u'690 Vopnafjörður'),
('700', u'700 Egilsstaðir'),
('701', u'701 Egilsstaðir'),
('710', u'710 Seyðisfjörður'),
('715', u'715 Mjóifjörður'),
('720', u'720 Borgarfjörður eystri'),
('730', u'730 Reyðarfjörður'),
('735', u'735 Eskifjörður'),
('740', u'740 Neskaupstaður'),
('750', u'750 Fáskrúðsfjörður'),
('755', u'755 Stöðvarfjörður'),
('760', u'760 Breiðdalsvík'),
('765', u'765 Djúpivogur'),
('780', u'780 Höfn í Hornafirði'),
('781', u'781 Höfn í Hornafirði'),
('785', u'785 Öræfi'),
('800', u'800 Selfoss'),
('801', u'801 Selfoss'),
('802', u'802 Selfoss'),
('810', u'810 Hveragerði'),
('815', u'815 Þorlákshöfn'),
('820', u'820 Eyrarbakki'),
('825', u'825 Stokkseyri'),
('840', u'840 Laugarvatn'),
('845', u'845 Flúðir'),
('850', u'850 Hella'),
('851', u'851 Hella'),
('860', u'860 Hvolsvöllur'),
('861', u'861 Hvolsvöllur'),
('870', u'870 Vík'),
('871', u'871 Vík'),
('880', u'880 Kirkjubæjarklaustur'),
('900', u'900 Vestmannaeyjar'),
('902', u'902 Vestmannaeyjar')
)
|
lgpl-3.0
| -6,250,891,911,346,344,000 | -4,632,572,714,510,268,000 | 30.291391 | 43 | 0.526772 | false |
pypot/scikit-learn
|
examples/decomposition/plot_pca_vs_lda.py
|
182
|
1743
|
"""
=======================================================
Comparison of LDA and PCA 2D projection of Iris dataset
=======================================================
The Iris dataset represents 3 kind of Iris flowers (Setosa, Versicolour
and Virginica) with 4 attributes: sepal length, sepal width, petal length
and petal width.
Principal Component Analysis (PCA) applied to this data identifies the
combination of attributes (principal components, or directions in the
feature space) that account for the most variance in the data. Here we
plot the different samples on the 2 first principal components.
Linear Discriminant Analysis (LDA) tries to identify attributes that
account for the most variance *between classes*. In particular,
LDA, in contrast to PCA, is a supervised method, using known class labels.
"""
print(__doc__)
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.decomposition import PCA
from sklearn.lda import LDA
iris = datasets.load_iris()
X = iris.data
y = iris.target
target_names = iris.target_names
pca = PCA(n_components=2)
X_r = pca.fit(X).transform(X)
lda = LDA(n_components=2)
X_r2 = lda.fit(X, y).transform(X)
# Percentage of variance explained for each components
print('explained variance ratio (first two components): %s'
% str(pca.explained_variance_ratio_))
plt.figure()
for c, i, target_name in zip("rgb", [0, 1, 2], target_names):
plt.scatter(X_r[y == i, 0], X_r[y == i, 1], c=c, label=target_name)
plt.legend()
plt.title('PCA of IRIS dataset')
plt.figure()
for c, i, target_name in zip("rgb", [0, 1, 2], target_names):
plt.scatter(X_r2[y == i, 0], X_r2[y == i, 1], c=c, label=target_name)
plt.legend()
plt.title('LDA of IRIS dataset')
plt.show()
|
bsd-3-clause
| 1,219,178,373,249,394,400 | 7,041,068,529,750,220,000 | 30.690909 | 74 | 0.682731 | false |
phlax/translate
|
translate/convert/factory.py
|
3
|
7615
|
# -*- coding: utf-8 -*-
#
# Copyright 2010 Zuza Software Foundation
#
# This file is part of the Translate Toolkit.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Factory methods to convert supported input files to supported translatable files."""
import os
import six
#from translate.convert import prop2po, po2prop, odf2xliff, xliff2odf
__all__ = ('converters', 'UnknownExtensionError', 'UnsupportedConversionError')
# Turn into property to support lazy loading of things?
converters = {}
#for module in (prop2po, po2prop, odf2xliff, xliff2odf):
# if not hasattr(module, 'formats'):
# continue
# for extension in module.formats:
# if extension not in converters:
# converters[extension] = []
# converters[extension].append(module.formats[extension])
@six.python_2_unicode_compatible
class UnknownExtensionError(Exception):
def __init__(self, afile):
self.file = afile
def __str__(self):
return 'Unable to find extension for file: %s' % (self.file)
@six.python_2_unicode_compatible
class UnsupportedConversionError(Exception):
def __init__(self, in_ext=None, out_ext=None, templ_ext=None):
self.in_ext = in_ext
self.out_ext = out_ext
self.templ_ext = templ_ext
def __str__(self):
msg = "Unsupported conversion from %s to %s" % (self.in_ext, self.out_ext)
if self.templ_ext:
msg += ' with template %s' % (self.templ_ext)
return msg
def get_extension(filename):
path, fname = os.path.split(filename)
ext = fname.split(os.extsep)[-1]
if ext == fname:
return None
return ext
def get_converter(in_ext, out_ext=None, templ_ext=None):
convert_candidates = None
if templ_ext:
if (in_ext, templ_ext) in converters:
convert_candidates = converters[(in_ext, templ_ext)]
else:
raise UnsupportedConversionError(in_ext, out_ext, templ_ext)
else:
if in_ext in converters:
convert_candidates = converters[in_ext]
elif (in_ext,) in converters:
convert_candidates = converters[(in_ext,)]
else:
raise UnsupportedConversionError(in_ext, out_ext)
convert_fn = None
if not out_ext:
out_ext, convert_fn = convert_candidates[0]
else:
for ext, func in convert_candidates:
if ext == out_ext:
convert_fn = func
break
if not convert_fn:
raise UnsupportedConversionError(in_ext, out_ext, templ_ext)
return convert_fn
def get_output_extensions(ext):
"""Compiles a list of possible output extensions for the given input extension."""
out_exts = []
for key in converters:
in_ext = key
if isinstance(key, tuple):
in_ext = key[0]
if in_ext == ext:
for out_ext, convert_fn in converters[key]:
out_exts.append(out_ext)
return out_exts
def convert(inputfile, template=None, options=None, convert_options=None):
"""Convert the given input file to an appropriate output format, optionally
using the given template file and further options.
If the output extension (format) cannot be inferred the first converter
that can handle the input file (and the format/extension it gives as
output) is used.
:type inputfile: file
:param inputfile: The input file to be converted
:type template: file
:param template: Template file to use during conversion
:type options: dict (default: None)
:param options: Valid options are:
- in_ext: The extension (format) of the input file.
- out_ext: The extension (format) to use for the output file.
- templ_ext: The extension (format) of the template file.
- in_fname: File name of the input file; used only to determine
the input file extension (format).
- templ_fname: File name of the template file; used only to
determine the template file extension (format).
:returns: a 2-tuple: The new output file (in a temporary directory) and
the extension (format) of the output file. The caller is
responsible for deleting the (temporary) output file.
"""
in_ext, out_ext, templ_ext = None, None, None
# Get extensions from options
if options is None:
options = {}
else:
if 'in_ext' in options:
in_ext = options['in_ext']
if 'out_ext' in options:
out_ext = options['out_ext']
if template and 'templ_ext' in options:
templ_ext = options['templ_ext']
# If we still do not have extensions, try and get it from the *_fname options
if not in_ext and 'in_fname' in options:
in_ext = get_extension(options['in_fname'])
if template and not templ_ext and 'templ_fname' in options:
templ_fname = get_extension(options['templ_fname'])
# If we still do not have extensions, get it from the file names
if not in_ext and hasattr(inputfile, 'name'):
in_ext = get_extension(inputfile.name)
if template and not templ_ext and hasattr(template, 'name'):
templ_ext = get_extension(template.name)
if not in_ext:
raise UnknownExtensionError(inputfile)
if template and not templ_ext:
raise UnknownExtensionError(template)
out_ext_candidates = get_output_extensions(in_ext)
if not out_ext_candidates:
# No converters registered for the in_ext we have
raise UnsupportedConversionError(in_ext=in_ext, templ_ext=templ_ext)
if out_ext and out_ext not in out_ext_candidates:
# If out_ext has a value at this point, it was given in options, so
# we just take a second to make sure that the conversion is supported.
raise UnsupportedConversionError(in_ext, out_ext, templ_ext)
if not out_ext and templ_ext in out_ext_candidates:
# If we're using a template, chances are (pretty damn) good that the
# output file will be of the same type
out_ext = templ_ext
else:
# As a last resort, we'll just use the first possible output format
out_ext = out_ext_candidates[0]
# XXX: We are abusing tempfile.mkstemp() below: we are only using it to
# obtain a temporary file name to use the normal open() with. This is
# done because a tempfile.NamedTemporaryFile simply gave too many
# issues when being closed (and deleted) by the rest of the toolkit
# (eg. TranslationStore.savefile()). Therefore none of mkstemp()'s
# security features are being utilised.
import tempfile
tempfd, tempfname = tempfile.mkstemp(prefix='ttk_convert', suffix=os.extsep + out_ext)
os.close(tempfd)
if convert_options is None:
convert_options = {}
with open(tempfname, 'w') as output_file:
get_converter(in_ext, out_ext, templ_ext)(inputfile, output_file, template, **convert_options)
return output_file, out_ext
|
gpl-2.0
| -5,523,099,488,572,223,000 | 8,690,251,204,402,550,000 | 36.328431 | 102 | 0.656861 | false |
Thraxis/pymedusa
|
tests/issue_submitter_tests.py
|
1
|
2025
|
# coding=UTF-8
# Author: Dennis Lutter <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
"""
Test exception logging
"""
import os.path
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from sickbeard import logger, ex
def exception_generator():
"""
Dummy function to raise a fake exception and log it
"""
try:
raise Exception('FAKE EXCEPTION')
except Exception as error:
logger.log(u"FAKE ERROR: " + ex(error), logger.ERROR) # pylint: disable=no-member
logger.submit_errors() # pylint: disable=no-member
raise
class IssueSubmitterBasicTests(unittest.TestCase):
"""
Tests logging of exceptions
"""
def test_submitter(self):
"""
Test that an exception is raised
"""
self.assertRaises(Exception, exception_generator)
if __name__ == "__main__":
print("""
==================
STARTING - ISSUE SUBMITTER TESTS
==================
######################################################################
""")
SUITE = unittest.TestLoader().loadTestsFromTestCase(IssueSubmitterBasicTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
gpl-3.0
| 3,575,185,474,991,662,000 | -596,993,662,280,087,400 | 28.779412 | 90 | 0.65284 | false |
jimdial/azure-quickstart-templates
|
splunk-on-ubuntu/scripts/config.py
|
119
|
1408
|
# The MIT License (MIT)
#
# Copyright (c) 2016 Microsoft. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
STORAGE_ACCOUNT_NAME = 'YOUR_STORAGE_ACCOUNT_NAME'
STORAGE_ACCOUNT_KEY = 'YOUR_STORAGE_ACCOUNT_KEY'
|
mit
| 3,337,463,651,410,082,300 | -6,105,677,790,461,407,000 | 54.125 | 84 | 0.725624 | false |
jonashaag/django-nonrel-nohistory
|
django/contrib/sessions/models.py
|
231
|
2034
|
import base64
import cPickle as pickle
from django.db import models
from django.utils.translation import ugettext_lazy as _
class SessionManager(models.Manager):
def encode(self, session_dict):
"""
Returns the given session dictionary pickled and encoded as a string.
"""
return SessionStore().encode(session_dict)
def save(self, session_key, session_dict, expire_date):
s = self.model(session_key, self.encode(session_dict), expire_date)
if session_dict:
s.save()
else:
s.delete() # Clear sessions with no data.
return s
class Session(models.Model):
"""
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
The Django sessions framework is entirely cookie-based. It does
not fall back to putting session IDs in URLs. This is an intentional
design decision. Not only does that behavior make URLs ugly, it makes
your site vulnerable to session-ID theft via the "Referer" header.
For complete documentation on using Sessions in your code, consult
the sessions documentation that is shipped with Django (also available
on the Django Web site).
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expire date'), db_index=True)
objects = SessionManager()
class Meta:
db_table = 'django_session'
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore().decode(self.session_data)
# At bottom to avoid circular import
from django.contrib.sessions.backends.db import SessionStore
|
bsd-3-clause
| 5,698,844,288,618,021,000 | -5,460,127,056,673,502,000 | 34.684211 | 77 | 0.682399 | false |
sparkslabs/kamaelia_
|
Sketches/MPS/BugReports/FixTests/Kamaelia/Kamaelia/Apps/JMB/WSGI/_WSGIHandler.py
|
3
|
15264
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""
WSGI Handler
=============
NOTE: This is experimental software.
This is the WSGI handler for ServerCore. It will wait on the
HTTPParser to transmit the body in full before proceeding. Thus, it is probably
not a good idea to use any WSGI apps requiring a lot of large file uploads (although
it could theoretically function fairly well for that purpose as long as the concurrency
level is relatively low).
For more information on WSGI, what it is, and to get a general overview of what
this component is intended to adapt the ServerCore to do, see one of the following
links:
* http://www.python.org/dev/peps/pep-0333/ (PEP 333)
* http://www.wsgi.org/wsgi/ (WsgiStart wiki)
* http://en.wikipedia.org/wiki/Web_Server_Gateway_Interface (Wikipedia article on WSGI)
-------------
Dependencies
-------------
This component depends on the wsgiref module, which is included with python 2.5.
Thus if you're using an older version, you will need to install it before using
this component.
The easiest way to install wsgiref is to use easy_install, which may be downloaded
from http://peak.telecommunity.com/DevCenter/EasyInstall . You may then install
wsgiref using the command "sudo easy_install wsgiref" (without the quotes of course).
Please note that Kamaelia Publish includes wsgiref.
-----------------------------
How do I use this component?
-----------------------------
The easiest way to use this component is to use the WsgiHandler factory function
that is included in Factory.py in this package. That method has URL handling that
will route a URL to the proper place. There is also a SimpleWsgiHandler that may
be used if you only want to support one application object. For more information
on how to use these functions, please see Factory.py. Also please note that both
of these factory functions are made to work with ServerCore/SimpleServer. Here is
an example of how to create a simple WSGI server:
from Kamaelia.Protocol.HTTP import HTTPProtocol
from Kamaelia.Experimental.Wsgi.Factory import WsgiFactory # FIXME: Docs are broken :-(
WsgiConfig = {
'wsgi_ver' : (1, 0),
'server_admin' : 'Jason Baker',
'server_software' : 'Kamaelia Publish'
}
url_list = [ #Note that this is a list of dictionaries. Order is important.
{
'kp.regex' : 'simple',
'kp.import_path' : 'Kamaelia.Apps.Wsgi.Apps.Simple',
'kp.app_obj' : 'simple_app',
}
{
'kp.regex' : '.*', #The .* means that this is a 404 handler
'kp.import_path' : 'Kamaelia.Apps.Wsgi.Apps.ErrorHandler',
'kp.app_obj' : 'application',
}
]
routing = [['/', WsgiFactory(WsgiConfig, url_list)]]
ServerCore(
protocol=HTTPProtocol(routing),
port=8080,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)).run()
------------------
Internal overview
------------------
request object
~~~~~~~~~~~~~~~
Note that certain WSGI applications will require configuration
data from the urls file. If you use the WsgiFactory to run this
handler, all options specified in the urls file will be put into
the environment variable with a kp. in front of them.
For example, the 'regex' entry in a urls file would go into the
environ dictionary like this if it was set to 'simple':
{
...
'kp.regex' : 'simple',
...
}
wsgi.input
~~~~~~~~~~~
PEP 333 requires that the WSGI environ dictionary also contain a file-like object
that holds the body of the request. Currently, the WsgiHandler will wait for the
full request before starting the application (which is not optimal behavior). If
the method is not PUT or POST, the handler will use a pre-made null-file object that
will always return empty data. This is an optimization to lower peak memory usage
and to speed things up.
WsgiConfig
~~~~~~~~~~~
The WsgiHandler requires a WsgiConfig dictonary for general configuration info. The
following items are required to be defined:
* wsgi_ver: the WSGI version as a Tuple. You want to use (1, 0)
* server_admin: the name and/or email address of the server's administrator
* server_software: The software and/or software version that runs the server
FIXME: It would be nice if the WsgiConfig were made into an object rather than a
dictionary.
"""
from pprint import pprint, pformat
import sys, os, cStringIO, cgitb, traceback, logging, copy
from datetime import datetime
from wsgiref.util import is_hop_by_hop
import Axon
from Axon.ThreadedComponent import threadedcomponent
from Axon.Ipc import producerFinished
import Kamaelia.Protocol.HTTP.ErrorPages as ErrorPages
from xml.sax.saxutils import unescape
class NullFileLike (object):
"""
This is a file-like object that is meant to represent an empty file.
"""
def read(self, number=0):
return ''
def readlines(self, number=0):
return[]
def readline(self):
return ''
def close(self):
pass
def next():
raise StopIteration()
class ErrorLogger(object):
"""This is the file-like object intended to be used for wsgi.errors."""
def __init__(self, logger):
self.logger = logger
def write(self, data):
self.logger.error(data)
def writelines(self, seq):
data = '\n'.join(seq)
self.logger.error(data)
def flush(self):
pass
_null_fl = NullFileLike()
class _WsgiHandler(threadedcomponent):
"""
This is a WSGI handler that is used to serve WSGI applications. Typically,
URL routing is to be done in the factory method that creates this. Thus,
the handler must be passed the application object. You probably don't need
to instantiate this class directly.
It will respond to the following signals:
producerFinished - This is used by the HTTPServer to indicate that the full
body has been transmitted. This will not shut down this component, and in
fact will make it BEGIN processing the request. If the request is not a
POST or PUT request, the Handler will ignore this signal.
Any other signals that this component may receive may result in undefined
behavior, but this component will most likely ignore them.
"""
Inboxes = {
'inbox' : 'Used to receive the body of requests from the HTTPParser',
'control' : 'NOT USED',
}
Outboxes = {
'outbox' : 'used to send page fragments',
'signal' : 'send producerFinished messages',
}
Debug = False
def __init__(self, app, request, WsgiConfig, **argd):
"""
app - The WSGI application to run
request - the request object that is generated by HTTPParser
log_writable - a LogWritable object to be passed as a wsgi.errors object.
WsgiConfig - General configuration about the WSGI server.
"""
super(_WsgiHandler, self).__init__(**argd)
self.environ = request
batch_str = self.environ.get('batch', '')
if batch_str:
batch_str = 'batch ' + batch_str
print 'request received for [%s] %s' % \
(self.environ['REQUEST_URI'], batch_str)
self.app = app
self.response_dict = {}
self.wsgi_config = WsgiConfig
self.write_called = False
self.pf_received = False #Have we received a producerFinished signal?
self.logger = logging.getLogger('kp')
self.log = ErrorLogger(self.logger)
def main(self):
if self.environ['REQUEST_METHOD'] == 'POST' or self.environ['REQUEST_METHOD'] == 'PUT':
try:
body = self.waitForBody()
except:
self._error(503, sys.exc_info())
self.send(producerFinished(self), 'signal')
return
self.memfile = cStringIO.StringIO(body)
else:
self.memfile = _null_fl
self.initWSGIVars(self.wsgi_config)
#pprint(self.environ)
not_done = True
try:
#PEP 333 specifies that we're not supposed to buffer output here,
#so pulling the iterator out of the app object
app_return = self.app(self.environ, self.start_response)
if isinstance(app_return, (list)):
response = app_return.pop(0)
self.write(response)
[self.sendFragment(x) for x in app_return]
else:
app_iter = iter(app_return)
response = app_iter.next()# License: LGPL
while not response:
response = app_iter.next()
self.write(response)
[self.sendFragment(x) for x in app_iter if x]
app_iter.close()
if hasattr(app_iter, 'close'):
app_iter.close()
except:
self._error(503, sys.exc_info()) #Catch any errors and log them and print
#either an error message or a stack
#trace (depending if debug is set)
self.memfile.close()
#The Kamaelia Publish Peer depends on the producerFinished signal being sent
#AFTER this handler has received a producerFinished signal. Thus, we wait
#until we get a signal before finishing up.
if not self.pf_received:
while not self.dataReady('control'):
self.pause()
self.send(Axon.Ipc.producerFinished(self), "signal")
def start_response(self, status, response_headers, exc_info=None):
"""
Method to be passed to WSGI application object to start the response.
"""
if exc_info:
try:
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
elif self.response_dict:
#Will be caught by _error
raise WsgiAppError('start_response called a second time without exc_info! See PEP 333.')
#PEP 333 requires that an application NOT send any hop-by-hop headers.
#Therefore, we check for any of them in the headers the application
#returns. If so, an exception is raised to be caught by _error.
for key,value in response_headers:
if is_hop_by_hop(key):
raise WsgiAppError('Hop by hop header specified')
self.response_dict['headers'] = copy.copy(response_headers)
self.response_dict['statuscode'] = status
return self.write
def write(self, body_data):
"""
Write method to be passed to WSGI application object. Used to write
unbuffered output to the page. You probably don't want to use this
unless you have good reason to.
"""
if self.response_dict and not self.write_called:
self.response_dict['data'] = body_data
self.send(self.response_dict, 'outbox')
self.write_called = True
elif self.write_called:
self.sendFragment(body_data)
#the following errors will be caught and sent to _error
elif not self.response_dict and not self.write_called:
raise WsgiError("write() called before start_response()!")
else:
raise WsgiError('Unkown error in write.')
def _error(self, status=500, body_data=('', '', '')):
"""
This is an internal method used to print an error to the browser and log
it in the wsgi log.
"""
if self.Debug:
resource = {
'statuscode' : status,
'type' : 'text/html',
'data' : cgitb.html(body_data),
}
self.send(resource, 'outbox')
else:
self.send(ErrorPages.getErrorPage(status, 'An internal error has occurred.'), 'outbox')
self.log.write(''.join(traceback.format_exception(body_data[0], body_data[1], body_data[2], '\n')))
def waitForBody(self):
"""
This internal method is used to make the WSGI Handler wait for the body
of an HTTP request before proceeding.
FIXME: We should really begin executing the Application and pull the
body as needed rather than pulling it all up front.
"""
buffer = []
not_done = True
while not_done:
for msg in self.Inbox('control'):
#print msg
if isinstance(msg, producerFinished):
not_done = False
self.pf_received = True
for msg in self.Inbox('inbox'):
if isinstance(msg, str):
text = msg
elif isinstance(msg, dict):
text = msg.get('body', '')
text = unescape(text)
else:
text = ''
if not isinstance(text, str):
text = str(text)
buffer.append(text)
if not_done and not self.anyReady():
self.pause()
return ''.join(buffer)
def sendFragment(self, fragment):
"""
This is a pretty simple method. It's used to send a fragment if an app
yields a value beyond the first.
"""
page = {
'data' : fragment,
}
#print 'FRAGMENT'
#pprint(page)
self.send(page, 'outbox')
def initWSGIVars(self, wsgi_config):
"""
This method initializes all variables that are required to be present
(including ones that could possibly be empty).
"""
#==================================
#WSGI variables
#==================================
self.environ["wsgi.version"] = wsgi_config['wsgi_ver']
self.environ["wsgi.errors"] = self.log
self.environ['wsgi.input'] = self.memfile
self.environ["wsgi.multithread"] = True
self.environ["wsgi.multiprocess"] = False
self.environ["wsgi.run_once"] = False
class WsgiError(Exception):
"""
This is used to indicate an internal error of some kind. It is thrown if the
write() callable is called without start_response being called.
"""
pass
class WsgiAppError(Exception):
"""
This is an exception that is used if a Wsgi application does something it shouldnt.
"""
pass
|
apache-2.0
| 8,323,502,713,929,386,000 | 2,012,422,192,661,253,000 | 35.342857 | 107 | 0.617859 | false |
riklaunim/django-custom-multisite
|
setup.py
|
1
|
4043
|
from distutils.core import setup
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific fix
# for this in distutils.command.install_data#306. It fixes install_lib but not
# install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to the
# fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils not to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
django_dir = 'django'
for dirpath, dirnames, filenames in os.walk(django_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Small hack for working with bdist_wininst.
# See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
if len(sys.argv) > 1 and sys.argv[1] == 'bdist_wininst':
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
# Dynamically calculate the version based on django.VERSION.
version = __import__('django').get_version()
setup(
name = "Django",
version = '1.4.18-multisite-pozytywnie2',
url = 'http://www.djangoproject.com/',
author = 'Django Software Foundation',
author_email = '[email protected]',
description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
download_url = 'https://www.djangoproject.com/m/releases/1.4/Django-1.4.18.tar.gz',
packages = packages,
cmdclass = cmdclasses,
data_files = data_files,
scripts = ['django/bin/django-admin.py'],
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
bsd-3-clause
| -4,764,345,656,766,090,000 | -6,449,328,383,725,169,000 | 39.43 | 117 | 0.666337 | false |
wakatime/wakadump
|
setup.py
|
1
|
1515
|
from setuptools import setup
about = {}
with open('wakadump/__about__.py') as f:
exec(f.read(), about)
packages = [
about['__title__'],
]
install_requires = [x.strip() for x in open('requirements.txt').readlines()]
setup(
name=about['__title__'],
version=about['__version__'],
license=about['__license__'],
description=about['__description__'],
long_description=open('README.rst').read(),
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
packages=packages,
package_dir={about['__title__']: about['__title__']},
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=install_requires,
entry_points={
'console_scripts': ['wakadump = wakadump.cli:main'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Text Editors',
],
)
|
bsd-3-clause
| -6,277,887,325,265,154,000 | 43,079,159,892,815,016 | 29.3 | 76 | 0.579538 | false |
Autodesk/molecular-design-toolkit
|
moldesign/integrators/verlet.py
|
1
|
2603
|
from __future__ import print_function, absolute_import, division
from future.builtins import *
from future import standard_library
standard_library.install_aliases()
# Copyright 2017 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .. import units as u
from ..molecules import Trajectory
from ..utils import exports
from .base import IntegratorBase
@exports
class VelocityVerlet(IntegratorBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# TODO: raise exception if any constraints are requested ...
def run(self, run_for):
"""
Users won't call this directly - instead, use mol.run
Propagate position, momentum by a single timestep using velocity verlet
:param run_for: number of timesteps OR amount of time to run for
"""
if not self._prepped:
self.prep()
nsteps = self.time_to_steps(run_for, self.params.timestep)
# Set up trajectory and record the first frame
self.mol.time = 0.0 * u.default.time
self.traj = Trajectory(self.mol)
self.mol.calculate()
self.traj.new_frame()
next_trajectory_frame = self.params.frame_interval
# Dynamics loop
for istep in range(nsteps):
self.step()
if istep + 1 >= next_trajectory_frame:
self.traj.new_frame()
next_trajectory_frame += self.params.frame_interval
return self.traj
def prep(self):
self.time = 0.0 * self.params.timestep
self._prepped = True
def step(self):
# Move momenta from t-dt to t-dt/2
phalf = self.mol.momenta + 0.5 * self.params.timestep * self.mol.calc_forces(wait=True)
# Move positions from t-dt to t
self.mol.positions += phalf * self.params.timestep / self.mol.dim_masses
# Move momenta from t-dt/2 to t - triggers recomputed forces
self.mol.momenta = phalf + 0.5 * self.params.timestep * self.mol.calc_forces(wait=True)
self.time += self.params.timestep
self.mol.time = self.time
|
apache-2.0
| 6,789,907,448,620,320,000 | -7,718,102,507,289,506,000 | 34.657534 | 95 | 0.661544 | false |
sajeeshcs/nested_projects_keystone
|
keystone/tests/mapping_fixtures.py
|
6
|
12218
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fixtures for Federation Mapping."""
EMPLOYEE_GROUP_ID = "0cd5e9"
CONTRACTOR_GROUP_ID = "85a868"
TESTER_GROUP_ID = "123"
DEVELOPER_GROUP_ID = "xyz"
# Mapping summary:
# LastName Smith & Not Contractor or SubContractor -> group 0cd5e9
# FirstName Jill & Contractor or SubContractor -> to group 85a868
MAPPING_SMALL = {
"rules": [
{
"local": [
{
"group": {
"id": EMPLOYEE_GROUP_ID
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"not_any_of": [
"Contractor",
"SubContractor"
]
},
{
"type": "LastName",
"any_one_of": [
"Bo"
]
}
]
},
{
"local": [
{
"group": {
"id": CONTRACTOR_GROUP_ID
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"any_one_of": [
"Contractor",
"SubContractor"
]
},
{
"type": "FirstName",
"any_one_of": [
"Jill"
]
}
]
}
]
}
# Mapping summary:
# orgPersonType Admin or Big Cheese -> name {0} {1} email {2} and group 0cd5e9
# orgPersonType Customer -> user name {0} email {1}
# orgPersonType Test and email ^@example.com$ -> group 123 and xyz
MAPPING_LARGE = {
"rules": [
{
"local": [
{
"user": {
"name": "{0} {1}",
"email": "{2}"
},
"group": {
"id": EMPLOYEE_GROUP_ID
}
}
],
"remote": [
{
"type": "FirstName"
},
{
"type": "LastName"
},
{
"type": "Email"
},
{
"type": "orgPersonType",
"any_one_of": [
"Admin",
"Big Cheese"
]
}
]
},
{
"local": [
{
"user": {
"name": "{0}",
"email": "{1}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "Email"
},
{
"type": "orgPersonType",
"not_any_of": [
"Admin",
"Employee",
"Contractor",
"Tester"
]
}
]
},
{
"local": [
{
"group": {
"id": TESTER_GROUP_ID
}
},
{
"group": {
"id": DEVELOPER_GROUP_ID
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"any_one_of": [
"Tester"
]
},
{
"type": "Email",
"any_one_of": [
".*@example.com$"
],
"regex": True
}
]
}
]
}
MAPPING_BAD_REQ = {
"rules": [
{
"local": [
{
"user": "name"
}
],
"remote": [
{
"type": "UserName",
"bad_requirement": [
"Young"
]
}
]
}
]
}
MAPPING_BAD_VALUE = {
"rules": [
{
"local": [
{
"user": "name"
}
],
"remote": [
{
"type": "UserName",
"any_one_of": "should_be_list"
}
]
}
]
}
MAPPING_NO_RULES = {
'rules': []
}
MAPPING_NO_REMOTE = {
"rules": [
{
"local": [
{
"user": "name"
}
],
"remote": []
}
]
}
MAPPING_MISSING_LOCAL = {
"rules": [
{
"remote": [
{
"type": "UserName",
"any_one_of": "should_be_list"
}
]
}
]
}
MAPPING_WRONG_TYPE = {
"rules": [
{
"local": [
{
"user": "{1}"
}
],
"remote": [
{
"not_type": "UserName"
}
]
}
]
}
MAPPING_MISSING_TYPE = {
"rules": [
{
"local": [
{
"user": "{1}"
}
],
"remote": [
{}
]
}
]
}
MAPPING_EXTRA_REMOTE_PROPS_NOT_ANY_OF = {
"rules": [
{
"local": [
{
"group": {
"id": "0cd5e9"
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"not_any_of": [
"SubContractor"
],
"invalid_type": "xyz"
}
]
}
]
}
MAPPING_EXTRA_REMOTE_PROPS_ANY_ONE_OF = {
"rules": [
{
"local": [
{
"group": {
"id": "0cd5e9"
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"any_one_of": [
"SubContractor"
],
"invalid_type": "xyz"
}
]
}
]
}
MAPPING_EXTRA_REMOTE_PROPS_JUST_TYPE = {
"rules": [
{
"local": [
{
"group": {
"id": "0cd5e9"
}
},
{
"user": {
"name": "{0}"
}
}
],
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"invalid_type": "xyz"
}
]
}
]
}
MAPPING_EXTRA_RULES_PROPS = {
"rules": [
{
"local": [
{
"group": {
"id": "0cd5e9"
}
},
{
"user": {
"name": "{0}"
}
}
],
"invalid_type": {
"id": "xyz",
},
"remote": [
{
"type": "UserName"
},
{
"type": "orgPersonType",
"not_any_of": [
"SubContractor"
]
}
]
}
]
}
MAPPING_TESTER_REGEX = {
"rules": [
{
"local": [
{
"user": {
"name": "{0}",
}
}
],
"remote": [
{
"type": "UserName"
}
]
},
{
"local": [
{
"group": {
"id": TESTER_GROUP_ID
}
}
],
"remote": [
{
"type": "orgPersonType",
"any_one_of": [
".*Tester*"
],
"regex": True
}
]
}
]
}
EMPLOYEE_ASSERTION = {
'Email': '[email protected]',
'UserName': 'tbo',
'FirstName': 'Tim',
'LastName': 'Bo',
'orgPersonType': 'Employee;BuildingX;'
}
EMPLOYEE_ASSERTION_PREFIXED = {
'PREFIX_Email': '[email protected]',
'PREFIX_UserName': 'tbo',
'PREFIX_FirstName': 'Tim',
'PREFIX_LastName': 'Bo',
'PREFIX_orgPersonType': 'SuperEmployee;BuildingX;'
}
CONTRACTOR_ASSERTION = {
'Email': '[email protected]',
'UserName': 'jsmith',
'FirstName': 'Jill',
'LastName': 'Smith',
'orgPersonType': 'Contractor;Non-Dev;'
}
ADMIN_ASSERTION = {
'Email': '[email protected]',
'UserName': 'bob',
'FirstName': 'Bob',
'LastName': 'Thompson',
'orgPersonType': 'Admin;Chief;'
}
CUSTOMER_ASSERTION = {
'Email': '[email protected]',
'UserName': 'bwilliams',
'FirstName': 'Beth',
'LastName': 'Williams',
'orgPersonType': 'Customer;'
}
TESTER_ASSERTION = {
'Email': '[email protected]',
'UserName': 'testacct',
'FirstName': 'Test',
'LastName': 'Account',
'orgPersonType': 'MadeupGroup;Tester;GroupX'
}
BAD_TESTER_ASSERTION = {
'Email': '[email protected]',
'UserName': 'Evil',
'FirstName': 'Test',
'LastName': 'Account',
'orgPersonType': 'Tester;'
}
MALFORMED_TESTER_ASSERTION = {
'Email': '[email protected]',
'UserName': 'testacct',
'FirstName': 'Test',
'LastName': 'Account',
'orgPersonType': 'Tester;',
'object': object(),
'dictionary': dict(zip('teststring', xrange(10))),
'tuple': tuple(xrange(5))
}
CONTRACTOR_MALFORMED_ASSERTION = {
'UserName': 'user',
'FirstName': object(),
'orgPersonType': 'Contractor'
}
|
apache-2.0
| 3,309,394,706,407,175,000 | -4,277,818,100,729,942,500 | 22.184061 | 78 | 0.281634 | false |
sktjdgns1189/android_kernel_pantech_ef63l
|
scripts/build-all.py
|
24
|
10334
|
#! /usr/bin/env python
# Copyright (c) 2009-2013, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
#
# TODO: Accept arguments to indicate what to build.
import glob
from optparse import OptionParser
import subprocess
import os
import os.path
import re
import shutil
import sys
version = 'build-all.py, version 0.01'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules", "dtbs"]
make_env = os.environ
make_env.update({
'ARCH': 'arm',
'KCONFIG_NOTIMESTAMP': 'true' })
make_env.setdefault('CROSS_COMPILE', 'arm-none-linux-gnueabi-')
all_options = {}
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
defconfig = open(file, 'a')
defconfig.write(str + '\n')
defconfig.close()
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = {}
arch_pats = (
r'[fm]sm[0-9]*_defconfig',
r'apq*_defconfig',
r'qsd*_defconfig',
r'msmkrypton*_defconfig',
)
for p in arch_pats:
for n in glob.glob('arch/arm/configs/' + p):
names[os.path.basename(n)[:-10]] = n
return names
class Builder:
def __init__(self, logname):
self.logname = logname
self.fd = open(logname, 'w')
def run(self, args):
devnull = open('/dev/null', 'r')
proc = subprocess.Popen(args, stdin=devnull,
env=make_env,
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
count = 0
# for line in proc.stdout:
rawfd = proc.stdout.fileno()
while True:
line = os.read(rawfd, 1024)
if not line:
break
self.fd.write(line)
self.fd.flush()
if all_options.verbose:
sys.stdout.write(line)
sys.stdout.flush()
else:
for i in range(line.count('\n')):
count += 1
if count == 64:
count = 0
print
sys.stdout.write('.')
sys.stdout.flush()
print
result = proc.wait()
self.fd.close()
return result
failed_targets = []
def build(target):
dest_dir = os.path.join(build_dir, target)
log_name = '%s/log-%s.log' % (build_dir, target)
print 'Building %s in %s log %s' % (target, dest_dir, log_name)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir)
perf_defconfig = msm8974_pantech_perf_defconfig
print 'Perf defconfig : %s' % perf_defconfig
defconfig = 'arch/arm/configs/%s_defconfig' % target
dotconfig = '%s/.config' % dest_dir
savedefconfig = '%s/defconfig' % dest_dir
shutil.copyfile(defconfig, dotconfig)
staging_dir = 'install_staging'
modi_dir = '%s' % staging_dir
hdri_dir = '%s/usr' % staging_dir
shutil.rmtree(os.path.join(dest_dir, staging_dir), ignore_errors=True)
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'PERF_DEFCONFIG=%s' % perf_defconfig,
'%s_defconfig' % target], env=make_env, stdin=devnull)
devnull.close()
if not all_options.updateconfigs:
# Build targets can be dependent upon the completion of previous
# build targets, so build them one at a time.
cmd_line = ['make',
'INSTALL_HDR_PATH=%s' % hdri_dir,
'INSTALL_MOD_PATH=%s' % modi_dir,
'O=%s' % dest_dir]
build_targets = []
for c in make_command:
if re.match(r'^-{1,2}\w', c):
cmd_line.append(c)
else:
build_targets.append(c)
for t in build_targets:
build = Builder(log_name)
result = build.run(cmd_line + [t])
if result != 0:
if all_options.keep_going:
failed_targets.append(target)
fail_or_error = error
else:
fail_or_error = fail
fail_or_error("Failed to build %s, see %s" %
(target, build.logname))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
devnull = open('/dev/null', 'r')
subprocess.check_call(['make', 'O=%s' % dest_dir,
'savedefconfig'], env=make_env, stdin=devnull)
devnull.close()
shutil.copyfile(savedefconfig, defconfig)
def build_many(allconf, targets):
print "Building %d target(s)" % len(targets)
for target in targets:
if all_options.updateconfigs:
update_config(allconf[target], all_options.updateconfigs)
build(target)
if failed_targets:
fail('\n '.join(["Failed targets:"] +
[target for target in failed_targets]))
def main():
global make_command
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
parser.add_option('-m', '--make-target', action='append',
help='Build the indicated make target (default: %s)' %
' '.join(make_command))
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs.keys():
print " %s" % target
sys.exit(0)
if options.oldconfig:
make_command = ["oldconfig"]
elif options.make_target:
make_command = options.make_target
if options.jobs:
make_command.append("-j%d" % options.jobs)
if options.load_average:
make_command.append("-l%d" % options.load_average)
if args == ['all']:
build_many(configs, configs.keys())
elif args == ['perf']:
targets = []
for t in configs.keys():
if "perf" in t:
targets.append(t)
build_many(configs, targets)
elif args == ['noperf']:
targets = []
for t in configs.keys():
if "perf" not in t:
targets.append(t)
build_many(configs, targets)
elif len(args) > 0:
targets = []
for t in args:
if t not in configs.keys():
parser.error("Target '%s' not one of %s" % (t, configs.keys()))
targets.append(t)
build_many(configs, targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
|
gpl-2.0
| -4,091,213,838,769,988,000 | 8,448,014,877,735,981,000 | 34.030508 | 80 | 0.586027 | false |
mgohde/nudge-tools
|
tools/dbtool.py
|
1
|
4655
|
#!/usr/bin/python
import os
import sys
# Because of course Python has XML parsing built in.
import xml.etree.ElementTree as ET
# dbtool.py -- Quick and dirty tool to insert XML formatted storylines into the database.
def nodesearch(nodename, node):
for n in node:
if n.tag==nodename:
return n
return None
def manynodesearch(nodename, node):
nlist=[]
for n in node:
if n.tag==nodename:
nlist.append(n)
return nlist
def sanitize(text):
"""Replaces all special characters in text with escaped characters."""
newstr=''
for c in text:
if c=='"' or c=="'" or c=='%' or c=='_':
newstr=newstr+'\\'
newstr=newstr+c
return newstr
def readstory(infile):
contents=infile.read()
curid=1
position=0;
resid=1
root=ET.fromstring(contents)
storytitle=root.attrib['title']
for subnode in root:
answerchoice='A'
snid=subnode.attrib['id']
textnode=nodesearch("text", subnode)
answernode=nodesearch("answers", subnode)
print "-- Generated statements for node: %s" % snid
# Check all destinations to see if there is an END node, and if there is, insert it into the rewards table.
for a in answernode:
destsearch=manynodesearch("dest", a)
for d in destsearch:
# The way nudge is currently implemented, rewards cannot be given out per user action, however
# this may change in the future, so it is beneficial to be able to include per-decision
# rewards now:
try:
numpoints=int(d.attrib['points'])
rewardname=d.attrib['reward']
rewardtext=d.attrib['rewardtext']
endname="e%d" % curid
print "INSERT INTO rewardss (reward, statement, points, end_id, end, storytitle) VALUES ('%s', '%s', %d, %d, '%s', '%s');" % (rewardname, rewardtext, numpoints, curid, endname, storytitle)
except:
pass
if(d.text=="END"):
position=1
print "INSERT INTO storytable VALUES (%d,'%s','%s','%s',%d);" % (curid, storytitle, snid, sanitize(textnode.text), position)
# This ensures that the story will have valid entry and exit points.
position=2
curid+=1
for a in answernode:
optiontextnode=nodesearch("text", a)
destsearch=manynodesearch("dest", a)
minprob=0
print "INSERT INTO answers VALUES ('%s','%s','%s','%s');" % (storytitle, snid, answerchoice, sanitize(optiontextnode.text))
for d in destsearch:
maxprob=minprob+int(d.attrib['p'])
print "INSERT INTO results VALUES (%d,'%s','%s','%s',%d,%d,'%s');" % (resid, storytitle, snid, answerchoice, minprob, maxprob, d.text)
minprob=minprob+int(d.attrib['p'])
resid+=1
answerchoice=chr(ord(answerchoice)+1)
def delstory(infile):
contents=infile.read()
curid=1
resid=1
root=ET.fromstring(contents)
storytitle=root.attrib['title']
print "-- Generated statements for story: %s" % storytitle
print "DELETE FROM storytable WHERE storytitle='%s';" % storytitle
print "DELETE FROM answers WHERE storytitle='%s';" % storytitle
print "DELETE FROM results WHERE storytitle='%s';" % storytitle
print "DELETE FROM rewardss WHERE storytitle='%s';" % storytitle
def printusage(progname):
print "Usage: %s [-d] [input filename]" % progname
print "Generate SQL statements to install or delete a storyline from a Nudge SQL database"
print "Generates statements to install if -d is not specified."
print ""
print "Arguments:"
print " -d\tGenerate statements to delete a storyline."
print "[input filename] may be blank. In this case, %s attempts to read a story from standard input." % progname
def main(args):
infile=sys.stdin
delete=False
# Insert arg handling fanciness here
if len(args)!=1:
if args[1]=='-d':
delete=True
if len(args)>1:
infile=open(args[2], 'r')
elif args[1]=='-h' or args[1]=='--h':
printusage(args[0])
return
else:
infile=open(args[1], 'r')
if not delete:
readstory(infile)
else:
delstory(infile)
if __name__=="__main__":
main(sys.argv)
|
gpl-2.0
| 45,234,308,240,368,740 | -7,378,030,594,347,416,000 | 30.241611 | 208 | 0.570569 | false |
elgambitero/FreeCAD_sf_master
|
src/Mod/Path/InitGui.py
|
16
|
6342
|
#***************************************************************************
#* (c) Yorik van Havre ([email protected]) 2014 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************/
class PathWorkbench ( Workbench ):
"Path workbench"
Icon = """
/* XPM */
static char * Path_xpm[] = {
"16 16 9 1",
" c None",
". c #262623",
"+ c #452F16",
"@ c #525451",
"# c #7E5629",
"$ c #838582",
"% c #BE823B",
"& c #989A97",
"* c #CFD1CE",
" .@@@@@@@@@@. ",
" $**********$ ",
" @$$$&&&&$$$@ ",
" .$&&&&$. ",
" @******. ",
" @******. ",
" ...@@... ",
" .&&@. ",
" .@. . ",
" .&&. ",
" .$*$. ",
" .$. . ",
"+###+ .@&.+###+",
"+%%%+ .$$. +%%%+",
"+%%%%#.. .#%%%%+",
".++++++..++++++."};
"""
MenuText = "Path"
ToolTip = "Path workbench"
def Initialize(self):
# load the builtin modules
import Path
import PathGui
# load python modules
from PathScripts import PathProfile
from PathScripts import PathPocket
from PathScripts import PathDrilling
from PathScripts import PathDressup
from PathScripts import PathHop
from PathScripts import PathCopy
from PathScripts import PathFixture
from PathScripts import PathCompoundExtended
from PathScripts import PathProject
from PathScripts import PathToolTableEdit
from PathScripts import PathStock
from PathScripts import PathPlane
from PathScripts import PathPost
from PathScripts import PathToolLenOffset
from PathScripts import PathLoadTool
from PathScripts import PathComment
from PathScripts import PathStop
from PathScripts import PathMachine
from PathScripts import PathFromShape
from PathScripts import PathKurve
# build commands list
commands =["Path_Stock","Path_Plane","Path_Fixture","Path_ToolTableEdit","Path_Profile","Path_Kurve","Path_Pocket","Path_Drilling",\
"Path_Dressup","Path_Hop","Path_Shape","Path_Copy","Path_CompoundExtended","Path_Project"]
projcmdlist = ["Path_Project", "Path_ToolTableEdit","Path_Post"]
prepcmdlist = ["Path_Plane","Path_Fixture","Path_LoadTool","Path_ToolLenOffset","Path_Comment","Path_Stop"]
opcmdlist = ["Path_Profile","Path_Kurve","Path_Pocket","Path_Drilling","Path_FromShape"]
modcmdlist = ["Path_Copy","Path_CompoundExtended","Path_Dressup","Path_Hop"]
# Add commands to menu and toolbar
def QT_TRANSLATE_NOOP(scope, text): return text
# self.appendToolbar(QT_TRANSLATE_NOOP("PathWorkbench","Path"),commands)
self.appendToolbar(QT_TRANSLATE_NOOP("PathWorkbench","Commands for setting up Project"),projcmdlist)
self.appendToolbar(QT_TRANSLATE_NOOP("PathWorkbench","Prepatory Commands"),prepcmdlist)
self.appendToolbar(QT_TRANSLATE_NOOP("PathWorkbench","Operations"),opcmdlist)
self.appendToolbar(QT_TRANSLATE_NOOP("PathWorkbench","Commands for grouping,copying, and organizing operations"),modcmdlist)
# self.appendMenu(QT_TRANSLATE_NOOP("PathWorkbench","Path"),commands)
self.appendMenu([QT_TRANSLATE_NOOP("PathWorkbench","Path"),QT_TRANSLATE_NOOP("Path","Project Setup")],projcmdlist)
self.appendMenu([QT_TRANSLATE_NOOP("PathWorkbench","Path"),QT_TRANSLATE_NOOP("Path","Prepatory Commands")],prepcmdlist)
self.appendMenu([QT_TRANSLATE_NOOP("PathWorkbench","Path"),QT_TRANSLATE_NOOP("Path","New Operation")],opcmdlist)
self.appendMenu([QT_TRANSLATE_NOOP("PathWorkbench","Path"),QT_TRANSLATE_NOOP("Path","Path Modification")],modcmdlist)
# Add preferences pages
import os
FreeCADGui.addPreferencePage(FreeCAD.getHomePath()+os.sep+"Mod"+os.sep+"Path"+os.sep+"PathScripts"+os.sep+"DlgSettingsPath.ui","Path")
Log ('Loading Path workbench... done\n')
def GetClassName(self):
return "Gui::PythonWorkbench"
def Activated(self):
Msg("Path workbench activated\n")
def Deactivated(self):
Msg("Path workbench deactivated\n")
Gui.addWorkbench(PathWorkbench())
FreeCAD.addImportType("GCode (*.nc *.gc *.ncc *.ngc *.cnc *.tap)","PathGui")
FreeCAD.addExportType("GCode (*.nc *.gc *.ncc *.ngc *.cnc *.tap)","PathGui")
|
lgpl-2.1
| 2,760,602,811,446,002,000 | -987,324,220,682,529,500 | 47.546875 | 142 | 0.511826 | false |
percy-g2/Novathor_xperia_u8500
|
6.2.A.1.100/external/webkit/Tools/Scripts/webkitpy/common/system/ospath_unittest.py
|
15
|
2518
|
# Copyright (C) 2010 Chris Jerdonek ([email protected])
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for ospath.py."""
import os
import unittest
from webkitpy.common.system.ospath import relpath
# Make sure the tests in this class are platform independent.
class RelPathTest(unittest.TestCase):
"""Tests relpath()."""
os_path_abspath = lambda self, path: path
def _rel_path(self, path, abs_start_path):
return relpath(path, abs_start_path, self.os_path_abspath)
def test_same_path(self):
rel_path = self._rel_path("WebKit", "WebKit")
self.assertEquals(rel_path, "")
def test_long_rel_path(self):
start_path = "WebKit"
expected_rel_path = os.path.join("test", "Foo.txt")
path = os.path.join(start_path, expected_rel_path)
rel_path = self._rel_path(path, start_path)
self.assertEquals(expected_rel_path, rel_path)
def test_none_rel_path(self):
"""Test _rel_path() with None return value."""
start_path = "WebKit"
path = os.path.join("other_dir", "foo.txt")
rel_path = self._rel_path(path, start_path)
self.assertTrue(rel_path is None)
rel_path = self._rel_path("Tools", "WebKit")
self.assertTrue(rel_path is None)
|
gpl-2.0
| -2,642,380,627,061,280,000 | 5,985,105,022,641,499,000 | 39.612903 | 79 | 0.70969 | false |
haxwithaxe/qutebrowser
|
qutebrowser/browser/history.py
|
2
|
7443
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2016 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Simple history which gets written to disk."""
import time
import collections
from PyQt5.QtCore import pyqtSignal, QUrl
from PyQt5.QtWebKit import QWebHistoryInterface
from qutebrowser.utils import utils, objreg, standarddir, log
from qutebrowser.config import config
from qutebrowser.misc import lineparser
class HistoryEntry:
"""A single entry in the web history.
Attributes:
atime: The time the page was accessed.
url: The URL which was accessed as QUrl.
url_string: The URL which was accessed as string.
"""
def __init__(self, atime, url):
self.atime = float(atime)
self.url = QUrl(url)
self.url_string = url
def __repr__(self):
return utils.get_repr(self, constructor=True, atime=self.atime,
url=self.url.toDisplayString())
def __str__(self):
return '{} {}'.format(int(self.atime), self.url_string)
class WebHistory(QWebHistoryInterface):
"""A QWebHistoryInterface which supports being written to disk.
Attributes:
_lineparser: The AppendLineParser used to save the history.
_history_dict: An OrderedDict of URLs read from the on-disk history.
_new_history: A list of HistoryEntry items of the current session.
_saved_count: How many HistoryEntries have been written to disk.
_initial_read_started: Whether async_read was called.
_initial_read_done: Whether async_read has completed.
_temp_history: OrderedDict of temporary history entries before
async_read was called.
Signals:
add_completion_item: Emitted before a new HistoryEntry is added.
arg: The new HistoryEntry.
item_added: Emitted after a new HistoryEntry is added.
arg: The new HistoryEntry.
"""
add_completion_item = pyqtSignal(HistoryEntry)
item_added = pyqtSignal(HistoryEntry)
async_read_done = pyqtSignal()
def __init__(self, parent=None):
super().__init__(parent)
self._initial_read_started = False
self._initial_read_done = False
self._lineparser = lineparser.AppendLineParser(
standarddir.data(), 'history', parent=self)
self._history_dict = collections.OrderedDict()
self._temp_history = collections.OrderedDict()
self._new_history = []
self._saved_count = 0
objreg.get('save-manager').add_saveable(
'history', self.save, self.item_added)
def __repr__(self):
return utils.get_repr(self, length=len(self))
def __getitem__(self, key):
return self._new_history[key]
def __iter__(self):
return iter(self._history_dict.values())
def __len__(self):
return len(self._history_dict)
def async_read(self):
"""Read the initial history."""
if self._initial_read_started:
log.init.debug("Ignoring async_read() because reading is started.")
return
self._initial_read_started = True
if standarddir.data() is None:
self._initial_read_done = True
self.async_read_done.emit()
return
with self._lineparser.open():
for line in self._lineparser:
yield
data = line.rstrip().split(maxsplit=1)
if not data:
# empty line
continue
elif len(data) != 2:
# other malformed line
log.init.warning("Invalid history entry {!r}!".format(
line))
continue
atime, url = data
if atime.startswith('\0'):
log.init.warning(
"Removing NUL bytes from entry {!r} - see "
"https://github.com/The-Compiler/qutebrowser/issues/"
"670".format(data))
atime = atime.lstrip('\0')
# This de-duplicates history entries; only the latest
# entry for each URL is kept. If you want to keep
# information about previous hits change the items in
# old_urls to be lists or change HistoryEntry to have a
# list of atimes.
entry = HistoryEntry(atime, url)
self._add_entry(entry)
self._initial_read_done = True
self.async_read_done.emit()
for url, entry in self._temp_history.items():
self._new_history.append(entry)
self._add_entry(entry)
self.add_completion_item.emit(entry)
def _add_entry(self, entry, target=None):
"""Add an entry to self._history_dict or another given OrderedDict."""
if target is None:
target = self._history_dict
target[entry.url_string] = entry
target.move_to_end(entry.url_string)
def get_recent(self):
"""Get the most recent history entries."""
old = self._lineparser.get_recent()
return old + [str(e) for e in self._new_history]
def save(self):
"""Save the history to disk."""
new = (str(e) for e in self._new_history[self._saved_count:])
self._lineparser.new_data = new
self._lineparser.save()
self._saved_count = len(self._new_history)
def addHistoryEntry(self, url_string):
"""Called by WebKit when an URL should be added to the history.
Args:
url_string: An url as string to add to the history.
"""
if not url_string:
return
if config.get('general', 'private-browsing'):
return
entry = HistoryEntry(time.time(), url_string)
if self._initial_read_done:
self.add_completion_item.emit(entry)
self._new_history.append(entry)
self._add_entry(entry)
self.item_added.emit(entry)
else:
self._add_entry(entry, target=self._temp_history)
def historyContains(self, url_string):
"""Called by WebKit to determine if an URL is contained in the history.
Args:
url_string: The URL (as string) to check for.
Return:
True if the url is in the history, False otherwise.
"""
return url_string in self._history_dict
def init(parent=None):
"""Initialize the web history.
Args:
parent: The parent to use for WebHistory.
"""
history = WebHistory(parent)
objreg.register('web-history', history)
QWebHistoryInterface.setDefaultInterface(history)
|
gpl-3.0
| 6,513,701,982,349,146,000 | -4,790,356,006,871,766,000 | 34.274882 | 79 | 0.604058 | false |
parinporecha/backend_gtgonline
|
GTG/tests/test_urlregex.py
|
1
|
1375
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2012 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
""" Tests for URL regex """
import unittest
from GTG.tools.urlregex import match
class TestURLRegex(unittest.TestCase):
""" Test extractor of URL from text """
def test_anchor_amperstand(self):
""" Reproducer for bug #1023555 """
url = "http://test.com/#hi&there"
self.assertEqual(match(url).group(0), url)
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
|
gpl-3.0
| 1,248,809,982,406,911,500 | -4,177,062,843,174,527,000 | 37.194444 | 79 | 0.637091 | false |
Nolski/airmozilla
|
airmozilla/manage/forms.py
|
1
|
29803
|
import re
import datetime
from collections import defaultdict
import dateutil.parser
import pytz
from django import forms
from django.db.models import Count
from django.conf import settings
from django.contrib.auth.models import User, Group
from django.utils.timezone import utc
from django.utils.safestring import mark_safe
from funfactory.urlresolvers import reverse
from slugify import slugify
from airmozilla.base.forms import BaseModelForm, BaseForm
from airmozilla.manage import url_transformer
from airmozilla.main.models import (
Approval,
Event,
EventTweet,
Location,
Region,
Tag,
Template,
Channel,
SuggestedEvent,
SuggestedEventComment,
URLMatch,
EventAssignment,
LocationDefaultEnvironment,
RecruitmentMessage,
Picture,
Topic,
)
from airmozilla.comments.models import Discussion, Comment
from airmozilla.surveys.models import Question, Survey
from airmozilla.staticpages.models import StaticPage
from .widgets import PictureWidget
TIMEZONE_CHOICES = [(tz, tz.replace('_', ' ')) for tz in pytz.common_timezones]
ONE_HOUR = 60 * 60
class UserEditForm(BaseModelForm):
class Meta:
model = User
fields = ('is_active', 'is_staff', 'is_superuser', 'groups')
def clean(self):
cleaned_data = super(UserEditForm, self).clean()
is_active = cleaned_data.get('is_active')
is_staff = cleaned_data.get('is_staff')
is_superuser = cleaned_data.get('is_superuser')
groups = cleaned_data.get('groups')
if is_superuser and not is_staff:
raise forms.ValidationError('Superusers must be staff.')
if is_staff and not is_active:
raise forms.ValidationError('Staff must be active.')
if is_staff and not is_superuser and not groups:
raise forms.ValidationError(
'Non-superuser staff must belong to a group.'
)
return cleaned_data
class GroupEditForm(BaseModelForm):
def __init__(self, *args, **kwargs):
super(GroupEditForm, self).__init__(*args, **kwargs)
self.fields['name'].required = True
choices = self.fields['permissions'].choices
self.fields['permissions'] = forms.MultipleChoiceField(
choices=choices,
widget=forms.CheckboxSelectMultiple,
required=False
)
class Meta:
model = Group
class EventRequestForm(BaseModelForm):
tags = forms.CharField(required=False)
class Meta:
model = Event
widgets = {
'description': forms.Textarea(attrs={'rows': 4}),
'short_description': forms.Textarea(attrs={'rows': 2}),
'call_info': forms.Textarea(attrs={'rows': 3}),
'additional_links': forms.Textarea(attrs={'rows': 3}),
'template_environment': forms.Textarea(attrs={'rows': 3}),
'additional_links': forms.Textarea(attrs={'rows': 3}),
'remote_presenters': forms.Textarea(attrs={'rows': 3}),
'start_time': forms.DateTimeInput(format='%Y-%m-%d %H:%M'),
'estimated_duration': forms.widgets.Select(
choices=Event.ESTIMATED_DURATION_CHOICES
),
}
exclude = ('featured', 'status', 'archive_time', 'slug')
# Fields specified to enforce order
fields = (
'title', 'placeholder_img', 'picture',
'description',
'short_description', 'location', 'start_time',
'estimated_duration',
'channels', 'tags', 'call_info',
'remote_presenters',
'additional_links', 'privacy', 'popcorn_url'
)
def __init__(self, *args, **kwargs):
super(EventRequestForm, self).__init__(*args, **kwargs)
self.fields['channels'].help_text = (
'<a href="%s" class="btn btn-default" target="_blank">'
'<i class="glyphicon glyphicon-plus-sign"></i>'
'New channel'
'</a>' % reverse('manage:channel_new'))
self.fields['placeholder_img'].label = 'Placeholder image'
if 'instance' in kwargs:
event = kwargs['instance']
approvals = event.approval_set.all()
self.initial['approvals'] = [app.group for app in approvals]
if event.location:
self.fields['start_time'].help_text = (
'Time zone of this date is that of {0}.'.format(
event.location.timezone
)
)
# when the django forms present the start_time form field,
# it's going to first change it to UTC, then strftime it
self.initial['start_time'] = (
event.location_time.replace(tzinfo=utc)
)
else:
self.fields['start_time'].help_text = (
'Since there is no location, time zone of this date '
' is UTC.'
)
if event.pk:
tags_formatted = ','.join(x.name for x in event.tags.all())
self.initial['tags'] = tags_formatted
def clean_tags(self):
tags = self.cleaned_data['tags']
split_tags = [t.strip() for t in tags.split(',') if t.strip()]
final_tags = []
for tag_name in split_tags:
try:
t = Tag.objects.get(name=tag_name)
except Tag.DoesNotExist:
try:
t = Tag.objects.get(name__iexact=tag_name)
except Tag.DoesNotExist:
t = Tag.objects.create(name=tag_name)
final_tags.append(t)
return final_tags
def clean_slug(self):
"""Enforce unique slug across current slugs and old slugs."""
slug = self.cleaned_data['slug']
if Event.objects.filter(slug=slug).exclude(pk=self.instance.id):
raise forms.ValidationError('This slug is already in use.')
return slug
@staticmethod
def _check_staticpage_slug(slug):
if StaticPage.objects.filter(url__startswith='/%s' % slug).count():
raise forms.ValidationError(
"The default slug for event would clash with an existing "
"static page with the same URL. It might destroy existing "
"URLs that people depend on."
)
def clean(self):
data = super(EventRequestForm, self).clean()
if data.get('title') and not data.get('slug'):
# this means you have submitted a form without being explicit
# about what the slug will be
self._check_staticpage_slug(slugify(data.get('title')).lower())
elif data.get('slug'):
# are you trying to change it?
if self.instance.slug != data['slug']:
# apparently, you want to change to a new slug
self._check_staticpage_slug(data['slug'])
return data
class EventEditForm(EventRequestForm):
approvals = forms.ModelMultipleChoiceField(
queryset=Group.objects.filter(permissions__codename='change_approval'),
required=False,
widget=forms.CheckboxSelectMultiple()
)
curated_groups = forms.CharField(
required=False,
help_text='Curated groups only matter if the event is open to'
' "%s".' % [x[1] for x in Event.PRIVACY_CHOICES
if x[0] == Event.PRIVACY_CONTRIBUTORS][0]
)
class Meta(EventRequestForm.Meta):
exclude = ('archive_time',)
# Fields specified to enforce order
fields = (
'title', 'slug', 'status', 'privacy', 'featured', 'template',
'template_environment', 'placeholder_img', 'picture',
'location',
'description', 'short_description', 'start_time',
'estimated_duration',
'archive_time',
'channels', 'tags',
'call_info', 'additional_links', 'remote_presenters',
'approvals',
'popcorn_url',
'pin',
'recruitmentmessage',
)
def __init__(self, *args, **kwargs):
super(EventEditForm, self).__init__(*args, **kwargs)
if 'pin' in self.fields:
self.fields['pin'].help_text = (
"Use of pins is deprecated. Use Curated groups instead."
)
self.fields['popcorn_url'].label = 'Popcorn URL'
if 'recruitmentmessage' in self.fields:
self.fields['recruitmentmessage'].required = False
self.fields['recruitmentmessage'].label = 'Recruitment message'
self.fields.keyOrder.pop(
self.fields.keyOrder.index('curated_groups')
)
self.fields.keyOrder.insert(
self.fields.keyOrder.index('privacy') + 1,
'curated_groups'
)
self.fields['location'].queryset = (
Location.objects.filter(is_active=True).order_by('name')
)
if self.instance and self.instance.id:
# Checking for id because it might be an instance but never
# been saved before.
self.fields['picture'].widget = PictureWidget(self.instance)
# make the list of approval objects depend on requested approvals
# print Group.approval_set.filter(event=self.instance)
group_ids = [
x[0] for x in
Approval.objects
.filter(event=self.instance).values_list('group')
]
self.fields['approvals'].queryset = Group.objects.filter(
id__in=group_ids
)
# If the event has a duration, it doesn't make sense to
# show the estimated_duration widget.
if self.instance.duration:
del self.fields['estimated_duration']
elif self.initial.get('picture'):
self.fields['picture'].widget = PictureWidget(
Picture.objects.get(id=self.initial['picture']),
editable=False
)
else:
# too early to associate with a picture
del self.fields['picture']
def clean_pin(self):
value = self.cleaned_data['pin']
if value and len(value) < 4:
raise forms.ValidationError("Pin too short to be safe")
return value
def clean(self):
cleaned_data = super(EventEditForm, self).clean()
if not (
cleaned_data.get('placeholder_img') or cleaned_data.get('picture')
):
raise forms.ValidationError("Must have a placeholder or a Picture")
return cleaned_data
class EventExperiencedRequestForm(EventEditForm):
class Meta(EventEditForm.Meta):
exclude = ('featured', 'archive_time', 'slug')
# Fields specified to enforce order
fields = (
'title', 'status', 'privacy', 'template',
'template_environment', 'placeholder_img', 'picture',
'description',
'short_description', 'location', 'start_time',
'estimated_duration',
'channels', 'tags', 'call_info',
'additional_links', 'remote_presenters',
'approvals', 'pin', 'popcorn_url', 'recruitmentmessage'
)
class EventArchiveForm(BaseModelForm):
class Meta(EventRequestForm.Meta):
exclude = ()
fields = ('template', 'template_environment')
class EventArchiveTimeForm(BaseModelForm):
class Meta(EventRequestForm.Meta):
exclude = ()
fields = ('archive_time',)
def __init__(self, *args, **kwargs):
super(EventArchiveTimeForm, self).__init__(*args, **kwargs)
self.fields['archive_time'].help_text = (
"Input timezone is <b>UTC</b>"
)
if self.initial['archive_time']:
# Force it to a UTC string so Django doesn't convert it
# to a timezone-less string in the settings.TIME_ZONE timezone.
self.initial['archive_time'] = (
self.initial['archive_time'].strftime('%Y-%m-%d %H:%M:%S')
)
def clean_archive_time(self):
value = self.cleaned_data['archive_time']
# force it back to UTC
if value:
value = value.replace(tzinfo=utc)
return value
class EventTweetForm(BaseModelForm):
class Meta:
model = EventTweet
fields = (
'text',
'include_placeholder',
'send_date',
)
widgets = {
'text': forms.Textarea(attrs={
'autocomplete': 'off',
'data-maxlength': 140,
'rows': 2,
})
}
def __init__(self, event, *args, **kwargs):
super(EventTweetForm, self).__init__(*args, **kwargs)
self.fields['text'].help_text = (
'<b class="char-counter">140</b> characters left. '
'<span class="char-counter-warning"><b>Note!</b> Sometimes '
'Twitter can count it as longer than it appears if you '
'include a URL. '
'It\'s usually best to leave a little room.</span>'
)
# it's a NOT NULL field but it defaults to NOW()
# in the views code
self.fields['send_date'].required = False
if event.tags.all():
def pack_tags(tags):
return '[%s]' % (','.join('"%s"' % x for x in tags))
self.fields['text'].help_text += (
'<br><a href="#" class="include-event-tags" '
'data-tags=\'%s\'>include all event tags</a>'
% pack_tags([x.name for x in event.tags.all()])
)
if event.placeholder_img or event.picture:
from airmozilla.main.helpers import thumbnail
if event.picture:
pic = event.picture.file
else:
pic = event.placeholder_img
thumb = thumbnail(pic, '160x90', crop='center')
self.fields['include_placeholder'].help_text = (
'<img src="%(url)s" alt="placeholder" class="thumbnail" '
'width="%(width)s" width="%(height)s">' %
{
'url': thumb.url,
'width': thumb.width,
'height': thumb.height
}
)
else:
del self.fields['include_placeholder']
if event.location:
self.fields['send_date'].help_text = (
'Timezone is %s' % event.location.timezone
)
class ChannelForm(BaseModelForm):
class Meta:
model = Channel
exclude = ('created',)
def __init__(self, *args, **kwargs):
super(ChannelForm, self).__init__(*args, **kwargs)
self.fields['parent'].required = False
if kwargs.get('instance'):
self.fields['parent'].choices = [
(x, y) for (x, y)
in self.fields['parent'].choices
if x != kwargs['instance'].pk
]
def clean(self):
cleaned_data = super(ChannelForm, self).clean()
if 'always_show' in cleaned_data and 'never_show' in cleaned_data:
# if one is true, the other one can't be
if cleaned_data['always_show'] and cleaned_data['never_show']:
raise forms.ValidationError(
"Can't both be on always and never shown"
)
return cleaned_data
class TemplateEditForm(BaseModelForm):
class Meta:
model = Template
widgets = {
'content': forms.Textarea(attrs={'rows': 20})
}
class TemplateMigrateForm(BaseForm):
template = forms.ModelChoiceField(
widget=forms.widgets.RadioSelect(),
queryset=Template.objects.all()
)
def __init__(self, *args, **kwargs):
self.instance = kwargs.pop('instance')
super(TemplateMigrateForm, self).__init__(*args, **kwargs)
scheduled = defaultdict(int)
removed = defaultdict(int)
events = Event.objects.all()
for each in events.values('template').annotate(Count('template')):
scheduled[each['template']] = each['template__count']
events = events.filter(status=Event.STATUS_REMOVED)
for each in events.values('template').annotate(Count('template')):
removed[each['template']] = each['template__count']
choices = [('', '---------')]
other_templates = Template.objects.exclude(id=self.instance.id)
for template in other_templates.order_by('name'):
choices.append((
template.id,
'{0} ({1} events, {2} removed)'.format(
template.name,
scheduled[template.id],
removed[template.id],
)
))
self.fields['template'].choices = choices
class RecruitmentMessageEditForm(BaseModelForm):
class Meta:
model = RecruitmentMessage
widgets = {
'notes': forms.Textarea(attrs={'rows': 3})
}
exclude = ('modified_user', 'created')
class SurveyEditForm(BaseModelForm):
class Meta:
model = Survey
exclude = ('created', 'modified')
def __init__(self, *args, **kwargs):
super(SurveyEditForm, self).__init__(*args, **kwargs)
self.fields['active'].validators.append(self.validate_active)
self.fields['events'].required = False
self.fields['events'].queryset = (
self.fields['events'].queryset.order_by('title')
)
def validate_active(self, value):
if value and not self.instance.question_set.count():
raise forms.ValidationError(
"Survey must have at least one question in order to be active"
)
class SurveyNewForm(BaseModelForm):
class Meta:
model = Survey
fields = ('name', )
class LocationEditForm(BaseModelForm):
timezone = forms.ChoiceField(choices=TIMEZONE_CHOICES)
def __init__(self, *args, **kwargs):
super(LocationEditForm, self).__init__(*args, **kwargs)
if 'instance' in kwargs:
initial = kwargs['instance'].timezone
else:
initial = settings.TIME_ZONE
self.initial['timezone'] = initial
class Meta:
model = Location
class LocationDefaultEnvironmentForm(BaseModelForm):
class Meta:
model = LocationDefaultEnvironment
fields = ('privacy', 'template', 'template_environment')
class RegionEditForm(BaseModelForm):
class Meta:
model = Region
class TopicEditForm(BaseModelForm):
class Meta:
model = Topic
def __init__(self, *args, **kwargs):
super(TopicEditForm, self).__init__(*args, **kwargs)
self.fields['topic'].widget = forms.widgets.TextInput(attrs={
'placeholder': 'for example Partners for Firefox OS'
})
class ApprovalForm(BaseModelForm):
class Meta:
model = Approval
fields = ('comment',)
widgets = {
'comment': forms.Textarea(attrs={'rows': 3})
}
class HeadersField(forms.CharField):
widget = forms.widgets.Textarea
def __init__(self, *args, **kwargs):
super(HeadersField, self).__init__(*args, **kwargs)
self.help_text = self.help_text or mark_safe(
"For example <code>Content-Type: text/xml</code>"
)
def to_python(self, value):
if not value:
return {}
headers = {}
for line in [x.strip() for x in value.splitlines() if x.strip()]:
try:
key, value = line.split(':', 1)
except ValueError:
raise forms.ValidationError(line)
headers[key.strip()] = value.strip()
return headers
def prepare_value(self, value):
if isinstance(value, basestring):
# already prepared
return value
elif value is None:
return ''
out = []
for key in sorted(value):
out.append('%s: %s' % (key, value[key]))
return '\n'.join(out)
def widget_attrs(self, widget):
attrs = super(HeadersField, self).widget_attrs(widget)
if 'rows' not in attrs:
attrs['rows'] = 3
return attrs
class StaticPageEditForm(BaseModelForm):
headers = HeadersField(required=False)
class Meta:
model = StaticPage
fields = (
'url',
'title',
'content',
'privacy',
'template_name',
'allow_querystring_variables',
'headers',
)
def __init__(self, *args, **kwargs):
super(StaticPageEditForm, self).__init__(*args, **kwargs)
self.fields['url'].label = 'URL'
self.fields['template_name'].label = 'Template'
choices = (
('', 'Default'),
('staticpages/nosidebar.html', 'Default (but no sidebar)'),
('staticpages/blank.html', 'Blank (no template wrapping)'),
)
self.fields['template_name'].widget = forms.widgets.Select(
choices=choices
)
def clean_url(self):
value = self.cleaned_data['url']
if value.startswith('sidebar'):
# expect it to be something like
# 'sidebar_bottom_how-tos'
try:
__, __, channel_slug = value.split('_', 2)
except ValueError:
raise forms.ValidationError(
"Must be format like `sidebar_bottom_channel-slug`"
)
try:
Channel.objects.get(slug=channel_slug)
except Channel.DoesNotExist:
raise forms.ValidationError(
"No channel slug found called `%s`" % channel_slug
)
return value
def clean(self):
cleaned_data = super(StaticPageEditForm, self).clean()
if 'url' in cleaned_data and 'privacy' in cleaned_data:
if cleaned_data['url'].startswith('sidebar_'):
if cleaned_data['privacy'] != Event.PRIVACY_PUBLIC:
raise forms.ValidationError(
"If a sidebar the privacy must be public"
)
return cleaned_data
class VidlyURLForm(forms.Form):
url = forms.CharField(
required=True,
label='URL',
widget=forms.widgets.TextInput(attrs={
'placeholder': 'E.g. http://videos.mozilla.org/.../file.flv',
'class': 'input-xxlarge',
})
)
token_protection = forms.BooleanField(required=False)
hd = forms.BooleanField(required=False, label='HD')
def __init__(self, *args, **kwargs):
disable_token_protection = kwargs.pop(
'disable_token_protection',
False
)
super(VidlyURLForm, self).__init__(*args, **kwargs)
if disable_token_protection:
self.fields['token_protection'].widget.attrs['disabled'] = (
'disabled'
)
self.fields['token_protection'].required = True
self.fields['token_protection'].help_text = (
'Required for non-public events'
)
def clean_url(self):
# annoyingly, we can't use forms.URLField since it barfs on
# Basic Auth urls. Instead, let's just make some basic validation
# here
value = self.cleaned_data['url']
if ' ' in value or '://' not in value:
raise forms.ValidationError('Not a valid URL')
value, error = url_transformer.run(value)
if error:
raise forms.ValidationError(error)
return value
class EventsAutocompleteForm(BaseForm):
q = forms.CharField(required=True, max_length=200)
max = forms.IntegerField(required=False, min_value=1, max_value=20)
class AcceptSuggestedEventForm(BaseModelForm):
class Meta:
model = SuggestedEvent
fields = ('review_comments',)
widgets = {
'review_comments': forms.Textarea(attrs={'rows': 3})
}
class TagEditForm(BaseModelForm):
class Meta:
model = Tag
class TagMergeForm(BaseForm):
name = forms.ChoiceField(
label='Name to keep',
widget=forms.widgets.RadioSelect()
)
def __init__(self, name, *args, **kwargs):
super(TagMergeForm, self).__init__(*args, **kwargs)
def describe_tag(tag):
count = Event.objects.filter(tags=tag).count()
if count == 1:
tmpl = '%s (%d time)'
else:
tmpl = '%s (%d times)'
return tmpl % (tag.name, count)
self.fields['name'].choices = [
(x.name, describe_tag(x))
for x in Tag.objects.filter(name__iexact=name)
]
class VidlyResubmitForm(VidlyURLForm):
id = forms.IntegerField(widget=forms.widgets.HiddenInput())
class URLMatchForm(BaseModelForm):
class Meta:
model = URLMatch
exclude = ('use_count',)
def clean_name(self):
name = self.cleaned_data['name'].strip()
if URLMatch.objects.filter(name__iexact=name):
raise forms.ValidationError("URL matcher name already in use")
return name
def clean_string(self):
string = self.cleaned_data['string']
try:
re.compile(string)
except Exception as e:
raise forms.ValidationError(e)
return string
class SuggestedEventCommentForm(BaseModelForm):
class Meta:
model = SuggestedEventComment
fields = ('comment',)
widgets = {
'comment': forms.Textarea(attrs={'rows': 3})
}
class DiscussionForm(BaseModelForm):
class Meta:
model = Discussion
fields = ('enabled', 'closed', 'moderate_all', 'notify_all',
'moderators')
class CommentEditForm(BaseModelForm):
class Meta:
model = Comment
fields = ('status', 'comment', 'flagged')
class CommentsFilterForm(BaseForm):
user = forms.CharField(required=False)
comment = forms.CharField(required=False)
status = forms.ChoiceField(
required=False,
choices=(
(('', 'ALL'),) + Comment.STATUS_CHOICES + (('flagged', 'Flagged'),)
)
)
class CommentsFilterForm(CommentsFilterForm):
event = forms.CharField(required=False)
class EventAssignmentForm(BaseModelForm):
class Meta:
model = EventAssignment
fields = ('locations', 'users')
def __init__(self, *args, **kwargs):
super(EventAssignmentForm, self).__init__(*args, **kwargs)
users = (
User.objects
.extra(select={
'email_lower': 'LOWER(email)'
})
.filter(is_active=True, is_staff=True)
.order_by('email_lower')
)
def describe_user(user):
ret = user.email
if user.first_name or user.last_name:
name = (user.first_name + ' ' + user.last_name).strip()
ret += ' (%s)' % name
return ret
self.fields['users'].choices = [
(x.pk, describe_user(x)) for x in users
]
self.fields['users'].required = False
self.fields['users'].help_text = 'Start typing to find users.'
locations = (
Location.objects.filter(is_active=True)
.order_by('name')
)
if self.instance.event.location:
locations = locations.exclude(pk=self.instance.event.location.pk)
self.fields['locations'].choices = [
(x.pk, x.name) for x in locations
]
self.fields['locations'].required = False
self.fields['locations'].help_text = 'Start typing to find locations.'
class EventTranscriptForm(BaseModelForm):
class Meta:
model = Event
fields = ('transcript', )
class QuestionForm(BaseModelForm):
class Meta:
model = Question
fields = ('question',)
class EventSurveyForm(BaseForm):
survey = forms.ChoiceField(
widget=forms.widgets.RadioSelect()
)
def __init__(self, *args, **kwargs):
super(EventSurveyForm, self).__init__(*args, **kwargs)
def describe_survey(survey):
output = survey.name
if not survey.active:
output += ' (not active)'
count_questions = Question.objects.filter(survey=survey).count()
if count_questions == 1:
output += ' (1 question)'
else:
output += ' (%d questions)' % count_questions
return output
self.fields['survey'].choices = [
('0', 'none')
] + [
(x.id, describe_survey(x)) for x in Survey.objects.all()
]
class PictureForm(BaseModelForm):
class Meta:
model = Picture
fields = ('file', 'notes', 'default_placeholder')
class AutocompeterUpdateForm(BaseForm):
verbose = forms.BooleanField(required=False)
max_ = forms.IntegerField(required=False)
all = forms.BooleanField(required=False)
flush_first = forms.BooleanField(required=False)
since = forms.IntegerField(
required=False,
help_text="Minutes since last modified"
)
def clean_since(self):
value = self.cleaned_data['since']
if value:
print "Minutes", int(value)
value = datetime.timedelta(minutes=int(value))
return value
class ISODateTimeField(forms.DateTimeField):
def strptime(self, value, __):
return dateutil.parser.parse(value)
class EventsDataForm(BaseForm):
since = ISODateTimeField(required=False)
|
bsd-3-clause
| -5,985,730,503,310,507,000 | 4,461,817,154,431,308,300 | 31.115302 | 79 | 0.562863 | false |
catapult-project/catapult
|
third_party/google-endpoints/future/types/newlist.py
|
82
|
2284
|
"""
A list subclass for Python 2 that behaves like Python 3's list.
The primary difference is that lists have a .copy() method in Py3.
Example use:
>>> from builtins import list
>>> l1 = list() # instead of {} for an empty list
>>> l1.append('hello')
>>> l2 = l1.copy()
"""
import sys
import copy
from future.utils import with_metaclass
from future.types.newobject import newobject
_builtin_list = list
ver = sys.version_info[:2]
class BaseNewList(type):
def __instancecheck__(cls, instance):
if cls == newlist:
return isinstance(instance, _builtin_list)
else:
return issubclass(instance.__class__, cls)
class newlist(with_metaclass(BaseNewList, _builtin_list)):
"""
A backport of the Python 3 list object to Py2
"""
def copy(self):
"""
L.copy() -> list -- a shallow copy of L
"""
return copy.copy(self)
def clear(self):
"""L.clear() -> None -- remove all items from L"""
for i in range(len(self)):
self.pop()
def __new__(cls, *args, **kwargs):
"""
list() -> new empty list
list(iterable) -> new list initialized from iterable's items
"""
if len(args) == 0:
return super(newlist, cls).__new__(cls)
elif type(args[0]) == newlist:
value = args[0]
else:
value = args[0]
return super(newlist, cls).__new__(cls, value)
def __add__(self, value):
return newlist(super(newlist, self).__add__(value))
def __radd__(self, left):
" left + self "
try:
return newlist(left) + self
except:
return NotImplemented
def __getitem__(self, y):
"""
x.__getitem__(y) <==> x[y]
Warning: a bug in Python 2.x prevents indexing via a slice from
returning a newlist object.
"""
if isinstance(y, slice):
return newlist(super(newlist, self).__getitem__(y))
else:
return super(newlist, self).__getitem__(y)
def __native__(self):
"""
Hook for the future.utils.native() function
"""
return list(self)
def __nonzero__(self):
return len(self) > 0
__all__ = ['newlist']
|
bsd-3-clause
| 4,490,771,478,490,172,400 | 962,936,182,255,706,400 | 23.042105 | 71 | 0.546848 | false |
michaelhowden/eden
|
controllers/vulnerability.py
|
6
|
86050
|
# -*- coding: utf-8 -*-
"""
Sahana Eden Vulnerability Controller
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
# @ToDo: deployment_setting
#countries = ["TL", "VN"]
countries = ["VN"]
#DEBUG = True
#s3_debug = s3base.s3_debug
# -----------------------------------------------------------------------------
def init():
"""
Create the static GeoJSONs that the app needs
"""
gis.export_admin_areas(countries)
return "complete"
# -----------------------------------------------------------------------------
def index():
""" Module Home Page: Map """
# This module uses it's own Theme
settings.base.theme = "Vulnerability"
# Additional scripts
append = s3.scripts.append
append("/%s/static/scripts/yepnope.1.5.4-min.js" % appname)
append("/%s/static/scripts/jit/jit-yc.js" % appname)
append("/%s/static/scripts/S3/s3.gis.loader.js" % appname)
if s3.debug:
append("/%s/static/themes/Vulnerability/js/jquery.ui.fnselectmenu.js" % appname)
append("/%s/static/themes/Vulnerability/js/TypeHelpers.js" % appname)
#append("/%s/static/scripts/ui/progressbar.js" % appname)
append("/%s/static/themes/Vulnerability/js/s3.vulnerability.js" % appname)
append("/%s/static/themes/Vulnerability/js/s3.reports.js" % appname)
append("/%s/static/themes/Vulnerability/js/s3.analysis.js" % appname)
append("/%s/static/themes/Vulnerability/js/s3.treemap.js" % appname)
append("/%s/static/scripts/jquery.dataTables.js" % appname)
append("/%s/static/scripts/jquery.dataTables.fnSetFilteringDelay.js" % appname)
append("/%s/static/scripts/S3/s3.dataTables.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.fillbetween.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.crosshair.js" % appname)
else:
append("/%s/static/themes/Vulnerability/js/s3.vulnerability.min.js" % appname)
append("/%s/static/scripts/S3/s3.dataTables.min.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.min.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.fillbetween.min.js" % appname)
append("/%s/static/scripts/flot/jquery.flot.crosshair.min.js" % appname)
js_global = []
append = js_global.append
# i18n
i18n = "\n".join((
"i18n.all='%s'" % T("All"),
"i18n.gis_requires_login='%s'" % T("Requires Login"),
"i18n.no_matching_result='%s'" % T("No matching result"),
"i18n.no_entries_found='%s'" % T("No Entries Found"),
"i18n.loading_report_details='%s'" % T("Loading report details"),
"i18n.choose='%s'" % T("Choose"),
"i18n.population='%s'" % T("Population"),
"i18n.reported='%s'" % T("Reported"),
"i18n.country='%s'" % COUNTRY,
"i18n.country_in='%s'" % T("Country in"),
"i18n.select_country='%s'" % T("Select a Country"),
"i18n.show_more='%s'" % T("Show more"),
"i18n.show_less='%s'" % T("Show less"),
"i18n.submit_data='%s'" % T("Submit Data"),
"i18n.analysis='%s'" % T("Analysis"),
"i18n.reports='%s'" % T("Reports"),
"i18n.all_reports='%s'" % T("All reports"),
"i18n.my_reports='%s'" % T("My reports"),
"i18n.approval_request_submitted='%s'" % T("Approval request submitted"),
"i18n.thankyou_for_your_approval='%s'" % T("Thank you for your approval"),
"i18n.reject_request_submitted='%s'" % T("Reject request submitted"),
"i18n.submission_has_been_declined='%s'" % T("Thank you, the submission%(br)shas been declined") % dict(br="<br />"),
"i18n.last_data_collected_on='%s'" % T("Last Data Collected on"),
"i18n.by='%s'" % T("by"),
"i18n.in_='%s'" % T("in"),
"i18n.in_this='%s'" % T("in this"),
"i18n.of='%s'" % T("of"),
"i18n.out_of='%s'" % T("out of"),
"i18n.review='%s'" % T("Review"),
"i18n.submitted_by='%s'" % T("submitted by"),
"i18n.go_to_the='%s'" % T("Go to the"),
"i18n.select_data_type='%s'" % T("Select data type"),
"i18n.about_to_submit_indicator_ratings='%s'" % T("You are about to submit indicator ratings for"),
"i18n.poor='%s'" % T("poor"),
"i18n.fair='%s'" % T("fair"),
"i18n.moderate='%s'" % T("moderate"),
"i18n.strong='%s'" % T("strong"),
"i18n.data_quality='%s'" % T("Data Quality"),
"i18n.of_total_data_reported='%s'" % T("of total data reported"),
"i18n.uploading_report_details='%s'" % T("Uploading report details"),
"i18n.upload_successful='%s'" % T("Upload successful"),
"i18n.no_data='%s'" % T("No Data"),
"i18n.extrapolated='%s'" % T("Extrapolated"),
"\n",
))
append(i18n)
append(s3base.S3DataTable.i18n())
# Save data in the session for later
table = s3db.vulnerability_aggregated_indicator
query = (table.uuid == "Resilience")
result = db(query).select(table.parameter_id, limitby=(0, 1))
if result:
session.s3.resilience_id = result.first().parameter_id
dtable = s3db.stats_demographic
query = (dtable.name == "Population")
result = db(query).select(dtable.parameter_id, limitby=(0, 1))
if result:
session.s3.population_id = result.first().parameter_id
# Get the list of indicators
itable = db.vulnerability_indicator
rows = db(itable.deleted == False).select(itable.name,
itable.description,
itable.parameter_id,
orderby=itable.posn)
pids = []
pappend = pids.append
indicators = OrderedDict()
count = 1
for row in rows:
pappend(row.parameter_id)
indicators[count] = dict(i=row.parameter_id,
n=row.name,
d=row.description)
count += 1
append('''\nidata=%s''' % json.dumps(indicators))
session.s3.indicator_pids = pids
# Get the L0 hdata & summary vdata
hdata, vdata = l0()
# Get the default location to open the map
bounds = None
root_org = auth.root_org()
start = False
if root_org:
otable = s3db.org_organisation
ttable = s3db.gis_location_tag
gtable = db.gis_location
query = (otable.id == root_org) & \
(ttable.tag == "ISO2") & \
(ttable.value == otable.country)
r = db(query).select(ttable.location_id,
limitby=(0, 1)).first()
if r and r.location_id in countries:
start = True
append('''\nstart=%s''' % r.location_id)
# Add the child L1 summary vdata
l1(r.location_id, vdata)
if not start:
append('''\nstart=""''')
dumps = json.dumps
script = '''
hdata=%s
vdata=%s''' % (dumps(hdata), dumps(vdata))
append(script)
s3.js_global.append("".join(js_global))
# Reports
# These get pulled-in via AJAX
# from s3.s3data import S3DataTable
# resource = s3db.resource("vulnerability_document")
# list_fields = ["id",
# "date",
# "location_id",
# "location_id$L2",
# "source_id"
# "document_type",
# "created_by",
# "approved_by",
# ]
# rfields = resource.resolve_selectors(list_fields)[0]
# filteredrows = resource.count()
# dt = S3DataTable(rfields, [], orderby=~s3db.vulnerability_document.date)
# level_1_titles = [["Approval pending", T("Approval pending")],
# ["VCA Report", T("VCA Report")],
# ["Report", T("Report")],
# ]
# report = dt.html(filteredrows,
# filteredrows,
# "report",
# dt_pagination = "false",
# dt_searching = "false",
# dt_dom = "t",
# dt_group = [4, 3],
# dt_group_totals = [level_1_titles],
# dt_ajax_url = URL(c="vulnerability",
# f="report",
# extension="aadata",
# vars={"id": "report"},
# ),
# dt_action_col = -1,
# dt_group_space = "true",
# dt_shrink_groups = "accordion",
# dt_group_types = ["text", "none"],
# )
# s3.report = report
# TreeMap
s3.stylesheets.append("jit/base.css")
user = auth.user
if user:
user_name = "%s %s" % (user.first_name, user.last_name)
else:
user_name = ""
today = request.utcnow.strftime("%d-%b-%y")
response.view = "vulnerability/map.html"
return dict(indicators=indicators,
user_name = user_name,
today = today,
COUNTRY = COUNTRY.upper(),
CHOOSE_COUNTRY = T("Choose Country"))
# -----------------------------------------------------------------------------
def l0():
"""
Return hdata (Hierarchy Labels) & summary vdata (Resilience) for all Countries
- used only by the initial map load
"""
gtable = db.gis_location
ttable = s3db.gis_location_tag
htable = s3db.gis_hierarchy
query = (gtable.id == ttable.location_id) & \
(ttable.tag == "ISO2") & \
(ttable.value.belongs(countries)) & \
(gtable.id == htable.location_id)
atable = s3db.vulnerability_aggregate
lquery = (atable.parameter_id == session.s3.resilience_id) & \
(atable.agg_type == 4) & \
(atable.location_id == gtable.id)
left = atable.on(lquery)
hdata = {}
vdata = {}
ids = []
append = ids.append
rows = db(query).select(gtable.id,
gtable.name,
htable.L1,
htable.L2,
htable.L3,
#htable.L4,
#atable.date,
atable.median,
orderby=~atable.date,
left=left)
for row in rows:
id = row[gtable].id
if id in ids:
# We're only interested in the most recent data per location
continue
append(id)
_grow = row[gtable]
_hrow = row[htable]
hdata[id] = dict(l1 = _hrow.L1,
l2 = _hrow.L2,
l3 = _hrow.L3,
#l4 = _hrow.L4,
)
median = row[atable].median
if median is None:
resilience = 0
else:
resilience = int(round(median, 0))
vdata[id] = dict(r = resilience,
n = _grow.name,
l = 0,
)
return hdata, vdata
# -----------------------------------------------------------------------------
def l1(id, vdata):
"""
Update summary vdata (Resilience) for all child L1s of the start country
- used only by the initial map load
"""
gtable = db.gis_location
# @ToDo: Filter by Date not just filter-out old locations
query = (gtable.parent == id) & \
(gtable.level == "L1") & \
(gtable.end_date == None)
aitable = db.vulnerability_aggregated_indicator
atable = db.vulnerability_aggregate
rquery = (aitable.name == "Resilience") & \
(atable.parameter_id == aitable.parameter_id) & \
(atable.agg_type == 4)
rows = db(query).select(gtable.id,
gtable.name,
)
for row in rows:
query = rquery & (atable.location_id == row.id)
_row = db(query).select(#atable.date,
atable.median,
orderby=~atable.date).first()
resilience = 0
if _row and _row.median is not None:
resilience = int(round(_row.median, 0))
vdata[row.id] = dict(r = resilience,
n = row.name,
l = 1,
f = id,
)
return
# -----------------------------------------------------------------------------
def vdata():
"""
Return JSON of the Vulnerability data for a location
- for display in Map Popups and the Drawer
vdata = { id : {
'n' : name,
'l' : level,
'f' : parent,
'r' : resilience,
'i' : indicator data,
'c' : count (how many L3s reported in this region),
't' : count (how many L3s total in this region),
'q' : quality,
'p' : population,
's' : source (for population),
'b' : population breakdown (for L3s),
'd' : date last collected (for L3s),
'w' : collected by (for L3s),
'm' : images (for L3s),
}
}
"""
try:
id = request.args[0]
except:
raise HTTP(400)
#if DEBUG:
# start = datetime.datetime.now()
gtable = s3db.gis_location
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("LocationModel load completed in %s seconds" % duration)
# start = datetime.datetime.now()
query = (gtable.id == id)
location = db(query).select(gtable.name,
gtable.level,
gtable.parent,
gtable.L0,
gtable.L1,
gtable.L2,
#gtable.L3,
limitby=(0, 1)).first()
if not location or not location.level:
return ""
script = ""
level = location.level
data = dict(n = location.name,
l = int(level[1]),
f = location.parent,
)
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 1 (location lookup) completed in %s seconds" % duration)
# start = datetime.datetime.now()
# Represent numbers in the correct format
nrepresent = IS_INT_AMOUNT().represent
vdata = {}
atable = s3db.vulnerability_aggregate
resilience_id = session.s3.resilience_id
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("StatsModel load completed in %s seconds" % duration)
# start = datetime.datetime.now()
if level != "L3":
# We need to read the ids, names & resiliences of the next level down for the selectmenu styling of the dropdown
_level = int(level[1]) + 1
# @ToDo: Filter by Date not just filter-out old locations
query = (gtable.parent == id) & \
(gtable.level == "L%s" % _level) & \
(gtable.deleted == False) & \
(gtable.end_date == None)
lquery = (atable.parameter_id == resilience_id) & \
(atable.agg_type == 4) & \
(atable.end_date == None) & \
(atable.location_id == gtable.id)
left = atable.on(lquery)
rows = db(query).select(gtable.id,
gtable.name,
#atable.date,
atable.median,
#atable.ward_count,
#atable.reported_count,
left=left)
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 2 (next level down) completed in %s seconds" % duration)
# start = datetime.datetime.now()
for row in rows:
grow = row[gtable]
median = row[atable].median
if median is None:
resilience = 0
else:
resilience = int(round(median, 0))
vdata[grow.id] = dict(r = resilience,
n = grow.name,
l = _level,
f = id,
)
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 2 (row in rows) completed in %s seconds" % duration)
# start = datetime.datetime.now()
else:
# We are an L3 already
# Last Data Collected on d by w
utable = auth.settings.table_user
vtable = s3db.vulnerability_data
query = (vtable.location_id == id)
left = utable.on(utable.id == vtable.created_by)
row = db(query).select(vtable.date,
utable.first_name,
utable.last_name,
orderby=~vtable.date,
left=left,
limitby=(0, 1)).first()
if row:
data["d"] = row[vtable].date.isoformat()
user = row[utable]
data["w"] = "%s %s" % (user.first_name, user.last_name)
else:
data["d"] = ""
data["w"] = ""
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 2 (last data collection) completed in %s seconds" % duration)
# start = datetime.datetime.now()
# Get the Resilience
query = (atable.parameter_id == resilience_id) & \
(atable.agg_type == 4) & \
(atable.end_date == None) & \
(atable.location_id == id)
r = db(query).select(atable.date,
atable.median,
atable.ward_count,
atable.reported_count,
# Should be only one with end_date == None
#orderby=~atable.date,
limitby=(0, 1)).first()
if not r or r.median is None:
data["r"] = 0
if level != "L3":
data["c"] = 0
data["q"] = "p"
# Total number of L3s in this region
data["t"] = nrepresent(len(gis.get_children(id, level="L3")))
else:
data["r"] = int(round(r.median, 0))
# How many L3s have reported?
reported_count = r.reported_count
data["c"] = nrepresent(reported_count)
# Total number of L3s in this region
ward_count = r.ward_count
data["t"] = nrepresent(ward_count)
if level != "L3":
# Calculate Quality
if reported_count == 0 or ward_count == 0:
q = "p"
else:
q = reported_count / ward_count
if q < 0.25:
q = "p"
elif q < 0.50:
q = "f"
elif q < 0.75:
q = "m"
else:
q = "s"
data["q"] = q
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 3 (resilience) completed in %s seconds" % duration)
# start = datetime.datetime.now()
# Get the aggregated data for this location for all indicators
query = (atable.location_id == id) & \
(atable.parameter_id.belongs(session.s3.indicator_pids))
rows = db(query).select(atable.parameter_id,
atable.min,
atable.max,
atable.median,
orderby=~atable.date,
)
indicator_data = {}
pids = []
pappend = pids.append
for row in rows:
pid = row.parameter_id
if pid in pids:
# We're only interested in the most recent data per indicator
continue
pappend(pid)
indicator_data[pid] = dict(min = row.min,
max = row.max,
med = row.median,
)
data["i"] = indicator_data
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 4 (indicators) completed in %s seconds" % duration)
# start = datetime.datetime.now()
# Get the Demographic data for the location
ddtable = s3db.stats_demographic_data
if level != "L3":
# Just Population
p = None
if level != "L2":
# Lookup direct
query = (ddtable.location_id == id) & \
(ddtable.parameter_id == session.s3.population_id)
row = db(query).select(ddtable.value,
orderby=~ddtable.date,
limitby=(0, 1)).first()
if row:
p = row.value
if not p:
# Fallback to an aggregate
# @ToDo: mark this in some way - either '> p' or else '~p' by averaging from the data that we do have
atable = s3db.stats_demographic_aggregate
query = (atable.agg_type == 2) & \
(atable.location_id == id) & \
(atable.parameter_id == session.s3.population_id) & \
(atable.end_date == None)
row = db(query).select(atable.sum,
# Should be only one with end_date == None
#orderby=~atable.date,
limitby=(0, 1)).first()
if row:
p = row.sum
data["p"] = nrepresent(p) if p else ""
else:
# L3: Population, Breakdowns & Source
# Add all available breakdowns to the output
b = {}
dtable = s3db.stats_demographic
query = (dtable.deleted != True) & \
(dtable.name != "Population")
demos = db(query).select(dtable.id,
dtable.name)
for d in demos:
b[d.id] = dict(n = s3_unicode(T(d.name)),
v = "",
s = "")
srctable = s3db.stats_source
query = (ddtable.location_id == id) & \
(ddtable.parameter_id == dtable.parameter_id) & \
(ddtable.source_id == srctable.id)
rows = db(query).select(dtable.id,
dtable.name,
ddtable.value,
srctable.name,
#ddtable.date,
orderby=~ddtable.date
)
ids = []
append = ids.append
for row in rows:
_id = row[dtable].id
if _id in ids:
# We're only interested in the most recent data per demographic
continue
append(_id)
d = row[dtable]
if d.name == "Population":
data["p"] = nrepresent(row[ddtable].value)
data["s"] = row[srctable].name
else:
# Breakdown
b[_id]["v"] = nrepresent(row[ddtable].value)
b[_id]["s"] = row[srctable].name
data["b"] = b
# Images
itable = s3db.doc_image
ttable = s3db.pr_image_library
vdoc_table = s3db.vulnerability_document
query = (vdoc_table.location_id == id) & \
(vdoc_table.approved_by != None) & \
(vdoc_table.document_type.belongs(("image", "map"))) & \
(vdoc_table.doc_id == itable.doc_id) & \
(ttable.original_name == itable.file)
left = utable.on(utable.id == itable.created_by)
images = db(query).select(itable.file,
itable.comments,
ttable.new_name,
utable.first_name,
utable.last_name,
left=left,
orderby=~itable.date)
m = []
mappend = m.append
for image in images:
i = image[itable]
user = image[utable]
mappend([image[ttable].new_name, i.file, i.comments,
"%s %s" % (user.first_name, user.last_name)])
data["m"] = m
#if DEBUG:
# end = datetime.datetime.now()
# duration = end - start
# duration = "{:.2f}".format(duration.total_seconds())
# s3_debug("Query 5 (demographics) completed in %s seconds" % duration)
# start = datetime.datetime.now()
vdata[id] = data
script = '''n=%s\n''' % json.dumps(vdata)
response.headers["Content-Type"] = "application/json"
return script
# -----------------------------------------------------------------------------
def rdata():
"""
Controller to extract data for resilience analysis line graph
returns a JavaScript like:
r={"location_id":
{"year":
{"indicator_index": [value, deviation]}
}
}
where indicator_index is 0 for the overall resilience (median), or
1-10 for the individual indicators (=index in the list + 1).
Any data which are not available from the db will be omitted (to
save bandwidth) - the client-side script must detect any missing
keys itself.
@todo: this controller must make sure that there is always a median
(overall resilience) in each set => calculate if not present.
"""
response.headers["Content-Type"] = "application/json"
if not len(request.args):
return '''n={}'''
else:
locations = list(set([a for a in request.args if a.isdigit()]))
fyear = None
lyear = None
if "after" in get_vars:
try:
fyear = int(get_vars["after"])
except ValueError:
pass
if "before" in get_vars:
try:
lyear = int(get_vars["before"])
except ValueError:
pass
if lyear and fyear and lyear > fyear:
lyear, fyear = fyear, lyear
if fyear:
fdate = datetime.datetime(fyear, 1, 1)
else:
fdate = None
if lyear:
ldate = datetime.datetime(lyear + 1, 1, 1)
else:
ldate = request.utcnow
resilience_id = session.s3.resilience_id
indicator_pids = session.s3.indicator_pids
pos = Storage([(indicator_pids[i], i + 1)
for i in xrange(len(indicator_pids))])
pos[resilience_id] = 0
atable = s3db.vulnerability_aggregate
query = ((atable.parameter_id == resilience_id) & \
(atable.agg_type == 4)) | \
(atable.parameter_id.belongs(indicator_pids))
if len(locations) == 1:
query &= (atable.location_id == locations[0])
else:
query &= (atable.location_id.belongs(locations))
if fyear:
query &= (atable.date >= fdate)
if lyear is None or lyear == request.utcnow.year:
query &= ((atable.end_date < ldate) | (atable.end_date == None))
else:
query &= (atable.end_date < ldate)
rows = db(query).select(atable.location_id,
atable.parameter_id,
atable.date,
atable.mean,
atable.median,
atable.mad,
orderby=~atable.date)
keys = []
seen = keys.append
data = dict()
for row in rows:
l = row.location_id
y = row.date.year
p = pos[row.parameter_id]
if (l, y, p) in keys:
continue
seen((l, y, p))
if p == pos[resilience_id]:
val = int(round(row.median, 0))
else:
val = row.median
dev = row.mad
if l not in data:
ldata = data[l] = dict()
else:
ldata = data[l]
if y not in ldata:
ydata = ldata[y] = dict()
else:
ydata = ldata[y]
ydata[p] = (val, dev)
script = '''r=%s\n''' % json.dumps(data)
return script
# -----------------------------------------------------------------------------
def tmdata():
""" Controller to extract tree map data """
MAX_LEVEL = 3 # the lowest level for child lookups
# Requested locations
if not len(request.args):
response.headers["Content-Type"] = "application/json"
return '''sdata={}'''
else:
locations = list(set([int(a) for a in request.args if a.isdigit()]))
sdata = Storage()
# Vulnerability Indicators
indicator_pids = session.s3.indicator_pids
idefaults = [(i, 0) for i in indicator_pids]
# Locations Hierarchy
ltable = s3db.gis_location
parents = list(locations)
children = list(locations)
while parents or children:
query = None
if children:
query = (ltable.id.belongs(children))
if parents:
q = (ltable.parent.belongs(parents))
if query is None:
query = q
else:
query |= q
if query is None:
break
rows = db(query).select(ltable.id,
ltable.name,
ltable.level,
ltable.parent)
next_parents = []
next_children = []
for row in rows:
this = row.id
level = int(row.level[1])
parent = row.parent
if this not in sdata:
sdata[this] = {}
data = sdata[this]
data["n"] = row.name
data["l"] = level
data["f"] = parent
data["p"] = 0
data["i"] = dict(idefaults)
data["x"] = this not in locations
if level > 0 and parent:
if parent in parents and \
level < MAX_LEVEL and \
parent in locations:
pass
#next_parents.append(this)
elif this in children and parent not in sdata:
next_children.append(parent)
parents = next_parents
children = next_children
# Population
if level in ("L0", "L1"):
# Lookup direct
ddtable = s3db.stats_demographic_data
query = (ddtable.location_id.belongs(sdata.keys())) & \
(ddtable.parameter_id == session.s3.population_id)
rows = db(query).select(ddtable.location_id,
ddtable.value,
orderby=~ddtable.date)
location_ids = []
seen = location_ids.append
for row in rows:
location_id = row.location_id
if location_id not in location_ids:
seen(location_id)
sdata[location_id]["p"] = row.value
# Look up aggregates
atable = s3db.vulnerability_aggregate
query = (atable.location_id.belongs(sdata.keys())) & \
(atable.parameter_id == session.s3.population_id)
rows = db(query).select(atable.location_id,
atable.sum,
atable.ward_count,
atable.reported_count,
orderby=~atable.date)
location_ids = []
seen = location_ids.append
for row in rows:
location_id = row.location_id
if location_id not in location_ids:
seen(location_id)
data = sdata[location_id]
if not data["p"]:
data["p"] = row.sum
data["t"] = row.ward_count
data["r"] = row.reported_count
# Calculate ward_count manually for Lx without aggregates
#commune_level = "L%s" % MAX_LEVEL
#for location_id in sdata.keys():
# data = sdata[location_id]
# if "t" not in data:
# data["r"] = 0
# # @ToDo: optimise this to do in-bulk rather than per-record
# data["t"] = len(gis.get_children(location_id, level=commune_level))
# Indicators
query = (atable.location_id.belongs(sdata.keys())) & \
(atable.parameter_id.belongs(indicator_pids))
rows = db(query).select(atable.location_id,
atable.parameter_id,
atable.median)
for row in rows:
location_id = row.location_id
location_data = sdata[location_id]
if "i" not in location_data:
location_data["i"] = dict(idefaults)
location_data["i"][row.parameter_id] = row.median
# Return as script
script = '''sdata=%s\n''' % json.dumps(sdata)
response.headers["Content-Type"] = "application/json"
return script
# -----------------------------------------------------------------------------
def filter_report(filter_request, loc_id, loc_level):
"""
Helper function to extract the selections from the side panel
and generate a resource filter
"""
vdoc_table = db.vulnerability_document
gtable = db.gis_location
query = (vdoc_table.deleted != True) & \
(vdoc_table.location_id == gtable.id)
if loc_id != -1:
# Don't filter to just next level
#next_loc_level = "L%s" % (int(loc_level[1:]) + 1)
#child_locations = gis.get_children(loc_id, next_loc_level)
child_locations = gis.get_children(loc_id)
if len(child_locations) == 0:
query &= (vdoc_table.location_id == loc_id)
else:
child_ids = [row.id for row in child_locations]
child_ids.append(loc_id) # include the selected location
query &= (vdoc_table.location_id.belongs(child_ids))
else:
# Show the country-level
query &= (gtable.level == "L0")
if filter_request["from_date"]:
query &= (vdoc_table.date >= filter_request["from_date"])
if filter_request["to_date"]:
query &= (vdoc_table.date <= filter_request["to_date"])
document_types = ["vca"]
indicator = (vdoc_table.document_type == "vca")
if "indicator" in filter_request:
document_types.append("indicator")
if "demographics" in filter_request:
document_types.append("demographic")
if "map" in filter_request:
document_types.append("map")
if "images" in filter_request:
document_types.append("image")
if "reports" in filter_request:
document_types.append("other")
if len(document_types) == 1:
query &= (vdoc_table.document_type == "vca")
else:
query &= (vdoc_table.document_type.belongs(document_types))
if "myReports" in filter_request:
user_id = auth.user.id
query &= ((vdoc_table.approved_by == user_id)
| (vdoc_table.created_by == user_id))
if "text" in filter_request and filter_request["text"] != "":
utable = auth.settings.table_user
text = "%%%s%%" % filter_request["text"].lower()
query &= (vdoc_table.location_id == gtable.id)
query &= (vdoc_table.created_by == utable.id)
query &= ((gtable.name.lower().like(text))
| (utable.first_name.lower().like(text))
| (utable.last_name.lower().like(text)))
# Now ensure that all unapproved records are added to the return list
query = ((vdoc_table.deleted != True) & \
(vdoc_table.approved_by == None) & \
(vdoc_table.location_id == gtable.id)
) | (query)
return query
# -------------------------------------------------------------------------
def report_group(row):
"""
Virtual field to show the group that the report belongs to
used by vulnerability/report
"""
if "vulnerability_document" in row:
row = row["vulnerability_document"]
# These get i18n later
if row.approved_by is None:
return "Approval pending"
elif row.document_type == "vca":
return "VCA Report"
else:
return "Report"
# -----------------------------------------------------------------------------
def reportDataTable():
"""
Return a dataTable using the selected filter options
"""
from s3.s3data import S3DataTable
vdoc_table = s3db.vulnerability_document
vdoc_table.group = Field.Method("group", report_group)
gtable = db.gis_location
# -------------------------------------------------------------------------
# Set up custom represents
# -------------------------------------------------------------------------
def location_repr(id):
"""
Return the location name (level) wrapped in a span
"""
if not id:
repr_text = messages["NONE"]
else:
row = locations.get(id, None)
if not row:
repr_text = messages.UNKNOWN_OPT
else:
level = loc_labels[row["level"]]
repr_text = "%s (%s)" % (row["name"], level)
return SPAN(repr_text, _class="communeCell")
# -------------------------------------------------------------------------
def submitted_repr(id):
"""
Return the initial of the first name and the complete last name
"""
if not id:
repr_text = T("Imported data")
else:
row = users.get(id, None)
if row:
repr_text = "%s. %s" % (row["first_name"][0], row["last_name"])
else:
repr_text = messages.UNKNOWN_OPT
return repr_text
# -------------------------------------------------------------------------
def approved_repr(id):
"""
Return the initials of the first and the last name
"""
if id is None:
repr_text = APPROVAL_PENDING
elif id == 0:
repr_text = APPROVED
else:
row = users.get(id, None)
if row:
repr_text = T("Approved by %(first_name)s.%(last_name)s") % \
dict(first_name = row["first_name"][0],
last_name = row["last_name"][0])
else:
repr_text = messages.UNKNOWN_OPT
return repr_text
# -------------------------------------------------------------------------
def action_repr(id):
"""
Return the action button for this row
"""
approved = approvals.get(id, None)
if approved != None:
repr_text = A(VIEW,
_id = id,
_class = "viewButton",
_href = "javascript:viewReportDetails(%s)" % id
)
else:
repr_text = A(REVIEW,
_id = id,
_class = "reviewButton",
_href = "javascript:showReportDetails(%s)" % id
)
repr_text.append(A(CLOSE,
_class = "closeReviewButton",
_href = "javascript:hideReportDetails(%s)" % id
))
return repr_text
filter_request = request.post_vars
loc_level = -1
if filter_request:
loc_id = filter_request.get("location_id", -1)
if loc_id == "-1":
loc_id = -1
if loc_id:
row = db(gtable.id == loc_id).select(gtable.level,
gtable.path,
limitby=(0, 1)
).first()
try:
loc_level = row.level
except:
# Invalid location ID
loc_id = -1
else:
if loc_level == "L0":
L0 = loc_id
else:
L0 = row.path.split("/")[0]
filter = filter_report(filter_request, loc_id, loc_level)
if loc_id == -1:
loc_labels = gis.get_location_hierarchy()
else:
loc_labels = gis.get_location_hierarchy(location=L0)
#############################################################
# Note if list_fields are changed here then they also need
# to be changed in index, where the table is initialised
#############################################################
if loc_level == -1:
loc_list_field = "location_id$L0"
loc_group_field = "gis_location.L0"
elif loc_level == "L0":
loc_list_field = "location_id$L1"
loc_group_field = "gis_location.L1"
elif loc_level == "L1":
loc_list_field = "location_id$L2"
loc_group_field = "gis_location.L2"
elif loc_level == "L2":
loc_list_field = "location_id$L3"
loc_group_field = "gis_location.L3"
elif loc_level == "L3":
loc_list_field = "location_id$L3"
loc_group_field = "gis_location.L3"
# @ToDo: Support countries with L4s/L5s
#elif loc_level == "L4":
# loc_list_field = "location_id$L4"
# loc_group_field = "gis_location.L4"
list_fields = [(T("Action"), "id"),
(T("Date"), "date"),
(T("Location"), "location_id"),
# Field.Method
"group",
loc_list_field,
"document_type",
(T("Submitted by"), "created_by"),
(T("Status"), "approved_by"),
]
# Ensure that we also get the records awaiting for approval
resource = s3db.resource("vulnerability_document", unapproved=True)
if filter_request:
resource.add_filter(filter)
totalrows = resource.count()
data = resource.select(list_fields,
orderby=~vdoc_table.date,
limit=None,
count=True,
represent=False,
#raw_data=True
)
filteredrows = data["numrows"]
if filteredrows > 0:
# Do represents in-bulk
# @ToDo: Replace with S3Represents & define before select
approvals = {}
locations = []
lappend = locations.append
users = []
uappend = users.append
rows = data["rows"]
for row in rows:
#raw = row["_row"]
location_id = row["vulnerability_document.location_id"]
if location_id and location_id not in locations:
lappend(location_id)
user_id = row["vulnerability_document.created_by"]
if user_id and user_id not in users:
uappend(user_id)
user_id = row["vulnerability_document.approved_by"]
if user_id:
approvals[row["vulnerability_document.id"]] = user_id
if user_id not in users:
uappend(user_id)
lrows = db(gtable.id.belongs(locations)).select(gtable.id,
gtable.name,
gtable.level,
gtable.L1,
gtable.L2)
locations = lrows.as_dict()
utable = auth.settings.table_user
urows = db(utable.id.belongs(users)).select(utable.id,
utable.first_name,
utable.last_name)
users = urows.as_dict()
APPROVED = T("Approved")
APPROVAL_PENDING = T("Approval pending")
CLOSE = T("Close")
REVIEW = T("Review")
VIEW = T("View")
# Apply represents
date_repr = vdoc_table.date.represent
doc_type_repr = vdoc_table.document_type.represent
for row in rows:
v = row["vulnerability_document.id"]
row["vulnerability_document.id"] = action_repr(v)
v = row["vulnerability_document.date"]
row["vulnerability_document.date"] = date_repr(v)
v = row["vulnerability_document.location_id"]
row["vulnerability_document.location_id"] = location_repr(v)
v = row["vulnerability_document.document_type"]
row["vulnerability_document.document_type"] = doc_type_repr(v)
v = row["vulnerability_document.created_by"]
row["vulnerability_document.created_by"] = submitted_repr(v)
v = row["vulnerability_document.approved_by"]
row["vulnerability_document.approved_by"] = approved_repr(v)
# The types are fixed and will always be displayed (even if empty)
type_totals = {"Approval pending" : 0,
"VCA Report" : 0,
"Report" : 0
}
# Calculate the report group totals
location_totals = {}
if loc_level != -1:
loc_level = int(loc_level[1:])
if loc_level < 3:
loc_label = loc_labels["L%s" % (loc_level + 1)]
else:
loc_label = ""
for row in rows:
# Collect the type totals
group = row["vulnerability_document.group"]
if not group:
group = "Report"
type_totals[group] += 1
# Collect the Location sub totals
if row[loc_group_field] == "None":
# If the group field is none then use the location for the group
# This will happen for any report for the selected location
#location = row["vulnerability_document.location_id"].components[0]
# This gives invalid Unicode conversion & anyway doesn't seem useful
continue
else:
if loc_level != -1:
location = "%s (%s)" % (row[loc_group_field], loc_label)
else:
location = row[loc_group_field]
# Represent the field
row[loc_group_field] = location
# Populate the groupTotals to be read by dataTables
loc_code = "%s_%s" % (group, s3_unicode(location))
if loc_code in location_totals:
location_totals[loc_code] += 1
else:
location_totals[loc_code] = 1
group_totals = {
unicode(T("Approval pending")) : type_totals["Approval pending"],
unicode(T("VCA Reports")) : type_totals["VCA Report"],
unicode(T("Reports")) : type_totals["Report"]
}
rfields = data["rfields"]
dt = S3DataTable(rfields,
rows,
orderby=~vdoc_table.date
)
# No need as hidden when used for Grouping
#if loc_level != -1:
# # Amend the column label
# dt.heading[loc_group_field] = loc_label
dt.defaultActionButtons(resource)
if request.extension == "html":
level_1_titles = [["Approval pending", T("Approval pending")],
["VCA Report", T("VCA Reports")],
["Report", T("Reports")],
]
report = dt.html(totalrows,
filteredrows,
"report",
dt_action_col = -1,
# Pagination done client-side currently!
dt_ajax_url = None,
#dt_ajax_url = URL(c="vulnerability",
# f="report",
# extension="aadata",
# vars={"id": "report"},
# ),
dt_dom = "t",
# No server-side pagination
dt_pagination = "false",
dt_pageLength = filteredrows,
dt_searching = "false",
dt_group = [3, 4],
dt_group_totals = [group_totals, location_totals],
dt_group_titles = [level_1_titles],
dt_group_types = ["text", "none"],
dt_group_space = "true",
dt_shrink_groups = "accordion",
)
reportCount = T("%(count)s Entries Found") % dict(count=filteredrows)
report.append(INPUT(_type="hidden",
_id="reportCount",
_name="config",
_value=reportCount))
return str(report)
elif request.extension == "aadata":
# Unsupported
raise
else:
return ""
# -----------------------------------------------------------------------------
def getReportDetails():
"""
Method to get the details of a report from the vulnerability_document id
It will build the custom display, which is essentially a form
wrapped around a table, if buttons are required then they will be added
allowing for the report to be approved or rejected.
"""
_id = get_vars.id
vdoc_table = s3db.vulnerability_document
vdoc = db(vdoc_table.id == _id).select(vdoc_table.name,
vdoc_table.document_type,
vdoc_table.doc_id,
vdoc_table.source_id,
limitby=(0, 1)).first()
document_type = vdoc.document_type
valid = True
if document_type == "indicator":
# Get the data for this report
vdtable = db.vulnerability_data
vitable = db.vulnerability_indicator
query = (vdtable.deleted == False) & \
(vdtable.source_id == vdoc.source_id) & \
(vitable.parameter_id == vdtable.parameter_id)
rows = db(query).select(vdtable.value,
vitable.name,
orderby=vitable.posn)
# Build the custom table
table = TABLE(TR(TH(_class="indicatorLabels"),
TH(DIV(1), _class="indicator1"),
TH(DIV(2), _class="indicator2"),
TH(DIV(3), _class="indicator3"),
TH(DIV(4), _class="indicator4"),
TH(DIV(5), _class="indicator5"),
),
TR(TH(),
TH(SPAN(XML("←"), _class="arrow"),
" %s" % T("LOW RESILIENCE"),
_colspan=2),
TH(" %s" % T("HIGH RESILIENCE"),
SPAN(XML("→"), _class="arrow"),
_class="highResilienceLabel",
_colspan=3)
),
_class="indicatorsTable")
mark = XML("<mark>*</mark>")
tr_class = "white"
for row in rows:
tr_class = "gray" if tr_class == "white" else "white"
tr = TR(_class=tr_class)
name = row.vulnerability_indicator.name
td = TD(mark, _class="indicatorLabels")
td.append(name)
tr.append(td)
value = int(row.vulnerability_data.value)
for i in range(5):
option = INPUT(_type = "radio",
_name = name,
_value = i + 1,
value = value,
_disabled = "disabled",
)
tr.append(option)
table.append(tr)
elif document_type == "demographic":
# Get the data for this report
ddtable = s3db.stats_demographic_data
sdtable = db.stats_demographic
query = (ddtable.deleted == False) & \
(ddtable.source_id == vdoc.source_id) & \
(sdtable.parameter_id == ddtable.parameter_id)
rows = db(query).select(ddtable.value,
ddtable.location_id,
sdtable.name,
orderby = sdtable.name)
# Build the custom table
table = TABLE(_class = "demographicsTable")
table.append(TR(TD(vdoc.name, _colspan=3)))
tr_class = "grey"
location_represent = s3db.gis_LocationRepresent()
for row in rows:
tr_class = "grey" if tr_class == "white" else "white"
tr = TR(_class = tr_class)
name = row.stats_demographic.name
tr.append(TD(name, _class = "demoLabel"))
value = IS_INT_AMOUNT().represent(row.stats_demographic_data.value)
tr.append(TD(value, _class = "demoStatistic"))
location = location_represent(row.stats_demographic_data.location_id)
tr.append(TD(location, _class = "demoSource"))
table.append(tr)
elif document_type in ("map", "image"):
ditable = s3db.doc_image
record = db(ditable.doc_id == vdoc.doc_id).select(ditable.id,
ditable.name,
ditable.file,
ditable.comments,
limitby=(0, 1)
).first()
if record:
size = (250, 250)
image = s3db.pr_image_represent(record.file, size=size)
size = s3db.pr_image_size(image, size)
desc = DIV(record.comments, _class="imageDesc")
filename = record.name
url_small = URL(c="default", f="download", args=image)
alt = record.comments if record.comments else filename
thumb = IMG(_src=url_small,
_alt=alt,
_width=size[0],
_height=size[1]
)
url_full = URL(c="default", f="download", args=record.file)
download = A(T("Download"), _class="download", _href=url_full)
view = A(T("View full size"),
_class="download",
_href=URL(c="vulnerability", f="view_image",
args=record.id),
_target="blank")
table = TABLE(_class = "imageTable")
table.append(TR(TD(thumb, _colspan=4)))
table.append(TR(TD(desc),
TD(download),
TD(DIV(" | ", _class="divider")),
TD(view),
_class="mapRow"))
else:
valid = False
elif document_type in ("other", "vca"):
doctable = s3db.doc_document
record = db(doctable.doc_id == vdoc.doc_id).select(doctable.id,
doctable.file,
doctable.name,
limitby=(0, 1)
).first()
if record:
desc = DIV(record.name, _class="imageDesc")
url = URL(c="default", f="download", args=record.file)
download = A(T("Download"), _class="download", _href=url)
table = TABLE(_class="imageTable")
table.append(TR(TD(desc),
TD(download),
_class="mapRow"))
else:
valid = False
else:
valid = False
# Place the table in a form and attach the buttons (if required)
form = FORM(_id="form%s" % _id)
if valid:
form.append(table)
else:
form.append(DIV(T("No data available"), _class="mapRow"))
if request.args(0) == "review":
if valid:
form.append(INPUT(_type="button", _name="Approve%s" % _id,
_value=T("Approve"), _class="approveButton"))
form.append(INPUT(_type="button", _name="Decline%s" % _id,
_value=T("Decline"), _class="declineButton"))
return str(form)
# -----------------------------------------------------------------------------
def view_image():
"""
View a Fullscreen version of an Image - called from Reports
"""
try:
_id = request.args[0]
except:
return "Need to provide the id of the Image"
table = s3db.doc_image
record = db(table.id == _id).select(table.name,
table.file,
table.comments,
limitby=(0, 1)).first()
desc = DIV(record.comments, _class="imageDesc")
filename = record.name
url = URL(c="default", f="download", args=record.file)
alt = record.comments if record.comments else filename
image = IMG(_src=url, _alt=alt)
output = Storage(image = image,
desc = desc,
)
return output
# -----------------------------------------------------------------------------
def approve_report(id):
"""
Function to approve a report
"""
# Approve the vulnerability_document record
resource = s3db.resource("vulnerability_document", id=id, unapproved=True)
resource.approve()
# Read the record details
vdoc_table = db.vulnerability_document
record = db(vdoc_table.id == id).select(vdoc_table.document_type,
vdoc_table.doc_id,
vdoc_table.source_id,
limitby=(0, 1)).first()
# Approve the linked records
document_type = record.document_type
if document_type == "indicator":
tablename = "vulnerability_data"
table = s3db[tablename]
query = (table.source_id == record.source_id)
agg_function = "vulnerability_update_aggregates"
elif document_type == "demographic":
tablename = "stats_demographic_data"
table = s3db[tablename]
query = (table.source_id == record.source_id)
agg_function = "stats_demographic_update_aggregates"
elif document_type in ("map", "image"):
tablename = "doc_image"
query = (s3db[tablename].doc_id == record.doc_id)
elif document_type in ("vca", "other"):
tablename = "doc_document"
query = (s3db[tablename].doc_id == record.doc_id)
else:
current.log.error("Report not Approved as unknown type", document_type)
return False
resource = s3db.resource(tablename, filter=query, unapproved=True)
resource.approve()
if document_type in ("indicator", "demographic"):
# Rebuild the relevant aggregates
rows = resource.select(fields=["data_id",
"parameter_id",
"date",
"location_id",
"value"],
as_rows=True)
s3task.async(agg_function,
vars=dict(records=rows.json()))
return True
# -----------------------------------------------------------------------------
def decline_report(id):
"""
Function to Decline a report
"""
# Find the type of report that we have
vdoc_table = s3db.vulnerability_document
record = db(vdoc_table.id == id).select(vdoc_table.document_type,
vdoc_table.doc_id,
vdoc_table.source_id,
limitby=(0, 1)).first()
document_type = record.document_type
# Now that we have the necessary data, reject the report
resource = s3db.resource("vulnerability_document", id=id, unapproved=True)
resource.reject()
# Reject the linked data
if document_type in ("indicator", "demographic"):
source_id = record.source_id
# Reject the stats_data records
query = (db.stats_data.source_id == source_id)
resource = s3db.resource("stats_data", filter=query, unapproved=True)
resource.reject()
# Reject the instance records
if document_type == "indicator":
query = (s3db.vulnerability_data.source_id == source_id)
resource = s3db.resource("vulnerability_data", filter=query,
unapproved=True)
resource.reject()
elif document_type == "demographic":
query = (s3db.stats_demographic_data.source_id == source_id)
resource = s3db.resource("stats_demographic_data", filter=query,
unapproved=True)
resource.reject()
elif document_type in ("image", "map"):
query = (s3db.doc_image.doc_id == record.doc_id)
resource = s3db.resource("doc_image", filter=query, unapproved=True)
resource.reject()
elif document_type in ("other", "vca"):
query = (s3db.doc_document.doc_id == record.doc_id)
resource = s3db.resource("doc_document", filter=query, unapproved=True)
resource.reject()
else:
return False
return True
# -----------------------------------------------------------------------------
def report():
"""
Controller to list/view/approve/reject Reports.
- list uses a suitably-filtered dataTable
"""
s3.no_formats = True
arg = request.args(0)
if arg == "filter":
data = reportDataTable()
elif arg == "review" or arg == "view":
data = getReportDetails()
elif arg == "approve":
# Check authorization
permitted = auth.s3_has_permission("approve", "vulnerability_document")
if not permitted:
data = s3_unicode(T("You are not permitted to approve documents"))
else:
id = request.post_vars.id
if approve_report(id):
data = reportDataTable()
else:
data = s3_unicode(T("Failed to approve"))
elif arg == "decline":
# Check authorization
permitted = auth.s3_has_permission("approve", "vulnerability_document")
if not permitted:
data = s3_unicode(T("You are not permitted to approve documents"))
else:
id = request.post_vars.id
if decline_report(id):
data = reportDataTable()
else:
data = s3_unicode(T("Decline failed"))
else:
date_widget = S3DateWidget(format="yy-mm-dd", future=0)
to_date = Field("to_date")
to_date.tablename = to_date._tablename = ""
from_date = Field("from_date")
from_date.tablename = from_date._tablename = ""
report = reportDataTable()
data = {"filter" : {"to_date" : str(date_widget(to_date, None)),
"from_date" : str(date_widget(from_date, None)),
},
"report" : report
}
response.headers["Content-Type"] = "application/json"
return json.dumps(data)
# -----------------------------------------------------------------------------
def submitData():
""" Controller to manage the AJAX import of vulnerability data """
# Get the action to be performed
action = request.post_vars.action
if action == "vulnerability":
return import_vul_ui()
elif action == "vulnerability_part1":
return import_vul_csv_part1()
elif action == "vulnerability_part2":
return import_vul_csv_part2()
elif action in ("map", "image", "other", "vca"):
return import_document(action)
elif action == "demographics":
return import_demo_ui()
elif action == "demographics_part1":
return import_demo_csv_part1()
elif action == "demographics_part2":
return import_demo_csv_part2()
# -----------------------------------------------------------------------------
def import_vul_ui():
"""
Controller to add a new set of vulnerability indicators
which have been input direct into the GUI
"""
date = request.utcnow
post_vars = request.post_vars
location_id = post_vars.location
update_super = s3db.update_super
# First create the stats_source
# NB This is direct to SE, no E here!
ss_table = s3db.stats_source
source_id = ss_table.insert(name = "Vulnerability indicators submitted through UI")
# Next create the vulnerability_document
vdoc_table = s3db.vulnerability_document
id = vdoc_table.insert(document_type = "indicator",
date = date,
location_id = location_id,
source_id = source_id,
)
update_super(vdoc_table, dict(id=id))
# Get the list of indicators
itable = s3db.vulnerability_indicator
rows = db(itable.deleted == False).select(itable.posn,
itable.parameter_id,
orderby=itable.posn)
vd_table = db.vulnerability_data
for row in rows:
id = vd_table.insert(parameter_id = row.parameter_id,
location_id = location_id,
value = post_vars[str(row.posn)],
date = date,
source_id = source_id,
)
update_super(vd_table, dict(id=id))
# -----------------------------------------------------------------------------
def import_vul_csv_part1():
"""
Controller to manage the first phase of the import of vulnerability
indicators from CSV
"""
from gluon.serializers import json as jsons
try:
file = request.post_vars.file.file
except:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(T("File missing"))})
# Check authorization
authorised = auth.s3_has_permission("create", "vulnerability_data")
if not authorised:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(T("You are not permitted to upload files"))})
# Do a normal CSV import
output = s3_rest_controller("vulnerability", "data",
csv_stylesheet="data.xsl")
if "Error" in output:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(output["Error"])})
upload_id = output[0]
item_ids = output[1]
data = output[2]
# Loop through all the vulnerability_data & group by source_id
from lxml import etree
loc_labels = {}
ele_dict = {}
for value in data:
if value["s3_import_item.error"]:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": value["s3_import_item.error"]})
ele = value["s3_import_item.element"]
ele = s3xml.xml_decode(ele)
try:
element = etree.fromstring(ele)
except:
return T("No valid data in the file")
data_dict = {}
data = element.findall("data")
for item in data:
f = item.get("field", None)
v = item.get("value", None)
data_dict[f] = v
references = element.findall("reference")
for reference in references:
f = reference.get("field", None)
if f == "source_id":
source_tuid = reference.get("tuid", None)
# tuid: stats_source//Level/Country/L1/L2/L3//Date
try:
# Extract the Location
loc_parts = source_tuid.split("//")[1].split("/")
data_dict["location"] = loc_parts[-1]
level = loc_parts[0]
country_code = loc_parts[1]
if country_code not in loc_labels:
country_name = gis.get_country(country_code, key_type="code")
table = s3db.gis_location
country_id = db(table.name == country_name).select(table.id,
limitby=(0, 1)).first().id
lx_labels = gis.get_location_hierarchy(location=country_id)
loc_labels[country_code] = lx_labels
else:
lx_labels = loc_labels[country_code]
data_dict["loc_label"] = lx_labels[level]
except:
# Invalid source_tuid
continue
elif f == "parameter_id":
t = reference.get("tuid", None)
try:
indicator = t.split("/")[1]
data_dict[f] = indicator
except:
# We can't do anything with a data element not linked to an Indicator
continue
if source_tuid in ele_dict:
ele_dict[source_tuid].append(data_dict)
else:
ele_dict[source_tuid] = [data_dict]
# Now prepare the data for display in the UI
from datetime import datetime
data_list = []
for (key, group) in ele_dict.items():
row = group[0]
group_dict = dict(
group = key,
date = datetime.strptime(row["date"], "%Y-%m-%d").strftime("%d-%b-%y"),
location = "%s %s" % (row["location"], row["loc_label"])
)
indicator_dict = {}
param_len = len(row["parameter_id"][0]) + 1 # include the separator
for row in group:
param = row["parameter_id"]
indicator_dict[param] = row["value"]
group_dict["data"] = indicator_dict
data_list.append(group_dict)
# Return the output
response.headers["Content-Type"] = "application/json"
return jsons({"upload_id" : upload_id,
"items" : item_ids,
"data" : data_list
})
# -----------------------------------------------------------------------------
def import_vul_csv_part2():
"""
Controller to manage the second phase of the import of vulnerability
indicators from CSV
"""
job_id = request.post_vars.job
if not job_id:
return "Error No Job ID's provided"
output = s3_rest_controller("vulnerability", "data",
csv_stylesheet="data.xsl")
totalRecords = output[0]
totalErrors = output[1]
totalIgnored = output[2]
from gluon.serializers import json as jsons
response.headers["Content-Type"] = "application/json"
return jsons({"totalRecords" : totalRecords,
"totalErrors" : totalErrors,
"totalIgnored" : totalIgnored
})
# -----------------------------------------------------------------------------
def import_document(document_type):
"""
Controller to store a document
"""
if document_type in ("map", "image"):
image = True
doc_table = s3db.doc_image
else:
image = False
doc_table = s3db.doc_document
post_vars = request.post_vars
file = post_vars.file
real_filename = file.filename
new_filename = doc_table.file.store(file, real_filename)
date = request.utcnow
location_id = post_vars.location
# Create the vulnerability_document
vdoc_table = s3db.vulnerability_document
id = vdoc_table.insert(document_type = document_type,
date = date,
location_id = location_id,
)
record = dict(id=id)
s3db.update_super(vdoc_table, record)
# Create the doc_document or doc_image
doc_table.insert(doc_id = record["doc_id"],
file = new_filename,
name = real_filename,
date = date,
comments = post_vars.desc,
location_id = location_id,
)
if image:
# Create a thumbnail of the image
s3db.pr_image_resize(file.file,
new_filename,
real_filename,
(250, 250),
)
# -----------------------------------------------------------------------------
def import_demo_ui():
"""
Controller to store a new set of demographic data which has been input
direct into the GUI
"""
vdoc_table = s3db.vulnerability_document
ss_table = db.stats_source
update_super = s3db.update_super
post_vars = request.post_vars
location_id = post_vars.location
date_submitted = post_vars.reportDate
# First create the demographic_documents (one per source)
last_source = ""
source_list = {} # the sources
seen_source = [] # the sources that have already been seen
data = []
for x in range(7):
value = post_vars["demoField%s" % x]
source = post_vars["sourceField%s" % x]
if source == "":
# Allow user to enter the source in just 1 field to use for all subsequent
source = last_source
else:
last_source = source
date = post_vars["reportDate%s" % x]
data.append((value, source, date))
if source != "" and value != "":
# Add the source if we have a value
if source not in seen_source:
seen_source.append(source)
# Create the stats_source
# - note that this means we'll get multiple copies of the same sources
# - however approval is done by vulnerability_document, so each vulnerability_document needs a unique source :/
source_id = ss_table.insert(name = source)
# Now create the vulnerability_document
id = vdoc_table.insert(name = source,
date = date_submitted,
location_id = location_id,
document_type = "demographic",
source_id = source_id,
)
update_super(vdoc_table, dict(id=id))
source_list[source] = source_id
# Now get the Demographic parameter_ids
demo_string_list = ["Population",
"Male",
"Female",
"Over 60",
"Under 5",
"Households",
"Households below poverty line"
]
sd_table = s3db.stats_demographic
rows = db(sdtable.name.belongs(demo_string_list)).select(sd_table.name,
sd_table.parameter_id)
# Sort these into the order of the UI
demo_recs = {}
for record in rows:
demo_recs[record.name] = record.parameter_id
demographics_list = []
for demo_string in demo_string_list:
if demo_string in demo_recs:
demographics_list.append(demo_recs[demo_string])
else:
demographics_list.append(None) # Should never have this
# Create the demographic_data records
sdd_table = db.stats_demographic_data
for x in range(7):
_data = data[x]
if _data[0] != "":
id = sdd_table.insert(parameter_id = demographics_list[x],
location_id = location_id,
value = _data[0],
date = _data[2],
source_id = source_list[_data[1]],
)
update_super(sdd_table, dict(id=id))
# -----------------------------------------------------------------------------
def import_demo_csv_part1():
"""
Controller to manage the first phase of the import of demographic data
from CSV
"""
from gluon.serializers import json as jsons
try:
file = request.post_vars.file.file
except:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(T("File missing"))})
# Check authorization
permitted = auth.s3_has_permission
authorised = permitted("create", "stats_demographic_data")
if not authorised:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(T("You are not permitted to upload files"))})
request.controller = "stats" # Need to set the controller to stats
output = s3_rest_controller("stats", "demographic_data",
csv_stylesheet="demographic_data.xsl")
if "Error" in output:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": s3_unicode(output["Error"])})
upload_id = output[0]
item_ids = output[1]
data = output[2]
# Loop through all the stats_demographic_data & group by source_id
from lxml import etree
loc_labels = {}
ele_dict = {}
for value in data:
if value["s3_import_item.error"]:
response.headers["Content-Type"] = "application/json"
return jsons({"Error": value["s3_import_item.error"]})
ele = value["s3_import_item.element"]
ele = s3xml.xml_decode(ele)
try:
element = etree.fromstring(ele)
except:
return T("No valid data in the file")
data_dict = {}
data = element.findall("data")
for item in data:
f = item.get("field", None)
v = item.get("value", None)
data_dict[f] = v
references = element.findall("reference")
for reference in references:
f = reference.get("field", None)
if f == "source_id":
source_tuid = reference.get("tuid", None)
elif f == "location_id":
# tuid: Level/Country/L1/L2/L3
tuid = reference.get("tuid", None)
if tuid:
try:
# Extract the Location
loc_parts = tuid.split("/")
data_dict["location"] = loc_parts[-1]
level = loc_parts[0]
country_code = loc_parts[1]
if country_code not in loc_labels:
country_name = gis.get_country(country_code, key_type="code")
table = s3db.gis_location
country_id = db(table.name == country_name).select(table.id,
limitby=(0, 1)).first().id
lx_labels = gis.get_location_hierarchy(location=country_id)
loc_labels[country_code] = lx_labels
else:
lx_labels = loc_labels[country_code]
data_dict["loc_label"] = lx_labels[level]
except:
# Invalid location_tuid
continue
else:
uuid = reference.get("uuid", None)
if uuid:
data_dict["loc_label"] = COUNTRY
country_code = uuid.split(":")[-1]
data_dict["location"] = gis.get_country(country_code, key_type="code")
elif f == "parameter_id":
t = reference.get("tuid", None)
try:
demographic = t.split("/")[1]
data_dict[f] = demographic
except:
# We can't do anything with a data element not linked to a Demographic
continue
if source_tuid in ele_dict:
ele_dict[source_tuid].append(data_dict)
else:
ele_dict[source_tuid] = [data_dict]
# Now prepare the data for display in the UI
from datetime import datetime
data_list = []
for (key, group) in ele_dict.items():
row = group[0]
group_dict = dict(
group = key,
date = datetime.strptime(row["date"], "%Y-%m-%d").strftime("%d-%b-%y"),
location = "%s %s" % (row["location"], row["loc_label"])
)
indicator_dict = {}
param_len = len(row["parameter_id"][0]) + 1 # include the separator
for row in group:
param = row["parameter_id"]
indicator_dict[param] = row["value"]
group_dict["data"] = indicator_dict
data_list.append(group_dict)
# Return the output
response.headers["Content-Type"] = "application/json"
return jsons({"upload_id" : upload_id,
"items" : item_ids,
"data" : data_list
})
# -----------------------------------------------------------------------------
def import_demo_csv_part2():
"""
Controller to manage the second phase of the import of demographic data
from CSV
"""
job_id = request.post_vars.job
if not job_id:
return "Error No Job ID's provided"
# Fake the controller for the import
request.controller = "stats"
output = s3_rest_controller("stats", "demographic_data",
csv_stylesheet="demographic_data.xsl")
totalRecords = output[0]
totalErrors = output[1]
totalIgnored = output[2]
from gluon.serializers import json as jsons
response.headers["Content-Type"] = "application/json"
return jsons({"totalRecords" : totalRecords,
"totalErrors" : totalErrors,
"totalIgnored" : totalIgnored
})
# -----------------------------------------------------------------------------
def indicator():
""" REST Controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def aggregated_indicator():
""" REST Controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def data():
""" REST Controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def document():
""" REST Controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def aggregate():
""" REST Controller """
def clear_aggregates(r, **attr):
if not s3_has_role(ADMIN):
auth.permission.fail()
s3db.stats_demographic_rebuild_all_aggregates()
redirect(URL(c="vulnerability",
f="aggregate",
args="",
))
s3db.set_method("vulnerability", "aggregate",
method="clear",
action=vulnerability_rebuild_all_aggregates)
output = s3_rest_controller()
return output
# -----------------------------------------------------------------------------
def handdrawn():
""" REST Controller for Hand-drawn Maps """
table = s3db.vulnerability_document
s3.filter = (s3db.doc_image.doc_id == table.doc_id) & \
(table.document_type == "map")
return s3_rest_controller("doc", "image")
# -----------------------------------------------------------------------------
def hazard():
""" REST Controller """
return s3_rest_controller()
# -----------------------------------------------------------------------------
def risk():
""" REST Controller """
return s3_rest_controller(rheader=s3db.vulnerability_rheader)
# -----------------------------------------------------------------------------
def evac_route():
""" REST Controller """
return s3_rest_controller()
# END =========================================================================
|
mit
| 125,762,405,724,610,190 | 1,883,646,339,871,495,700 | 37.535602 | 127 | 0.478129 | false |
uruz/django-rest-framework
|
runtests.py
|
63
|
3080
|
#! /usr/bin/env python
from __future__ import print_function
import os
import subprocess
import sys
import pytest
PYTEST_ARGS = {
'default': ['tests', '--tb=short', '-s'],
'fast': ['tests', '--tb=short', '-q', '-s'],
}
FLAKE8_ARGS = ['rest_framework', 'tests', '--ignore=E501']
ISORT_ARGS = ['--recursive', '--check-only', 'rest_framework', 'tests']
sys.path.append(os.path.dirname(__file__))
def exit_on_failure(ret, message=None):
if ret:
sys.exit(ret)
def flake8_main(args):
print('Running flake8 code linting')
ret = subprocess.call(['flake8'] + args)
print('flake8 failed' if ret else 'flake8 passed')
return ret
def isort_main(args):
print('Running isort code checking')
ret = subprocess.call(['isort'] + args)
if ret:
print('isort failed: Some modules have incorrectly ordered imports. Fix by running `isort --recursive .`')
else:
print('isort passed')
return ret
def split_class_and_function(string):
class_string, function_string = string.split('.', 1)
return "%s and %s" % (class_string, function_string)
def is_function(string):
# `True` if it looks like a test function is included in the string.
return string.startswith('test_') or '.test_' in string
def is_class(string):
# `True` if first character is uppercase - assume it's a class name.
return string[0] == string[0].upper()
if __name__ == "__main__":
try:
sys.argv.remove('--nolint')
except ValueError:
run_flake8 = True
run_isort = True
else:
run_flake8 = False
run_isort = False
try:
sys.argv.remove('--lintonly')
except ValueError:
run_tests = True
else:
run_tests = False
try:
sys.argv.remove('--fast')
except ValueError:
style = 'default'
else:
style = 'fast'
run_flake8 = False
run_isort = False
if len(sys.argv) > 1:
pytest_args = sys.argv[1:]
first_arg = pytest_args[0]
try:
pytest_args.remove('--coverage')
except ValueError:
pass
else:
pytest_args = ['--cov', 'rest_framework'] + pytest_args
if first_arg.startswith('-'):
# `runtests.py [flags]`
pytest_args = ['tests'] + pytest_args
elif is_class(first_arg) and is_function(first_arg):
# `runtests.py TestCase.test_function [flags]`
expression = split_class_and_function(first_arg)
pytest_args = ['tests', '-k', expression] + pytest_args[1:]
elif is_class(first_arg) or is_function(first_arg):
# `runtests.py TestCase [flags]`
# `runtests.py test_function [flags]`
pytest_args = ['tests', '-k', pytest_args[0]] + pytest_args[1:]
else:
pytest_args = PYTEST_ARGS[style]
if run_tests:
exit_on_failure(pytest.main(pytest_args))
if run_flake8:
exit_on_failure(flake8_main(FLAKE8_ARGS))
if run_isort:
exit_on_failure(isort_main(ISORT_ARGS))
|
bsd-2-clause
| 7,002,230,375,393,922,000 | 1,129,569,600,937,561,600 | 24.882353 | 114 | 0.584091 | false |
hyperized/ansible
|
lib/ansible/module_utils/network/frr/providers/module.py
|
20
|
2106
|
#
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.frr.providers import providers
from ansible.module_utils._text import to_text
class NetworkModule(AnsibleModule):
fail_on_missing_provider = True
def __init__(self, connection=None, *args, **kwargs):
super(NetworkModule, self).__init__(*args, **kwargs)
if connection is None:
connection = Connection(self._socket_path)
self.connection = connection
@property
def provider(self):
if not hasattr(self, '_provider'):
capabilities = self.from_json(self.connection.get_capabilities())
network_os = capabilities['device_info']['network_os']
network_api = capabilities['network_api']
if network_api == 'cliconf':
connection_type = 'network_cli'
cls = providers.get(network_os, self._name.split('.')[-1], connection_type)
if not cls:
msg = 'unable to find suitable provider for network os %s' % network_os
if self.fail_on_missing_provider:
self.fail_json(msg=msg)
else:
self.warn(msg)
obj = cls(self.params, self.connection, self.check_mode)
setattr(self, '_provider', obj)
return getattr(self, '_provider')
def get_facts(self, subset=None):
try:
self.provider.get_facts(subset)
except Exception as exc:
self.fail_json(msg=to_text(exc))
def edit_config(self, config_filter=None):
current_config = self.connection.get_config(flags=config_filter)
try:
commands = self.provider.edit_config(current_config)
changed = bool(commands)
return {'commands': commands, 'changed': changed}
except Exception as exc:
self.fail_json(msg=to_text(exc))
|
gpl-3.0
| -7,049,161,898,118,851,000 | -630,117,665,281,693,000 | 32.967742 | 92 | 0.612061 | false |
CydarLtd/ansible
|
lib/ansible/modules/network/panos/panos_interface.py
|
78
|
5736
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage PaloAltoNetworks Firewall
# (c) 2016, techbizdev <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: panos_interface
short_description: configure data-port network interface for DHCP
description:
- Configure data-port (DP) network interface for DHCP. By default DP interfaces are static.
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer)"
version_added: "2.3"
requirements:
- pan-python can be obtained from PyPi U(https://pypi.python.org/pypi/pan-python)
notes:
- Checkmode is not supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
username:
description:
- Username credentials to use for auth.
default: "admin"
password:
description:
- Password credentials to use for auth.
required: true
if_name:
description:
- Name of the interface to configure.
required: true
zone_name:
description: >
Name of the zone for the interface. If the zone does not exist it is created but if the zone exists and
it is not of the layer3 type the operation will fail.
required: true
create_default_route:
description:
- Whether or not to add default route with router learned via DHCP.
default: "false"
commit:
description:
- Commit if changed
default: true
'''
EXAMPLES = '''
- name: enable DHCP client on ethernet1/1 in zone public
interface:
password: "admin"
ip_address: "192.168.1.1"
if_name: "ethernet1/1"
zone_name: "public"
create_default_route: "yes"
'''
RETURN='''
# Default return values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_exception
try:
import pan.xapi
from pan.xapi import PanXapiError
HAS_LIB = True
except ImportError:
HAS_LIB = False
_IF_XPATH = "/config/devices/entry[@name='localhost.localdomain']" +\
"/network/interface/ethernet/entry[@name='%s']"
_ZONE_XPATH = "/config/devices/entry[@name='localhost.localdomain']" +\
"/vsys/entry/zone/entry"
_ZONE_XPATH_QUERY = _ZONE_XPATH+"[network/layer3/member/text()='%s']"
_ZONE_XPATH_IF = _ZONE_XPATH+"[@name='%s']/network/layer3/member[text()='%s']"
_VR_XPATH = "/config/devices/entry[@name='localhost.localdomain']" +\
"/network/virtual-router/entry"
def add_dhcp_if(xapi, if_name, zone_name, create_default_route):
if_xml = [
'<entry name="%s">',
'<layer3>',
'<dhcp-client>',
'<create-default-route>%s</create-default-route>',
'</dhcp-client>'
'</layer3>'
'</entry>'
]
cdr = 'yes'
if not create_default_route:
cdr = 'no'
if_xml = (''.join(if_xml)) % (if_name, cdr)
xapi.edit(xpath=_IF_XPATH % if_name, element=if_xml)
xapi.set(xpath=_ZONE_XPATH+"[@name='%s']/network/layer3" % zone_name,
element='<member>%s</member>' % if_name)
xapi.set(xpath=_VR_XPATH+"[@name='default']/interface",
element='<member>%s</member>' % if_name)
return True
def if_exists(xapi, if_name):
xpath = _IF_XPATH % if_name
xapi.get(xpath=xpath)
network = xapi.element_root.find('.//layer3')
return (network is not None)
def main():
argument_spec = dict(
ip_address=dict(required=True),
password=dict(required=True, no_log=True),
username=dict(default='admin'),
if_name=dict(required=True),
zone_name=dict(required=True),
create_default_route=dict(type='bool', default=False),
commit=dict(type='bool', default=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_LIB:
module.fail_json(msg='pan-python is required for this module')
ip_address = module.params["ip_address"]
password = module.params["password"]
username = module.params['username']
xapi = pan.xapi.PanXapi(
hostname=ip_address,
api_username=username,
api_password=password
)
if_name = module.params['if_name']
zone_name = module.params['zone_name']
create_default_route = module.params['create_default_route']
commit = module.params['commit']
ifexists = if_exists(xapi, if_name)
if ifexists:
module.exit_json(changed=False, msg="interface exists, not changed")
try:
changed = add_dhcp_if(xapi, if_name, zone_name, create_default_route)
except PanXapiError:
exc = get_exception()
module.fail_json(msg=exc.message)
if changed and commit:
xapi.commit(cmd="<commit></commit>", sync=True, interval=1)
module.exit_json(changed=changed, msg="okey dokey")
if __name__ == '__main__':
main()
|
gpl-3.0
| -988,795,381,806,506,100 | 5,393,173,411,323,869,000 | 30.173913 | 115 | 0.639121 | false |
MonicaHsu/truvaluation
|
venv/lib/python2.7/site-packages/gunicorn/util.py
|
24
|
15402
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import fcntl
import io
import os
import pkg_resources
import random
import resource
import socket
import sys
import textwrap
import time
import traceback
import inspect
import errno
import warnings
from gunicorn.errors import AppImportError
from gunicorn.six import text_type, string_types
MAXFD = 1024
REDIRECT_TO = getattr(os, 'devnull', '/dev/null')
timeout_default = object()
CHUNK_SIZE = (16 * 1024)
MAX_BODY = 1024 * 132
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
# Server and Date aren't technically hop-by-hop
# headers, but they are in the purview of the
# origin server which the WSGI spec says we should
# act like. So we drop them and add our own.
#
# In the future, concatenation server header values
# might be better, but nothing else does it and
# dropping them is easier.
hop_headers = set("""
connection keep-alive proxy-authenticate proxy-authorization
te trailers transfer-encoding upgrade
server date
""".split())
try:
from setproctitle import setproctitle
def _setproctitle(title):
setproctitle("gunicorn: %s" % title)
except ImportError:
def _setproctitle(title):
return
try:
from importlib import import_module
except ImportError:
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
def load_class(uri, default="sync", section="gunicorn.workers"):
if inspect.isclass(uri):
return uri
if uri.startswith("egg:"):
# uses entry points
entry_str = uri.split("egg:")[1]
try:
dist, name = entry_str.rsplit("#", 1)
except ValueError:
dist = entry_str
name = default
try:
return pkg_resources.load_entry_point(dist, section, name)
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
else:
components = uri.split('.')
if len(components) == 1:
try:
if uri.startswith("#"):
uri = uri[1:]
return pkg_resources.load_entry_point("gunicorn",
section, uri)
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
klass = components.pop(-1)
try:
mod = __import__('.'.join(components))
except:
exc = traceback.format_exc()
raise RuntimeError("class uri %r invalid or not found: \n\n[%s]" % (uri,
exc))
for comp in components[1:]:
mod = getattr(mod, comp)
return getattr(mod, klass)
def set_owner_process(uid, gid):
""" set user and group of workers processes """
if gid:
# versions of python < 2.6.2 don't manage unsigned int for
# groups like on osx or fedora
gid = abs(gid) & 0x7FFFFFFF
os.setgid(gid)
if uid:
os.setuid(uid)
def chown(path, uid, gid):
gid = abs(gid) & 0x7FFFFFFF # see note above.
os.chown(path, uid, gid)
if sys.platform.startswith("win"):
def _waitfor(func, pathname, waitall=False):
# Peform the operation
func(pathname)
# Now setup the wait loop
if waitall:
dirname = pathname
else:
dirname, name = os.path.split(pathname)
dirname = dirname or '.'
# Check for `pathname` to be removed from the filesystem.
# The exponential backoff of the timeout amounts to a total
# of ~1 second after which the deletion is probably an error
# anyway.
# Testing on a [email protected] shows that usually only 1 iteration is
# required when contention occurs.
timeout = 0.001
while timeout < 1.0:
# Note we are only testing for the existance of the file(s) in
# the contents of the directory regardless of any security or
# access rights. If we have made it this far, we have sufficient
# permissions to do that much using Python's equivalent of the
# Windows API FindFirstFile.
# Other Windows APIs can fail or give incorrect results when
# dealing with files that are pending deletion.
L = os.listdir(dirname)
if not (L if waitall else name in L):
return
# Increase the timeout and try again
time.sleep(timeout)
timeout *= 2
warnings.warn('tests may fail, delete still pending for ' + pathname,
RuntimeWarning, stacklevel=4)
def _unlink(filename):
_waitfor(os.unlink, filename)
else:
_unlink = os.unlink
def unlink(filename):
try:
_unlink(filename)
except OSError as error:
# The filename need not exist.
if error.errno not in (errno.ENOENT, errno.ENOTDIR):
raise
def is_ipv6(addr):
try:
socket.inet_pton(socket.AF_INET6, addr)
except socket.error: # not a valid address
return False
except ValueError: # ipv6 not supported on this platform
return False
return True
def parse_address(netloc, default_port=8000):
if netloc.startswith("unix://"):
return netloc.split("unix://")[1]
if netloc.startswith("unix:"):
return netloc.split("unix:")[1]
if netloc.startswith("tcp://"):
netloc = netloc.split("tcp://")[1]
# get host
if '[' in netloc and ']' in netloc:
host = netloc.split(']')[0][1:].lower()
elif ':' in netloc:
host = netloc.split(':')[0].lower()
elif netloc == "":
host = "0.0.0.0"
else:
host = netloc.lower()
#get port
netloc = netloc.split(']')[-1]
if ":" in netloc:
port = netloc.split(':', 1)[1]
if not port.isdigit():
raise RuntimeError("%r is not a valid port number." % port)
port = int(port)
else:
port = default_port
return (host, port)
def get_maxfd():
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if (maxfd == resource.RLIM_INFINITY):
maxfd = MAXFD
return maxfd
def close_on_exec(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, flags)
def set_non_blocking(fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def close(sock):
try:
sock.close()
except socket.error:
pass
try:
from os import closerange
except ImportError:
def closerange(fd_low, fd_high):
# Iterate through and close all file descriptors.
for fd in range(fd_low, fd_high):
try:
os.close(fd)
except OSError: # ERROR, fd wasn't open to begin with (ignored)
pass
def write_chunk(sock, data):
if isinstance(data, text_type):
data = data.encode('utf-8')
chunk_size = "%X\r\n" % len(data)
chunk = b"".join([chunk_size.encode('utf-8'), data, b"\r\n"])
sock.sendall(chunk)
def write(sock, data, chunked=False):
if chunked:
return write_chunk(sock, data)
sock.sendall(data)
def write_nonblock(sock, data, chunked=False):
timeout = sock.gettimeout()
if timeout != 0.0:
try:
sock.setblocking(0)
return write(sock, data, chunked)
finally:
sock.setblocking(1)
else:
return write(sock, data, chunked)
def writelines(sock, lines, chunked=False):
for line in list(lines):
write(sock, line, chunked)
def write_error(sock, status_int, reason, mesg):
html = textwrap.dedent("""\
<html>
<head>
<title>%(reason)s</title>
</head>
<body>
<h1>%(reason)s</h1>
%(mesg)s
</body>
</html>
""") % {"reason": reason, "mesg": mesg}
http = textwrap.dedent("""\
HTTP/1.1 %s %s\r
Connection: close\r
Content-Type: text/html\r
Content-Length: %d\r
\r
%s
""") % (str(status_int), reason, len(html), html)
write_nonblock(sock, http.encode('latin1'))
def normalize_name(name):
return "-".join([w.lower().capitalize() for w in name.split("-")])
def import_app(module):
parts = module.split(":", 1)
if len(parts) == 1:
module, obj = module, "application"
else:
module, obj = parts[0], parts[1]
try:
__import__(module)
except ImportError:
if module.endswith(".py") and os.path.exists(module):
raise ImportError("Failed to find application, did "
"you mean '%s:%s'?" % (module.rsplit(".", 1)[0], obj))
else:
raise
mod = sys.modules[module]
try:
app = eval(obj, mod.__dict__)
except NameError:
raise AppImportError("Failed to find application: %r" % module)
if app is None:
raise AppImportError("Failed to find application object: %r" % obj)
if not callable(app):
raise AppImportError("Application object must be callable.")
return app
def getcwd():
# get current path, try to use PWD env first
try:
a = os.stat(os.environ['PWD'])
b = os.stat(os.getcwd())
if a.st_ino == b.st_ino and a.st_dev == b.st_dev:
cwd = os.environ['PWD']
else:
cwd = os.getcwd()
except:
cwd = os.getcwd()
return cwd
def http_date(timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
weekdayname[wd],
day, monthname[month], year,
hh, mm, ss)
return s
def is_hoppish(header):
return header.lower().strip() in hop_headers
def daemonize(enable_stdio_inheritance=False):
"""\
Standard daemonization of a process.
http://www.svbug.com/documentation/comp.unix.programmer-FAQ/faq_2.html#SEC16
"""
if not 'GUNICORN_FD' in os.environ:
if os.fork():
os._exit(0)
os.setsid()
if os.fork():
os._exit(0)
os.umask(0)
# In both the following any file descriptors above stdin
# stdout and stderr are left untouched. The inheritence
# option simply allows one to have output go to a file
# specified by way of shell redirection when not wanting
# to use --error-log option.
if not enable_stdio_inheritance:
# Remap all of stdin, stdout and stderr on to
# /dev/null. The expectation is that users have
# specified the --error-log option.
closerange(0, 3)
fd_null = os.open(REDIRECT_TO, os.O_RDWR)
if fd_null != 0:
os.dup2(fd_null, 0)
os.dup2(fd_null, 1)
os.dup2(fd_null, 2)
else:
fd_null = os.open(REDIRECT_TO, os.O_RDWR)
# Always redirect stdin to /dev/null as we would
# never expect to need to read interactive input.
if fd_null != 0:
os.close(0)
os.dup2(fd_null, 0)
# If stdout and stderr are still connected to
# their original file descriptors we check to see
# if they are associated with terminal devices.
# When they are we map them to /dev/null so that
# are still detached from any controlling terminal
# properly. If not we preserve them as they are.
#
# If stdin and stdout were not hooked up to the
# original file descriptors, then all bets are
# off and all we can really do is leave them as
# they were.
#
# This will allow 'gunicorn ... > output.log 2>&1'
# to work with stdout/stderr going to the file
# as expected.
#
# Note that if using --error-log option, the log
# file specified through shell redirection will
# only be used up until the log file specified
# by the option takes over. As it replaces stdout
# and stderr at the file descriptor level, then
# anything using stdout or stderr, including having
# cached a reference to them, will still work.
def redirect(stream, fd_expect):
try:
fd = stream.fileno()
if fd == fd_expect and stream.isatty():
os.close(fd)
os.dup2(fd_null, fd)
except AttributeError:
pass
redirect(sys.stdout, 1)
redirect(sys.stderr, 2)
def seed():
try:
random.seed(os.urandom(64))
except NotImplementedError:
random.seed('%s.%s' % (time.time(), os.getpid()))
def check_is_writeable(path):
try:
f = open(path, 'a')
except IOError as e:
raise RuntimeError("Error: '%s' isn't writable [%r]" % (path, e))
f.close()
def to_bytestring(value):
"""Converts a string argument to a byte string"""
if isinstance(value, bytes):
return value
assert isinstance(value, text_type)
return value.encode("utf-8")
def is_fileobject(obj):
if not hasattr(obj, "tell") or not hasattr(obj, "fileno"):
return False
# check BytesIO case and maybe others
try:
obj.fileno()
except io.UnsupportedOperation:
return False
return True
def warn(msg):
sys.stderr.write("!!!\n")
lines = msg.splitlines()
for i, line in enumerate(lines):
if i == 0:
line = "WARNING: %s" % line
sys.stderr.write("!!! %s\n" % line)
sys.stderr.write("!!!\n\n")
sys.stderr.flush()
|
mit
| -8,003,306,553,882,952,000 | -6,534,499,294,662,614,000 | 27.735075 | 88 | 0.569861 | false |
marissazhou/django
|
django/template/backends/dummy.py
|
480
|
2037
|
# Since this package contains a "django" module, this is required on Python 2.
from __future__ import absolute_import
import errno
import io
import string
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.template import Origin, TemplateDoesNotExist
from django.utils.html import conditional_escape
from .base import BaseEngine
from .utils import csrf_input_lazy, csrf_token_lazy
class TemplateStrings(BaseEngine):
app_dirname = 'template_strings'
def __init__(self, params):
params = params.copy()
options = params.pop('OPTIONS').copy()
if options:
raise ImproperlyConfigured(
"Unknown options: {}".format(", ".join(options)))
super(TemplateStrings, self).__init__(params)
def from_string(self, template_code):
return Template(template_code)
def get_template(self, template_name):
tried = []
for template_file in self.iter_template_filenames(template_name):
try:
with io.open(template_file, encoding=settings.FILE_CHARSET) as fp:
template_code = fp.read()
except IOError as e:
if e.errno == errno.ENOENT:
tried.append((
Origin(template_file, template_name, self),
'Source does not exist',
))
continue
raise
return Template(template_code)
else:
raise TemplateDoesNotExist(template_name, tried=tried, backend=self)
class Template(string.Template):
def render(self, context=None, request=None):
if context is None:
context = {}
else:
context = {k: conditional_escape(v) for k, v in context.items()}
if request is not None:
context['csrf_input'] = csrf_input_lazy(request)
context['csrf_token'] = csrf_token_lazy(request)
return self.safe_substitute(context)
|
bsd-3-clause
| -9,121,187,049,299,889,000 | -9,173,786,010,512,450,000 | 31.333333 | 82 | 0.609229 | false |
izhukov/ansible
|
v2/ansible/plugins/inventory/__init__.py
|
8
|
2702
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from abc import ABCMeta, abstractmethod
from six import add_metaclass
@add_metaclass(ABCMeta)
class InventoryParser:
'''Abstract Base Class for retrieving inventory information
Any InventoryParser functions by taking an inven_source. The caller then
calls the parser() method. Once parser is called, the caller can access
InventoryParser.hosts for a mapping of Host objects and
InventoryParser.Groups for a mapping of Group objects.
'''
def __init__(self, inven_source):
'''
InventoryParser contructors take a source of inventory information
that they will parse the host and group information from.
'''
self.inven_source = inven_source
self.reset_parser()
@abstractmethod
def reset_parser(self):
'''
InventoryParsers generally cache their data once parser() is
called. This method initializes any parser state before calling parser
again.
'''
self.hosts = dict()
self.groups = dict()
self.parsed = False
def _merge(self, target, addition):
'''
This method is provided to InventoryParsers to merge host or group
dicts since it may take several passes to get all of the data
Example usage:
self.hosts = self.from_ini(filename)
new_hosts = self.from_script(scriptname)
self._merge(self.hosts, new_hosts)
'''
for i in addition:
if i in target:
target[i].merge(addition[i])
else:
target[i] = addition[i]
@abstractmethod
def parse(self, refresh=False):
if refresh:
self.reset_parser()
if self.parsed:
return self.parsed
# Parse self.inven_sources here
pass
|
gpl-3.0
| 2,109,185,882,520,526,300 | -8,665,200,297,950,180,000 | 31.95122 | 79 | 0.650999 | false |
mdg/pygrate
|
test/migration_test.py
|
1
|
5291
|
import unittest
import os.path
import types
from pygration.migration import VersionNumber, Loader
import pygration
@pygration.step_class
class TestStep(object):
ADD_FILE = 'add.sql'
class StepTest(unittest.TestCase):
def test_class_decorator(self):
self.assertEqual("test.migration_test", TestStep.version)
self.assertEqual("TestStep", TestStep.step_name)
self.assertEqual("TestStep", TestStep.step_id)
class VersionComponentCompare(unittest.TestCase):
"""Test results for the component comparison function in Version."""
def test_numeric_comparison(self):
v = VersionNumber("v0")
self.assertTrue( v._component_compare("1","2") < 0 )
def test_numeric_comparison_double_digits(self):
"""Test that double digit numbers compare later than single digits."""
v = VersionNumber("v0")
self.assertTrue( v._component_compare("2","12") < 0 )
class VersionNumberTest(unittest.TestCase):
"""Tests for the pygration Version class."""
def test_underscore_is_pygration(self):
"""Check that v0_0_0 is reported as a pygration version."""
v = VersionNumber("v1_2_13")
self.assertTrue( v.is_pygration() )
self.assertEqual(v._component(0), "1")
self.assertEqual(v._component(1), "2")
self.assertEqual(v._component(2), "13")
def test_dash_is_pygration(self):
"""Check that v0-0-0 is reported as a pygration version."""
v = VersionNumber("v1-2-3")
self.assertTrue( v.is_pygration() )
self.assertEqual(v._component(0), "1")
self.assertEqual(v._component(1), "2")
self.assertEqual(v._component(2), "3")
def test_dot_is_pygration(self):
"""Check that v0.0.0 is reported as a pygration version."""
v = VersionNumber("v1.2.3")
self.assertTrue( v.is_pygration() )
self.assertEqual(v._component(0), "1")
self.assertEqual(v._component(1), "2")
self.assertEqual(v._component(2), "3")
def test_asdf_is_not_pygration(self):
"""Assert that asdf is reported as not a pygration version."""
v = VersionNumber("asdf")
self.assertFalse( v.is_pygration() )
def test_extended_version(self):
"""Test that a version with a sub-build number is compared later"""
v1 = VersionNumber("v1")
v2 = VersionNumber("v1-2")
self.assertTrue( cmp(v1, v2) < 0 )
self.assertTrue( cmp(v2, v1) > 0 )
def test_numeric_compare(self):
"""Test that a numeric version is compared as a number."""
v1 = VersionNumber("v1-2")
v2 = VersionNumber("v1-12")
self.assertTrue( cmp(v1, v2) < 0 )
self.assertTrue( cmp(v2, v1) > 0 )
def test_underscore_comparison(self):
v1 = VersionNumber("v0_1_2")
v2 = VersionNumber("v0_2_2")
self.assertTrue( cmp(v1, v2) < 0 )
self.assertTrue( cmp(v2, v1) > 0 )
def test_dash_comparison(self):
v1 = VersionNumber("v0-1-2")
v2 = VersionNumber("v0-2-2")
self.assertTrue( cmp(v1, v2) < 0 )
self.assertTrue( cmp(v2, v1) > 0 )
def test_dot_comparison(self):
v1 = VersionNumber("v0.1.2")
v2 = VersionNumber("v0.2.2")
self.assertTrue( cmp(v1, v2) < 0 )
self.assertTrue( cmp(v2, v1) > 0 )
def test_self_comparison(self):
v = VersionNumber("v0.1.2")
self.assertTrue( cmp(v, v) == 0 )
def test_equality_comparison(self):
vA = VersionNumber("v001")
vB = VersionNumber("v001")
self.assertTrue(vA == vB)
class MigrationSetTest(unittest.TestCase):
pass
class LoaderTest(unittest.TestCase):
def setUp( self ):
test_dir = os.path.join( os.path.dirname( __file__ ), "test1" )
self._loader = Loader(test_dir)
def test_find_versions(self):
v001 = VersionNumber('v001')
v002 = VersionNumber('v002')
v07 = VersionNumber('v0-7')
self._loader._find_files()
self.assertEqual([v07, v001, v002], self._loader._find_versions())
def test_load_migration_module(self):
self._loader._load_migration_module('v001')
m = self._loader._modules
self.assertEqual( 1, len(m) )
self.assertEqual( types.ModuleType, type(m[0]) )
class MigrationLoadTest(unittest.TestCase):
def setUp( self ):
self._test_dir = os.path.join( os.path.dirname( __file__ ), "test1" )
def test_load(self):
"""Test that the migration loader loads correctly."""
migset = pygration.migration.load(self._test_dir)
migs = migset.migrations()
self.assertEqual(3, len(migs))
self.assertEqual("v0-7", migs[0].version())
self.assertEqual("v001", migs[1].version())
self.assertEqual("v002", migs[2].version())
v07 = migs[0]
self.assertEqual(2, len(v07.steps()))
self.assertEqual("EmployeeTable", v07.step(0).step_name)
v001 = migs[1]
self.assertEqual(2, len(v001.steps()))
self.assertEqual("SalaryTable", v001.step(0).step_name)
self.assertEqual("EmployeeTable", v001.step(1).step_name)
v002 = migs[2]
self.assertEqual(1, len(v002.steps()))
self.assertEqual("AccountTable", v002.step(0).step_name)
|
apache-2.0
| 3,078,465,142,659,288,600 | -5,809,662,514,704,698,000 | 32.916667 | 78 | 0.612361 | false |
iuliat/nova
|
nova/objects/host_mapping.py
|
29
|
5690
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects import base as ovo
from nova.db.sqlalchemy import api as db_api
from nova.db.sqlalchemy import api_models
from nova import exception
from nova.objects import base
from nova.objects import cell_mapping
from nova.objects import fields
def _cell_id_in_updates(updates):
cell_mapping_obj = updates.pop("cell_mapping", None)
if cell_mapping_obj:
updates["cell_id"] = cell_mapping_obj.id
# NOTE(danms): Maintain Dict compatibility because of ovo bug 1474952
@base.NovaObjectRegistry.register
class HostMapping(base.NovaTimestampObject, base.NovaObject,
ovo.VersionedObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(read_only=True),
'host': fields.StringField(),
'cell_mapping': fields.ObjectField('CellMapping'),
}
obj_relationships = {
'cell_mapping': [('1.0', '1.0')]
}
def _get_cell_mapping(self):
session = db_api.get_api_session()
with session.begin():
cell_map = (session.query(api_models.CellMapping)
.join(api_models.HostMapping)
.filter(api_models.HostMapping.host == self.host)
.first())
if cell_map is not None:
return cell_mapping.CellMapping._from_db_object(
self._context, cell_mapping.CellMapping(), cell_map)
def _load_cell_mapping(self):
self.cell_mapping = self._get_cell_mapping()
def obj_load_attr(self, attrname):
if attrname == 'cell_mapping':
self._load_cell_mapping()
@staticmethod
def _from_db_object(context, host_mapping, db_host_mapping):
for key in host_mapping.fields:
db_value = db_host_mapping.get(key)
if key == "cell_mapping":
# NOTE(dheeraj): If cell_mapping is stashed in db object
# we load it here. Otherwise, lazy loading will happen
# when .cell_mapping is accessd later
if not db_value:
continue
db_value = cell_mapping.CellMapping._from_db_object(
host_mapping._context, cell_mapping.CellMapping(),
db_value)
setattr(host_mapping, key, db_value)
host_mapping.obj_reset_changes()
host_mapping._context = context
return host_mapping
@staticmethod
def _get_by_host_from_db(context, host):
session = db_api.get_api_session()
with session.begin():
db_mapping = (session.query(api_models.HostMapping)
.join(api_models.CellMapping)
.with_entities(api_models.HostMapping,
api_models.CellMapping)
.filter(api_models.HostMapping.host == host)).first()
if not db_mapping:
raise exception.HostMappingNotFound(name=host)
host_mapping = db_mapping[0]
host_mapping["cell_mapping"] = db_mapping[1]
return host_mapping
@base.remotable_classmethod
def get_by_host(cls, context, host):
db_mapping = cls._get_by_host_from_db(context, host)
return cls._from_db_object(context, cls(), db_mapping)
@staticmethod
def _create_in_db(context, updates):
session = db_api.get_api_session()
db_mapping = api_models.HostMapping()
db_mapping.update(updates)
db_mapping.save(session)
return db_mapping
@base.remotable
def create(self):
changes = self.obj_get_changes()
# cell_mapping must be mapped to cell_id for create
_cell_id_in_updates(changes)
db_mapping = self._create_in_db(self._context, changes)
self._from_db_object(self._context, self, db_mapping)
@staticmethod
def _save_in_db(context, obj, updates):
session = db_api.get_api_session()
with session.begin():
db_mapping = session.query(
api_models.HostMapping).filter_by(
id=obj.id).first()
if not db_mapping:
raise exception.HostMappingNotFound(name=obj.host)
db_mapping.update(updates)
return db_mapping
@base.remotable
def save(self):
changes = self.obj_get_changes()
# cell_mapping must be mapped to cell_id for updates
_cell_id_in_updates(changes)
db_mapping = self._save_in_db(self._context, self.host, changes)
self._from_db_object(self._context, self, db_mapping)
self.obj_reset_changes()
@staticmethod
def _destroy_in_db(context, host):
session = db_api.get_api_session()
with session.begin():
result = session.query(api_models.HostMapping).filter_by(
host=host).delete()
if not result:
raise exception.HostMappingNotFound(name=host)
@base.remotable
def destroy(self):
self._destroy_in_db(self._context, self.host)
|
apache-2.0
| 4,650,309,465,604,054,000 | -3,195,838,229,142,851,600 | 36.189542 | 79 | 0.604394 | false |
ThePirateWhoSmellsOfSunflowers/Empire
|
lib/modules/powershell/situational_awareness/network/smbscanner.py
|
10
|
5259
|
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-SMBScanner',
'Author': ['@obscuresec', '@harmj0y'],
'Description': ('Tests a username/password combination across a number of machines.'),
'Background' : True,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : False,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comments': [
'https://gist.github.com/obscuresec/df5f652c7e7088e2412c'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'CredID' : {
'Description' : 'CredID from the store to use.',
'Required' : False,
'Value' : ''
},
'ComputerName' : {
'Description' : 'Comma-separated hostnames to try username/password combinations against. Otherwise enumerate the domain for machines.',
'Required' : False,
'Value' : ''
},
'Password' : {
'Description' : 'Password to test.',
'Required' : True,
'Value' : ''
},
'UserName' : {
'Description' : '[domain\]username to test.',
'Required' : True,
'Value' : ''
},
'NoPing' : {
'Description' : 'Switch. Don\'t ping hosts before enumeration.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/situational_awareness/network/Invoke-SmbScanner.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode + "\n"
scriptEnd = ""
# if a credential ID is specified, try to parse
credID = self.options["CredID"]['Value']
if credID != "":
if not self.mainMenu.credentials.is_credential_valid(credID):
print helpers.color("[!] CredID is invalid!")
return ""
(credID, credType, domainName, userName, password, host, os, sid, notes) = self.mainMenu.credentials.get_credentials(credID)[0]
if domainName != "":
self.options["UserName"]['Value'] = str(domainName) + "\\" + str(userName)
else:
self.options["UserName"]['Value'] = str(userName)
if password != "":
self.options["Password"]['Value'] = password
if self.options["UserName"]['Value'] == "" or self.options["Password"]['Value'] == "":
print helpers.color("[!] Username and password must be specified.")
if (self.options['ComputerName']['Value'] != ''):
usernames = "\"" + "\",\"".join(self.options['ComputerName']['Value'].split(",")) + "\""
scriptEnd += usernames + " | "
scriptEnd += "Invoke-SMBScanner "
for option,values in self.options.iteritems():
if option.lower() != "agent" and option.lower() != "computername" and option.lower() != "credid":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
scriptEnd += " -" + str(option)
else:
scriptEnd += " -" + str(option) + " '" + str(values['Value']) + "'"
scriptEnd += "| Out-String | %{$_ + \"`n\"};"
scriptEnd += "'Invoke-SMBScanner completed'"
if obfuscate:
scriptEnd = helpers.obfuscate(self.mainMenu.installPath, psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
bsd-3-clause
| -2,108,252,006,123,905,500 | -8,435,094,038,456,297,000 | 37.108696 | 156 | 0.4942 | false |
gram526/VTK
|
Rendering/Annotation/Testing/Python/bore.py
|
20
|
2855
|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create arc plots
# get the interactor ui
camera = vtk.vtkCamera()
# read the bore
bore = vtk.vtkPolyDataReader()
bore.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/bore.vtk")
tuber = vtk.vtkTubeFilter()
tuber.SetInputConnection(bore.GetOutputPort())
tuber.SetNumberOfSides(6)
tuber.SetRadius(15)
mapBore = vtk.vtkPolyDataMapper()
mapBore.SetInputConnection(tuber.GetOutputPort())
mapBore.ScalarVisibilityOff()
boreActor = vtk.vtkActor()
boreActor.SetMapper(mapBore)
boreActor.GetProperty().SetColor(0,0,0)
# create the arc plots
#
track1 = vtk.vtkPolyDataReader()
track1.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/track1.binary.vtk")
ap = vtk.vtkArcPlotter()
ap.SetInputConnection(track1.GetOutputPort())
ap.SetCamera(camera)
ap.SetRadius(250.0)
ap.SetHeight(200.0)
ap.UseDefaultNormalOn()
ap.SetDefaultNormal(1,1,0)
mapArc = vtk.vtkPolyDataMapper()
mapArc.SetInputConnection(ap.GetOutputPort())
arcActor = vtk.vtkActor()
arcActor.SetMapper(mapArc)
arcActor.GetProperty().SetColor(0,1,0)
track2 = vtk.vtkPolyDataReader()
track2.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/track2.binary.vtk")
ap2 = vtk.vtkArcPlotter()
ap2.SetInputConnection(track2.GetOutputPort())
ap2.SetCamera(camera)
ap2.SetRadius(450.0)
ap2.SetHeight(200.0)
ap2.UseDefaultNormalOn()
ap2.SetDefaultNormal(1,1,0)
mapArc2 = vtk.vtkPolyDataMapper()
mapArc2.SetInputConnection(ap2.GetOutputPort())
arcActor2 = vtk.vtkActor()
arcActor2.SetMapper(mapArc2)
arcActor2.GetProperty().SetColor(0,0,1)
track3 = vtk.vtkPolyDataReader()
track3.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/track3.binary.vtk")
ap3 = vtk.vtkArcPlotter()
ap3.SetInputConnection(track3.GetOutputPort())
ap3.SetCamera(camera)
ap3.SetRadius(250.0)
ap3.SetHeight(50.0)
ap3.SetDefaultNormal(1,1,0)
mapArc3 = vtk.vtkPolyDataMapper()
mapArc3.SetInputConnection(ap3.GetOutputPort())
arcActor3 = vtk.vtkActor()
arcActor3.SetMapper(mapArc3)
arcActor3.GetProperty().SetColor(1,0,1)
# Create graphics objects
# Create the rendering window renderer and interactive renderer
ren1 = vtk.vtkRenderer()
ren1.SetActiveCamera(camera)
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer set the background and size
ren1.AddActor(boreActor)
ren1.AddActor(arcActor)
ren1.AddActor(arcActor2)
ren1.AddActor(arcActor3)
ren1.SetBackground(1,1,1)
renWin.SetSize(235,500)
camera.SetClippingRange(14144,32817)
camera.SetFocalPoint(-1023,680,5812)
camera.SetPosition(15551,-2426,19820)
camera.SetViewUp(-0.651889,-0.07576,0.754521)
camera.SetViewAngle(20)
renWin.Render()
# render the image
#
iren.Initialize()
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
bsd-3-clause
| -1,015,780,383,098,805,200 | 2,382,965,464,807,200,300 | 30.373626 | 71 | 0.785289 | false |
40223136/w17test2
|
static/Brython3.1.3-20150514-095342/Lib/unittest/case.py
|
743
|
48873
|
"""Test case implementation"""
import sys
import functools
import difflib
import pprint
import re
import warnings
import collections
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
_count_diff_hashable)
__unittest = True
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestCase.skipTest() or one of the skipping decorators
instead of raising this directly.
"""
class _ExpectedFailure(Exception):
"""
Raise this when a test is expected to fail.
This is an implementation detail.
"""
def __init__(self, exc_info):
super(_ExpectedFailure, self).__init__()
self.exc_info = exc_info
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
class _Outcome(object):
def __init__(self):
self.success = True
self.skipped = None
self.unexpectedSuccess = None
self.expectedFailure = None
self.errors = []
self.failures = []
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not isinstance(test_item, type):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception:
raise _ExpectedFailure(sys.exc_info())
raise _UnexpectedSuccess
return wrapper
class _AssertRaisesBaseContext(object):
def __init__(self, expected, test_case, callable_obj=None,
expected_regex=None):
self.expected = expected
self.test_case = test_case
if callable_obj is not None:
try:
self.obj_name = callable_obj.__name__
except AttributeError:
self.obj_name = str(callable_obj)
else:
self.obj_name = None
if isinstance(expected_regex, (bytes, str)):
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
self.msg = None
def _raiseFailure(self, standardMsg):
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
def handle(self, name, callable_obj, args, kwargs):
"""
If callable_obj is None, assertRaises/Warns is being used as a
context manager, so check for a 'msg' kwarg and return self.
If callable_obj is not None, call it passing args and kwargs.
"""
if callable_obj is None:
self.msg = kwargs.pop('msg', None)
return self
with self:
callable_obj(*args, **kwargs)
class _AssertRaisesContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
self._raiseFailure("{} not raised by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
# store exception, without traceback, for later retrieval
self.exception = exc_value.with_traceback(None)
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
self._raiseFailure('"{}" does not match "{}"'.format(
expected_regex.pattern, str(exc_value)))
return True
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter("always", self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
self._raiseFailure('"{}" does not match "{}"'.format(
self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
self._raiseFailure("{} not triggered by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not triggered".format(exc_name))
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
When subclassing TestCase, you can set these attributes:
* failureException: determines which exception will be raised when
the instance's assertion methods fail; test methods raising this
exception will be deemed to have 'failed' rather than 'errored'.
* longMessage: determines whether long messages (including repr of
objects used in assert methods) will be printed on failure in *addition*
to any explicit message passed.
* maxDiff: sets the maximum length of a diff in failure messages
by assert methods using difflib. It is looked up as an instance
attribute so can be configured by individual tests if required.
"""
failureException = AssertionError
longMessage = True
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._outcomeForDoCleanups = None
self._testMethodDoc = 'No test'
try:
testMethod = getattr(self, methodName)
except AttributeError:
if methodName != 'runTest':
# we allow instantiation with no explicit method name
# but not an *incorrect* or missing method name
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
else:
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(str, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(self, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(self)
def _executeTestPart(self, function, outcome, isTest=False):
try:
function()
except KeyboardInterrupt:
raise
except SkipTest as e:
outcome.success = False
outcome.skipped = str(e)
except _UnexpectedSuccess:
exc_info = sys.exc_info()
outcome.success = False
if isTest:
outcome.unexpectedSuccess = exc_info
else:
outcome.errors.append(exc_info)
except _ExpectedFailure:
outcome.success = False
exc_info = sys.exc_info()
if isTest:
outcome.expectedFailure = exc_info
else:
outcome.errors.append(exc_info)
except self.failureException:
outcome.success = False
outcome.failures.append(sys.exc_info())
exc_info = sys.exc_info()
except:
outcome.success = False
outcome.errors.append(sys.exc_info())
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, skip_why)
finally:
result.stopTest(self)
return
try:
outcome = _Outcome()
self._outcomeForDoCleanups = outcome
self._executeTestPart(self.setUp, outcome)
if outcome.success:
self._executeTestPart(testMethod, outcome, isTest=True)
self._executeTestPart(self.tearDown, outcome)
self.doCleanups()
if outcome.success:
result.addSuccess(self)
else:
if outcome.skipped is not None:
self._addSkip(result, outcome.skipped)
for exc_info in outcome.errors:
result.addError(self, exc_info)
for exc_info in outcome.failures:
result.addFailure(self, exc_info)
if outcome.unexpectedSuccess is not None:
addUnexpectedSuccess = getattr(result, 'addUnexpectedSuccess', None)
if addUnexpectedSuccess is not None:
addUnexpectedSuccess(self)
else:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failures",
RuntimeWarning)
result.addFailure(self, outcome.unexpectedSuccess)
if outcome.expectedFailure is not None:
addExpectedFailure = getattr(result, 'addExpectedFailure', None)
if addExpectedFailure is not None:
addExpectedFailure(self, outcome.expectedFailure)
else:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
outcome = self._outcomeForDoCleanups or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
part = lambda: function(*args, **kwargs)
self._executeTestPart(part, outcome)
# return this for backwards compatibility
# even though we no longer us it internally
return outcome.success
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
"""Fail unless an exception of class excClass is raised
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
raised, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with callableObj omitted or None, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
An optional keyword argument 'msg' can be provided when assertRaises
is used as a context object.
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(excClass, self, callableObj)
return context.handle('assertRaises', callableObj, args, kwargs)
def assertWarns(self, expected_warning, callable_obj=None, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
by callable_obj when invoked with arguments args and keyword
arguments kwargs. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
If called with callable_obj omitted or None, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
An optional keyword argument 'msg' can be provided when assertWarns
is used as a context object.
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
of Python code from which the warning was triggered.
This allows you to inspect the warning after the assertion::
with self.assertWarns(SomeWarning) as cm:
do_something()
the_warning = cm.warning
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj)
return context.handle('assertWarns', callable_obj, args, kwargs)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
if isinstance(asserter, str):
asserter = getattr(self, asserter)
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % (safe_repr(first), safe_repr(second))
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '!='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is more than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most signficant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
seq1_repr = safe_repr(seq1)
seq2_repr = safe_repr(seq2)
if len(seq1_repr) > 30:
seq1_repr = seq1_repr[:30] + '...'
if len(seq2_repr) > 30:
seq2_repr = seq2_repr[:30] + '...'
elements = (seq_type_name.capitalize(), seq1_repr, seq2_repr)
differing = '%ss differ: %s != %s\n' % elements
for i in range(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
(i, item1, item2))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, seq1[len2]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, seq2[len1]))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % (safe_repr(d1, True), safe_repr(d2, True))
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
warnings.warn('assertDictContainsSubset is deprecated',
DeprecationWarning)
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
regardless of order. If the same element occurs more than once,
it verifies that the elements occur the same number of times.
self.assertEqual(Counter(list(first)),
Counter(list(second)))
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(first), list(second)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, str, 'First argument is not a string')
self.assertIsInstance(second, str, 'Second argument is not a string')
if first != second:
# don't use difflib if the strings are too long
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
firstlines = first.splitlines(keepends=True)
secondlines = second.splitlines(keepends=True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % (safe_repr(first, True),
safe_repr(second, True))
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegex(self, expected_exception, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a raised exception matches a regex.
Args:
expected_exception: Exception class expected to be raised.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertRaisesRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertRaisesContext(expected_exception, self, callable_obj,
expected_regex)
return context.handle('assertRaisesRegex', callable_obj, args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
callable_obj=None, *args, **kwargs):
"""Asserts that the message in a triggered warning matches a regexp.
Basic functioning is similar to assertWarns() with the addition
that only warnings whose messages also match the regular expression
are considered successful matches.
Args:
expected_warning: Warning class expected to be triggered.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
callable_obj: Function to be called.
msg: Optional message used in case of failure. Can only be used
when assertWarnsRegex is used as a context manager.
args: Extra args.
kwargs: Extra kwargs.
"""
context = _AssertWarnsContext(expected_warning, self, callable_obj,
expected_regex)
return context.handle('assertWarnsRegex', callable_obj, args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regex, (str, bytes)):
assert expected_regex, "expected_regex must not be empty."
expected_regex = re.compile(expected_regex)
if not expected_regex.search(text):
msg = msg or "Regex didn't match"
msg = '%s: %r not found in %r' % (msg, expected_regex.pattern, text)
raise self.failureException(msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regex, (str, bytes)):
unexpected_regex = re.compile(unexpected_regex)
match = unexpected_regex.search(text)
if match:
msg = msg or "Regex matched"
msg = '%s: %r matches %r in %r' % (msg,
text[match.start():match.end()],
unexpected_regex.pattern,
text)
raise self.failureException(msg)
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
# see #9424
failUnlessEqual = assertEquals = _deprecate(assertEqual)
failIfEqual = assertNotEquals = _deprecate(assertNotEqual)
failUnlessAlmostEqual = assertAlmostEquals = _deprecate(assertAlmostEqual)
failIfAlmostEqual = assertNotAlmostEquals = _deprecate(assertNotAlmostEqual)
failUnless = assert_ = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
assertRaisesRegexp = _deprecate(assertRaisesRegex)
assertRegexpMatches = _deprecate(assertRegex)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
|
gpl-3.0
| -2,843,029,245,278,232,000 | 9,193,020,653,665,455,000 | 39.257825 | 109 | 0.575164 | false |
jtyuan/racetrack
|
src/arch/x86/isa/insts/general_purpose/flags/set_and_clear.py
|
91
|
2816
|
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop CLD {
ruflags t1
limm t2, "~((uint64_t)DFBit)", dataSize=8
and t1, t1, t2
wruflags t1, t0
};
def macroop STD {
ruflags t1
limm t2, "DFBit", dataSize=8
or t1, t1, t2
wruflags t1, t0
};
def macroop CLC {
ruflags t1
andi t2, t1, "CFBit"
wruflags t1, t2
};
def macroop STC {
ruflags t1
ori t1, t1, "CFBit"
wruflags t1, t0
};
def macroop CMC {
ruflags t1
wruflagsi t1, "CFBit"
};
def macroop STI {
rflags t1
limm t2, "IFBit", dataSize=8
or t1, t1, t2
wrflags t1, t0
};
def macroop CLI {
rflags t1
limm t2, "~IFBit", dataSize=8
and t1, t1, t2
wrflags t1, t0
};
'''
|
bsd-3-clause
| 27,415,377,803,394,724 | -1,214,816,795,912,178,200 | 32.927711 | 72 | 0.742188 | false |
ianmiell/OLD-shutitdist
|
inetutils/inetutils.py
|
1
|
1466
|
"""ShutIt module. See http://shutit.tk
"""
from shutit_module import ShutItModule
class inetutils(ShutItModule):
def is_installed(self, shutit):
return shutit.file_exists('/root/shutit_build/module_record/' + self.module_id + '/built')
def build(self, shutit):
shutit.send('mkdir -p /tmp/build/inetutils')
shutit.send('cd /tmp/build/inetutils')
shutit.send('curl -L http://ftp.gnu.org/gnu/inetutils/inetutils-1.9.2.tar.gz | tar -zxf -')
shutit.send('cd inetutils*')
shutit.send('''echo '#define PATH_PROCNET_DEV "/proc/net/dev"' >> ifconfig/system/linux.h''')
shutit.send('./configure --prefix=/usr --localstatedir=/var --disable-logger --disable-whois --disable-servers')
shutit.send('make')
shutit.send('make install')
shutit.send('mv -v /usr/bin/{hostname,ping,ping6,traceroute} /bin')
shutit.send('mv -v /usr/bin/ifconfig /sbin')
shutit.send('./configure --prefix=/usr')
return True
#def get_config(self, shutit):
# shutit.get_config(self.module_id,'item','default')
# return True
#def check_ready(self, shutit):
# return True
#def start(self, shutit):
# return True
#def stop(self, shutit):
# return True
#def finalize(self, shutit):
# return True
#def remove(self, shutit):
# return True
#def test(self, shutit):
# return True
def module():
return inetutils(
'shutit.tk.sd.inetutils.inetutils', 158844782.0047,
description='',
maintainer='',
depends=['shutit.tk.sd.pkg_config.pkg_config']
)
|
gpl-2.0
| -7,156,356,372,825,553,000 | 8,237,483,710,203,182,000 | 25.178571 | 114 | 0.686903 | false |
JackKelly/neuralnilm_prototype
|
scripts/experiment029.py
|
2
|
3262
|
from __future__ import division
import matplotlib.pyplot as plt
import numpy as np
import theano
import theano.tensor as T
import lasagne
from gen_data_029 import gen_data, N_BATCH, LENGTH
theano.config.compute_test_value = 'raise'
# Number of units in the hidden (recurrent) layer
N_HIDDEN = 5
# SGD learning rate
LEARNING_RATE = 1e-1
# Number of iterations to train the net
N_ITERATIONS = 200
# Generate a "validation" sequence whose cost we will periodically compute
X_val, y_val = gen_data()
n_features = X_val.shape[-1]
n_output = y_val.shape[-1]
assert X_val.shape == (N_BATCH, LENGTH, n_features)
assert y_val.shape == (N_BATCH, LENGTH, n_output)
# Construct LSTM RNN: One LSTM layer and one dense output layer
l_in = lasagne.layers.InputLayer(shape=(N_BATCH, LENGTH, n_features))
# setup fwd and bck LSTM layer.
l_fwd = lasagne.layers.LSTMLayer(
l_in, N_HIDDEN, backwards=False, learn_init=True, peepholes=True)
l_bck = lasagne.layers.LSTMLayer(
l_in, N_HIDDEN, backwards=True, learn_init=True, peepholes=True)
# concatenate forward and backward LSTM layers
l_fwd_reshape = lasagne.layers.ReshapeLayer(l_fwd, (N_BATCH*LENGTH, N_HIDDEN))
l_bck_reshape = lasagne.layers.ReshapeLayer(l_bck, (N_BATCH*LENGTH, N_HIDDEN))
l_concat = lasagne.layers.ConcatLayer([l_fwd_reshape, l_bck_reshape], axis=1)
l_recurrent_out = lasagne.layers.DenseLayer(
l_concat, num_units=n_output, nonlinearity=None)
l_out = lasagne.layers.ReshapeLayer(
l_recurrent_out, (N_BATCH, LENGTH, n_output))
input = T.tensor3('input')
target_output = T.tensor3('target_output')
# add test values
input.tag.test_value = np.random.rand(
*X_val.shape).astype(theano.config.floatX)
target_output.tag.test_value = np.random.rand(
*y_val.shape).astype(theano.config.floatX)
# Cost = mean squared error
cost = T.mean((l_out.get_output(input) - target_output)**2)
# Use NAG for training
all_params = lasagne.layers.get_all_params(l_out)
updates = lasagne.updates.nesterov_momentum(cost, all_params, LEARNING_RATE)
# Theano functions for training, getting output, and computing cost
train = theano.function([input, target_output],
cost, updates=updates, on_unused_input='warn',
allow_input_downcast=True)
y_pred = theano.function(
[input], l_out.get_output(input), on_unused_input='warn',
allow_input_downcast=True)
compute_cost = theano.function(
[input, target_output], cost, on_unused_input='warn',
allow_input_downcast=True)
# Train the net
def run_training():
costs = np.zeros(N_ITERATIONS)
for n in range(N_ITERATIONS):
X, y = gen_data()
# you should use your own training data mask instead of mask_val
costs[n] = train(X, y)
if not n % 10:
cost_val = compute_cost(X_val, y_val)
print "Iteration {} validation cost = {}".format(n, cost_val)
plt.plot(costs)
plt.xlabel('Iteration')
plt.ylabel('Cost')
plt.show()
def plot_estimates():
X, y = gen_data()
y_predictions = y_pred(X)
ax = plt.gca()
ax.plot(y_predictions[0,:,0], label='estimate')
ax.plot(y[0,:,0], label='ground truth')
# ax.plot(X[0,:,0], label='aggregate')
ax.legend()
plt.show()
run_training()
plot_estimates()
|
mit
| 1,701,497,126,155,149,000 | 1,032,227,877,129,775,900 | 30.980392 | 78 | 0.686082 | false |
Unow/edx-platform
|
common/lib/capa/capa/tests/test_correctmap.py
|
61
|
5833
|
"""
Tests to verify that CorrectMap behaves correctly
"""
import unittest
from capa.correctmap import CorrectMap
import datetime
class CorrectMapTest(unittest.TestCase):
"""
Tests to verify that CorrectMap behaves correctly
"""
def setUp(self):
self.cmap = CorrectMap()
def test_set_input_properties(self):
# Set the correctmap properties for two inputs
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5,
msg='Test message',
hint='Test hint',
hintmode='always',
queuestate={
'key': 'secretstring',
'time': '20130228100026'
}
)
self.cmap.set(
answer_id='2_2_1',
correctness='incorrect',
npoints=None,
msg=None,
hint=None,
hintmode=None,
queuestate=None
)
# Assert that each input has the expected properties
self.assertTrue(self.cmap.is_correct('1_2_1'))
self.assertFalse(self.cmap.is_correct('2_2_1'))
self.assertEqual(self.cmap.get_correctness('1_2_1'), 'correct')
self.assertEqual(self.cmap.get_correctness('2_2_1'), 'incorrect')
self.assertEqual(self.cmap.get_npoints('1_2_1'), 5)
self.assertEqual(self.cmap.get_npoints('2_2_1'), 0)
self.assertEqual(self.cmap.get_msg('1_2_1'), 'Test message')
self.assertEqual(self.cmap.get_msg('2_2_1'), None)
self.assertEqual(self.cmap.get_hint('1_2_1'), 'Test hint')
self.assertEqual(self.cmap.get_hint('2_2_1'), None)
self.assertEqual(self.cmap.get_hintmode('1_2_1'), 'always')
self.assertEqual(self.cmap.get_hintmode('2_2_1'), None)
self.assertTrue(self.cmap.is_queued('1_2_1'))
self.assertFalse(self.cmap.is_queued('2_2_1'))
self.assertEqual(self.cmap.get_queuetime_str('1_2_1'), '20130228100026')
self.assertEqual(self.cmap.get_queuetime_str('2_2_1'), None)
self.assertTrue(self.cmap.is_right_queuekey('1_2_1', 'secretstring'))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', 'invalidstr'))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', ''))
self.assertFalse(self.cmap.is_right_queuekey('1_2_1', None))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', 'secretstring'))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', 'invalidstr'))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', ''))
self.assertFalse(self.cmap.is_right_queuekey('2_2_1', None))
def test_get_npoints(self):
# Set the correctmap properties for 4 inputs
# 1) correct, 5 points
# 2) correct, None points
# 3) incorrect, 5 points
# 4) incorrect, None points
# 5) correct, 0 points
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5
)
self.cmap.set(
answer_id='2_2_1',
correctness='correct',
npoints=None
)
self.cmap.set(
answer_id='3_2_1',
correctness='incorrect',
npoints=5
)
self.cmap.set(
answer_id='4_2_1',
correctness='incorrect',
npoints=None
)
self.cmap.set(
answer_id='5_2_1',
correctness='correct',
npoints=0
)
# Assert that we get the expected points
# If points assigned --> npoints
# If no points assigned and correct --> 1 point
# If no points assigned and incorrect --> 0 points
self.assertEqual(self.cmap.get_npoints('1_2_1'), 5)
self.assertEqual(self.cmap.get_npoints('2_2_1'), 1)
self.assertEqual(self.cmap.get_npoints('3_2_1'), 5)
self.assertEqual(self.cmap.get_npoints('4_2_1'), 0)
self.assertEqual(self.cmap.get_npoints('5_2_1'), 0)
def test_set_overall_message(self):
# Default is an empty string string
self.assertEqual(self.cmap.get_overall_message(), "")
# Set a message that applies to the whole question
self.cmap.set_overall_message("Test message")
# Retrieve the message
self.assertEqual(self.cmap.get_overall_message(), "Test message")
# Setting the message to None --> empty string
self.cmap.set_overall_message(None)
self.assertEqual(self.cmap.get_overall_message(), "")
def test_update_from_correctmap(self):
# Initialize a CorrectMap with some properties
self.cmap.set(
answer_id='1_2_1',
correctness='correct',
npoints=5,
msg='Test message',
hint='Test hint',
hintmode='always',
queuestate={
'key': 'secretstring',
'time': '20130228100026'
}
)
self.cmap.set_overall_message("Test message")
# Create a second cmap, then update it to have the same properties
# as the first cmap
other_cmap = CorrectMap()
other_cmap.update(self.cmap)
# Assert that it has all the same properties
self.assertEqual(
other_cmap.get_overall_message(),
self.cmap.get_overall_message()
)
self.assertEqual(
other_cmap.get_dict(),
self.cmap.get_dict()
)
def test_update_from_invalid(self):
# Should get an exception if we try to update() a CorrectMap
# with a non-CorrectMap value
invalid_list = [None, "string", 5, datetime.datetime.today()]
for invalid in invalid_list:
with self.assertRaises(Exception):
self.cmap.update(invalid)
|
agpl-3.0
| 4,716,075,139,380,063,000 | 4,685,071,168,089,246,000 | 31.405556 | 80 | 0.570204 | false |
zmr/namsel
|
accuracy_test.py
|
1
|
2139
|
#encoding: utf-8
import cPickle as pickle
from classify import load_cls, label_chars
from cv2 import GaussianBlur
from feature_extraction import get_zernike_moments, get_hu_moments, \
extract_features, normalize_and_extract_features
from functools import partial
import glob
from multiprocessing.pool import Pool
import numpy as np
import os
from sklearn.externals import joblib
from sobel_features import sobel_features
from transitions import transition_features
from fast_utils import fnormalize, ftrim
cls = load_cls('logistic-cls')
# Load testing sets
print 'Loading test data'
tsets = pickle.load(open('datasets/testing/training_sets.pkl', 'rb'))
scaler = joblib.load('zernike_scaler-latest')
print 'importing classifier'
print cls.get_params()
print 'scoring ...'
keys = tsets.keys()
keys.sort()
all_samples = []
## Baseline accuracies for the data in tsets
baseline = [0.608, 0.5785123966942148, 0.4782608695652174, 0.7522123893805309,
0.6884057971014492, 0.5447154471544715, 0.9752066115702479,
0.9830508474576272]
def test_accuracy(t, clsf=None):
'''Get accuracy score for a testset t'''
if clsf:
cls = clsf
else:
global cls
y = tsets[t][:,0]
x = tsets[t][:,1:]
x3 = []
for j in x:
j = ftrim(j.reshape((32,16)).astype(np.uint8))
x3.append(normalize_and_extract_features(j))
pred = cls.predict(x3)
s = 0
for i, p in enumerate(pred):
if float(p) == y[i]:
s += 1.0
else:
pass
print 'correct', label_chars[y[i]], '||', label_chars[p], t #, max(cls.predict_proba(x3[i])[0])
score = s / len(y)
return score
def test_all(clsf=None):
'''Run accuracy tests for all testsets'''
print 'starting tests. this will take a moment'
test_accuracy(keys[0], clsf)
test_all = partial(test_accuracy, clsf=clsf)
p = Pool()
all_samples = p.map(test_all, keys)
for t, s in zip(keys, all_samples):
print t, s
return np.mean(all_samples)
if __name__ == '__main__':
print test_all()
|
mit
| -4,550,017,685,354,198,500 | -6,342,829,419,377,071,000 | 23.872093 | 107 | 0.640019 | false |
cdrooom/odoo
|
addons/account_payment/wizard/account_payment_order.py
|
8
|
5838
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
from openerp.tools.translate import _
class payment_order_create(osv.osv_memory):
"""
Create a payment object with lines corresponding to the account move line
to pay according to the date and the mode provided by the user.
Hypothesis:
- Small number of non-reconciled move line, payment mode and bank account type,
- Big number of partner and bank account.
If a type is given, unsuitable account Entry lines are ignored.
"""
_name = 'payment.order.create'
_description = 'payment.order.create'
_columns = {
'duedate': fields.date('Due Date', required=True),
'entries': fields.many2many('account.move.line', 'line_pay_rel', 'pay_id', 'line_id', 'Entries')
}
_defaults = {
'duedate': lambda *a: time.strftime('%Y-%m-%d'),
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
if not context: context = {}
res = super(payment_order_create, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
if context and 'line_ids' in context:
doc = etree.XML(res['arch'])
nodes = doc.xpath("//field[@name='entries']")
for node in nodes:
node.set('domain', '[("id", "in", '+ str(context['line_ids'])+')]')
res['arch'] = etree.tostring(doc)
return res
def create_payment(self, cr, uid, ids, context=None):
order_obj = self.pool.get('payment.order')
line_obj = self.pool.get('account.move.line')
payment_obj = self.pool.get('payment.line')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
line_ids = [entry.id for entry in data.entries]
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
payment = order_obj.browse(cr, uid, context['active_id'], context=context)
t = None
line2bank = line_obj.line2bank(cr, uid, line_ids, t, context)
## Finally populate the current payment with new lines:
for line in line_obj.browse(cr, uid, line_ids, context=context):
if payment.date_prefered == "now":
#no payment date => immediate payment
date_to_pay = False
elif payment.date_prefered == 'due':
date_to_pay = line.date_maturity
elif payment.date_prefered == 'fixed':
date_to_pay = payment.date_scheduled
payment_obj.create(cr, uid,{
'move_line_id': line.id,
'amount_currency': line.amount_residual_currency,
'bank_id': line2bank.get(line.id),
'order_id': payment.id,
'partner_id': line.partner_id and line.partner_id.id or False,
'communication': line.ref or '/',
'state': line.invoice and line.invoice.reference_type != 'none' and 'structured' or 'normal',
'date': date_to_pay,
'currency': (line.invoice and line.invoice.currency_id.id) or line.journal_id.currency.id or line.journal_id.company_id.currency_id.id,
}, context=context)
return {'type': 'ir.actions.act_window_close'}
def search_entries(self, cr, uid, ids, context=None):
line_obj = self.pool.get('account.move.line')
mod_obj = self.pool.get('ir.model.data')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
search_due_date = data.duedate
# payment = self.pool.get('payment.order').browse(cr, uid, context['active_id'], context=context)
# Search for move line to pay:
domain = [('reconcile_id', '=', False), ('account_id.type', '=', 'payable'), ('credit', '>', 0), ('account_id.reconcile', '=', True)]
domain = domain + ['|', ('date_maturity', '<=', search_due_date), ('date_maturity', '=', False)]
line_ids = line_obj.search(cr, uid, domain, context=context)
context = dict(context, line_ids=line_ids)
model_data_ids = mod_obj.search(cr, uid,[('model', '=', 'ir.ui.view'), ('name', '=', 'view_create_payment_order_lines')], context=context)
resource_id = mod_obj.read(cr, uid, model_data_ids, fields=['res_id'], context=context)[0]['res_id']
return {'name': _('Entry Lines'),
'context': context,
'view_type': 'form',
'view_mode': 'form',
'res_model': 'payment.order.create',
'views': [(resource_id,'form')],
'type': 'ir.actions.act_window',
'target': 'new',
}
|
agpl-3.0
| -8,419,340,071,826,836,000 | -7,138,747,198,877,889,000 | 47.247934 | 159 | 0.578452 | false |
jseabold/scipy
|
scipy/sparse/csc.py
|
58
|
6330
|
"""Compressed Sparse Column matrix format"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['csc_matrix', 'isspmatrix_csc']
import numpy as np
from scipy._lib.six import xrange
from ._sparsetools import csc_tocsr
from . import _sparsetools
from .sputils import upcast, isintlike, IndexMixin, get_index_dtype
from .compressed import _cs_matrix
class csc_matrix(_cs_matrix, IndexMixin):
"""
Compressed Sparse Column matrix
This can be instantiated in several ways:
csc_matrix(D)
with a dense matrix or rank-2 ndarray D
csc_matrix(S)
with another sparse matrix S (equivalent to S.tocsc())
csc_matrix((M, N), [dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
csc_matrix((data, (row_ind, col_ind)), [shape=(M, N)])
where ``data``, ``row_ind`` and ``col_ind`` satisfy the
relationship ``a[row_ind[k], col_ind[k]] = data[k]``.
csc_matrix((data, indices, indptr), [shape=(M, N)])
is the standard CSC representation where the row indices for
column i are stored in ``indices[indptr[i]:indptr[i+1]]``
and their corresponding values are stored in
``data[indptr[i]:indptr[i+1]]``. If the shape parameter is
not supplied, the matrix dimensions are inferred from
the index arrays.
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of nonzero elements
data
Data array of the matrix
indices
CSC format index array
indptr
CSC format index pointer array
has_sorted_indices
Whether indices are sorted
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Advantages of the CSC format
- efficient arithmetic operations CSC + CSC, CSC * CSC, etc.
- efficient column slicing
- fast matrix vector products (CSR, BSR may be faster)
Disadvantages of the CSC format
- slow row slicing operations (consider CSR)
- changes to the sparsity structure are expensive (consider LIL or DOK)
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import csc_matrix
>>> csc_matrix((3, 4), dtype=np.int8).toarray()
array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=int8)
>>> row = np.array([0, 2, 2, 0, 1, 2])
>>> col = np.array([0, 0, 1, 2, 2, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csc_matrix((data, (row, col)), shape=(3, 3)).toarray()
array([[1, 0, 4],
[0, 0, 5],
[2, 3, 6]])
>>> indptr = np.array([0, 2, 3, 6])
>>> indices = np.array([0, 2, 2, 0, 1, 2])
>>> data = np.array([1, 2, 3, 4, 5, 6])
>>> csc_matrix((data, indices, indptr), shape=(3, 3)).toarray()
array([[1, 0, 4],
[0, 0, 5],
[2, 3, 6]])
"""
def transpose(self, copy=False):
from .csr import csr_matrix
M,N = self.shape
return csr_matrix((self.data,self.indices,self.indptr),(N,M),copy=copy)
def __iter__(self):
csr = self.tocsr()
for r in xrange(self.shape[0]):
yield csr[r,:]
def tocsc(self, copy=False):
if copy:
return self.copy()
else:
return self
def tocsr(self):
M,N = self.shape
idx_dtype = get_index_dtype((self.indptr, self.indices),
maxval=max(self.nnz, N))
indptr = np.empty(M + 1, dtype=idx_dtype)
indices = np.empty(self.nnz, dtype=idx_dtype)
data = np.empty(self.nnz, dtype=upcast(self.dtype))
csc_tocsr(M, N,
self.indptr.astype(idx_dtype),
self.indices.astype(idx_dtype),
self.data,
indptr,
indices,
data)
from .csr import csr_matrix
A = csr_matrix((data, indices, indptr), shape=self.shape)
A.has_sorted_indices = True
return A
def __getitem__(self, key):
# Use CSR to implement fancy indexing.
row, col = self._unpack_index(key)
# Things that return submatrices. row or col is a int or slice.
if (isinstance(row, slice) or isinstance(col, slice) or
isintlike(row) or isintlike(col)):
return self.T[col, row].T
# Things that return a sequence of values.
else:
return self.T[col, row]
def nonzero(self):
# CSC can't use _cs_matrix's .nonzero method because it
# returns the indices sorted for self transposed.
# Get row and col indices, from _cs_matrix.tocoo
major_dim, minor_dim = self._swap(self.shape)
minor_indices = self.indices
major_indices = np.empty(len(minor_indices), dtype=self.indptr.dtype)
_sparsetools.expandptr(major_dim, self.indptr, major_indices)
row, col = self._swap((major_indices, minor_indices))
# Sort them to be in C-style order
ind = np.lexsort((col, row))
row = row[ind]
col = col[ind]
return row, col
nonzero.__doc__ = _cs_matrix.nonzero.__doc__
def getrow(self, i):
"""Returns a copy of row i of the matrix, as a (1 x n)
CSR matrix (row vector).
"""
# we convert to CSR to maintain compatibility with old impl.
# in spmatrix.getrow()
return self._get_submatrix(i, slice(None)).tocsr()
def getcol(self, i):
"""Returns a copy of column i of the matrix, as a (m x 1)
CSC matrix (column vector).
"""
return self._get_submatrix(slice(None), i)
# these functions are used by the parent class (_cs_matrix)
# to remove redudancy between csc_matrix and csr_matrix
def _swap(self,x):
"""swap the members of x if this is a column-oriented matrix
"""
return (x[1],x[0])
def isspmatrix_csc(x):
return isinstance(x, csc_matrix)
|
bsd-3-clause
| 3,948,736,279,085,598,700 | 1,242,376,458,238,591,500 | 30.492537 | 79 | 0.570616 | false |
ktan2020/legacy-automation
|
win/Lib/test/test_os.py
|
7
|
32181
|
# As a test suite for the os module, this is woefully inadequate, but this
# does add tests for a few functions which have been determined to be more
# portable than they had been thought to be.
import os
import errno
import unittest
import warnings
import sys
import signal
import subprocess
import time
from test import test_support
import mmap
import uuid
warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, __name__)
warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, __name__)
# Tests creating TESTFN
class FileTests(unittest.TestCase):
def setUp(self):
if os.path.exists(test_support.TESTFN):
os.unlink(test_support.TESTFN)
tearDown = setUp
def test_access(self):
f = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR)
os.close(f)
self.assertTrue(os.access(test_support.TESTFN, os.W_OK))
def test_closerange(self):
first = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR)
# We must allocate two consecutive file descriptors, otherwise
# it will mess up other file descriptors (perhaps even the three
# standard ones).
second = os.dup(first)
try:
retries = 0
while second != first + 1:
os.close(first)
retries += 1
if retries > 10:
# XXX test skipped
self.skipTest("couldn't allocate two consecutive fds")
first, second = second, os.dup(second)
finally:
os.close(second)
# close a fd that is open, and one that isn't
os.closerange(first, first + 2)
self.assertRaises(OSError, os.write, first, "a")
@test_support.cpython_only
def test_rename(self):
path = unicode(test_support.TESTFN)
old = sys.getrefcount(path)
self.assertRaises(TypeError, os.rename, path, 0)
new = sys.getrefcount(path)
self.assertEqual(old, new)
class TemporaryFileTests(unittest.TestCase):
def setUp(self):
self.files = []
os.mkdir(test_support.TESTFN)
def tearDown(self):
for name in self.files:
os.unlink(name)
os.rmdir(test_support.TESTFN)
def check_tempfile(self, name):
# make sure it doesn't already exist:
self.assertFalse(os.path.exists(name),
"file already exists for temporary file")
# make sure we can create the file
open(name, "w")
self.files.append(name)
def test_tempnam(self):
if not hasattr(os, "tempnam"):
return
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "tempnam", RuntimeWarning,
r"test_os$")
warnings.filterwarnings("ignore", "tempnam", DeprecationWarning)
self.check_tempfile(os.tempnam())
name = os.tempnam(test_support.TESTFN)
self.check_tempfile(name)
name = os.tempnam(test_support.TESTFN, "pfx")
self.assertTrue(os.path.basename(name)[:3] == "pfx")
self.check_tempfile(name)
def test_tmpfile(self):
if not hasattr(os, "tmpfile"):
return
# As with test_tmpnam() below, the Windows implementation of tmpfile()
# attempts to create a file in the root directory of the current drive.
# On Vista and Server 2008, this test will always fail for normal users
# as writing to the root directory requires elevated privileges. With
# XP and below, the semantics of tmpfile() are the same, but the user
# running the test is more likely to have administrative privileges on
# their account already. If that's the case, then os.tmpfile() should
# work. In order to make this test as useful as possible, rather than
# trying to detect Windows versions or whether or not the user has the
# right permissions, just try and create a file in the root directory
# and see if it raises a 'Permission denied' OSError. If it does, then
# test that a subsequent call to os.tmpfile() raises the same error. If
# it doesn't, assume we're on XP or below and the user running the test
# has administrative privileges, and proceed with the test as normal.
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "tmpfile", DeprecationWarning)
if sys.platform == 'win32':
name = '\\python_test_os_test_tmpfile.txt'
if os.path.exists(name):
os.remove(name)
try:
fp = open(name, 'w')
except IOError, first:
# open() failed, assert tmpfile() fails in the same way.
# Although open() raises an IOError and os.tmpfile() raises an
# OSError(), 'args' will be (13, 'Permission denied') in both
# cases.
try:
fp = os.tmpfile()
except OSError, second:
self.assertEqual(first.args, second.args)
else:
self.fail("expected os.tmpfile() to raise OSError")
return
else:
# open() worked, therefore, tmpfile() should work. Close our
# dummy file and proceed with the test as normal.
fp.close()
os.remove(name)
fp = os.tmpfile()
fp.write("foobar")
fp.seek(0,0)
s = fp.read()
fp.close()
self.assertTrue(s == "foobar")
def test_tmpnam(self):
if not hasattr(os, "tmpnam"):
return
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning,
r"test_os$")
warnings.filterwarnings("ignore", "tmpnam", DeprecationWarning)
name = os.tmpnam()
if sys.platform in ("win32",):
# The Windows tmpnam() seems useless. From the MS docs:
#
# The character string that tmpnam creates consists of
# the path prefix, defined by the entry P_tmpdir in the
# file STDIO.H, followed by a sequence consisting of the
# digit characters '0' through '9'; the numerical value
# of this string is in the range 1 - 65,535. Changing the
# definitions of L_tmpnam or P_tmpdir in STDIO.H does not
# change the operation of tmpnam.
#
# The really bizarre part is that, at least under MSVC6,
# P_tmpdir is "\\". That is, the path returned refers to
# the root of the current drive. That's a terrible place to
# put temp files, and, depending on privileges, the user
# may not even be able to open a file in the root directory.
self.assertFalse(os.path.exists(name),
"file already exists for temporary file")
else:
self.check_tempfile(name)
# Test attributes on return values from os.*stat* family.
class StatAttributeTests(unittest.TestCase):
def setUp(self):
os.mkdir(test_support.TESTFN)
self.fname = os.path.join(test_support.TESTFN, "f1")
f = open(self.fname, 'wb')
f.write("ABC")
f.close()
def tearDown(self):
os.unlink(self.fname)
os.rmdir(test_support.TESTFN)
def test_stat_attributes(self):
if not hasattr(os, "stat"):
return
import stat
result = os.stat(self.fname)
# Make sure direct access works
self.assertEqual(result[stat.ST_SIZE], 3)
self.assertEqual(result.st_size, 3)
# Make sure all the attributes are there
members = dir(result)
for name in dir(stat):
if name[:3] == 'ST_':
attr = name.lower()
if name.endswith("TIME"):
def trunc(x): return int(x)
else:
def trunc(x): return x
self.assertEqual(trunc(getattr(result, attr)),
result[getattr(stat, name)])
self.assertIn(attr, members)
try:
result[200]
self.fail("No exception thrown")
except IndexError:
pass
# Make sure that assignment fails
try:
result.st_mode = 1
self.fail("No exception thrown")
except (AttributeError, TypeError):
pass
try:
result.st_rdev = 1
self.fail("No exception thrown")
except (AttributeError, TypeError):
pass
try:
result.parrot = 1
self.fail("No exception thrown")
except AttributeError:
pass
# Use the stat_result constructor with a too-short tuple.
try:
result2 = os.stat_result((10,))
self.fail("No exception thrown")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_statvfs_attributes(self):
if not hasattr(os, "statvfs"):
return
try:
result = os.statvfs(self.fname)
except OSError, e:
# On AtheOS, glibc always returns ENOSYS
if e.errno == errno.ENOSYS:
return
# Make sure direct access works
self.assertEqual(result.f_bfree, result[3])
# Make sure all the attributes are there.
members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files',
'ffree', 'favail', 'flag', 'namemax')
for value, member in enumerate(members):
self.assertEqual(getattr(result, 'f_' + member), result[value])
# Make sure that assignment really fails
try:
result.f_bfree = 1
self.fail("No exception thrown")
except TypeError:
pass
try:
result.parrot = 1
self.fail("No exception thrown")
except AttributeError:
pass
# Use the constructor with a too-short tuple.
try:
result2 = os.statvfs_result((10,))
self.fail("No exception thrown")
except TypeError:
pass
# Use the constructor with a too-long tuple.
try:
result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
except TypeError:
pass
def test_utime_dir(self):
delta = 1000000
st = os.stat(test_support.TESTFN)
# round to int, because some systems may support sub-second
# time stamps in stat, but not in utime.
os.utime(test_support.TESTFN, (st.st_atime, int(st.st_mtime-delta)))
st2 = os.stat(test_support.TESTFN)
self.assertEqual(st2.st_mtime, int(st.st_mtime-delta))
# Restrict test to Win32, since there is no guarantee other
# systems support centiseconds
if sys.platform == 'win32':
def get_file_system(path):
root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
import ctypes
kernel32 = ctypes.windll.kernel32
buf = ctypes.create_string_buffer("", 100)
if kernel32.GetVolumeInformationA(root, None, 0, None, None, None, buf, len(buf)):
return buf.value
if get_file_system(test_support.TESTFN) == "NTFS":
def test_1565150(self):
t1 = 1159195039.25
os.utime(self.fname, (t1, t1))
self.assertEqual(os.stat(self.fname).st_mtime, t1)
def test_large_time(self):
t1 = 5000000000 # some day in 2128
os.utime(self.fname, (t1, t1))
self.assertEqual(os.stat(self.fname).st_mtime, t1)
def test_1686475(self):
# Verify that an open file can be stat'ed
try:
os.stat(r"c:\pagefile.sys")
except WindowsError, e:
if e.errno == 2: # file does not exist; cannot run test
return
self.fail("Could not stat pagefile.sys")
from test import mapping_tests
class EnvironTests(mapping_tests.BasicTestMappingProtocol):
"""check that os.environ object conform to mapping protocol"""
type2test = None
def _reference(self):
return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
def _empty_mapping(self):
os.environ.clear()
return os.environ
def setUp(self):
self.__save = dict(os.environ)
os.environ.clear()
def tearDown(self):
os.environ.clear()
os.environ.update(self.__save)
# Bug 1110478
def test_update2(self):
if os.path.exists("/bin/sh"):
os.environ.update(HELLO="World")
with os.popen("/bin/sh -c 'echo $HELLO'") as popen:
value = popen.read().strip()
self.assertEqual(value, "World")
class WalkTests(unittest.TestCase):
"""Tests for os.walk()."""
def test_traversal(self):
import os
from os.path import join
# Build:
# TESTFN/
# TEST1/ a file kid and two directory kids
# tmp1
# SUB1/ a file kid and a directory kid
# tmp2
# SUB11/ no kids
# SUB2/ a file kid and a dirsymlink kid
# tmp3
# link/ a symlink to TESTFN.2
# TEST2/
# tmp4 a lone file
walk_path = join(test_support.TESTFN, "TEST1")
sub1_path = join(walk_path, "SUB1")
sub11_path = join(sub1_path, "SUB11")
sub2_path = join(walk_path, "SUB2")
tmp1_path = join(walk_path, "tmp1")
tmp2_path = join(sub1_path, "tmp2")
tmp3_path = join(sub2_path, "tmp3")
link_path = join(sub2_path, "link")
t2_path = join(test_support.TESTFN, "TEST2")
tmp4_path = join(test_support.TESTFN, "TEST2", "tmp4")
# Create stuff.
os.makedirs(sub11_path)
os.makedirs(sub2_path)
os.makedirs(t2_path)
for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
f = file(path, "w")
f.write("I'm " + path + " and proud of it. Blame test_os.\n")
f.close()
if hasattr(os, "symlink"):
os.symlink(os.path.abspath(t2_path), link_path)
sub2_tree = (sub2_path, ["link"], ["tmp3"])
else:
sub2_tree = (sub2_path, [], ["tmp3"])
# Walk top-down.
all = list(os.walk(walk_path))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: TESTFN, SUB1, SUB11, SUB2
# flipped: TESTFN, SUB2, SUB1, SUB11
flipped = all[0][1][0] != "SUB1"
all[0][1].sort()
self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 + flipped], (sub11_path, [], []))
self.assertEqual(all[3 - 2 * flipped], sub2_tree)
# Prune the search.
all = []
for root, dirs, files in os.walk(walk_path):
all.append((root, dirs, files))
# Don't descend into SUB1.
if 'SUB1' in dirs:
# Note that this also mutates the dirs we appended to all!
dirs.remove('SUB1')
self.assertEqual(len(all), 2)
self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
self.assertEqual(all[1], sub2_tree)
# Walk bottom-up.
all = list(os.walk(walk_path, topdown=False))
self.assertEqual(len(all), 4)
# We can't know which order SUB1 and SUB2 will appear in.
# Not flipped: SUB11, SUB1, SUB2, TESTFN
# flipped: SUB2, SUB11, SUB1, TESTFN
flipped = all[3][1][0] != "SUB1"
all[3][1].sort()
self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
self.assertEqual(all[flipped], (sub11_path, [], []))
self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
self.assertEqual(all[2 - 2 * flipped], sub2_tree)
if hasattr(os, "symlink"):
# Walk, following symlinks.
for root, dirs, files in os.walk(walk_path, followlinks=True):
if root == link_path:
self.assertEqual(dirs, [])
self.assertEqual(files, ["tmp4"])
break
else:
self.fail("Didn't follow symlink with followlinks=True")
def tearDown(self):
# Tear everything down. This is a decent use for bottom-up on
# Windows, which doesn't have a recursive delete command. The
# (not so) subtlety is that rmdir will fail unless the dir's
# kids are removed first, so bottom up is essential.
for root, dirs, files in os.walk(test_support.TESTFN, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
dirname = os.path.join(root, name)
if not os.path.islink(dirname):
os.rmdir(dirname)
else:
os.remove(dirname)
os.rmdir(test_support.TESTFN)
class MakedirTests (unittest.TestCase):
def setUp(self):
os.mkdir(test_support.TESTFN)
def test_makedir(self):
base = test_support.TESTFN
path = os.path.join(base, 'dir1', 'dir2', 'dir3')
os.makedirs(path) # Should work
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
os.makedirs(path)
# Try paths with a '.' in them
self.assertRaises(OSError, os.makedirs, os.curdir)
path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
os.makedirs(path)
path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
'dir5', 'dir6')
os.makedirs(path)
def tearDown(self):
path = os.path.join(test_support.TESTFN, 'dir1', 'dir2', 'dir3',
'dir4', 'dir5', 'dir6')
# If the tests failed, the bottom-most directory ('../dir6')
# may not have been created, so we look for the outermost directory
# that exists.
while not os.path.exists(path) and path != test_support.TESTFN:
path = os.path.dirname(path)
os.removedirs(path)
class DevNullTests (unittest.TestCase):
def test_devnull(self):
f = file(os.devnull, 'w')
f.write('hello')
f.close()
f = file(os.devnull, 'r')
self.assertEqual(f.read(), '')
f.close()
class URandomTests (unittest.TestCase):
def test_urandom(self):
try:
self.assertEqual(len(os.urandom(1)), 1)
self.assertEqual(len(os.urandom(10)), 10)
self.assertEqual(len(os.urandom(100)), 100)
self.assertEqual(len(os.urandom(1000)), 1000)
# see http://bugs.python.org/issue3708
self.assertRaises(TypeError, os.urandom, 0.9)
self.assertRaises(TypeError, os.urandom, 1.1)
self.assertRaises(TypeError, os.urandom, 2.0)
except NotImplementedError:
pass
def test_execvpe_with_bad_arglist(self):
self.assertRaises(ValueError, os.execvpe, 'notepad', [], None)
class Win32ErrorTests(unittest.TestCase):
def test_rename(self):
self.assertRaises(WindowsError, os.rename, test_support.TESTFN, test_support.TESTFN+".bak")
def test_remove(self):
self.assertRaises(WindowsError, os.remove, test_support.TESTFN)
def test_chdir(self):
self.assertRaises(WindowsError, os.chdir, test_support.TESTFN)
def test_mkdir(self):
f = open(test_support.TESTFN, "w")
try:
self.assertRaises(WindowsError, os.mkdir, test_support.TESTFN)
finally:
f.close()
os.unlink(test_support.TESTFN)
def test_utime(self):
self.assertRaises(WindowsError, os.utime, test_support.TESTFN, None)
def test_chmod(self):
self.assertRaises(WindowsError, os.chmod, test_support.TESTFN, 0)
class TestInvalidFD(unittest.TestCase):
singles = ["fchdir", "fdopen", "dup", "fdatasync", "fstat",
"fstatvfs", "fsync", "tcgetpgrp", "ttyname"]
#singles.append("close")
#We omit close because it doesn'r raise an exception on some platforms
def get_single(f):
def helper(self):
if hasattr(os, f):
self.check(getattr(os, f))
return helper
for f in singles:
locals()["test_"+f] = get_single(f)
def check(self, f, *args):
try:
f(test_support.make_bad_fd(), *args)
except OSError as e:
self.assertEqual(e.errno, errno.EBADF)
else:
self.fail("%r didn't raise a OSError with a bad file descriptor"
% f)
def test_isatty(self):
if hasattr(os, "isatty"):
self.assertEqual(os.isatty(test_support.make_bad_fd()), False)
def test_closerange(self):
if hasattr(os, "closerange"):
fd = test_support.make_bad_fd()
# Make sure none of the descriptors we are about to close are
# currently valid (issue 6542).
for i in range(10):
try: os.fstat(fd+i)
except OSError:
pass
else:
break
if i < 2:
raise unittest.SkipTest(
"Unable to acquire a range of invalid file descriptors")
self.assertEqual(os.closerange(fd, fd + i-1), None)
def test_dup2(self):
if hasattr(os, "dup2"):
self.check(os.dup2, 20)
def test_fchmod(self):
if hasattr(os, "fchmod"):
self.check(os.fchmod, 0)
def test_fchown(self):
if hasattr(os, "fchown"):
self.check(os.fchown, -1, -1)
def test_fpathconf(self):
if hasattr(os, "fpathconf"):
self.check(os.fpathconf, "PC_NAME_MAX")
def test_ftruncate(self):
if hasattr(os, "ftruncate"):
self.check(os.ftruncate, 0)
def test_lseek(self):
if hasattr(os, "lseek"):
self.check(os.lseek, 0, 0)
def test_read(self):
if hasattr(os, "read"):
self.check(os.read, 1)
def test_tcsetpgrpt(self):
if hasattr(os, "tcsetpgrp"):
self.check(os.tcsetpgrp, 0)
def test_write(self):
if hasattr(os, "write"):
self.check(os.write, " ")
if sys.platform != 'win32':
class Win32ErrorTests(unittest.TestCase):
pass
class PosixUidGidTests(unittest.TestCase):
if hasattr(os, 'setuid'):
def test_setuid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.setuid, 0)
self.assertRaises(OverflowError, os.setuid, 1<<32)
if hasattr(os, 'setgid'):
def test_setgid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.setgid, 0)
self.assertRaises(OverflowError, os.setgid, 1<<32)
if hasattr(os, 'seteuid'):
def test_seteuid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.seteuid, 0)
self.assertRaises(OverflowError, os.seteuid, 1<<32)
if hasattr(os, 'setegid'):
def test_setegid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.setegid, 0)
self.assertRaises(OverflowError, os.setegid, 1<<32)
if hasattr(os, 'setreuid'):
def test_setreuid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.setreuid, 0, 0)
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
def test_setreuid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setreuid(-1,-1);sys.exit(0)'])
if hasattr(os, 'setregid'):
def test_setregid(self):
if os.getuid() != 0:
self.assertRaises(os.error, os.setregid, 0, 0)
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
def test_setregid_neg1(self):
# Needs to accept -1. We run this in a subprocess to avoid
# altering the test runner's process state (issue8045).
subprocess.check_call([
sys.executable, '-c',
'import os,sys;os.setregid(-1,-1);sys.exit(0)'])
else:
class PosixUidGidTests(unittest.TestCase):
pass
@unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
class Win32KillTests(unittest.TestCase):
def _kill(self, sig):
# Start sys.executable as a subprocess and communicate from the
# subprocess to the parent that the interpreter is ready. When it
# becomes ready, send *sig* via os.kill to the subprocess and check
# that the return code is equal to *sig*.
import ctypes
from ctypes import wintypes
import msvcrt
# Since we can't access the contents of the process' stdout until the
# process has exited, use PeekNamedPipe to see what's inside stdout
# without waiting. This is done so we can tell that the interpreter
# is started and running at a point where it could handle a signal.
PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
PeekNamedPipe.restype = wintypes.BOOL
PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
ctypes.POINTER(ctypes.c_char), # stdout buf
wintypes.DWORD, # Buffer size
ctypes.POINTER(wintypes.DWORD), # bytes read
ctypes.POINTER(wintypes.DWORD), # bytes avail
ctypes.POINTER(wintypes.DWORD)) # bytes left
msg = "running"
proc = subprocess.Popen([sys.executable, "-c",
"import sys;"
"sys.stdout.write('{}');"
"sys.stdout.flush();"
"input()".format(msg)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE)
self.addCleanup(proc.stdout.close)
self.addCleanup(proc.stderr.close)
self.addCleanup(proc.stdin.close)
count, max = 0, 100
while count < max and proc.poll() is None:
# Create a string buffer to store the result of stdout from the pipe
buf = ctypes.create_string_buffer(len(msg))
# Obtain the text currently in proc.stdout
# Bytes read/avail/left are left as NULL and unused
rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
buf, ctypes.sizeof(buf), None, None, None)
self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
if buf.value:
self.assertEqual(msg, buf.value)
break
time.sleep(0.1)
count += 1
else:
self.fail("Did not receive communication from the subprocess")
os.kill(proc.pid, sig)
self.assertEqual(proc.wait(), sig)
def test_kill_sigterm(self):
# SIGTERM doesn't mean anything special, but make sure it works
self._kill(signal.SIGTERM)
def test_kill_int(self):
# os.kill on Windows can take an int which gets set as the exit code
self._kill(100)
def _kill_with_event(self, event, name):
tagname = "test_os_%s" % uuid.uuid1()
m = mmap.mmap(-1, 1, tagname)
m[0] = '0'
# Run a script which has console control handling enabled.
proc = subprocess.Popen([sys.executable,
os.path.join(os.path.dirname(__file__),
"win_console_handler.py"), tagname],
creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
# Let the interpreter startup before we send signals. See #3137.
count, max = 0, 20
while count < max and proc.poll() is None:
if m[0] == '1':
break
time.sleep(0.5)
count += 1
else:
self.fail("Subprocess didn't finish initialization")
os.kill(proc.pid, event)
# proc.send_signal(event) could also be done here.
# Allow time for the signal to be passed and the process to exit.
time.sleep(0.5)
if not proc.poll():
# Forcefully kill the process if we weren't able to signal it.
os.kill(proc.pid, signal.SIGINT)
self.fail("subprocess did not stop on {}".format(name))
@unittest.skip("subprocesses aren't inheriting CTRL+C property")
def test_CTRL_C_EVENT(self):
from ctypes import wintypes
import ctypes
# Make a NULL value by creating a pointer with no argument.
NULL = ctypes.POINTER(ctypes.c_int)()
SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
wintypes.BOOL)
SetConsoleCtrlHandler.restype = wintypes.BOOL
# Calling this with NULL and FALSE causes the calling process to
# handle CTRL+C, rather than ignore it. This property is inherited
# by subprocesses.
SetConsoleCtrlHandler(NULL, 0)
self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
def test_CTRL_BREAK_EVENT(self):
self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
def test_main():
test_support.run_unittest(
FileTests,
TemporaryFileTests,
StatAttributeTests,
EnvironTests,
WalkTests,
MakedirTests,
DevNullTests,
URandomTests,
Win32ErrorTests,
TestInvalidFD,
PosixUidGidTests,
Win32KillTests
)
if __name__ == "__main__":
test_main()
|
mit
| 1,971,982,842,538,172,700 | 4,887,564,786,879,425,000 | 37.054612 | 99 | 0.538361 | false |
bruderstein/PythonScript
|
PythonLib/full/unittest/test/testmock/testmock.py
|
1
|
71837
|
import copy
import re
import sys
import tempfile
from test.support import ALWAYS_EQ
import unittest
from unittest.test.testmock.support import is_instance
from unittest import mock
from unittest.mock import (
call, DEFAULT, patch, sentinel,
MagicMock, Mock, NonCallableMock,
NonCallableMagicMock, AsyncMock, _Call, _CallList,
create_autospec
)
class Iter(object):
def __init__(self):
self.thing = iter(['this', 'is', 'an', 'iter'])
def __iter__(self):
return self
def next(self):
return next(self.thing)
__next__ = next
class Something(object):
def meth(self, a, b, c, d=None): pass
@classmethod
def cmeth(cls, a, b, c, d=None): pass
@staticmethod
def smeth(a, b, c, d=None): pass
def something(a): pass
class MockTest(unittest.TestCase):
def test_all(self):
# if __all__ is badly defined then import * will raise an error
# We have to exec it because you can't import * inside a method
# in Python 3
exec("from unittest.mock import *")
def test_constructor(self):
mock = Mock()
self.assertFalse(mock.called, "called not initialised correctly")
self.assertEqual(mock.call_count, 0,
"call_count not initialised correctly")
self.assertTrue(is_instance(mock.return_value, Mock),
"return_value not initialised correctly")
self.assertEqual(mock.call_args, None,
"call_args not initialised correctly")
self.assertEqual(mock.call_args_list, [],
"call_args_list not initialised correctly")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly")
# Can't use hasattr for this test as it always returns True on a mock
self.assertNotIn('_items', mock.__dict__,
"default mock should not have '_items' attribute")
self.assertIsNone(mock._mock_parent,
"parent not initialised correctly")
self.assertIsNone(mock._mock_methods,
"methods not initialised correctly")
self.assertEqual(mock._mock_children, {},
"children not initialised incorrectly")
def test_return_value_in_constructor(self):
mock = Mock(return_value=None)
self.assertIsNone(mock.return_value,
"return value in constructor not honoured")
def test_change_return_value_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.return_value = 1
self.assertEqual(mock(), 1)
def test_change_side_effect_via_delegate(self):
def f(): pass
mock = create_autospec(f)
mock.mock.side_effect = TypeError()
with self.assertRaises(TypeError):
mock()
def test_repr(self):
mock = Mock(name='foo')
self.assertIn('foo', repr(mock))
self.assertIn("'%s'" % id(mock), repr(mock))
mocks = [(Mock(), 'mock'), (Mock(name='bar'), 'bar')]
for mock, name in mocks:
self.assertIn('%s.bar' % name, repr(mock.bar))
self.assertIn('%s.foo()' % name, repr(mock.foo()))
self.assertIn('%s.foo().bing' % name, repr(mock.foo().bing))
self.assertIn('%s()' % name, repr(mock()))
self.assertIn('%s()()' % name, repr(mock()()))
self.assertIn('%s()().foo.bar.baz().bing' % name,
repr(mock()().foo.bar.baz().bing))
def test_repr_with_spec(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec=X())
self.assertIn(" spec='X' ", repr(mock))
mock = Mock(spec_set=X)
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec_set=X())
self.assertIn(" spec_set='X' ", repr(mock))
mock = Mock(spec=X, name='foo')
self.assertIn(" spec='X' ", repr(mock))
self.assertIn(" name='foo' ", repr(mock))
mock = Mock(name='foo')
self.assertNotIn("spec", repr(mock))
mock = Mock()
self.assertNotIn("spec", repr(mock))
mock = Mock(spec=['foo'])
self.assertNotIn("spec", repr(mock))
def test_side_effect(self):
mock = Mock()
def effect(*args, **kwargs):
raise SystemError('kablooie')
mock.side_effect = effect
self.assertRaises(SystemError, mock, 1, 2, fish=3)
mock.assert_called_with(1, 2, fish=3)
results = [1, 2, 3]
def effect():
return results.pop()
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"side effect not used correctly")
mock = Mock(side_effect=sentinel.SideEffect)
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side effect in constructor not used")
def side_effect():
return DEFAULT
mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN)
self.assertEqual(mock(), sentinel.RETURN)
def test_autospec_side_effect(self):
# Test for issue17826
results = [1, 2, 3]
def effect():
return results.pop()
def f(): pass
mock = create_autospec(f)
mock.side_effect = [1, 2, 3]
self.assertEqual([mock(), mock(), mock()], [1, 2, 3],
"side effect not used correctly in create_autospec")
# Test where side effect is a callable
results = [1, 2, 3]
mock = create_autospec(f)
mock.side_effect = effect
self.assertEqual([mock(), mock(), mock()], [3, 2, 1],
"callable side effect not used correctly")
def test_autospec_side_effect_exception(self):
# Test for issue 23661
def f(): pass
mock = create_autospec(f)
mock.side_effect = ValueError('Bazinga!')
self.assertRaisesRegex(ValueError, 'Bazinga!', mock)
def test_reset_mock(self):
parent = Mock()
spec = ["something"]
mock = Mock(name="child", parent=parent, spec=spec)
mock(sentinel.Something, something=sentinel.SomethingElse)
something = mock.something
mock.something()
mock.side_effect = sentinel.SideEffect
return_value = mock.return_value
return_value()
mock.reset_mock()
self.assertEqual(mock._mock_name, "child",
"name incorrectly reset")
self.assertEqual(mock._mock_parent, parent,
"parent incorrectly reset")
self.assertEqual(mock._mock_methods, spec,
"methods incorrectly reset")
self.assertFalse(mock.called, "called not reset")
self.assertEqual(mock.call_count, 0, "call_count not reset")
self.assertEqual(mock.call_args, None, "call_args not reset")
self.assertEqual(mock.call_args_list, [], "call_args_list not reset")
self.assertEqual(mock.method_calls, [],
"method_calls not initialised correctly: %r != %r" %
(mock.method_calls, []))
self.assertEqual(mock.mock_calls, [])
self.assertEqual(mock.side_effect, sentinel.SideEffect,
"side_effect incorrectly reset")
self.assertEqual(mock.return_value, return_value,
"return_value incorrectly reset")
self.assertFalse(return_value.called, "return value mock not reset")
self.assertEqual(mock._mock_children, {'something': something},
"children reset incorrectly")
self.assertEqual(mock.something, something,
"children incorrectly cleared")
self.assertFalse(mock.something.called, "child not reset")
def test_reset_mock_recursion(self):
mock = Mock()
mock.return_value = mock
# used to cause recursion
mock.reset_mock()
def test_reset_mock_on_mock_open_issue_18622(self):
a = mock.mock_open()
a.reset_mock()
def test_call(self):
mock = Mock()
self.assertTrue(is_instance(mock.return_value, Mock),
"Default return_value should be a Mock")
result = mock()
self.assertEqual(mock(), result,
"different result from consecutive calls")
mock.reset_mock()
ret_val = mock(sentinel.Arg)
self.assertTrue(mock.called, "called not set")
self.assertEqual(mock.call_count, 1, "call_count incorrect")
self.assertEqual(mock.call_args, ((sentinel.Arg,), {}),
"call_args not set")
self.assertEqual(mock.call_args.args, (sentinel.Arg,),
"call_args not set")
self.assertEqual(mock.call_args.kwargs, {},
"call_args not set")
self.assertEqual(mock.call_args_list, [((sentinel.Arg,), {})],
"call_args_list not initialised correctly")
mock.return_value = sentinel.ReturnValue
ret_val = mock(sentinel.Arg, key=sentinel.KeyArg)
self.assertEqual(ret_val, sentinel.ReturnValue,
"incorrect return value")
self.assertEqual(mock.call_count, 2, "call_count incorrect")
self.assertEqual(mock.call_args,
((sentinel.Arg,), {'key': sentinel.KeyArg}),
"call_args not set")
self.assertEqual(mock.call_args_list, [
((sentinel.Arg,), {}),
((sentinel.Arg,), {'key': sentinel.KeyArg})
],
"call_args_list not set")
def test_call_args_comparison(self):
mock = Mock()
mock()
mock(sentinel.Arg)
mock(kw=sentinel.Kwarg)
mock(sentinel.Arg, kw=sentinel.Kwarg)
self.assertEqual(mock.call_args_list, [
(),
((sentinel.Arg,),),
({"kw": sentinel.Kwarg},),
((sentinel.Arg,), {"kw": sentinel.Kwarg})
])
self.assertEqual(mock.call_args,
((sentinel.Arg,), {"kw": sentinel.Kwarg}))
self.assertEqual(mock.call_args.args, (sentinel.Arg,))
self.assertEqual(mock.call_args.kwargs, {"kw": sentinel.Kwarg})
# Comparing call_args to a long sequence should not raise
# an exception. See issue 24857.
self.assertFalse(mock.call_args == "a long sequence")
def test_calls_equal_with_any(self):
# Check that equality and non-equality is consistent even when
# comparing with mock.ANY
mm = mock.MagicMock()
self.assertTrue(mm == mm)
self.assertFalse(mm != mm)
self.assertFalse(mm == mock.MagicMock())
self.assertTrue(mm != mock.MagicMock())
self.assertTrue(mm == mock.ANY)
self.assertFalse(mm != mock.ANY)
self.assertTrue(mock.ANY == mm)
self.assertFalse(mock.ANY != mm)
self.assertTrue(mm == ALWAYS_EQ)
self.assertFalse(mm != ALWAYS_EQ)
call1 = mock.call(mock.MagicMock())
call2 = mock.call(mock.ANY)
self.assertTrue(call1 == call2)
self.assertFalse(call1 != call2)
self.assertTrue(call2 == call1)
self.assertFalse(call2 != call1)
self.assertTrue(call1 == ALWAYS_EQ)
self.assertFalse(call1 != ALWAYS_EQ)
self.assertFalse(call1 == 1)
self.assertTrue(call1 != 1)
def test_assert_called_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_with()
self.assertRaises(AssertionError, mock.assert_called_with, 1)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_with)
mock(1, 2, 3, a='fish', b='nothing')
mock.assert_called_with(1, 2, 3, a='fish', b='nothing')
def test_assert_called_with_any(self):
m = MagicMock()
m(MagicMock())
m.assert_called_with(mock.ANY)
def test_assert_called_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock.assert_called_with(1, 2, 3)
mock.assert_called_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_with,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_called_with(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_assert_called_with_method_spec(self):
def _check(mock):
mock(1, b=2, c=3)
mock.assert_called_with(1, 2, 3)
mock.assert_called_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_with,
1, b=3, c=2)
mock = Mock(spec=Something().meth)
_check(mock)
mock = Mock(spec=Something.cmeth)
_check(mock)
mock = Mock(spec=Something().cmeth)
_check(mock)
mock = Mock(spec=Something.smeth)
_check(mock)
mock = Mock(spec=Something().smeth)
_check(mock)
def test_assert_called_exception_message(self):
msg = "Expected '{0}' to have been called"
with self.assertRaisesRegex(AssertionError, msg.format('mock')):
Mock().assert_called()
with self.assertRaisesRegex(AssertionError, msg.format('test_name')):
Mock(name="test_name").assert_called()
def test_assert_called_once_with(self):
mock = Mock()
mock()
# Will raise an exception if it fails
mock.assert_called_once_with()
mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock.reset_mock()
self.assertRaises(AssertionError, mock.assert_called_once_with)
mock('foo', 'bar', baz=2)
mock.assert_called_once_with('foo', 'bar', baz=2)
mock.reset_mock()
mock('foo', 'bar', baz=2)
self.assertRaises(
AssertionError,
lambda: mock.assert_called_once_with('bob', 'bar', baz=2)
)
def test_assert_called_once_with_call_list(self):
m = Mock()
m(1)
m(2)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1), call(2)]"),
lambda: m.assert_called_once_with(2))
def test_assert_called_once_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock.assert_called_once_with(1, 2, 3)
mock.assert_called_once_with(a=1, b=2, c=3)
self.assertRaises(AssertionError, mock.assert_called_once_with,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_called_once_with(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
# Mock called more than once => always fails
mock(4, 5, 6)
self.assertRaises(AssertionError, mock.assert_called_once_with,
1, 2, 3)
self.assertRaises(AssertionError, mock.assert_called_once_with,
4, 5, 6)
def test_attribute_access_returns_mocks(self):
mock = Mock()
something = mock.something
self.assertTrue(is_instance(something, Mock), "attribute isn't a mock")
self.assertEqual(mock.something, something,
"different attributes returned for same name")
# Usage example
mock = Mock()
mock.something.return_value = 3
self.assertEqual(mock.something(), 3, "method returned wrong value")
self.assertTrue(mock.something.called,
"method didn't record being called")
def test_attributes_have_name_and_parent_set(self):
mock = Mock()
something = mock.something
self.assertEqual(something._mock_name, "something",
"attribute name not set correctly")
self.assertEqual(something._mock_parent, mock,
"attribute parent not set correctly")
def test_method_calls_recorded(self):
mock = Mock()
mock.something(3, fish=None)
mock.something_else.something(6, cake=sentinel.Cake)
self.assertEqual(mock.something_else.method_calls,
[("something", (6,), {'cake': sentinel.Cake})],
"method calls not recorded correctly")
self.assertEqual(mock.method_calls, [
("something", (3,), {'fish': None}),
("something_else.something", (6,), {'cake': sentinel.Cake})
],
"method calls not recorded correctly")
def test_method_calls_compare_easily(self):
mock = Mock()
mock.something()
self.assertEqual(mock.method_calls, [('something',)])
self.assertEqual(mock.method_calls, [('something', (), {})])
mock = Mock()
mock.something('different')
self.assertEqual(mock.method_calls, [('something', ('different',))])
self.assertEqual(mock.method_calls,
[('something', ('different',), {})])
mock = Mock()
mock.something(x=1)
self.assertEqual(mock.method_calls, [('something', {'x': 1})])
self.assertEqual(mock.method_calls, [('something', (), {'x': 1})])
mock = Mock()
mock.something('different', some='more')
self.assertEqual(mock.method_calls, [
('something', ('different',), {'some': 'more'})
])
def test_only_allowed_methods_exist(self):
for spec in ['something'], ('something',):
for arg in 'spec', 'spec_set':
mock = Mock(**{arg: spec})
# this should be allowed
mock.something
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute 'something_else'",
getattr, mock, 'something_else'
)
def test_from_spec(self):
class Something(object):
x = 3
__something__ = None
def y(self): pass
def test_attributes(mock):
# should work
mock.x
mock.y
mock.__something__
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute 'z'",
getattr, mock, 'z'
)
self.assertRaisesRegex(
AttributeError,
"Mock object has no attribute '__foobar__'",
getattr, mock, '__foobar__'
)
test_attributes(Mock(spec=Something))
test_attributes(Mock(spec=Something()))
def test_wraps_calls(self):
real = Mock()
mock = Mock(wraps=real)
self.assertEqual(mock(), real())
real.reset_mock()
mock(1, 2, fish=3)
real.assert_called_with(1, 2, fish=3)
def test_wraps_prevents_automatic_creation_of_mocks(self):
class Real(object):
pass
real = Real()
mock = Mock(wraps=real)
self.assertRaises(AttributeError, lambda: mock.new_attr())
def test_wraps_call_with_nondefault_return_value(self):
real = Mock()
mock = Mock(wraps=real)
mock.return_value = 3
self.assertEqual(mock(), 3)
self.assertFalse(real.called)
def test_wraps_attributes(self):
class Real(object):
attribute = Mock()
real = Real()
mock = Mock(wraps=real)
self.assertEqual(mock.attribute(), real.attribute())
self.assertRaises(AttributeError, lambda: mock.fish)
self.assertNotEqual(mock.attribute, real.attribute)
result = mock.attribute.frog(1, 2, fish=3)
Real.attribute.frog.assert_called_with(1, 2, fish=3)
self.assertEqual(result, Real.attribute.frog())
def test_customize_wrapped_object_with_side_effect_iterable_with_default(self):
class Real(object):
def method(self):
return sentinel.ORIGINAL_VALUE
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, DEFAULT]
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.ORIGINAL_VALUE)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_side_effect_iterable(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, sentinel.VALUE2]
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.VALUE2)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_side_effect_exception(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = RuntimeError
self.assertRaises(RuntimeError, mock.method)
def test_customize_wrapped_object_with_side_effect_function(self):
class Real(object):
def method(self): pass
def side_effect():
return sentinel.VALUE
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = side_effect
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.return_value = sentinel.VALUE
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value_and_side_effect(self):
# side_effect should always take precedence over return_value.
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, sentinel.VALUE2]
mock.method.return_value = sentinel.WRONG_VALUE
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.VALUE2)
self.assertRaises(StopIteration, mock.method)
def test_customize_wrapped_object_with_return_value_and_side_effect2(self):
# side_effect can return DEFAULT to default to return_value
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = lambda: DEFAULT
mock.method.return_value = sentinel.VALUE
self.assertEqual(mock.method(), sentinel.VALUE)
def test_customize_wrapped_object_with_return_value_and_side_effect_default(self):
class Real(object):
def method(self): pass
real = Real()
mock = Mock(wraps=real)
mock.method.side_effect = [sentinel.VALUE1, DEFAULT]
mock.method.return_value = sentinel.RETURN
self.assertEqual(mock.method(), sentinel.VALUE1)
self.assertEqual(mock.method(), sentinel.RETURN)
self.assertRaises(StopIteration, mock.method)
def test_magic_method_wraps_dict(self):
# bpo-25597: MagicMock with wrap doesn't call wrapped object's
# method for magic methods with default values.
data = {'foo': 'bar'}
wrapped_dict = MagicMock(wraps=data)
self.assertEqual(wrapped_dict.get('foo'), 'bar')
# Accessing key gives a MagicMock
self.assertIsInstance(wrapped_dict['foo'], MagicMock)
# __contains__ method has a default value of False
self.assertFalse('foo' in wrapped_dict)
# return_value is non-sentinel and takes precedence over wrapped value.
wrapped_dict.get.return_value = 'return_value'
self.assertEqual(wrapped_dict.get('foo'), 'return_value')
# return_value is sentinel and hence wrapped value is returned.
wrapped_dict.get.return_value = sentinel.DEFAULT
self.assertEqual(wrapped_dict.get('foo'), 'bar')
self.assertEqual(wrapped_dict.get('baz'), None)
self.assertIsInstance(wrapped_dict['baz'], MagicMock)
self.assertFalse('bar' in wrapped_dict)
data['baz'] = 'spam'
self.assertEqual(wrapped_dict.get('baz'), 'spam')
self.assertIsInstance(wrapped_dict['baz'], MagicMock)
self.assertFalse('bar' in wrapped_dict)
del data['baz']
self.assertEqual(wrapped_dict.get('baz'), None)
def test_magic_method_wraps_class(self):
class Foo:
def __getitem__(self, index):
return index
def __custom_method__(self):
return "foo"
klass = MagicMock(wraps=Foo)
obj = klass()
self.assertEqual(obj.__getitem__(2), 2)
self.assertEqual(obj[2], 2)
self.assertEqual(obj.__custom_method__(), "foo")
def test_exceptional_side_effect(self):
mock = Mock(side_effect=AttributeError)
self.assertRaises(AttributeError, mock)
mock = Mock(side_effect=AttributeError('foo'))
self.assertRaises(AttributeError, mock)
def test_baseexceptional_side_effect(self):
mock = Mock(side_effect=KeyboardInterrupt)
self.assertRaises(KeyboardInterrupt, mock)
mock = Mock(side_effect=KeyboardInterrupt('foo'))
self.assertRaises(KeyboardInterrupt, mock)
def test_assert_called_with_message(self):
mock = Mock()
self.assertRaisesRegex(AssertionError, 'not called',
mock.assert_called_with)
def test_assert_called_once_with_message(self):
mock = Mock(name='geoffrey')
self.assertRaisesRegex(AssertionError,
r"Expected 'geoffrey' to be called once\.",
mock.assert_called_once_with)
def test__name__(self):
mock = Mock()
self.assertRaises(AttributeError, lambda: mock.__name__)
mock.__name__ = 'foo'
self.assertEqual(mock.__name__, 'foo')
def test_spec_list_subclass(self):
class Sub(list):
pass
mock = Mock(spec=Sub(['foo']))
mock.append(3)
mock.append.assert_called_with(3)
self.assertRaises(AttributeError, getattr, mock, 'foo')
def test_spec_class(self):
class X(object):
pass
mock = Mock(spec=X)
self.assertIsInstance(mock, X)
mock = Mock(spec=X())
self.assertIsInstance(mock, X)
self.assertIs(mock.__class__, X)
self.assertEqual(Mock().__class__.__name__, 'Mock')
mock = Mock(spec_set=X)
self.assertIsInstance(mock, X)
mock = Mock(spec_set=X())
self.assertIsInstance(mock, X)
def test_spec_class_no_object_base(self):
class X:
pass
mock = Mock(spec=X)
self.assertIsInstance(mock, X)
mock = Mock(spec=X())
self.assertIsInstance(mock, X)
self.assertIs(mock.__class__, X)
self.assertEqual(Mock().__class__.__name__, 'Mock')
mock = Mock(spec_set=X)
self.assertIsInstance(mock, X)
mock = Mock(spec_set=X())
self.assertIsInstance(mock, X)
def test_setting_attribute_with_spec_set(self):
class X(object):
y = 3
mock = Mock(spec=X)
mock.x = 'foo'
mock = Mock(spec_set=X)
def set_attr():
mock.x = 'foo'
mock.y = 'foo'
self.assertRaises(AttributeError, set_attr)
def test_copy(self):
current = sys.getrecursionlimit()
self.addCleanup(sys.setrecursionlimit, current)
# can't use sys.maxint as this doesn't exist in Python 3
sys.setrecursionlimit(int(10e8))
# this segfaults without the fix in place
copy.copy(Mock())
def test_subclass_with_properties(self):
class SubClass(Mock):
def _get(self):
return 3
def _set(self, value):
raise NameError('strange error')
some_attribute = property(_get, _set)
s = SubClass(spec_set=SubClass)
self.assertEqual(s.some_attribute, 3)
def test():
s.some_attribute = 3
self.assertRaises(NameError, test)
def test():
s.foo = 'bar'
self.assertRaises(AttributeError, test)
def test_setting_call(self):
mock = Mock()
def __call__(self, a):
self._increment_mock_call(a)
return self._mock_call(a)
type(mock).__call__ = __call__
mock('one')
mock.assert_called_with('one')
self.assertRaises(TypeError, mock, 'one', 'two')
def test_dir(self):
mock = Mock()
attrs = set(dir(mock))
type_attrs = set([m for m in dir(Mock) if not m.startswith('_')])
# all public attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
# creates these attributes
mock.a, mock.b
self.assertIn('a', dir(mock))
self.assertIn('b', dir(mock))
# instance attributes
mock.c = mock.d = None
self.assertIn('c', dir(mock))
self.assertIn('d', dir(mock))
# magic methods
mock.__iter__ = lambda s: iter([])
self.assertIn('__iter__', dir(mock))
def test_dir_from_spec(self):
mock = Mock(spec=unittest.TestCase)
testcase_attrs = set(dir(unittest.TestCase))
attrs = set(dir(mock))
# all attributes from the spec are included
self.assertEqual(set(), testcase_attrs - attrs)
# shadow a sys attribute
mock.version = 3
self.assertEqual(dir(mock).count('version'), 1)
def test_filter_dir(self):
patcher = patch.object(mock, 'FILTER_DIR', False)
patcher.start()
try:
attrs = set(dir(Mock()))
type_attrs = set(dir(Mock))
# ALL attributes from the type are included
self.assertEqual(set(), type_attrs - attrs)
finally:
patcher.stop()
def test_dir_does_not_include_deleted_attributes(self):
mock = Mock()
mock.child.return_value = 1
self.assertIn('child', dir(mock))
del mock.child
self.assertNotIn('child', dir(mock))
def test_configure_mock(self):
mock = Mock(foo='bar')
self.assertEqual(mock.foo, 'bar')
mock = MagicMock(foo='bar')
self.assertEqual(mock.foo, 'bar')
kwargs = {'side_effect': KeyError, 'foo.bar.return_value': 33,
'foo': MagicMock()}
mock = Mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
mock = Mock()
mock.configure_mock(**kwargs)
self.assertRaises(KeyError, mock)
self.assertEqual(mock.foo.bar(), 33)
self.assertIsInstance(mock.foo, MagicMock)
def assertRaisesWithMsg(self, exception, message, func, *args, **kwargs):
# needed because assertRaisesRegex doesn't work easily with newlines
with self.assertRaises(exception) as context:
func(*args, **kwargs)
msg = str(context.exception)
self.assertEqual(msg, message)
def test_assert_called_with_failure_message(self):
mock = NonCallableMock()
actual = 'not called.'
expected = "mock(1, '2', 3, bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
mock.assert_called_with, 1, '2', 3, bar='foo'
)
mock.foo(1, '2', 3, foo='foo')
asserters = [
mock.foo.assert_called_with, mock.foo.assert_called_once_with
]
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, '2', 3, bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, '2', 3, bar='foo'
)
# just kwargs
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(bar='foo')"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, bar='foo'
)
# just args
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo(1, 2, 3)"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual),
meth, 1, 2, 3
)
# empty
for meth in asserters:
actual = "foo(1, '2', 3, foo='foo')"
expected = "foo()"
message = 'expected call not found.\nExpected: %s\nActual: %s'
self.assertRaisesWithMsg(
AssertionError, message % (expected, actual), meth
)
def test_mock_calls(self):
mock = MagicMock()
# need to do this because MagicMock.mock_calls used to just return
# a MagicMock which also returned a MagicMock when __eq__ was called
self.assertIs(mock.mock_calls == [], True)
mock = MagicMock()
mock()
expected = [('', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock.foo()
expected.append(call.foo())
self.assertEqual(mock.mock_calls, expected)
# intermediate mock_calls work too
self.assertEqual(mock.foo.mock_calls, [('', (), {})])
mock = MagicMock()
mock().foo(1, 2, 3, a=4, b=5)
expected = [
('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.return_value.foo.mock_calls,
[('', (1, 2, 3), dict(a=4, b=5))])
self.assertEqual(mock.return_value.mock_calls,
[('foo', (1, 2, 3), dict(a=4, b=5))])
mock = MagicMock()
mock().foo.bar().baz()
expected = [
('', (), {}), ('().foo.bar', (), {}),
('().foo.bar().baz', (), {})
]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().call_list())
for kwargs in dict(), dict(name='bar'):
mock = MagicMock(**kwargs)
int(mock.foo)
expected = [('foo.__int__', (), {})]
self.assertEqual(mock.mock_calls, expected)
mock = MagicMock(**kwargs)
mock.a()()
expected = [('a', (), {}), ('a()', (), {})]
self.assertEqual(mock.mock_calls, expected)
self.assertEqual(mock.a().mock_calls, [call()])
mock = MagicMock(**kwargs)
mock(1)(2)(3)
self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
self.assertEqual(mock().mock_calls, call(2)(3).call_list())
self.assertEqual(mock()().mock_calls, call(3).call_list())
mock = MagicMock(**kwargs)
mock(1)(2)(3).a.b.c(4)
self.assertEqual(mock.mock_calls,
call(1)(2)(3).a.b.c(4).call_list())
self.assertEqual(mock().mock_calls,
call(2)(3).a.b.c(4).call_list())
self.assertEqual(mock()().mock_calls,
call(3).a.b.c(4).call_list())
mock = MagicMock(**kwargs)
int(mock().foo.bar().baz())
last_call = ('().foo.bar().baz().__int__', (), {})
self.assertEqual(mock.mock_calls[-1], last_call)
self.assertEqual(mock().mock_calls,
call.foo.bar().baz().__int__().call_list())
self.assertEqual(mock().foo.bar().mock_calls,
call.baz().__int__().call_list())
self.assertEqual(mock().foo.bar().baz.mock_calls,
call().__int__().call_list())
def test_child_mock_call_equal(self):
m = Mock()
result = m()
result.wibble()
# parent looks like this:
self.assertEqual(m.mock_calls, [call(), call().wibble()])
# but child should look like this:
self.assertEqual(result.mock_calls, [call.wibble()])
def test_mock_call_not_equal_leaf(self):
m = Mock()
m.foo().something()
self.assertNotEqual(m.mock_calls[1], call.foo().different())
self.assertEqual(m.mock_calls[0], call.foo())
def test_mock_call_not_equal_non_leaf(self):
m = Mock()
m.foo().bar()
self.assertNotEqual(m.mock_calls[1], call.baz().bar())
self.assertNotEqual(m.mock_calls[0], call.baz())
def test_mock_call_not_equal_non_leaf_params_different(self):
m = Mock()
m.foo(x=1).bar()
# This isn't ideal, but there's no way to fix it without breaking backwards compatibility:
self.assertEqual(m.mock_calls[1], call.foo(x=2).bar())
def test_mock_call_not_equal_non_leaf_attr(self):
m = Mock()
m.foo.bar()
self.assertNotEqual(m.mock_calls[0], call.baz.bar())
def test_mock_call_not_equal_non_leaf_call_versus_attr(self):
m = Mock()
m.foo.bar()
self.assertNotEqual(m.mock_calls[0], call.foo().bar())
def test_mock_call_repr(self):
m = Mock()
m.foo().bar().baz.bob()
self.assertEqual(repr(m.mock_calls[0]), 'call.foo()')
self.assertEqual(repr(m.mock_calls[1]), 'call.foo().bar()')
self.assertEqual(repr(m.mock_calls[2]), 'call.foo().bar().baz.bob()')
def test_mock_call_repr_loop(self):
m = Mock()
m.foo = m
repr(m.foo())
self.assertRegex(repr(m.foo()), r"<Mock name='mock\(\)' id='\d+'>")
def test_mock_calls_contains(self):
m = Mock()
self.assertFalse([call()] in m.mock_calls)
def test_subclassing(self):
class Subclass(Mock):
pass
mock = Subclass()
self.assertIsInstance(mock.foo, Subclass)
self.assertIsInstance(mock(), Subclass)
class Subclass(Mock):
def _get_child_mock(self, **kwargs):
return Mock(**kwargs)
mock = Subclass()
self.assertNotIsInstance(mock.foo, Subclass)
self.assertNotIsInstance(mock(), Subclass)
def test_arg_lists(self):
mocks = [
Mock(),
MagicMock(),
NonCallableMock(),
NonCallableMagicMock()
]
def assert_attrs(mock):
names = 'call_args_list', 'method_calls', 'mock_calls'
for name in names:
attr = getattr(mock, name)
self.assertIsInstance(attr, _CallList)
self.assertIsInstance(attr, list)
self.assertEqual(attr, [])
for mock in mocks:
assert_attrs(mock)
if callable(mock):
mock()
mock(1, 2)
mock(a=3)
mock.reset_mock()
assert_attrs(mock)
mock.foo()
mock.foo.bar(1, a=3)
mock.foo(1).bar().baz(3)
mock.reset_mock()
assert_attrs(mock)
def test_call_args_two_tuple(self):
mock = Mock()
mock(1, a=3)
mock(2, b=4)
self.assertEqual(len(mock.call_args), 2)
self.assertEqual(mock.call_args.args, (2,))
self.assertEqual(mock.call_args.kwargs, dict(b=4))
expected_list = [((1,), dict(a=3)), ((2,), dict(b=4))]
for expected, call_args in zip(expected_list, mock.call_args_list):
self.assertEqual(len(call_args), 2)
self.assertEqual(expected[0], call_args[0])
self.assertEqual(expected[1], call_args[1])
def test_side_effect_iterator(self):
mock = Mock(side_effect=iter([1, 2, 3]))
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
mock = MagicMock(side_effect=['a', 'b', 'c'])
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
mock = Mock(side_effect='ghi')
self.assertEqual([mock(), mock(), mock()], ['g', 'h', 'i'])
self.assertRaises(StopIteration, mock)
class Foo(object):
pass
mock = MagicMock(side_effect=Foo)
self.assertIsInstance(mock(), Foo)
mock = Mock(side_effect=Iter())
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
def test_side_effect_iterator_exceptions(self):
for Klass in Mock, MagicMock:
iterable = (ValueError, 3, KeyError, 6)
m = Klass(side_effect=iterable)
self.assertRaises(ValueError, m)
self.assertEqual(m(), 3)
self.assertRaises(KeyError, m)
self.assertEqual(m(), 6)
def test_side_effect_setting_iterator(self):
mock = Mock()
mock.side_effect = iter([1, 2, 3])
self.assertEqual([mock(), mock(), mock()], [1, 2, 3])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
mock.side_effect = ['a', 'b', 'c']
self.assertEqual([mock(), mock(), mock()], ['a', 'b', 'c'])
self.assertRaises(StopIteration, mock)
side_effect = mock.side_effect
self.assertIsInstance(side_effect, type(iter([])))
this_iter = Iter()
mock.side_effect = this_iter
self.assertEqual([mock(), mock(), mock(), mock()],
['this', 'is', 'an', 'iter'])
self.assertRaises(StopIteration, mock)
self.assertIs(mock.side_effect, this_iter)
def test_side_effect_iterator_default(self):
mock = Mock(return_value=2)
mock.side_effect = iter([1, DEFAULT])
self.assertEqual([mock(), mock()], [1, 2])
def test_assert_has_calls_any_order(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(b=6)
kalls = [
call(1, 2), ({'a': 3},),
((3, 4),), ((), {'a': 3}),
('', (1, 2)), ('', {'a': 3}),
('', (1, 2), {}), ('', (), {'a': 3})
]
for kall in kalls:
mock.assert_has_calls([kall], any_order=True)
for kall in call(1, '2'), call(b=3), call(), 3, None, 'foo':
self.assertRaises(
AssertionError, mock.assert_has_calls,
[kall], any_order=True
)
kall_lists = [
[call(1, 2), call(b=6)],
[call(3, 4), call(1, 2)],
[call(b=6), call(b=6)],
]
for kall_list in kall_lists:
mock.assert_has_calls(kall_list, any_order=True)
kall_lists = [
[call(b=6), call(b=6), call(b=6)],
[call(1, 2), call(1, 2)],
[call(3, 4), call(1, 2), call(5, 7)],
[call(b=6), call(3, 4), call(b=6), call(1, 2), call(b=6)],
]
for kall_list in kall_lists:
self.assertRaises(
AssertionError, mock.assert_has_calls,
kall_list, any_order=True
)
def test_assert_has_calls(self):
kalls1 = [
call(1, 2), ({'a': 3},),
((3, 4),), call(b=6),
('', (1,), {'b': 6}),
]
kalls2 = [call.foo(), call.bar(1)]
kalls2.extend(call.spam().baz(a=3).call_list())
kalls2.extend(call.bam(set(), foo={}).fish([1]).call_list())
mocks = []
for mock in Mock(), MagicMock():
mock(1, 2)
mock(a=3)
mock(3, 4)
mock(b=6)
mock(1, b=6)
mocks.append((mock, kalls1))
mock = Mock()
mock.foo()
mock.bar(1)
mock.spam().baz(a=3)
mock.bam(set(), foo={}).fish([1])
mocks.append((mock, kalls2))
for mock, kalls in mocks:
for i in range(len(kalls)):
for step in 1, 2, 3:
these = kalls[i:i+step]
mock.assert_has_calls(these)
if len(these) > 1:
self.assertRaises(
AssertionError,
mock.assert_has_calls,
list(reversed(these))
)
def test_assert_has_calls_nested_spec(self):
class Something:
def __init__(self): pass
def meth(self, a, b, c, d=None): pass
class Foo:
def __init__(self, a): pass
def meth1(self, a, b): pass
mock_class = create_autospec(Something)
for m in [mock_class, mock_class()]:
m.meth(1, 2, 3, d=1)
m.assert_has_calls([call.meth(1, 2, 3, d=1)])
m.assert_has_calls([call.meth(1, 2, 3, 1)])
mock_class.reset_mock()
for m in [mock_class, mock_class()]:
self.assertRaises(AssertionError, m.assert_has_calls, [call.Foo()])
m.Foo(1).meth1(1, 2)
m.assert_has_calls([call.Foo(1), call.Foo(1).meth1(1, 2)])
m.Foo.assert_has_calls([call(1), call().meth1(1, 2)])
mock_class.reset_mock()
invalid_calls = [call.meth(1),
call.non_existent(1),
call.Foo().non_existent(1),
call.Foo().meth(1, 2, 3, 4)]
for kall in invalid_calls:
self.assertRaises(AssertionError,
mock_class.assert_has_calls,
[kall]
)
def test_assert_has_calls_nested_without_spec(self):
m = MagicMock()
m().foo().bar().baz()
m.one().two().three()
calls = call.one().two().three().call_list()
m.assert_has_calls(calls)
def test_assert_has_calls_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock(4, 5, c=6, d=7)
mock(10, 11, c=12)
calls = [
('', (1, 2, 3), {}),
('', (4, 5, 6), {'d': 7}),
((10, 11, 12), {}),
]
mock.assert_has_calls(calls)
mock.assert_has_calls(calls, any_order=True)
mock.assert_has_calls(calls[1:])
mock.assert_has_calls(calls[1:], any_order=True)
mock.assert_has_calls(calls[:-1])
mock.assert_has_calls(calls[:-1], any_order=True)
# Reversed order
calls = list(reversed(calls))
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls)
mock.assert_has_calls(calls, any_order=True)
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls[1:])
mock.assert_has_calls(calls[1:], any_order=True)
with self.assertRaises(AssertionError):
mock.assert_has_calls(calls[:-1])
mock.assert_has_calls(calls[:-1], any_order=True)
def test_assert_has_calls_not_matching_spec_error(self):
def f(x=None): pass
mock = Mock(spec=f)
mock(1)
with self.assertRaisesRegex(
AssertionError,
'^{}$'.format(
re.escape('Calls not found.\n'
'Expected: [call()]\n'
'Actual: [call(1)]'))) as cm:
mock.assert_has_calls([call()])
self.assertIsNone(cm.exception.__cause__)
with self.assertRaisesRegex(
AssertionError,
'^{}$'.format(
re.escape(
'Error processing expected calls.\n'
"Errors: [None, TypeError('too many positional arguments')]\n"
"Expected: [call(), call(1, 2)]\n"
'Actual: [call(1)]'))) as cm:
mock.assert_has_calls([call(), call(1, 2)])
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_assert_any_call(self):
mock = Mock()
mock(1, 2)
mock(a=3)
mock(1, b=6)
mock.assert_any_call(1, 2)
mock.assert_any_call(a=3)
mock.assert_any_call(1, b=6)
self.assertRaises(
AssertionError,
mock.assert_any_call
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
1, 3
)
self.assertRaises(
AssertionError,
mock.assert_any_call,
a=4
)
def test_assert_any_call_with_function_spec(self):
def f(a, b, c, d=None): pass
mock = Mock(spec=f)
mock(1, b=2, c=3)
mock(4, 5, c=6, d=7)
mock.assert_any_call(1, 2, 3)
mock.assert_any_call(a=1, b=2, c=3)
mock.assert_any_call(4, 5, 6, 7)
mock.assert_any_call(a=4, b=5, c=6, d=7)
self.assertRaises(AssertionError, mock.assert_any_call,
1, b=3, c=2)
# Expected call doesn't match the spec's signature
with self.assertRaises(AssertionError) as cm:
mock.assert_any_call(e=8)
self.assertIsInstance(cm.exception.__cause__, TypeError)
def test_mock_calls_create_autospec(self):
def f(a, b): pass
obj = Iter()
obj.f = f
funcs = [
create_autospec(f),
create_autospec(obj).f
]
for func in funcs:
func(1, 2)
func(3, 4)
self.assertEqual(
func.mock_calls, [call(1, 2), call(3, 4)]
)
#Issue21222
def test_create_autospec_with_name(self):
m = mock.create_autospec(object(), name='sweet_func')
self.assertIn('sweet_func', repr(m))
#Issue23078
def test_create_autospec_classmethod_and_staticmethod(self):
class TestClass:
@classmethod
def class_method(cls): pass
@staticmethod
def static_method(): pass
for method in ('class_method', 'static_method'):
with self.subTest(method=method):
mock_method = mock.create_autospec(getattr(TestClass, method))
mock_method()
mock_method.assert_called_once_with()
self.assertRaises(TypeError, mock_method, 'extra_arg')
#Issue21238
def test_mock_unsafe(self):
m = Mock()
msg = "Attributes cannot start with 'assert' or 'assret'"
with self.assertRaisesRegex(AttributeError, msg):
m.assert_foo_call()
with self.assertRaisesRegex(AttributeError, msg):
m.assret_foo_call()
m = Mock(unsafe=True)
m.assert_foo_call()
m.assret_foo_call()
#Issue21262
def test_assert_not_called(self):
m = Mock()
m.hello.assert_not_called()
m.hello()
with self.assertRaises(AssertionError):
m.hello.assert_not_called()
def test_assert_not_called_message(self):
m = Mock()
m(1, 2)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1, 2)]"),
m.assert_not_called)
def test_assert_called(self):
m = Mock()
with self.assertRaises(AssertionError):
m.hello.assert_called()
m.hello()
m.hello.assert_called()
m.hello()
m.hello.assert_called()
def test_assert_called_once(self):
m = Mock()
with self.assertRaises(AssertionError):
m.hello.assert_called_once()
m.hello()
m.hello.assert_called_once()
m.hello()
with self.assertRaises(AssertionError):
m.hello.assert_called_once()
def test_assert_called_once_message(self):
m = Mock()
m(1, 2)
m(3)
self.assertRaisesRegex(AssertionError,
re.escape("Calls: [call(1, 2), call(3)]"),
m.assert_called_once)
def test_assert_called_once_message_not_called(self):
m = Mock()
with self.assertRaises(AssertionError) as e:
m.assert_called_once()
self.assertNotIn("Calls:", str(e.exception))
#Issue37212 printout of keyword args now preserves the original order
def test_ordered_call_signature(self):
m = Mock()
m.hello(name='hello', daddy='hero')
text = "call(name='hello', daddy='hero')"
self.assertEqual(repr(m.hello.call_args), text)
#Issue21270 overrides tuple methods for mock.call objects
def test_override_tuple_methods(self):
c = call.count()
i = call.index(132,'hello')
m = Mock()
m.count()
m.index(132,"hello")
self.assertEqual(m.method_calls[0], c)
self.assertEqual(m.method_calls[1], i)
def test_reset_return_sideeffect(self):
m = Mock(return_value=10, side_effect=[2,3])
m.reset_mock(return_value=True, side_effect=True)
self.assertIsInstance(m.return_value, Mock)
self.assertEqual(m.side_effect, None)
def test_reset_return(self):
m = Mock(return_value=10, side_effect=[2,3])
m.reset_mock(return_value=True)
self.assertIsInstance(m.return_value, Mock)
self.assertNotEqual(m.side_effect, None)
def test_reset_sideeffect(self):
m = Mock(return_value=10, side_effect=[2, 3])
m.reset_mock(side_effect=True)
self.assertEqual(m.return_value, 10)
self.assertEqual(m.side_effect, None)
def test_reset_return_with_children(self):
m = MagicMock(f=MagicMock(return_value=1))
self.assertEqual(m.f(), 1)
m.reset_mock(return_value=True)
self.assertNotEqual(m.f(), 1)
def test_reset_return_with_children_side_effect(self):
m = MagicMock(f=MagicMock(side_effect=[2, 3]))
self.assertNotEqual(m.f.side_effect, None)
m.reset_mock(side_effect=True)
self.assertEqual(m.f.side_effect, None)
def test_mock_add_spec(self):
class _One(object):
one = 1
class _Two(object):
two = 2
class Anything(object):
one = two = three = 'four'
klasses = [
Mock, MagicMock, NonCallableMock, NonCallableMagicMock
]
for Klass in list(klasses):
klasses.append(lambda K=Klass: K(spec=Anything))
klasses.append(lambda K=Klass: K(spec_set=Anything))
for Klass in klasses:
for kwargs in dict(), dict(spec_set=True):
mock = Klass()
#no error
mock.one, mock.two, mock.three
for One, Two in [(_One, _Two), (['one'], ['two'])]:
for kwargs in dict(), dict(spec_set=True):
mock.mock_add_spec(One, **kwargs)
mock.one
self.assertRaises(
AttributeError, getattr, mock, 'two'
)
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
mock.mock_add_spec(Two, **kwargs)
self.assertRaises(
AttributeError, getattr, mock, 'one'
)
mock.two
self.assertRaises(
AttributeError, getattr, mock, 'three'
)
if 'spec_set' in kwargs:
self.assertRaises(
AttributeError, setattr, mock, 'three', None
)
# note that creating a mock, setting an instance attribute, and
# *then* setting a spec doesn't work. Not the intended use case
def test_mock_add_spec_magic_methods(self):
for Klass in MagicMock, NonCallableMagicMock:
mock = Klass()
int(mock)
mock.mock_add_spec(object)
self.assertRaises(TypeError, int, mock)
mock = Klass()
mock['foo']
mock.__int__.return_value =4
mock.mock_add_spec(int)
self.assertEqual(int(mock), 4)
self.assertRaises(TypeError, lambda: mock['foo'])
def test_adding_child_mock(self):
for Klass in (NonCallableMock, Mock, MagicMock, NonCallableMagicMock,
AsyncMock):
mock = Klass()
mock.foo = Mock()
mock.foo()
self.assertEqual(mock.method_calls, [call.foo()])
self.assertEqual(mock.mock_calls, [call.foo()])
mock = Klass()
mock.bar = Mock(name='name')
mock.bar()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
# mock with an existing _new_parent but no name
mock = Klass()
mock.baz = MagicMock()()
mock.baz()
self.assertEqual(mock.method_calls, [])
self.assertEqual(mock.mock_calls, [])
def test_adding_return_value_mock(self):
for Klass in Mock, MagicMock:
mock = Klass()
mock.return_value = MagicMock()
mock()()
self.assertEqual(mock.mock_calls, [call(), call()()])
def test_manager_mock(self):
class Foo(object):
one = 'one'
two = 'two'
manager = Mock()
p1 = patch.object(Foo, 'one')
p2 = patch.object(Foo, 'two')
mock_one = p1.start()
self.addCleanup(p1.stop)
mock_two = p2.start()
self.addCleanup(p2.stop)
manager.attach_mock(mock_one, 'one')
manager.attach_mock(mock_two, 'two')
Foo.two()
Foo.one()
self.assertEqual(manager.mock_calls, [call.two(), call.one()])
def test_magic_methods_mock_calls(self):
for Klass in Mock, MagicMock:
m = Klass()
m.__int__ = Mock(return_value=3)
m.__float__ = MagicMock(return_value=3.0)
int(m)
float(m)
self.assertEqual(m.mock_calls, [call.__int__(), call.__float__()])
self.assertEqual(m.method_calls, [])
def test_mock_open_reuse_issue_21750(self):
mocked_open = mock.mock_open(read_data='data')
f1 = mocked_open('a-name')
f1_data = f1.read()
f2 = mocked_open('another-name')
f2_data = f2.read()
self.assertEqual(f1_data, f2_data)
def test_mock_open_dunder_iter_issue(self):
# Test dunder_iter method generates the expected result and
# consumes the iterator.
mocked_open = mock.mock_open(read_data='Remarkable\nNorwegian Blue')
f1 = mocked_open('a-name')
lines = [line for line in f1]
self.assertEqual(lines[0], 'Remarkable\n')
self.assertEqual(lines[1], 'Norwegian Blue')
self.assertEqual(list(f1), [])
def test_mock_open_using_next(self):
mocked_open = mock.mock_open(read_data='1st line\n2nd line\n3rd line')
f1 = mocked_open('a-name')
line1 = next(f1)
line2 = f1.__next__()
lines = [line for line in f1]
self.assertEqual(line1, '1st line\n')
self.assertEqual(line2, '2nd line\n')
self.assertEqual(lines[0], '3rd line')
self.assertEqual(list(f1), [])
with self.assertRaises(StopIteration):
next(f1)
def test_mock_open_next_with_readline_with_return_value(self):
mopen = mock.mock_open(read_data='foo\nbarn')
mopen.return_value.readline.return_value = 'abc'
self.assertEqual('abc', next(mopen()))
def test_mock_open_write(self):
# Test exception in file writing write()
mock_namedtemp = mock.mock_open(mock.MagicMock(name='JLV'))
with mock.patch('tempfile.NamedTemporaryFile', mock_namedtemp):
mock_filehandle = mock_namedtemp.return_value
mock_write = mock_filehandle.write
mock_write.side_effect = OSError('Test 2 Error')
def attempt():
tempfile.NamedTemporaryFile().write('asd')
self.assertRaises(OSError, attempt)
def test_mock_open_alter_readline(self):
mopen = mock.mock_open(read_data='foo\nbarn')
mopen.return_value.readline.side_effect = lambda *args:'abc'
first = mopen().readline()
second = mopen().readline()
self.assertEqual('abc', first)
self.assertEqual('abc', second)
def test_mock_open_after_eof(self):
# read, readline and readlines should work after end of file.
_open = mock.mock_open(read_data='foo')
h = _open('bar')
h.read()
self.assertEqual('', h.read())
self.assertEqual('', h.read())
self.assertEqual('', h.readline())
self.assertEqual('', h.readline())
self.assertEqual([], h.readlines())
self.assertEqual([], h.readlines())
def test_mock_parents(self):
for Klass in Mock, MagicMock:
m = Klass()
original_repr = repr(m)
m.return_value = m
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m.reset_mock()
self.assertIs(m(), m)
self.assertEqual(repr(m), original_repr)
m = Klass()
m.b = m.a
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m.reset_mock()
self.assertIn("name='mock.a'", repr(m.b))
self.assertIn("name='mock.a'", repr(m.a))
m = Klass()
original_repr = repr(m)
m.a = m()
m.a.return_value = m
self.assertEqual(repr(m), original_repr)
self.assertEqual(repr(m.a()), original_repr)
def test_attach_mock(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in classes:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'bar')
self.assertIs(m.bar, m2)
self.assertIn("name='mock.bar'", repr(m2))
m.bar.baz(1)
self.assertEqual(m.mock_calls, [call.bar.baz(1)])
self.assertEqual(m.method_calls, [call.bar.baz(1)])
def test_attach_mock_return_value(self):
classes = Mock, MagicMock, NonCallableMagicMock, NonCallableMock
for Klass in Mock, MagicMock:
for Klass2 in classes:
m = Klass()
m2 = Klass2(name='foo')
m.attach_mock(m2, 'return_value')
self.assertIs(m(), m2)
self.assertIn("name='mock()'", repr(m2))
m2.foo()
self.assertEqual(m.mock_calls, call().foo().call_list())
def test_attach_mock_patch_autospec(self):
parent = Mock()
with mock.patch(f'{__name__}.something', autospec=True) as mock_func:
self.assertEqual(mock_func.mock._extract_mock_name(), 'something')
parent.attach_mock(mock_func, 'child')
parent.child(1)
something(2)
mock_func(3)
parent_calls = [call.child(1), call.child(2), call.child(3)]
child_calls = [call(1), call(2), call(3)]
self.assertEqual(parent.mock_calls, parent_calls)
self.assertEqual(parent.child.mock_calls, child_calls)
self.assertEqual(something.mock_calls, child_calls)
self.assertEqual(mock_func.mock_calls, child_calls)
self.assertIn('mock.child', repr(parent.child.mock))
self.assertEqual(mock_func.mock._extract_mock_name(), 'mock.child')
def test_attach_mock_patch_autospec_signature(self):
with mock.patch(f'{__name__}.Something.meth', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_meth')
obj = Something()
obj.meth(1, 2, 3, d=4)
manager.assert_has_calls([call.attach_meth(mock.ANY, 1, 2, 3, d=4)])
obj.meth.assert_has_calls([call(mock.ANY, 1, 2, 3, d=4)])
mocked.assert_has_calls([call(mock.ANY, 1, 2, 3, d=4)])
with mock.patch(f'{__name__}.something', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_func')
something(1)
manager.assert_has_calls([call.attach_func(1)])
something.assert_has_calls([call(1)])
mocked.assert_has_calls([call(1)])
with mock.patch(f'{__name__}.Something', autospec=True) as mocked:
manager = Mock()
manager.attach_mock(mocked, 'attach_obj')
obj = Something()
obj.meth(1, 2, 3, d=4)
manager.assert_has_calls([call.attach_obj(),
call.attach_obj().meth(1, 2, 3, d=4)])
obj.meth.assert_has_calls([call(1, 2, 3, d=4)])
mocked.assert_has_calls([call(), call().meth(1, 2, 3, d=4)])
def test_attribute_deletion(self):
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
self.assertTrue(hasattr(mock, 'm'))
del mock.m
self.assertFalse(hasattr(mock, 'm'))
del mock.f
self.assertFalse(hasattr(mock, 'f'))
self.assertRaises(AttributeError, getattr, mock, 'f')
def test_mock_does_not_raise_on_repeated_attribute_deletion(self):
# bpo-20239: Assigning and deleting twice an attribute raises.
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
mock.foo = 3
self.assertTrue(hasattr(mock, 'foo'))
self.assertEqual(mock.foo, 3)
del mock.foo
self.assertFalse(hasattr(mock, 'foo'))
mock.foo = 4
self.assertTrue(hasattr(mock, 'foo'))
self.assertEqual(mock.foo, 4)
del mock.foo
self.assertFalse(hasattr(mock, 'foo'))
def test_mock_raises_when_deleting_nonexistent_attribute(self):
for mock in (Mock(), MagicMock(), NonCallableMagicMock(),
NonCallableMock()):
del mock.foo
with self.assertRaises(AttributeError):
del mock.foo
def test_reset_mock_does_not_raise_on_attr_deletion(self):
# bpo-31177: reset_mock should not raise AttributeError when attributes
# were deleted in a mock instance
mock = Mock()
mock.child = True
del mock.child
mock.reset_mock()
self.assertFalse(hasattr(mock, 'child'))
def test_class_assignable(self):
for mock in Mock(), MagicMock():
self.assertNotIsInstance(mock, int)
mock.__class__ = int
self.assertIsInstance(mock, int)
mock.foo
def test_name_attribute_of_call(self):
# bpo-35357: _Call should not disclose any attributes whose names
# may clash with popular ones (such as ".name")
self.assertIsNotNone(call.name)
self.assertEqual(type(call.name), _Call)
self.assertEqual(type(call.name().name), _Call)
def test_parent_attribute_of_call(self):
# bpo-35357: _Call should not disclose any attributes whose names
# may clash with popular ones (such as ".parent")
self.assertIsNotNone(call.parent)
self.assertEqual(type(call.parent), _Call)
self.assertEqual(type(call.parent().parent), _Call)
def test_parent_propagation_with_create_autospec(self):
def foo(a, b): pass
mock = Mock()
mock.child = create_autospec(foo)
mock.child(1, 2)
self.assertRaises(TypeError, mock.child, 1)
self.assertEqual(mock.mock_calls, [call.child(1, 2)])
self.assertIn('mock.child', repr(mock.child.mock))
def test_parent_propagation_with_autospec_attach_mock(self):
def foo(a, b): pass
parent = Mock()
parent.attach_mock(create_autospec(foo, name='bar'), 'child')
parent.child(1, 2)
self.assertRaises(TypeError, parent.child, 1)
self.assertEqual(parent.child.mock_calls, [call.child(1, 2)])
self.assertIn('mock.child', repr(parent.child.mock))
def test_isinstance_under_settrace(self):
# bpo-36593 : __class__ is not set for a class that has __class__
# property defined when it's used with sys.settrace(trace) set.
# Delete the module to force reimport with tracing function set
# restore the old reference later since there are other tests that are
# dependent on unittest.mock.patch. In testpatch.PatchTest
# test_patch_dict_test_prefix and test_patch_test_prefix not restoring
# causes the objects patched to go out of sync
old_patch = unittest.mock.patch
# Directly using __setattr__ on unittest.mock causes current imported
# reference to be updated. Use a lambda so that during cleanup the
# re-imported new reference is updated.
self.addCleanup(lambda patch: setattr(unittest.mock, 'patch', patch),
old_patch)
with patch.dict('sys.modules'):
del sys.modules['unittest.mock']
# This trace will stop coverage being measured ;-)
def trace(frame, event, arg): # pragma: no cover
return trace
self.addCleanup(sys.settrace, sys.gettrace())
sys.settrace(trace)
from unittest.mock import (
Mock, MagicMock, NonCallableMock, NonCallableMagicMock
)
mocks = [
Mock, MagicMock, NonCallableMock, NonCallableMagicMock, AsyncMock
]
for mock in mocks:
obj = mock(spec=Something)
self.assertIsInstance(obj, Something)
def test_bool_not_called_when_passing_spec_arg(self):
class Something:
def __init__(self):
self.obj_with_bool_func = unittest.mock.MagicMock()
obj = Something()
with unittest.mock.patch.object(obj, 'obj_with_bool_func', autospec=True): pass
self.assertEqual(obj.obj_with_bool_func.__bool__.call_count, 0)
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
| -6,160,722,252,872,933,000 | 6,807,238,169,524,725,000 | 32.08936 | 98 | 0.547573 | false |
h3biomed/ansible
|
test/units/modules/network/fortimanager/test_fmgr_ha.py
|
38
|
7774
|
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
import pytest
try:
from ansible.modules.network.fortimanager import fmgr_ha
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
def load_fixtures():
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') + "/{filename}.json".format(
filename=os.path.splitext(os.path.basename(__file__))[0])
try:
with open(fixture_path, "r") as fixture_file:
fixture_data = json.load(fixture_file)
except IOError:
return []
return [fixture_data]
@pytest.fixture(autouse=True)
def module_mock(mocker):
connection_class_mock = mocker.patch('ansible.module_utils.basic.AnsibleModule')
return connection_class_mock
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortimanager.fmgr_ha.Connection')
return connection_class_mock
@pytest.fixture(scope="function", params=load_fixtures())
def fixture_data(request):
func_name = request.function.__name__.replace("test_", "")
return request.param.get(func_name, None)
fmg_instance = FortiManagerHandler(connection_mock, module_mock)
def test_fmgr_set_ha_mode(fixture_data, mocker):
mocker.patch("ansible.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request",
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# fmgr_ha_peer_sn: None
# fmgr_ha_hb_threshold: 10
# fmgr_ha_cluster_pw: fortinet
# fmgr_ha_peer_ipv6: None
# fmgr_ha_peer_status: None
# fmgr_ha_file_quota: 2048
# fmgr_ha_cluster_id: 2
# fmgr_ha_peer_ipv4: None
# fmgr_ha_hb_interval: 15
# fmgr_ha_mode: master
# mode: set
##################################################
##################################################
# fmgr_ha_peer_sn: None
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: fortinet
# fmgr_ha_hb_interval: 5
# fmgr_ha_cluster_id: 2
# fmgr_ha_file_quota: 4096
# fmgr_ha_peer_status: None
# fmgr_ha_peer_ipv4: None
# fmgr_ha_peer_ipv6: None
# fmgr_ha_mode: slave
# mode: set
##################################################
##################################################
# fmgr_ha_peer_sn: FMG-VMTM18001881
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: fortinet
# fmgr_ha_peer_ipv6: None
# fmgr_ha_peer_status: enable
# fmgr_ha_file_quota: 4096
# fmgr_ha_cluster_id: 2
# fmgr_ha_peer_ipv4: 10.7.220.35
# fmgr_ha_hb_interval: 5
# fmgr_ha_mode: slave
# mode: set
##################################################
##################################################
# fmgr_ha_file_quota: 4096
# fmgr_ha_cluster_pw: None
# fmgr_ha_peer_sn: None
# fmgr_ha_hb_interval: 5
# fmgr_ha_cluster_id: 1
# fmgr_ha_mode: standalone
# fmgr_ha_peer_status: None
# fmgr_ha_hb_threshold: 3
# fmgr_ha_peer_ipv4: None
# fmgr_ha_peer_ipv6: None
# mode: set
##################################################
# Test using fixture 1 #
output = fmgr_ha.fmgr_set_ha_mode(fmg_instance, fixture_data[0]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 2 #
output = fmgr_ha.fmgr_set_ha_mode(fmg_instance, fixture_data[1]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 3 #
output = fmgr_ha.fmgr_set_ha_mode(fmg_instance, fixture_data[2]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 4 #
output = fmgr_ha.fmgr_set_ha_mode(fmg_instance, fixture_data[3]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
def test_fmgr_get_ha_peer_list(fixture_data, mocker):
mocker.patch("ansible.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request",
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# fmgr_ha_peer_sn: FMG-VMTM18001882
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: None
# fmgr_ha_peer_ipv6: None
# fmgr_ha_peer_status: enable
# fmgr_ha_file_quota: 4096
# fmgr_ha_cluster_id: 1
# fmgr_ha_peer_ipv4: 10.7.220.36
# fmgr_ha_hb_interval: 5
# fmgr_ha_mode: None
# mode: get
##################################################
##################################################
# fmgr_ha_peer_sn: FMG-VMTM18001881
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: fortinet
# fmgr_ha_hb_interval: 5
# fmgr_ha_cluster_id: 2
# fmgr_ha_file_quota: 4096
# fmgr_ha_peer_status: enable
# fmgr_ha_peer_ipv4: 10.7.220.35
# fmgr_ha_peer_ipv6: None
# fmgr_ha_mode: slave
# mode: get
##################################################
# Test using fixture 1 #
output = fmgr_ha.fmgr_get_ha_peer_list(fmg_instance)
assert isinstance(output['raw_response'], list) is True
# Test using fixture 2 #
output = fmgr_ha.fmgr_get_ha_peer_list(fmg_instance)
assert isinstance(output['raw_response'], list) is True
def test_fmgr_set_ha_peer(fixture_data, mocker):
mocker.patch("ansible.module_utils.network.fortimanager.fortimanager.FortiManagerHandler.process_request",
side_effect=fixture_data)
# Fixture sets used:###########################
##################################################
# fmgr_ha_peer_sn: FMG-VMTM18001882
# next_peer_id: 2
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: None
# fmgr_ha_peer_ipv6: None
# fmgr_ha_peer_status: enable
# fmgr_ha_file_quota: 4096
# fmgr_ha_cluster_id: 1
# peer_id: 1
# fmgr_ha_peer_ipv4: 10.7.220.36
# fmgr_ha_hb_interval: 5
# fmgr_ha_mode: None
# mode: set
##################################################
##################################################
# fmgr_ha_peer_sn: FMG-VMTM18001881
# next_peer_id: 1
# fmgr_ha_hb_threshold: 3
# fmgr_ha_cluster_pw: fortinet
# fmgr_ha_hb_interval: 5
# fmgr_ha_cluster_id: 2
# fmgr_ha_file_quota: 4096
# fmgr_ha_peer_status: enable
# peer_id: 1
# fmgr_ha_peer_ipv4: 10.7.220.35
# fmgr_ha_peer_ipv6: None
# fmgr_ha_mode: slave
# mode: set
##################################################
# Test using fixture 1 #
output = fmgr_ha.fmgr_set_ha_peer(fmg_instance, fixture_data[0]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
# Test using fixture 2 #
output = fmgr_ha.fmgr_set_ha_peer(fmg_instance, fixture_data[1]['paramgram_used'])
assert output['raw_response']['status']['code'] == 0
|
gpl-3.0
| 5,003,958,709,121,388,000 | -8,746,073,451,678,404,000 | 34.990741 | 110 | 0.581554 | false |
GuilhermeGSousa/ardupilot
|
Tools/scripts/build_examples.py
|
74
|
1075
|
#!/usr/bin/env python
# useful script to test the build of all example code
# This helps when doing large merges
# Peter Barker, June 2016
# based on build_examples.sh, Andrew Tridgell, November 2012
import os
import sys
import optparse
sys.path.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), '../autotest/pysim'))
import util
class BuildExamples():
def __init__(self, targets=[], clean=False):
print("init")
self.targets = targets
self.clean = clean
def run(self):
for target in self.targets:
util.build_examples(target, clean=self.clean)
if __name__ == '__main__':
parser = optparse.OptionParser("build_examples.py")
parser.add_option("--target", type='string', default=['navio','px4-v2'], help='list of targets for which to build examples', action='append')
parser.add_option("--clean", action='store_true', default=False, help='clean build')
opts, args = parser.parse_args()
buildexamples = BuildExamples(targets=opts.target, clean=opts.clean)
buildexamples.run()
|
gpl-3.0
| -3,215,875,452,155,622,000 | 3,106,705,800,612,782,600 | 30.617647 | 145 | 0.675349 | false |
TheTacoScott/GoAtThrottleUp
|
ServerRelay/cherrypy/lib/gctools.py
|
40
|
7396
|
import gc
import inspect
import os
import sys
import time
try:
import objgraph
except ImportError:
objgraph = None
import cherrypy
from cherrypy import _cprequest, _cpwsgi
from cherrypy.process.plugins import SimplePlugin
class ReferrerTree(object):
"""An object which gathers all referrers of an object to a given depth."""
peek_length = 40
def __init__(self, ignore=None, maxdepth=2, maxparents=10):
self.ignore = ignore or []
self.ignore.append(inspect.currentframe().f_back)
self.maxdepth = maxdepth
self.maxparents = maxparents
def ascend(self, obj, depth=1):
"""Return a nested list containing referrers of the given object."""
depth += 1
parents = []
# Gather all referrers in one step to minimize
# cascading references due to repr() logic.
refs = gc.get_referrers(obj)
self.ignore.append(refs)
if len(refs) > self.maxparents:
return [("[%s referrers]" % len(refs), [])]
try:
ascendcode = self.ascend.__code__
except AttributeError:
ascendcode = self.ascend.im_func.func_code
for parent in refs:
if inspect.isframe(parent) and parent.f_code is ascendcode:
continue
if parent in self.ignore:
continue
if depth <= self.maxdepth:
parents.append((parent, self.ascend(parent, depth)))
else:
parents.append((parent, []))
return parents
def peek(self, s):
"""Return s, restricted to a sane length."""
if len(s) > (self.peek_length + 3):
half = self.peek_length // 2
return s[:half] + '...' + s[-half:]
else:
return s
def _format(self, obj, descend=True):
"""Return a string representation of a single object."""
if inspect.isframe(obj):
filename, lineno, func, context, index = inspect.getframeinfo(obj)
return "<frame of function '%s'>" % func
if not descend:
return self.peek(repr(obj))
if isinstance(obj, dict):
return "{" + ", ".join(["%s: %s" % (self._format(k, descend=False),
self._format(v, descend=False))
for k, v in obj.items()]) + "}"
elif isinstance(obj, list):
return "[" + ", ".join([self._format(item, descend=False)
for item in obj]) + "]"
elif isinstance(obj, tuple):
return "(" + ", ".join([self._format(item, descend=False)
for item in obj]) + ")"
r = self.peek(repr(obj))
if isinstance(obj, (str, int, float)):
return r
return "%s: %s" % (type(obj), r)
def format(self, tree):
"""Return a list of string reprs from a nested list of referrers."""
output = []
def ascend(branch, depth=1):
for parent, grandparents in branch:
output.append((" " * depth) + self._format(parent))
if grandparents:
ascend(grandparents, depth + 1)
ascend(tree)
return output
def get_instances(cls):
return [x for x in gc.get_objects() if isinstance(x, cls)]
class RequestCounter(SimplePlugin):
def start(self):
self.count = 0
def before_request(self):
self.count += 1
def after_request(self):
self.count -=1
request_counter = RequestCounter(cherrypy.engine)
request_counter.subscribe()
def get_context(obj):
if isinstance(obj, _cprequest.Request):
return "path=%s;stage=%s" % (obj.path_info, obj.stage)
elif isinstance(obj, _cprequest.Response):
return "status=%s" % obj.status
elif isinstance(obj, _cpwsgi.AppResponse):
return "PATH_INFO=%s" % obj.environ.get('PATH_INFO', '')
elif hasattr(obj, "tb_lineno"):
return "tb_lineno=%s" % obj.tb_lineno
return ""
class GCRoot(object):
"""A CherryPy page handler for testing reference leaks."""
classes = [(_cprequest.Request, 2, 2,
"Should be 1 in this request thread and 1 in the main thread."),
(_cprequest.Response, 2, 2,
"Should be 1 in this request thread and 1 in the main thread."),
(_cpwsgi.AppResponse, 1, 1,
"Should be 1 in this request thread only."),
]
def index(self):
return "Hello, world!"
index.exposed = True
def stats(self):
output = ["Statistics:"]
for trial in range(10):
if request_counter.count > 0:
break
time.sleep(0.5)
else:
output.append("\nNot all requests closed properly.")
# gc_collect isn't perfectly synchronous, because it may
# break reference cycles that then take time to fully
# finalize. Call it thrice and hope for the best.
gc.collect()
gc.collect()
unreachable = gc.collect()
if unreachable:
if objgraph is not None:
final = objgraph.by_type('Nondestructible')
if final:
objgraph.show_backrefs(final, filename='finalizers.png')
trash = {}
for x in gc.garbage:
trash[type(x)] = trash.get(type(x), 0) + 1
if trash:
output.insert(0, "\n%s unreachable objects:" % unreachable)
trash = [(v, k) for k, v in trash.items()]
trash.sort()
for pair in trash:
output.append(" " + repr(pair))
# Check declared classes to verify uncollected instances.
# These don't have to be part of a cycle; they can be
# any objects that have unanticipated referrers that keep
# them from being collected.
allobjs = {}
for cls, minobj, maxobj, msg in self.classes:
allobjs[cls] = get_instances(cls)
for cls, minobj, maxobj, msg in self.classes:
objs = allobjs[cls]
lenobj = len(objs)
if lenobj < minobj or lenobj > maxobj:
if minobj == maxobj:
output.append(
"\nExpected %s %r references, got %s." %
(minobj, cls, lenobj))
else:
output.append(
"\nExpected %s to %s %r references, got %s." %
(minobj, maxobj, cls, lenobj))
for obj in objs:
if objgraph is not None:
ig = [id(objs), id(inspect.currentframe())]
fname = "graph_%s_%s.png" % (cls.__name__, id(obj))
objgraph.show_backrefs(
obj, extra_ignore=ig, max_depth=4, too_many=20,
filename=fname, extra_info=get_context)
output.append("\nReferrers for %s (refcount=%s):" %
(repr(obj), sys.getrefcount(obj)))
t = ReferrerTree(ignore=[objs], maxdepth=3)
tree = t.ascend(obj)
output.extend(t.format(tree))
return "\n".join(output)
stats.exposed = True
|
mit
| -8,023,968,950,152,924,000 | 8,249,369,389,234,425,000 | 33.560748 | 80 | 0.527312 | false |
moijes12/oh-mainline
|
vendor/packages/kombu/kombu/transport/django/__init__.py
|
20
|
2175
|
"""Kombu transport using the Django database as a message store."""
from __future__ import absolute_import
from anyjson import loads, dumps
from django.conf import settings
from django.core import exceptions as errors
from kombu.five import Empty
from kombu.transport import virtual
from kombu.utils.encoding import bytes_to_str
from .models import Queue
try:
from django.apps import AppConfig
except ImportError: # pragma: no cover
pass
else:
class KombuAppConfig(AppConfig):
name = 'kombu.transport.django'
label = name.replace('.', '_')
verbose_name = 'Message queue'
default_app_config = 'kombu.transport.django.KombuAppConfig'
VERSION = (1, 0, 0)
__version__ = '.'.join(map(str, VERSION))
POLLING_INTERVAL = getattr(settings, 'KOMBU_POLLING_INTERVAL',
getattr(settings, 'DJKOMBU_POLLING_INTERVAL', 5.0))
class Channel(virtual.Channel):
def _new_queue(self, queue, **kwargs):
Queue.objects.get_or_create(name=queue)
def _put(self, queue, message, **kwargs):
Queue.objects.publish(queue, dumps(message))
def basic_consume(self, queue, *args, **kwargs):
qinfo = self.state.bindings[queue]
exchange = qinfo[0]
if self.typeof(exchange).type == 'fanout':
return
super(Channel, self).basic_consume(queue, *args, **kwargs)
def _get(self, queue):
m = Queue.objects.fetch(queue)
if m:
return loads(bytes_to_str(m))
raise Empty()
def _size(self, queue):
return Queue.objects.size(queue)
def _purge(self, queue):
return Queue.objects.purge(queue)
def refresh_connection(self):
from django import db
db.close_connection()
class Transport(virtual.Transport):
Channel = Channel
default_port = 0
polling_interval = POLLING_INTERVAL
channel_errors = (
virtual.Transport.channel_errors + (
errors.ObjectDoesNotExist, errors.MultipleObjectsReturned)
)
driver_type = 'sql'
driver_name = 'django'
def driver_version(self):
import django
return '.'.join(map(str, django.VERSION))
|
agpl-3.0
| -393,470,442,847,165,250 | 121,167,190,930,621,390 | 26.531646 | 78 | 0.650575 | false |
fangxingli/hue
|
apps/oozie/src/oozie/migrations/0023_auto__add_field_node_data__add_field_job_data.py
|
37
|
26876
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Node.data'
db.add_column('oozie_node', 'data',
self.gf('django.db.models.fields.TextField')(default='{}', blank=True),
keep_default=False)
# Adding field 'Job.data'
db.add_column('oozie_job', 'data',
self.gf('django.db.models.fields.TextField')(default='{}', blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Node.data'
db.delete_column('oozie_node', 'data')
# Deleting field 'Job.data'
db.delete_column('oozie_job', 'data')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'desktop.document': {
'Meta': {'object_name': 'Document'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'extra': ('django.db.models.fields.TextField', [], {'default': "''"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'default': "''"}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'doc_owner'", 'to': "orm['auth.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['desktop.DocumentTag']", 'db_index': 'True', 'symmetrical': 'False'}),
'version': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'})
},
'desktop.documenttag': {
'Meta': {'object_name': 'DocumentTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'tag': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'oozie.bundle': {
'Meta': {'object_name': 'Bundle', '_ormbases': ['oozie.Job']},
'coordinators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['oozie.Coordinator']", 'through': "orm['oozie.BundledCoordinator']", 'symmetrical': 'False'}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'kick_off_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 12, 17, 0, 0)'})
},
'oozie.bundledcoordinator': {
'Meta': {'object_name': 'BundledCoordinator'},
'bundle': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Bundle']"}),
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameters': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"}]\''})
},
'oozie.coordinator': {
'Meta': {'object_name': 'Coordinator', '_ormbases': ['oozie.Job']},
'concurrency': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 12, 20, 0, 0)'}),
'execution': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 12, 17, 0, 0)'}),
'throttle': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timeout': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']", 'null': 'True'})
},
'oozie.datainput': {
'Meta': {'object_name': 'DataInput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataoutput': {
'Meta': {'object_name': 'DataOutput'},
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'dataset': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Dataset']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'oozie.dataset': {
'Meta': {'object_name': 'Dataset'},
'advanced_end_instance': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '128', 'blank': 'True'}),
'advanced_start_instance': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '128'}),
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Coordinator']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'done_flag': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'blank': 'True'}),
'frequency_number': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'frequency_unit': ('django.db.models.fields.CharField', [], {'default': "'days'", 'max_length': '20'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_choice': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '10'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'start': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 12, 17, 0, 0)'}),
'timezone': ('django.db.models.fields.CharField', [], {'default': "'America/Los_Angeles'", 'max_length': '24'}),
'uri': ('django.db.models.fields.CharField', [], {'default': "'/data/${YEAR}${MONTH}${DAY}'", 'max_length': '1024'})
},
'oozie.decision': {
'Meta': {'object_name': 'Decision'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.decisionend': {
'Meta': {'object_name': 'DecisionEnd'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.distcp': {
'Meta': {'object_name': 'DistCp'},
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.email': {
'Meta': {'object_name': 'Email'},
'body': ('django.db.models.fields.TextField', [], {'default': "''"}),
'cc': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'subject': ('django.db.models.fields.TextField', [], {'default': "''"}),
'to': ('django.db.models.fields.TextField', [], {'default': "''"})
},
'oozie.end': {
'Meta': {'object_name': 'End'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.fork': {
'Meta': {'object_name': 'Fork'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.fs': {
'Meta': {'object_name': 'Fs'},
'chmods': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}),
'deletes': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}),
'mkdirs': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}),
'moves': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'touchzs': ('django.db.models.fields.TextField', [], {'default': "'[]'", 'blank': 'True'})
},
'oozie.generic': {
'Meta': {'object_name': 'Generic'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {'default': "''"})
},
'oozie.history': {
'Meta': {'object_name': 'History'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Job']"}),
'oozie_job_id': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'properties': ('django.db.models.fields.TextField', [], {}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'oozie.hive': {
'Meta': {'object_name': 'Hive'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.hive.defaults","value":"hive-site.xml"}]\''}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.java': {
'Meta': {'object_name': 'Java'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'args': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'capture_output': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'java_opts': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'main_class': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.job': {
'Meta': {'object_name': 'Job'},
'data': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}),
'deployment_dir': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_shared': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_trashed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'parameters': ('django.db.models.fields.TextField', [], {'default': '\'[{"name":"oozie.use.system.libpath","value":"true"}]\''}),
'schema_version': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'oozie.join': {
'Meta': {'object_name': 'Join'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.kill': {
'Meta': {'object_name': 'Kill'},
'message': ('django.db.models.fields.CharField', [], {'default': "'Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]'", 'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.link': {
'Meta': {'object_name': 'Link'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parent_node'", 'to': "orm['oozie.Node']"}),
'comment': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'child_node'", 'to': "orm['oozie.Node']"})
},
'oozie.mapreduce': {
'Meta': {'object_name': 'Mapreduce'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'jar_path': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.node': {
'Meta': {'object_name': 'Node'},
'children': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'parents'", 'symmetrical': 'False', 'through': "orm['oozie.Link']", 'to': "orm['oozie.Node']"}),
'data': ('django.db.models.fields.TextField', [], {'default': "'{}'", 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1024', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'node_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']"})
},
'oozie.pig': {
'Meta': {'object_name': 'Pig'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'oozie.shell': {
'Meta': {'object_name': 'Shell'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'capture_output': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'command': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"})
},
'oozie.sqoop': {
'Meta': {'object_name': 'Sqoop'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'prepares': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'script_path': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'oozie.ssh': {
'Meta': {'object_name': 'Ssh'},
'capture_output': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'command': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'params': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'oozie.start': {
'Meta': {'object_name': 'Start'},
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'})
},
'oozie.streaming': {
'Meta': {'object_name': 'Streaming'},
'archives': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'files': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'mapper': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'reducer': ('django.db.models.fields.CharField', [], {'max_length': '512'})
},
'oozie.subworkflow': {
'Meta': {'object_name': 'SubWorkflow'},
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'node_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Node']", 'unique': 'True', 'primary_key': 'True'}),
'propagate_configuration': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'sub_workflow': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['oozie.Workflow']"})
},
'oozie.workflow': {
'Meta': {'object_name': 'Workflow', '_ormbases': ['oozie.Job']},
'end': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'end_workflow'", 'null': 'True', 'to': "orm['oozie.End']"}),
'is_single': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'job_properties': ('django.db.models.fields.TextField', [], {'default': "'[]'"}),
'job_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['oozie.Job']", 'unique': 'True', 'primary_key': 'True'}),
'job_xml': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'blank': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'start': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'start_workflow'", 'null': 'True', 'to': "orm['oozie.Start']"})
}
}
complete_apps = ['oozie']
|
apache-2.0
| -1,758,208,195,669,710,600 | 8,146,214,433,485,933,000 | 75.791429 | 194 | 0.529506 | false |
jbassen/edx-platform
|
lms/djangoapps/ccx/tests/test_models.py
|
45
|
8622
|
"""
tests for the models
"""
from datetime import datetime, timedelta
from django.utils.timezone import UTC
from mock import patch
from nose.plugins.attrib import attr
from student.roles import CourseCcxCoachRole # pylint: disable=import-error
from student.tests.factories import ( # pylint: disable=import-error
AdminFactory,
)
from util.tests.test_date_utils import fake_ugettext
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import (
CourseFactory,
check_mongo_calls
)
from .factories import (
CcxFactory,
)
from ..overrides import override_field_for_ccx
@attr('shard_1')
class TestCCX(ModuleStoreTestCase):
"""Unit tests for the CustomCourseForEdX model
"""
def setUp(self):
"""common setup for all tests"""
super(TestCCX, self).setUp()
self.course = course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
def set_ccx_override(self, field, value):
"""Create a field override for the test CCX on <field> with <value>"""
override_field_for_ccx(self.ccx, self.course, field, value)
def test_ccx_course_is_correct_course(self):
"""verify that the course property of a ccx returns the right course"""
expected = self.course
actual = self.ccx.course
self.assertEqual(expected, actual)
def test_ccx_course_caching(self):
"""verify that caching the propery works to limit queries"""
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.course # pylint: disable=pointless-statement
with check_mongo_calls(0):
self.ccx.course # pylint: disable=pointless-statement
def test_ccx_start_is_correct(self):
"""verify that the start datetime for a ccx is correctly retrieved
Note that after setting the start field override microseconds are
truncated, so we can't do a direct comparison between before and after.
For this reason we test the difference between and make sure it is less
than one second.
"""
expected = datetime.now(UTC())
self.set_ccx_override('start', expected)
actual = self.ccx.start # pylint: disable=no-member
diff = expected - actual
self.assertTrue(abs(diff.total_seconds()) < 1)
def test_ccx_start_caching(self):
"""verify that caching the start property works to limit queries"""
now = datetime.now(UTC())
self.set_ccx_override('start', now)
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.start # pylint: disable=pointless-statement, no-member
with check_mongo_calls(0):
self.ccx.start # pylint: disable=pointless-statement, no-member
def test_ccx_due_without_override(self):
"""verify that due returns None when the field has not been set"""
actual = self.ccx.due # pylint: disable=no-member
self.assertIsNone(actual)
def test_ccx_due_is_correct(self):
"""verify that the due datetime for a ccx is correctly retrieved"""
expected = datetime.now(UTC())
self.set_ccx_override('due', expected)
actual = self.ccx.due # pylint: disable=no-member
diff = expected - actual
self.assertTrue(abs(diff.total_seconds()) < 1)
def test_ccx_due_caching(self):
"""verify that caching the due property works to limit queries"""
expected = datetime.now(UTC())
self.set_ccx_override('due', expected)
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.due # pylint: disable=pointless-statement, no-member
with check_mongo_calls(0):
self.ccx.due # pylint: disable=pointless-statement, no-member
def test_ccx_has_started(self):
"""verify that a ccx marked as starting yesterday has started"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now - delta
self.set_ccx_override('start', then)
self.assertTrue(self.ccx.has_started()) # pylint: disable=no-member
def test_ccx_has_not_started(self):
"""verify that a ccx marked as starting tomorrow has not started"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now + delta
self.set_ccx_override('start', then)
self.assertFalse(self.ccx.has_started()) # pylint: disable=no-member
def test_ccx_has_ended(self):
"""verify that a ccx that has a due date in the past has ended"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now - delta
self.set_ccx_override('due', then)
self.assertTrue(self.ccx.has_ended()) # pylint: disable=no-member
def test_ccx_has_not_ended(self):
"""verify that a ccx that has a due date in the future has not eneded
"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now + delta
self.set_ccx_override('due', then)
self.assertFalse(self.ccx.has_ended()) # pylint: disable=no-member
def test_ccx_without_due_date_has_not_ended(self):
"""verify that a ccx without a due date has not ended"""
self.assertFalse(self.ccx.has_ended()) # pylint: disable=no-member
# ensure that the expected localized format will be found by the i18n
# service
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "%b %d, %Y",
}))
def test_start_datetime_short_date(self):
"""verify that the start date for a ccx formats properly by default"""
start = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015"
self.set_ccx_override('start', start)
actual = self.ccx.start_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_start_datetime_date_time_format(self):
"""verify that the DATE_TIME format also works as expected"""
start = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015 at 12:00 UTC"
self.set_ccx_override('start', start)
actual = self.ccx.start_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "%b %d, %Y",
}))
def test_end_datetime_short_date(self):
"""verify that the end date for a ccx formats properly by default"""
end = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015"
self.set_ccx_override('due', end)
actual = self.ccx.end_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_end_datetime_date_time_format(self):
"""verify that the DATE_TIME format also works as expected"""
end = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015 at 12:00 UTC"
self.set_ccx_override('due', end)
actual = self.ccx.end_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_end_datetime_no_due_date(self):
"""verify that without a due date, the end date is an empty string"""
expected = ''
actual = self.ccx.end_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
actual = self.ccx.end_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
|
agpl-3.0
| -4,003,900,349,608,983,000 | -3,591,448,300,492,997,600 | 41.683168 | 87 | 0.639295 | false |
poulpito/Flexget
|
flexget/utils/log.py
|
9
|
2557
|
"""Logging utilities"""
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
import logging
import hashlib
from datetime import datetime, timedelta
from sqlalchemy import Column, Integer, String, DateTime, Index
from flexget.utils.database import with_session
from flexget import db_schema
from flexget import logger as f_logger
from flexget.utils.sqlalchemy_utils import table_schema
from flexget.event import event
log = logging.getLogger('util.log')
Base = db_schema.versioned_base('log_once', 0)
@db_schema.upgrade('log_once')
def upgrade(ver, session):
if ver is None:
log.info('Adding index to md5sum column of log_once table.')
table = table_schema('log_once', session)
Index('log_once_md5sum', table.c.md5sum, unique=True).create()
ver = 0
return ver
class LogMessage(Base):
"""Declarative"""
__tablename__ = 'log_once'
id = Column(Integer, primary_key=True)
md5sum = Column(String, unique=True)
added = Column(DateTime, default=datetime.now())
def __init__(self, md5sum):
self.md5sum = md5sum
def __repr__(self):
return "<LogMessage('%s')>" % self.md5sum
@event('manager.db_cleanup')
def purge(manager, session):
"""Purge old messages from database"""
old = datetime.now() - timedelta(days=365)
result = session.query(LogMessage).filter(LogMessage.added < old).delete()
if result:
log.verbose('Purged %s entries from log_once table.' % result)
@with_session
def log_once(message, logger=logging.getLogger('log_once'), once_level=logging.INFO, suppressed_level=f_logger.VERBOSE,
session=None):
"""
Log message only once using given logger`. Returns False if suppressed logging.
When suppressed, `suppressed_level` level is still logged.
"""
# If there is no active manager, don't access the db
from flexget.manager import manager
if not manager:
log.warning('DB not initialized. log_once will not work properly.')
logger.log(once_level, message)
return
digest = hashlib.md5()
digest.update(message.encode('latin1', 'replace')) # ticket:250
md5sum = digest.hexdigest()
# abort if this has already been logged
if session.query(LogMessage).filter_by(md5sum=md5sum).first():
logger.log(suppressed_level, message)
return False
row = LogMessage(md5sum)
session.add(row)
logger.log(once_level, message)
return True
|
mit
| 3,490,665,101,175,033,300 | 2,322,345,624,444,797,400 | 29.440476 | 119 | 0.686742 | false |
rosswhitfield/mantid
|
scripts/BilbyCustomFunctions_Reduction.py
|
3
|
17128
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
import csv
import math
from itertools import product
import sys
from mantid.simpleapi import MoveInstrumentComponent, CropWorkspace
# values for att_pos 2 and 4 shall not make sense; those attenuators have not been in use that time
attenuation_correction_pre_2016 = {1.0: 0.007655, 2.0: -1.0, 3.0: 1.0, 4.0: -1.0, 5.0: 0.005886}
attenuation_correction_post_2016 = {1.0: 1.0, 2.0: 0.00955, 3.0: 0.005886, 4.0: 0.00290, 5.0: 0.00062}
##############################################################################
def string_boolean(line):
""" Convert string to boolean; needed to read "true" and "false" from the csv Data reduction settings table """
if line == 'false':
bool_string = False
elif line == 'true':
bool_string = True
else:
print("Check value of {}".format(line))
print("It must be either True or False")
sys.exit()
return bool_string
##############################################################################
def read_convert_to_float(array_strings):
""" Needed to convert binning parameters from the csv file into the float numbers """
array = [x.strip() for x in array_strings.split(',')]
array = [float(x) for x in array]
if (len(array) != 3):
print("Check input parameters; binning parameters shall be given in a format left_value, step, right_value.")
sys.exit()
return array
##############################################################################
def files_list_reduce(filename):
""" Creat array of input reduction settings """
parameters = []
with open(filename) as csv_file:
reader = csv.DictReader(csv_file)
for row in reader:
if row['index'] == '':
continue
if row['index'] == 'END':
break
parameters.append(row)
return parameters
##############################################################################
def evaluate_files_list(numbers):
""" Needed for FilesToReduce, see below """
expanded = []
for number in numbers.split(","):
if "-" in number:
start, end = number.split("-")
nrs = range(int(start), int(end) + 1)
expanded.extend(nrs)
else:
expanded.append(int(number))
return expanded
##############################################################################
def files_to_reduce(parameters, evaluate_files):
""" Create list of the files to reduce """
files_to_reduce = []
if len(evaluate_files) == 0:
files_to_reduce.extend(parameters)
else:
evaluate_files_l = evaluate_files_list(evaluate_files) # call function for retrieve the IDs list
for parameter in parameters:
if int(parameter['index']) in evaluate_files_l:
files_to_reduce.append(parameter)
return files_to_reduce
##############################################################################
def strip_NaNs(output_workspace, base_output_name):
""" Strip NaNs from the 1D OutputWorkspace """ # add isinf
data = output_workspace.readY(0)
start_index = next((index for index in range(len(data)) if not math.isnan(data[index])), None)
end_index = next((index for index in range(len(data)-1, -1, -1) if not math.isnan(data[index])), None)
q_values = output_workspace.readX(0)
start_q = q_values[start_index]
end_q = q_values[end_index]
CropWorkspace(InputWorkspace=output_workspace, XMin=start_q, XMax=end_q, OutputWorkspace=base_output_name)
return base_output_name
##############################################################################
def output_header(external_mode, used_wl_range, ws_sample, sample_thickness,
sample_transmission, empty_beam_transmission, blocked_beam, sample_mask, transmission_mask):
""" Creates header to be recorded into the output file """
header = []
wl_row = 'Velocity selector set wavelength: ' + str(format(float(ws_sample.run().getProperty("wavelength").value), '.3f')) + ' Angstrom'
header.append(wl_row)
if (external_mode):
choppers = 'Double choppers pair: ' + str(int(ws_sample.run().getProperty("master1_chopper_id").value)) + ' and ' \
+ str(int(ws_sample.run().getProperty("master2_chopper_id").value))
header.append(choppers)
frequency = 'Data defining pulse frequency (equal or slower than the Double pair frequency): ' \
+ str(format(1e6/float(ws_sample.run().getProperty("period").value), '.2f')) + ' Hz'
header.append(frequency)
wavelength_range = 'Wavelength range used for the data reduction: ' + str(format(float(used_wl_range[0]), '.2f')) + ' to ' \
+ str(format(float(used_wl_range[2]), '.2f')) + ' Angstrom'
header.append(wavelength_range)
resolution_value = float(used_wl_range[1])
if resolution_value < 0:
resolution = 'Resolution used for calculation of dQ: ' + str(format((-100 * resolution_value), '.2f')) + '%'
else:
resolution = 'Resolution taken as wavelength binning;' + '\n' + 'the value is set to ' + \
str(format(resolution_value, '.2f')) + '%' # on linear scale, hence the dQ calculation is meaningless'
header.append(resolution)
else:
resolution = "Nominal resolution: 10%"
header.append(resolution)
l1 = 'L1: ' + str(format(float(ws_sample.run().getProperty("L1").value), '.3f')) + ' m'
header.append(l1)
rear_l2_row = 'L2 to rear detector: ' + str(format(float(ws_sample.run().getProperty("L2_det_value").value), '.3f')) + ' m'
header.append(rear_l2_row)
curtain_ud_l2_row = 'L2 to horizontal curtains: ' \
+ str(format(float(ws_sample.run().getProperty("L2_curtainu_value").value), '.3f')) + ' m'
header.append(curtain_ud_l2_row)
curtain_lr_l2_row = 'L2 to vertical curtains: ' \
+ str(format(float(ws_sample.run().getProperty("L2_curtainr_value").value), '.3f')) + ' m'
header.append(curtain_lr_l2_row)
curtain_l_separation_row = 'Left curtain separation: ' \
+ str(format(float(ws_sample.run().getProperty("D_curtainl_value").value), '.3f')) + ' m'
header.append(curtain_l_separation_row)
curtain_r_separation_row = 'Right curtain separation: ' \
+ str(format(float(ws_sample.run().getProperty("D_curtainr_value").value), '.3f')) + ' m'
header.append(curtain_r_separation_row)
curtain_u_separation_row = 'Top curtain separation: ' \
+ str(format(float(ws_sample.run().getProperty("D_curtainu_value").value), '.3f')) + ' m'
header.append(curtain_u_separation_row)
curtain_d_separation_row = 'Bottom curtain separation: ' \
+ str(format(float(ws_sample.run().getProperty("D_curtaind_value").value), '.3f')) + ' m'
header.append(curtain_d_separation_row)
apertures = 'Source and sample apertures diameters: ' \
+ str(format(float(ws_sample.run().getProperty("source_aperture").value), '.1f')) + ' mm and ' \
+ str(format(float(ws_sample.run().getProperty("sample_aperture").value), '.1f')) + ' mm'
header.append(apertures)
sample_related_details = 'Sample thickness and transmission: ' \
+ format(float(sample_thickness), '.2f') + ' cm and ' + sample_transmission
header.append(sample_related_details)
corrections_related_details = 'Empty beam transmission and blocked beam scattering: ' \
+ empty_beam_transmission + ' and ' + blocked_beam
header.append(corrections_related_details)
masks = 'Sample and trasmission masks: ' + sample_mask + ' and ' + transmission_mask + '\n'
header.append(masks)
return header
##############################################################################
def get_pixel_size(): # reads current IDF and get pixelsize from there
""" To get pixel size for Bilby detectors from the Bilby_Definition.xml file """
from mantid.api import ExperimentInfo
import xml.etree.cElementTree as ET
currentIDF = ExperimentInfo.getInstrumentFilename("Bilby")
# print currentIDF
tree = ET.parse(currentIDF)
for node in tree.iter():
if node.tag == "{http://www.mantidproject.org/IDF/1.0}height":
name = node.attrib.get('val')
break
pixelsize = float(name)
return pixelsize
##############################################################################
def read_csv(filename):
""" Read cvs... """
parameters = []
with open(filename) as csvfile:
reader = csv.reader(csvfile)
for row in reader:
parameters.append(row)
return parameters
##############################################################################
def attenuation_correction(att_pos, data_before_May_2016):
""" Bilby has four attenuators; before May 2016 there were only two.
Value of the attenuators are hard coded here and being used for the I(Q) scaling in Q1D """
if (data_before_May_2016):
print("You stated data have been collected before May, 2016, i.e. using old attenuators. Please double check.")
if (att_pos == 2.0 or att_pos == 4.0):
print(
"Wrong attenuators value; Either data have been collected after May, 2016, or something is wrong with hdf file")
sys.exit()
scale = attenuation_correction_pre_2016[att_pos]
else:
scale = attenuation_correction_post_2016[att_pos]
return scale
##############################################################################
def wavelengh_slices(wavelength_intervals, binning_wavelength_ini, wav_delta):
""" This function defined number of wavelenth slices and creates array of the binning parameters for each slice """
binning_wavelength = []
if not wavelength_intervals:
binning_wavelength.append(binning_wavelength_ini)
n = 1 # in this case, number of wavelength range intervals always will be 1
else: # reducing data on a several intervals of wavelengths
wav1 = float(binning_wavelength_ini[0])
wv_ini_step = float(binning_wavelength_ini[1])
wav2 = float(binning_wavelength_ini[2])
# check if chosen wavelenth interval is feasible
if (wav1 + wav_delta) > wav2:
raise ValueError("wav_delta is too large for the upper range of wavelength")
if math.fmod((wav2 - wav1), wav_delta) == 0.0: # if reminder is 0
n = (wav2 - wav1)/wav_delta
else: # if reminder is greater than 0, to trancate the maximum wavelength in the range
n = math.floor((wav2 - wav1)/wav_delta)
max_wave_length = wav1 + n*wav_delta
print ('\n WARNING: because of your set-up, maximum wavelength to consider \
for partial reduction is only %4.2f \n' % max_wave_length)
# number of wavelength range intervals
n = int(n)
binning_wavelength_interm = []
binning_wavelength_interm_1 = wv_ini_step # binning step is always the same
for i in range(n):
binning_wavelength_interm_0 = wav1 + wav_delta * i # left range
binning_wavelength_interm_2 = binning_wavelength_interm_0 + wav_delta # right range
binning_wavelength_interm = [binning_wavelength_interm_0, binning_wavelength_interm_1, binning_wavelength_interm_2]
binning_wavelength.append(binning_wavelength_interm)
binning_wavelength.append(binning_wavelength_ini) # reduce data on the full range
n = n + 1 # to include full range
return binning_wavelength, n
##############################################################################
def correction_tubes_shift(ws_to_correct, path_to_shifts_file):
""" This function moves each tube and then rear panels as a whole as per numbers recorded in the path_to_shifts_file csv file.
The values in the file are obtained from fitting of a few data sets collected using different masks.
It is a very good idea do not change the file. """
shifts = []
shifts = read_csv(path_to_shifts_file)
# shall be precisely sevel lines; shifts for rear left, rear right, left, right, top, bottom curtains
# [calculated from 296_Cd_lines_setup1 file] + value for symmetrical shift for entire rear panels
pixelsize = get_pixel_size()
correct_element_one_stripe("BackDetectorLeft", pixelsize, shifts[0], ws_to_correct)
correct_element_one_stripe("BackDetectorRight", pixelsize, shifts[1], ws_to_correct)
correct_element_one_stripe("CurtainLeft", pixelsize, shifts[2], ws_to_correct)
correct_element_one_stripe("CurtainRight", pixelsize, shifts[3], ws_to_correct)
correct_element_one_stripe("CurtainTop", pixelsize, shifts[4], ws_to_correct)
correct_element_one_stripe("CurtainBottom", pixelsize, shifts[5], ws_to_correct)
move_rear_panels(shifts[6][0], pixelsize, ws_to_correct)
correction_based_on_experiment(ws_to_correct)
return
##############################################################################
def correct_element_one_stripe(panel, pixelsize, shift, ws):
""" Technical for CorrectionTubesShift.
Sutable for one Cd stripe correction and for the stripes on BorAl mask on left curtain """
eightpack = ['eight_pack1', 'eight_pack2', 'eight_pack3', 'eight_pack4', 'eight_pack5']
tube = ['tube1', 'tube2', 'tube3', 'tube4', 'tube5', 'tube6', 'tube7', 'tube8']
i = 0
for ei_pack, t_tube in product(eightpack, tube):
if (panel == "BackDetectorLeft" or panel == "CurtainLeft"):
direction = 1.0
MoveInstrumentComponent(ws, panel + '/' + ei_pack + '/' + t_tube, X=0, Y=-float(shift[i])*pixelsize*direction, Z=0)
if (panel == "BackDetectorRight" or panel == "CurtainRight"):
direction = -1.0
MoveInstrumentComponent(ws, panel + '/' + ei_pack + '/' + t_tube, X=0, Y=-float(shift[i])*pixelsize*direction, Z=0)
if (panel == "CurtainBottom"):
direction = 1.0
MoveInstrumentComponent(ws, panel + '/' + ei_pack + '/' + t_tube, X=-float(shift[i])*pixelsize*direction, Y=0, Z=0)
if (panel == "CurtainTop"):
direction = -1.0
MoveInstrumentComponent(ws, panel + '/' + ei_pack + '/' + t_tube, X=-float(shift[i])*pixelsize*direction, Y=0, Z=0)
i = i + 1
return ws
##############################################################################
def move_rear_panels(shift, pixelsize, ws):
""" Technical for CorrectionTubesShift """
panel = "BackDetectorLeft"
direction = 1.0
MoveInstrumentComponent(ws, panel, X=0, Y=-float(shift)*pixelsize*direction, Z=0)
panel = "BackDetectorRight"
direction = -1.0
MoveInstrumentComponent(ws, panel, X=0, Y=-float(shift)*pixelsize*direction, Z=0)
return ws
##############################################################################
def correction_based_on_experiment(ws_to_correct):
""" The function to move curtains, based on fits/analysis of a massive set of AgBeh and liquid crystals data
collected on 6 Oct 2016. Previous Laser tracker data has not picked up these imperfections."""
MoveInstrumentComponent(ws_to_correct, 'CurtainLeft', X=-5.3/1000, Y=0, Z=13.0/1000)
MoveInstrumentComponent(ws_to_correct, 'CurtainRight', X=5.5/1000, Y=0, Z=17.0/1000)
MoveInstrumentComponent(ws_to_correct, 'CurtainTop', X=0, Y=-4.0/1000, Z=0)
MoveInstrumentComponent(ws_to_correct, 'CurtainBottom', X=0, Y=6.0/1000, Z=0)
MoveInstrumentComponent(ws_to_correct, 'BackDetectorRight', X=0, Y=-2.0/1000, Z=0)
MoveInstrumentComponent(ws_to_correct, 'BackDetectorLeft', X=0, Y=-2.0/1000, Z=0)
return
##############################################################################
""" Final detectors' alignement has been done using laser tracker in January, 2016.
To correct data collected before that, some extra shift hardcoded here, shall be applied """
def det_shift_before_2016 (ws_to_correct):
shift_curtainl = 0.74/1000
shift_curtainr = 6.92/1000
shift_curtainu = -7.50/1000
shift_curtaind = -1.59/1000
MoveInstrumentComponent(ws_to_correct, 'CurtainLeft', X = shift_curtainl, Y = 0 , Z = 0)
MoveInstrumentComponent(ws_to_correct, 'CurtainRight', X = shift_curtainr, Y = 0 , Z = 0)
MoveInstrumentComponent(ws_to_correct, 'CurtainTop', X = 0, Y=shift_curtainu , Z = 0)
MoveInstrumentComponent(ws_to_correct, 'CurtainBottom', X = 0, Y=shift_curtaind , Z = 0)
correction_based_on_experiment(ws_to_correct)
return ws_to_correct
|
gpl-3.0
| 5,492,286,485,411,810,000 | 1,293,535,747,168,455,000 | 42.917949 | 140 | 0.595983 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.