repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
kernsuite-debian/lofar | CEP/Pipeline/recipes/sip/nodes/setupsourcedb.py | 1 | 3286 | # LOFAR IMAGING PIPELINE
#
# setupsourcedb nodes recipe
# Marcel Loose, 2012
# [email protected]
# ------------------------------------------------------------------------------
from subprocess import CalledProcessError
import errno
import os
import tempfile
import shutil
import sys
from lofarpipe.support.lofarnode import LOFARnodeTCP
from lofarpipe.support.utilities import log_time
from lofarpipe.support.pipelinelogging import CatchLog4CPlus
from lofarpipe.support.utilities import catch_segfaults
class setupsourcedb(LOFARnodeTCP):
"""
Create the sourcedb at the supplied location
1. Create output directory if it does not yet exist.
2. Create sourcedb
3. validate performance, cleanup
"""
def run(self, executable, catalogue, skydb, dbtype):
"""
Contains all functionality
"""
with log_time(self.logger):
# ****************************************************************
# 1. Create output directory if it does not yet exist.
skydb_dir = os.path.dirname(skydb)
try:
os.makedirs(skydb_dir)
self.logger.debug("Created output directory %s" % skydb_dir)
except FileExistsError:
pass
# ****************************************************************
# 2 Remove any old sky database
# Create the sourcedb
shutil.rmtree(skydb, ignore_errors=True)
self.logger.info("Creating skymodel: %s" % (skydb))
scratch_dir = tempfile.mkdtemp(suffix=".%s" % (os.path.basename(__file__),))
try:
cmd = [executable,
"in=%s" % catalogue,
"out=%s" % skydb,
"outtype=%s" % dbtype,
"format=<",
"append=false"
]
with CatchLog4CPlus(
scratch_dir,
self.logger.name + "." + os.path.basename(skydb),
os.path.basename(executable)
) as logger:
catch_segfaults(cmd, scratch_dir, None, logger)
# *****************************************************************
# 3. Validate performance and cleanup temp files
except CalledProcessError as err:
# For CalledProcessError isn't properly propagated by IPython
# Temporary workaround...
self.logger.error(str(err))
return 1
finally:
shutil.rmtree(scratch_dir)
return 0
if __name__ == "__main__":
# If invoked directly, parse command line arguments for logger information
# and pass the rest to the run() method defined above
# --------------------------------------------------------------------------
jobid, jobhost, jobport = sys.argv[1:4]
sys.exit(setupsourcedb(jobid, jobhost, jobport).run_with_stored_arguments())
| gpl-3.0 | -1,931,678,355,029,964,500 | 38.590361 | 88 | 0.460743 | false |
IronLanguages/ironpython3 | Src/StdLib/Lib/test/test_deque.py | 1 | 26044 | from collections import deque
import unittest
from test import support, seq_tests
import gc
import weakref
import copy
import pickle
from io import StringIO
import random
import struct
import sys
BIG = 100000
def fail():
raise SyntaxError
yield 1
class BadCmp:
def __eq__(self, other):
raise RuntimeError
class MutateCmp:
def __init__(self, deque, result):
self.deque = deque
self.result = result
def __eq__(self, other):
self.deque.clear()
return self.result
class TestBasic(unittest.TestCase):
def test_basics(self):
d = deque(range(-5125, -5000))
d.__init__(range(200))
for i in range(200, 400):
d.append(i)
for i in reversed(range(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), list(range(-200, 400)))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in range(250)]
self.assertEqual(left, list(range(-200, 50)))
self.assertEqual(list(d), list(range(50, 400)))
right = [d.pop() for i in range(250)]
right.reverse()
self.assertEqual(right, list(range(150, 400)))
self.assertEqual(list(d), list(range(50, 150)))
def test_maxlen(self):
self.assertRaises(ValueError, deque, 'abc', -1)
self.assertRaises(ValueError, deque, 'abc', -2)
it = iter(range(10))
d = deque(it, maxlen=3)
self.assertEqual(list(it), [])
self.assertEqual(repr(d), 'deque([7, 8, 9], maxlen=3)')
self.assertEqual(list(d), [7, 8, 9])
self.assertEqual(d, deque(range(10), 3))
d.append(10)
self.assertEqual(list(d), [8, 9, 10])
d.appendleft(7)
self.assertEqual(list(d), [7, 8, 9])
d.extend([10, 11])
self.assertEqual(list(d), [9, 10, 11])
d.extendleft([8, 7])
self.assertEqual(list(d), [7, 8, 9])
d = deque(range(200), maxlen=10)
d.append(d)
support.unlink(support.TESTFN)
fo = open(support.TESTFN, "w")
try:
fo.write(str(d))
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
d = deque(range(10), maxlen=None)
self.assertEqual(repr(d), 'deque([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])')
fo = open(support.TESTFN, "w")
try:
fo.write(str(d))
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
def test_maxlen_zero(self):
it = iter(range(100))
deque(it, maxlen=0)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extend(it)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extendleft(it)
self.assertEqual(list(it), [])
def test_maxlen_attribute(self):
self.assertEqual(deque().maxlen, None)
self.assertEqual(deque('abc').maxlen, None)
self.assertEqual(deque('abc', maxlen=4).maxlen, 4)
self.assertEqual(deque('abc', maxlen=2).maxlen, 2)
self.assertEqual(deque('abc', maxlen=0).maxlen, 0)
with self.assertRaises(AttributeError):
d = deque('abc')
d.maxlen = 10
def test_count(self):
for s in ('', 'abracadabra', 'simsalabim'*500+'abc'):
s = list(s)
d = deque(s)
for letter in 'abcdefghijklmnopqrstuvwxyz':
self.assertEqual(s.count(letter), d.count(letter), (s, d, letter))
self.assertRaises(TypeError, d.count) # too few args
self.assertRaises(TypeError, d.count, 1, 2) # too many args
class BadCompare:
def __eq__(self, other):
raise ArithmeticError
d = deque([1, 2, BadCompare(), 3])
self.assertRaises(ArithmeticError, d.count, 2)
d = deque([1, 2, 3])
self.assertRaises(ArithmeticError, d.count, BadCompare())
class MutatingCompare:
def __eq__(self, other):
self.d.pop()
return True
m = MutatingCompare()
d = deque([1, 2, 3, m, 4, 5])
m.d = d
self.assertRaises(RuntimeError, d.count, 3)
# test issue11004
# block advance failed after rotation aligned elements on right side of block
d = deque([None]*16)
for i in range(len(d)):
d.rotate(-1)
d.rotate(1)
self.assertEqual(d.count(1), 0)
self.assertEqual(d.count(None), 16)
def test_comparisons(self):
d = deque('xabc'); d.popleft()
for e in [d, deque('abc'), deque('ab'), deque(), list(d)]:
self.assertEqual(d==e, type(d)==type(e) and list(d)==list(e))
self.assertEqual(d!=e, not(type(d)==type(e) and list(d)==list(e)))
args = map(deque, ('', 'a', 'b', 'ab', 'ba', 'abc', 'xba', 'xabc', 'cba'))
for x in args:
for y in args:
self.assertEqual(x == y, list(x) == list(y), (x,y))
self.assertEqual(x != y, list(x) != list(y), (x,y))
self.assertEqual(x < y, list(x) < list(y), (x,y))
self.assertEqual(x <= y, list(x) <= list(y), (x,y))
self.assertEqual(x > y, list(x) > list(y), (x,y))
self.assertEqual(x >= y, list(x) >= list(y), (x,y))
def test_extend(self):
d = deque('a')
self.assertRaises(TypeError, d.extend, 1)
d.extend('bcd')
self.assertEqual(list(d), list('abcd'))
d.extend(d)
self.assertEqual(list(d), list('abcdabcd'))
def test_iadd(self):
d = deque('a')
d += 'bcd'
self.assertEqual(list(d), list('abcd'))
d += d
self.assertEqual(list(d), list('abcdabcd'))
def test_extendleft(self):
d = deque('a')
self.assertRaises(TypeError, d.extendleft, 1)
d.extendleft('bcd')
self.assertEqual(list(d), list(reversed('abcd')))
d.extendleft(d)
self.assertEqual(list(d), list('abcddcba'))
d = deque()
d.extendleft(range(1000))
self.assertEqual(list(d), list(reversed(range(1000))))
self.assertRaises(SyntaxError, d.extendleft, fail())
def test_getitem(self):
n = 200
d = deque(range(n))
l = list(range(n))
for i in range(n):
d.popleft()
l.pop(0)
if random.random() < 0.5:
d.append(i)
l.append(i)
for j in range(1-len(l), len(l)):
assert d[j] == l[j]
d = deque('superman')
self.assertEqual(d[0], 's')
self.assertEqual(d[-1], 'n')
d = deque()
self.assertRaises(IndexError, d.__getitem__, 0)
self.assertRaises(IndexError, d.__getitem__, -1)
def test_setitem(self):
n = 200
d = deque(range(n))
for i in range(n):
d[i] = 10 * i
self.assertEqual(list(d), [10*i for i in range(n)])
l = list(d)
for i in range(1-n, 0, -1):
d[i] = 7*i
l[i] = 7*i
self.assertEqual(list(d), l)
def test_delitem(self):
n = 500 # O(n**2) test, don't make this too big
d = deque(range(n))
self.assertRaises(IndexError, d.__delitem__, -n-1)
self.assertRaises(IndexError, d.__delitem__, n)
for i in range(n):
self.assertEqual(len(d), n-i)
j = random.randrange(-len(d), len(d))
val = d[j]
self.assertIn(val, d)
del d[j]
self.assertNotIn(val, d)
self.assertEqual(len(d), 0)
def test_reverse(self):
n = 500 # O(n**2) test, don't make this too big
data = [random.random() for i in range(n)]
for i in range(n):
d = deque(data[:i])
r = d.reverse()
self.assertEqual(list(d), list(reversed(data[:i])))
self.assertIs(r, None)
d.reverse()
self.assertEqual(list(d), data[:i])
self.assertRaises(TypeError, d.reverse, 1) # Arity is zero
def test_rotate(self):
s = tuple('abcde')
n = len(s)
d = deque(s)
d.rotate(1) # verify rot(1)
self.assertEqual(''.join(d), 'eabcd')
d = deque(s)
d.rotate(-1) # verify rot(-1)
self.assertEqual(''.join(d), 'bcdea')
d.rotate() # check default to 1
self.assertEqual(tuple(d), s)
for i in range(n*3):
d = deque(s)
e = deque(d)
d.rotate(i) # check vs. rot(1) n times
for j in range(i):
e.rotate(1)
self.assertEqual(tuple(d), tuple(e))
d.rotate(-i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(n-i) # check that it wraps forward
self.assertEqual(tuple(e), s)
for i in range(n*3):
d = deque(s)
e = deque(d)
d.rotate(-i)
for j in range(i):
e.rotate(-1) # check vs. rot(-1) n times
self.assertEqual(tuple(d), tuple(e))
d.rotate(i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(i-n) # check that it wraps backaround
self.assertEqual(tuple(e), s)
d = deque(s)
e = deque(s)
e.rotate(BIG+17) # verify on long series of rotates
dr = d.rotate
for i in range(BIG+17):
dr()
self.assertEqual(tuple(d), tuple(e))
self.assertRaises(TypeError, d.rotate, 'x') # Wrong arg type
self.assertRaises(TypeError, d.rotate, 1, 10) # Too many args
d = deque()
d.rotate() # rotate an empty deque
self.assertEqual(d, deque())
def test_len(self):
d = deque('ab')
self.assertEqual(len(d), 2)
d.popleft()
self.assertEqual(len(d), 1)
d.pop()
self.assertEqual(len(d), 0)
self.assertRaises(IndexError, d.pop)
self.assertEqual(len(d), 0)
d.append('c')
self.assertEqual(len(d), 1)
d.appendleft('d')
self.assertEqual(len(d), 2)
d.clear()
self.assertEqual(len(d), 0)
def test_underflow(self):
d = deque()
self.assertRaises(IndexError, d.pop)
self.assertRaises(IndexError, d.popleft)
def test_clear(self):
d = deque(range(100))
self.assertEqual(len(d), 100)
d.clear()
self.assertEqual(len(d), 0)
self.assertEqual(list(d), [])
d.clear() # clear an emtpy deque
self.assertEqual(list(d), [])
def test_remove(self):
d = deque('abcdefghcij')
d.remove('c')
self.assertEqual(d, deque('abdefghcij'))
d.remove('c')
self.assertEqual(d, deque('abdefghij'))
self.assertRaises(ValueError, d.remove, 'c')
self.assertEqual(d, deque('abdefghij'))
# Handle comparison errors
d = deque(['a', 'b', BadCmp(), 'c'])
e = deque(d)
self.assertRaises(RuntimeError, d.remove, 'c')
for x, y in zip(d, e):
# verify that original order and values are retained.
self.assertTrue(x is y)
# Handle evil mutator
for match in (True, False):
d = deque(['ab'])
d.extend([MutateCmp(d, match), 'c'])
self.assertRaises(IndexError, d.remove, 'c')
self.assertEqual(d, deque())
def test_repr(self):
d = deque(range(200))
e = eval(repr(d))
self.assertEqual(list(d), list(e))
d.append(d)
self.assertIn('...', repr(d))
def test_print(self):
d = deque(range(200))
d.append(d)
try:
support.unlink(support.TESTFN)
fo = open(support.TESTFN, "w")
print(d, file=fo, end='')
fo.close()
fo = open(support.TESTFN, "r")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
support.unlink(support.TESTFN)
def test_init(self):
self.assertRaises(TypeError, deque, 'abc', 2, 3);
self.assertRaises(TypeError, deque, 1);
def test_hash(self):
self.assertRaises(TypeError, hash, deque('abc'))
def test_long_steadystate_queue_popleft(self):
for size in (0, 1, 2, 100, 1000):
d = deque(range(size))
append, pop = d.append, d.popleft
for i in range(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(d), list(range(BIG-size, BIG)))
def test_long_steadystate_queue_popright(self):
for size in (0, 1, 2, 100, 1000):
d = deque(reversed(range(size)))
append, pop = d.appendleft, d.pop
for i in range(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(reversed(list(d))),
list(range(BIG-size, BIG)))
def test_big_queue_popleft(self):
pass
d = deque()
append, pop = d.append, d.popleft
for i in range(BIG):
append(i)
for i in range(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_queue_popright(self):
d = deque()
append, pop = d.appendleft, d.pop
for i in range(BIG):
append(i)
for i in range(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_stack_right(self):
d = deque()
append, pop = d.append, d.pop
for i in range(BIG):
append(i)
for i in reversed(range(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_big_stack_left(self):
d = deque()
append, pop = d.appendleft, d.popleft
for i in range(BIG):
append(i)
for i in reversed(range(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_roundtrip_iter_init(self):
d = deque(range(200))
e = deque(d)
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_pickle(self):
d = deque(range(200))
for i in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, i)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
## def test_pickle_recursive(self):
## d = deque('abc')
## d.append(d)
## for i in range(pickle.HIGHEST_PROTOCOL + 1):
## e = pickle.loads(pickle.dumps(d, i))
## self.assertNotEqual(id(d), id(e))
## self.assertEqual(id(e), id(e[-1]))
def test_iterator_pickle(self):
data = deque(range(200))
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
it = itorg = iter(data)
d = pickle.dumps(it, proto)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), list(data))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it, proto)
self.assertEqual(list(it), list(data)[1:])
def test_deepcopy(self):
mut = [10]
d = deque([mut])
e = copy.deepcopy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertNotEqual(list(d), list(e))
def test_copy(self):
mut = [10]
d = deque([mut])
e = copy.copy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_reversed(self):
for s in ('abcd', range(2000)):
self.assertEqual(list(reversed(deque(s))), list(reversed(s)))
def test_gc_doesnt_blowup(self):
import gc
# This used to assert-fail in deque_traverse() under a debug
# build, or run wild with a NULL pointer in a release build.
d = deque()
for i in range(100):
d.append(1)
gc.collect()
@unittest.skipIf(sys.implementation.name == "ironpython", "https://github.com/IronLanguages/ironpython3/issues/544")
def test_container_iterator(self):
# Bug #3680: tp_traverse was not implemented for deque iterator objects
class C(object):
pass
for i in range(2):
obj = C()
ref = weakref.ref(obj)
if i == 0:
container = deque([obj, 1])
else:
container = reversed(deque([obj, 1]))
obj.x = iter(container)
del obj, container
gc.collect()
self.assertTrue(ref() is None, "Cycle was not collected")
check_sizeof = support.check_sizeof
@support.cpython_only
def test_sizeof(self):
BLOCKLEN = 62
basesize = support.calcobjsize('2P4nlP')
blocksize = struct.calcsize('2P%dP' % BLOCKLEN)
self.assertEqual(object.__sizeof__(deque()), basesize)
check = self.check_sizeof
check(deque(), basesize + blocksize)
check(deque('a'), basesize + blocksize)
check(deque('a' * (BLOCKLEN - 1)), basesize + blocksize)
check(deque('a' * BLOCKLEN), basesize + 2 * blocksize)
check(deque('a' * (42 * BLOCKLEN)), basesize + 43 * blocksize)
class TestVariousIteratorArgs(unittest.TestCase):
def test_constructor(self):
for s in ("123", "", range(1000), ('do', 1.2), range(2000,2200,5)):
for g in (seq_tests.Sequence, seq_tests.IterFunc,
seq_tests.IterGen, seq_tests.IterFuncStop,
seq_tests.itermulti, seq_tests.iterfunc):
self.assertEqual(list(deque(g(s))), list(g(s)))
self.assertRaises(TypeError, deque, seq_tests.IterNextOnly(s))
self.assertRaises(TypeError, deque, seq_tests.IterNoNext(s))
self.assertRaises(ZeroDivisionError, deque, seq_tests.IterGenExc(s))
def test_iter_with_altered_data(self):
d = deque('abcdefg')
it = iter(d)
d.pop()
self.assertRaises(RuntimeError, next, it)
def test_runtime_error_on_empty_deque(self):
d = deque()
it = iter(d)
d.append(10)
self.assertRaises(RuntimeError, next, it)
class Deque(deque):
pass
class DequeWithBadIter(deque):
def __iter__(self):
raise TypeError
class TestSubclass(unittest.TestCase):
def test_basics(self):
d = Deque(range(25))
d.__init__(range(200))
for i in range(200, 400):
d.append(i)
for i in reversed(range(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), list(range(-200, 400)))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in range(250)]
self.assertEqual(left, list(range(-200, 50)))
self.assertEqual(list(d), list(range(50, 400)))
right = [d.pop() for i in range(250)]
right.reverse()
self.assertEqual(right, list(range(150, 400)))
self.assertEqual(list(d), list(range(50, 150)))
d.clear()
self.assertEqual(len(d), 0)
def test_copy_pickle(self):
d = Deque('abc')
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, proto)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
d = Deque('abcde', maxlen=4)
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, proto)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
## def test_pickle(self):
## d = Deque('abc')
## d.append(d)
##
## e = pickle.loads(pickle.dumps(d))
## self.assertNotEqual(id(d), id(e))
## self.assertEqual(type(d), type(e))
## dd = d.pop()
## ee = e.pop()
## self.assertEqual(id(e), id(ee))
## self.assertEqual(d, e)
##
## d.x = d
## e = pickle.loads(pickle.dumps(d))
## self.assertEqual(id(e), id(e.x))
##
## d = DequeWithBadIter('abc')
## self.assertRaises(TypeError, pickle.dumps, d)
def test_weakref(self):
d = deque('gallahad')
p = weakref.proxy(d)
self.assertEqual(str(p), str(d))
d = None
gc.collect() # required by IronPython
self.assertRaises(ReferenceError, str, p)
def test_strange_subclass(self):
class X(deque):
def __iter__(self):
return iter([])
d1 = X([1,2,3])
d2 = X([4,5,6])
d1 == d2 # not clear if this is supposed to be True or False,
# but it used to give a SystemError
class SubclassWithKwargs(deque):
def __init__(self, newarg=1):
deque.__init__(self)
class TestSubclassWithKwargs(unittest.TestCase):
def test_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
SubclassWithKwargs(newarg=1)
#==============================================================================
libreftest = """
Example from the Library Reference: Doc/lib/libcollections.tex
>>> from collections import deque
>>> d = deque('ghi') # make a new deque with three items
>>> for elem in d: # iterate over the deque's elements
... print(elem.upper())
G
H
I
>>> d.append('j') # add a new entry to the right side
>>> d.appendleft('f') # add a new entry to the left side
>>> d # show the representation of the deque
deque(['f', 'g', 'h', 'i', 'j'])
>>> d.pop() # return and remove the rightmost item
'j'
>>> d.popleft() # return and remove the leftmost item
'f'
>>> list(d) # list the contents of the deque
['g', 'h', 'i']
>>> d[0] # peek at leftmost item
'g'
>>> d[-1] # peek at rightmost item
'i'
>>> list(reversed(d)) # list the contents of a deque in reverse
['i', 'h', 'g']
>>> 'h' in d # search the deque
True
>>> d.extend('jkl') # add multiple elements at once
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> d.rotate(1) # right rotation
>>> d
deque(['l', 'g', 'h', 'i', 'j', 'k'])
>>> d.rotate(-1) # left rotation
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> deque(reversed(d)) # make a new deque in reverse order
deque(['l', 'k', 'j', 'i', 'h', 'g'])
>>> d.clear() # empty the deque
>>> d.pop() # cannot pop from an empty deque
Traceback (most recent call last):
File "<pyshell#6>", line 1, in -toplevel-
d.pop()
IndexError: pop from an empty deque
>>> d.extendleft('abc') # extendleft() reverses the input order
>>> d
deque(['c', 'b', 'a'])
>>> def delete_nth(d, n):
... d.rotate(-n)
... d.popleft()
... d.rotate(n)
...
>>> d = deque('abcdef')
>>> delete_nth(d, 2) # remove the entry at d[2]
>>> d
deque(['a', 'b', 'd', 'e', 'f'])
>>> def roundrobin(*iterables):
... pending = deque(iter(i) for i in iterables)
... while pending:
... task = pending.popleft()
... try:
... yield next(task)
... except StopIteration:
... continue
... pending.append(task)
...
>>> for value in roundrobin('abc', 'd', 'efgh'):
... print(value)
...
a
d
e
b
f
c
g
h
>>> def maketree(iterable):
... d = deque(iterable)
... while len(d) > 1:
... pair = [d.popleft(), d.popleft()]
... d.append(pair)
... return list(d)
...
>>> print(maketree('abcdefgh'))
[[[['a', 'b'], ['c', 'd']], [['e', 'f'], ['g', 'h']]]]
"""
#==============================================================================
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
import sys
test_classes = (
TestBasic,
TestVariousIteratorArgs,
TestSubclass,
TestSubclassWithKwargs,
)
support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in range(len(counts)):
support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print(counts)
# doctests
from test import test_deque
support.run_doctest(test_deque, verbose)
if __name__ == "__main__":
test_main(verbose=True)
| apache-2.0 | -6,558,432,362,852,502,000 | 30.6452 | 120 | 0.513669 | false |
Princessgladys/googleresourcefinder | lib/feedlib/geo.py | 1 | 2777 | # Copyright 2009-2010 by Ka-Ping Yee
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Geographical functions. All measurements are in metres."""
from math import asin, cos, pi, sin, sqrt
EARTH_RADIUS = 6371009
def hav(theta):
"""Computes the haversine of an angle given in radians."""
return sin(theta/2)**2
def central_angle((phi_s, lam_s), (phi_f, lam_f)):
"""Returns the central angle between two points on a sphere, whose
locations are given as (latitude, longitude) pairs in radians."""
d_phi = phi_s - phi_f
d_lam = lam_s - lam_f
return 2*asin(sqrt(hav(d_phi) + cos(phi_s)*cos(phi_f)*hav(d_lam)))
def distance(start, finish):
"""Approximates the distance in metres between two points on the Earth,
which are given as {'lat':y, 'lon':x} objects in degrees."""
start_rad = (start['lat']*pi/180, start['lon']*pi/180)
finish_rad = (finish['lat']*pi/180, finish['lon']*pi/180)
return central_angle(start_rad, finish_rad)*EARTH_RADIUS
def point_inside_polygon(point, poly):
"""Returns true if the given point is inside the given polygon.
point is given as an {'lat':y, 'lon':x} object in degrees
poly is given as a list of (longitude, latitude) tuples. The last vertex
is assumed to be the same as the first vertex.
TODO(shakusa): poly should probably be expressed in a less-confusing way"""
lat = point['lat']
lon = point['lon']
n = len(poly)
inside = False
# Count the parity of intersections of a horizontal eastward ray starting
# at (lon, lat). If even, point is outside, odd, point is inside
lon1, lat1 = poly[0]
for i in range(n + 1):
lon2, lat2 = poly[i % n]
# if our ray falls within the vertical coords of the edge
if min(lat1, lat2) < lat <= max(lat1, lat2):
# if our (eastward) ray starts before the edge and the edge is not
# horizontal
if lon <= max(lon1, lon2) and lat1 != lat2:
lon_inters = lon1 + (lat - lat1) * (lon2 - lon1) / (lat2 - lat1)
# if the intersection is beyond the start of the ray,
# we've crossed it
if lon <= lon_inters:
inside = not inside
lon1, lat1 = lon2, lat2
return inside
| apache-2.0 | 188,185,627,319,149,760 | 41.075758 | 80 | 0.649982 | false |
voxpupuli/puppetboard | puppetboard/utils.py | 1 | 3597 | from __future__ import absolute_import
from __future__ import unicode_literals
import json
import logging
import os.path
from distutils.util import strtobool
from flask import abort, request, url_for
from jinja2.utils import contextfunction
from pypuppetdb.errors import EmptyResponseError
from requests.exceptions import ConnectionError, HTTPError
log = logging.getLogger(__name__)
@contextfunction
def url_static_offline(context, value):
request_parts = os.path.split(os.path.dirname(context.name))
static_path = '/'.join(request_parts[1:])
return url_for('static', filename="%s/%s" % (static_path, value))
def url_for_field(field, value):
args = request.view_args.copy()
args.update(request.args.copy())
args[field] = value
return url_for(request.endpoint, **args)
def jsonprint(value):
return json.dumps(value, indent=2, separators=(',', ': '))
def get_db_version(puppetdb):
'''
Get the version of puppetdb. Version form 3.2 query
interface is slightly different on mbeans
'''
ver = ()
try:
version = puppetdb.current_version()
(major, minor, build) = [int(x) for x in version.split('.')]
ver = (major, minor, build)
log.info("PuppetDB Version %d.%d.%d" % (major, minor, build))
except ValueError as e:
log.error("Unable to determine version from string: '%s'" % version)
ver = (4, 2, 0)
except HTTPError as e:
log.error(str(e))
except ConnectionError as e:
log.error(str(e))
except EmptyResponseError as e:
log.error(str(e))
return ver
def formatvalue(value):
if isinstance(value, str):
return value
elif isinstance(value, list):
return ", ".join(map(formatvalue, value))
elif isinstance(value, dict):
ret = ""
for k in value:
ret += k + " => " + formatvalue(value[k]) + ",<br/>"
return ret
else:
return str(value)
def prettyprint(value):
html = '<table class="ui basic fixed sortable table"><thead><tr>'
# Get keys
for k in value[0]:
html += "<th>" + k + "</th>"
html += "</tr></thead><tbody>"
for e in value:
html += "<tr>"
for k in e:
html += "<td>" + formatvalue(e[k]) + "</td>"
html += "</tr>"
html += "</tbody></table>"
return (html)
def get_or_abort(func, *args, **kwargs):
"""Execute the function with its arguments and handle the possible
errors that might occur.
In this case, if we get an exception we simply abort the request.
"""
try:
return func(*args, **kwargs)
except HTTPError as e:
log.error(str(e))
abort(e.response.status_code)
except ConnectionError as e:
log.error(str(e))
abort(500)
except EmptyResponseError as e:
log.error(str(e))
abort(204)
except Exception as e:
log.error(str(e))
abort(500)
def yield_or_stop(generator):
"""Similar in intent to get_or_abort this helper will iterate over our
generators and handle certain errors.
Since this is also used in streaming responses where we can't just abort
a request we raise StopIteration.
"""
while True:
try:
yield next(generator)
except (EmptyResponseError, ConnectionError, HTTPError, StopIteration):
return
def is_bool(b):
try:
bool(strtobool(b))
return True
except ValueError:
return False
except TypeError:
return False
except AttributeError:
return False
| apache-2.0 | -6,206,864,817,678,152,000 | 25.065217 | 79 | 0.615791 | false |
CrazyBBer/Python-Learn-Sample | Modules/modules_BuiltIn.py | 1 | 1445 | #!/usr/bin/env python3
# -*- coding utf-8 -*-
__Author__ ='eamon'
'Modules Built-In'
from datetime import datetime
now = datetime.now()
print(now)
print(type(now))
dt=datetime(2015,10,5,20,1,20)
print(dt)
print(dt.timestamp())
t=1444046480.0
print(datetime.fromtimestamp(t))
print(datetime.utcfromtimestamp(t))
cday=datetime.strptime('2015-10-05 20:07:59','%Y-%m-%d %H:%M:%S')
print(cday)
now=datetime.now()
print(now.strftime('%a,%b,%d %H:%M'))
from datetime import timedelta
now = datetime.now()
print(now)
# datetime.datetime(2015,10,05,20,12,58,10054)
print(now+timedelta(hours=10))
from datetime import timezone
tz_utc_8 = timezone(timedelta(hours=8))
now= datetime.now()
print(now)
dt=now.replace(tzinfo=tz_utc_8)
print(dt)
print('------------------------')
utc_dt=datetime.utcnow().replace(tzinfo=timezone.utc)
print(utc_dt)
bjtm=utc_dt.astimezone(timezone(timedelta(hours=8)))
print(bjtm)
tokyo_tm=bjtm.astimezone(timezone(timedelta(hours=9)))
print('------------------------')
print(tokyo_tm)
import re
def to_timestamp(dt_str,tz_str):
tz_fmt_str='^UTC([+-]\d{1,2})\:\d{2}$'
tm_fmt=re.match(tz_fmt_str,tz_str)
if tm_fmt:
tz_hours=int(tm_fmt.group(1))
cur_datetime=datetime.strptime(dt_str,'%Y-%m-%d %H:%M:%S')
return cur_datetime.replace(tzinfo=timezone(timedelta(hours=tz_hours))).timestamp()
t1 = to_timestamp('2015-6-1 08:10:30', 'UTC+7:00')
assert t1 == 1433121030.0, t1
print('Pass')
| mit | -5,883,119,671,976,176,000 | 15.802326 | 85 | 0.666436 | false |
nansencenter/sea_ice_drift | sea_ice_drift/seaicedrift.py | 1 | 3709 | # Name: seaicedrift.py
# Purpose: Container of SeaIceDrift class
# Authors: Anton Korosov, Stefan Muckenhuber
# Created: 21.09.2016
# Copyright: (c) NERSC 2016
# Licence:
# This file is part of SeaIceDrift.
# SeaIceDrift is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
# http://www.gnu.org/licenses/gpl-3.0.html
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
from __future__ import absolute_import
import numpy as np
from sea_ice_drift.lib import get_n, get_drift_vectors
from sea_ice_drift.ftlib import feature_tracking
from sea_ice_drift.pmlib import pattern_matching
class SeaIceDrift(object):
''' Retrieve Sea Ice Drift using Feature Tracking and Pattern Matching'''
def __init__(self, filename1, filename2, **kwargs):
''' Initialize from two file names:
Open files with Nansat
Read data from sigma0_HV or other band and convert to UInt8
Parameters
----------
filename1 : str, file name of the first Sentinel-1 image
filename2 : str, file name of the second Sentinel-1 image
'''
self.filename1 = filename1
self.filename2 = filename2
# get Nansat
self.n1 = get_n(self.filename1, **kwargs)
self.n2 = get_n(self.filename2, **kwargs)
def get_drift_FT(self, **kwargs):
''' Get sea ice drift using Feature Tracking
Parameters
----------
**kwargs : parameters for
feature_tracking
get_drift_vectors
Returns
-------
u : 1D vector - eastward ice drift speed
v : 1D vector - northward ice drift speed
lon1 : 1D vector - longitudes of source points
lat1 : 1D vector - latitudes of source points
lon2 : 1D vector - longitudes of destination points
lat2 : 1D vector - latitudes of destination points
'''
x1, y1, x2, y2 = feature_tracking(self.n1, self.n2, **kwargs)
return get_drift_vectors(self.n1, x1, y1,
self.n2, x2, y2, **kwargs)
def get_drift_PM(self, lons, lats, lon1, lat1, lon2, lat2, **kwargs):
''' Get sea ice drift using Pattern Matching
Parameters
----------
lons : 1D vector, longitude of result vectors on image 1
lats : 1D vector, latitude of result vectors on image 1
lon1 : 1D vector, longitude of keypoints on image1
lat1 : 1D vector, latitude of keypoints on image1
lon2 : 1D vector, longitude of keypoints on image2
lat2 : 1D vector, latitude of keypoints on image2
**kwargs : parameters for
feature_tracking
get_drift_vectors
Returns
-------
u : 1D vector, eastward ice drift speed, m/s
v : 1D vector, eastward ice drift speed, m/s
a : 1D vector, angle that gives the highes MCC
r : 1D vector, MCC
h : 1D vector, Hessian of CC matrix and MCC point
lon2_dst : 1D vector, longitude of results on image 2
lat2_dst : 1D vector, latitude of results on image 2
'''
x1, y1 = self.n1.transform_points(lon1, lat1, 1)
x2, y2 = self.n2.transform_points(lon2, lat2, 1)
return pattern_matching(lons, lats, self.n1, x1, y1,
self.n2, x2, y2, **kwargs)
| gpl-3.0 | -6,663,547,883,015,826,000 | 41.147727 | 77 | 0.605284 | false |
revcoin/revcoin | contrib/pyminer/pyminer.py | 1 | 6434 | #!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 7572
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| mit | -6,053,348,634,752,949,000 | 24.531746 | 84 | 0.648896 | false |
google-research/google-research | summae/tokenizer_test.py | 1 | 4540 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding=utf-8
"""Tests for tokenizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
import six
from six.moves import range # pylint: disable=redefined-builtin
import tensorflow.compat.v1 as tf
from summae import tokenizer
FLAGS = tf.flags.FLAGS
_TESTDATA = os.path.join(os.path.dirname(__file__), "testdata")
class TokenizerTest(tf.test.TestCase):
def test_encode(self):
self.assertListEqual(
[u"Dude", u" - ", u"that", u"'", u"s", u"so", u"cool", u"."],
tokenizer.encode(u"Dude - that's so cool."))
self.assertListEqual([u"Łukasz", u"est", u"né", u"en", u"1981", u"."],
tokenizer.encode(u"Łukasz est né en 1981."))
self.assertListEqual([u" ", u"Spaces", u"at", u"the", u"ends", u" "],
tokenizer.encode(u" Spaces at the ends "))
self.assertListEqual([u"802", u".", u"11b"], tokenizer.encode(u"802.11b"))
self.assertListEqual([u"two", u". \n", u"lines"],
tokenizer.encode(u"two. \nlines"))
def test_decode(self):
self.assertEqual(
u"Dude - that's so cool.",
tokenizer.decode(
[u"Dude", u" - ", u"that", u"'", u"s", u"so", u"cool", u"."]))
def test_invertibility_on_random_strings(self):
for _ in range(1000):
s = u"".join(six.unichr(random.randint(0, 65535)) for _ in range(10))
self.assertEqual(s, tokenizer.decode(tokenizer.encode(s)))
class TestTokenCounts(tf.test.TestCase):
def setUp(self):
super(TestTokenCounts, self).setUp()
self.corpus_path = os.path.join(_TESTDATA, "corpus-*.txt")
self.vocab_path = os.path.join(_TESTDATA, "vocab-*.txt")
def test_corpus_token_counts_split_on_newlines(self):
token_counts = tokenizer.corpus_token_counts(
self.corpus_path, corpus_max_lines=0, split_on_newlines=True)
expected = {
u"'": 2,
u".": 2,
u". ": 1,
u"... ": 1,
u"Groucho": 1,
u"Marx": 1,
u"Mitch": 1,
u"Hedberg": 1,
u"I": 3,
u"in": 2,
u"my": 2,
u"pajamas": 2,
}
self.assertDictContainsSubset(expected, token_counts)
self.assertNotIn(u".\n\n", token_counts)
self.assertNotIn(u"\n", token_counts)
def test_corpus_token_counts_no_split_on_newlines(self):
token_counts = tokenizer.corpus_token_counts(
self.corpus_path, corpus_max_lines=0, split_on_newlines=False)
self.assertDictContainsSubset({u".\n\n": 2, u"\n": 3}, token_counts)
def test_corpus_token_counts_split_with_max_lines(self):
token_counts = tokenizer.corpus_token_counts(
self.corpus_path, corpus_max_lines=5, split_on_newlines=True)
self.assertIn(u"slept", token_counts)
self.assertNotIn(u"Mitch", token_counts)
def test_corpus_token_counts_no_split_with_max_lines(self):
token_counts = tokenizer.corpus_token_counts(
self.corpus_path, corpus_max_lines=5, split_on_newlines=False)
self.assertIn(u"slept", token_counts)
self.assertNotIn(u"Mitch", token_counts)
self.assertDictContainsSubset({
u".\n\n": 1,
u"\n": 2,
u".\n": 1
}, token_counts)
def test_vocab_token_counts(self):
token_counts = tokenizer.vocab_token_counts(self.vocab_path, 0)
expected = {
u"lollipop": 8,
u"reverberated": 12,
u"kattywampus": 11,
u"balderdash": 10,
u"jiggery-pokery": 14,
}
self.assertDictEqual(expected, token_counts)
def test_vocab_token_counts_with_max_lines(self):
# vocab-1 has 2 lines, vocab-2 has 3
token_counts = tokenizer.vocab_token_counts(self.vocab_path, 5)
expected = {
u"lollipop": 8,
u"reverberated": 12,
u"kattywampus": 11,
u"balderdash": 10,
}
self.assertDictEqual(expected, token_counts)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | -218,404,520,536,151,600 | 31.170213 | 78 | 0.630732 | false |
wschwa/Mr-Orange-Sick-Beard | sickbeard/databases/mainDB.py | 1 | 30360 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sickbeard
import os.path
from sickbeard import db, common, helpers, logger
from sickbeard.providers.generic import GenericProvider
from sickbeard import encodingKludge as ek
from sickbeard.name_parser.parser import NameParser, InvalidNameException
class MainSanityCheck(db.DBSanityCheck):
def check(self):
self.fix_duplicate_shows()
self.fix_duplicate_episodes()
self.fix_orphan_episodes()
def fix_duplicate_shows(self):
sqlResults = self.connection.select("SELECT show_id, tvdb_id, COUNT(tvdb_id) as count FROM tv_shows GROUP BY tvdb_id HAVING count > 1")
for cur_duplicate in sqlResults:
logger.log(u"Duplicate show detected! tvdb_id: " + str(cur_duplicate["tvdb_id"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
cur_dupe_results = self.connection.select("SELECT show_id, tvdb_id FROM tv_shows WHERE tvdb_id = ? LIMIT ?",
[cur_duplicate["tvdb_id"], int(cur_duplicate["count"])-1]
)
for cur_dupe_id in cur_dupe_results:
logger.log(u"Deleting duplicate show with tvdb_id: " + str(cur_dupe_id["tvdb_id"]) + u" show_id: " + str(cur_dupe_id["show_id"]))
self.connection.action("DELETE FROM tv_shows WHERE show_id = ?", [cur_dupe_id["show_id"]])
else:
logger.log(u"No duplicate show, check passed")
def fix_duplicate_episodes(self):
sqlResults = self.connection.select("SELECT showid, season, episode, COUNT(showid) as count FROM tv_episodes GROUP BY showid, season, episode HAVING count > 1")
for cur_duplicate in sqlResults:
logger.log(u"Duplicate episode detected! showid: " + str(cur_duplicate["showid"]) + u" season: "+str(cur_duplicate["season"]) + u" episode: "+str(cur_duplicate["episode"]) + u" count: " + str(cur_duplicate["count"]), logger.DEBUG)
cur_dupe_results = self.connection.select("SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? and episode = ? ORDER BY episode_id DESC LIMIT ?",
[cur_duplicate["showid"], cur_duplicate["season"], cur_duplicate["episode"], int(cur_duplicate["count"])-1]
)
for cur_dupe_id in cur_dupe_results:
logger.log(u"Deleting duplicate episode with episode_id: " + str(cur_dupe_id["episode_id"]))
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_dupe_id["episode_id"]])
else:
logger.log(u"No duplicate episode, check passed")
def fix_orphan_episodes(self):
sqlResults = self.connection.select("SELECT episode_id, showid, tv_shows.tvdb_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.tvdb_id WHERE tv_shows.tvdb_id is NULL")
for cur_orphan in sqlResults:
logger.log(u"Orphan episode detected! episode_id: " + str(cur_orphan["episode_id"]) + " showid: " + str(cur_orphan["showid"]), logger.DEBUG)
logger.log(u"Deleting orphan episode with episode_id: "+str(cur_orphan["episode_id"]))
self.connection.action("DELETE FROM tv_episodes WHERE episode_id = ?", [cur_orphan["episode_id"]])
else:
logger.log(u"No orphan episode, check passed")
def backupDatabase(version):
helpers.backupVersionedFile(db.dbFilename(), version)
# ======================
# = Main DB Migrations =
# ======================
# Add new migrations at the bottom of the list; subclass the previous migration.
class InitialSchema (db.SchemaUpgrade):
def test(self):
return self.hasTable("tv_shows")
def execute(self):
queries = [
"CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, seasonfolders NUMERIC, paused NUMERIC, startyear NUMERIC);",
"CREATE TABLE tv_episodes (episode_id INTEGER PRIMARY KEY, showid NUMERIC, tvdbid NUMERIC, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT);",
"CREATE TABLE info (last_backlog NUMERIC, last_tvdb NUMERIC);",
"CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider NUMERIC);"
]
for query in queries:
self.connection.action(query)
class AddTvrId (InitialSchema):
def test(self):
return self.hasColumn("tv_shows", "tvr_id")
def execute(self):
self.addColumn("tv_shows", "tvr_id")
class AddTvrName (AddTvrId):
def test(self):
return self.hasColumn("tv_shows", "tvr_name")
def execute(self):
self.addColumn("tv_shows", "tvr_name", "TEXT", "")
class AddImdbId (InitialSchema):
def test(self):
return self.hasColumn("tv_shows", "imdb_id")
def execute(self):
self.addColumn("tv_shows", "imdb_id", "TEXT", "")
class AddAirdateIndex (AddTvrName):
def test(self):
return self.hasTable("idx_tv_episodes_showid_airdate")
def execute(self):
self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid,airdate);")
class NumericProviders (AddAirdateIndex):
def test(self):
return self.connection.tableInfo("history")['provider']['type'] == 'TEXT'
histMap = {-1: 'unknown',
1: 'newzbin',
2: 'tvbinz',
3: 'nzbs',
4: 'eztv',
5: 'nzbmatrix',
6: 'tvnzb',
7: 'ezrss',
8: 'thepiratebay',
9: 'dtt',
10: 'torrentleech' }
def execute(self):
self.connection.action("ALTER TABLE history RENAME TO history_old")
self.connection.action("CREATE TABLE history (action NUMERIC, date NUMERIC, showid NUMERIC, season NUMERIC, episode NUMERIC, quality NUMERIC, resource TEXT, provider TEXT);")
for x in self.histMap.keys():
self.upgradeHistory(x, self.histMap[x])
def upgradeHistory(self, number, name):
oldHistory = self.connection.action("SELECT * FROM history_old").fetchall()
for curResult in oldHistory:
sql = "INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)"
provider = 'unknown'
try:
provider = self.histMap[int(curResult["provider"])]
except ValueError:
provider = curResult["provider"]
args = [curResult["action"], curResult["date"], curResult["showid"], curResult["season"], curResult["episode"], curResult["quality"], curResult["resource"], provider]
self.connection.action(sql, args)
class NewQualitySettings (NumericProviders):
def test(self):
return self.hasTable("db_version")
def execute(self):
backupDatabase(0)
# old stuff that's been removed from common but we need it to upgrade
HD = 1
SD = 3
ANY = 2
BEST = 4
ACTION_SNATCHED = 1
ACTION_PRESNATCHED = 2
ACTION_DOWNLOADED = 3
PREDOWNLOADED = 3
MISSED = 6
BACKLOG = 7
DISCBACKLOG = 8
SNATCHED_BACKLOG = 10
### Update default quality
if sickbeard.QUALITY_DEFAULT == HD:
sickbeard.QUALITY_DEFAULT = common.HD
elif sickbeard.QUALITY_DEFAULT == SD:
sickbeard.QUALITY_DEFAULT = common.SD
elif sickbeard.QUALITY_DEFAULT == ANY:
sickbeard.QUALITY_DEFAULT = common.ANY
elif sickbeard.QUALITY_DEFAULT == BEST:
sickbeard.QUALITY_DEFAULT = common.BEST
### Update episode statuses
toUpdate = self.connection.select("SELECT episode_id, location, status FROM tv_episodes WHERE status IN (?, ?, ?, ?, ?, ?, ?)", [common.DOWNLOADED, common.SNATCHED, PREDOWNLOADED, MISSED, BACKLOG, DISCBACKLOG, SNATCHED_BACKLOG])
didUpdate = False
for curUpdate in toUpdate:
# remember that we changed something
didUpdate = True
newStatus = None
oldStatus = int(curUpdate["status"])
if oldStatus == common.SNATCHED:
newStatus = common.Quality.compositeStatus(common.SNATCHED, common.Quality.UNKNOWN)
elif oldStatus == PREDOWNLOADED:
newStatus = common.Quality.compositeStatus(common.DOWNLOADED, common.Quality.SDTV)
elif oldStatus in (MISSED, BACKLOG, DISCBACKLOG):
newStatus = common.WANTED
elif oldStatus == SNATCHED_BACKLOG:
newStatus = common.Quality.compositeStatus(common.SNATCHED, common.Quality.UNKNOWN)
if newStatus != None:
self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ? ", [newStatus, curUpdate["episode_id"]])
continue
# if we get here status should be == DOWNLOADED
if not curUpdate["location"]:
continue
newQuality = common.Quality.nameQuality(curUpdate["location"])
if newQuality == common.Quality.UNKNOWN:
newQuality = common.Quality.assumeQuality(curUpdate["location"])
self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.Quality.compositeStatus(common.DOWNLOADED, newQuality), curUpdate["episode_id"]])
# if no updates were done then the backup is useless
if didUpdate:
os.remove(db.dbFilename(suffix='v0'))
### Update show qualities
toUpdate = self.connection.select("SELECT * FROM tv_shows")
for curUpdate in toUpdate:
if not curUpdate["quality"]:
continue
if int(curUpdate["quality"]) == HD:
newQuality = common.HD
elif int(curUpdate["quality"]) == SD:
newQuality = common.SD
elif int(curUpdate["quality"]) == ANY:
newQuality = common.ANY
elif int(curUpdate["quality"]) == BEST:
newQuality = common.BEST
else:
logger.log(u"Unknown show quality: "+str(curUpdate["quality"]), logger.WARNING)
newQuality = None
if newQuality:
self.connection.action("UPDATE tv_shows SET quality = ? WHERE show_id = ?", [newQuality, curUpdate["show_id"]])
### Update history
toUpdate = self.connection.select("SELECT * FROM history")
for curUpdate in toUpdate:
newAction = None
newStatus = None
if int(curUpdate["action"] == ACTION_SNATCHED):
newStatus = common.SNATCHED
elif int(curUpdate["action"] == ACTION_DOWNLOADED):
newStatus = common.DOWNLOADED
elif int(curUpdate["action"] == ACTION_PRESNATCHED):
newAction = common.Quality.compositeStatus(common.SNATCHED, common.Quality.SDTV)
if newAction == None and newStatus == None:
continue
if not newAction:
if int(curUpdate["quality"] == HD):
newAction = common.Quality.compositeStatus(newStatus, common.Quality.HDTV)
elif int(curUpdate["quality"] == SD):
newAction = common.Quality.compositeStatus(newStatus, common.Quality.SDTV)
else:
newAction = common.Quality.compositeStatus(newStatus, common.Quality.UNKNOWN)
self.connection.action("UPDATE history SET action = ? WHERE date = ? AND showid = ?", [newAction, curUpdate["date"], curUpdate["showid"]])
self.connection.action("CREATE TABLE db_version (db_version INTEGER);")
self.connection.action("INSERT INTO db_version (db_version) VALUES (?)", [1])
class DropOldHistoryTable(NewQualitySettings):
def test(self):
return self.checkDBVersion() >= 2
def execute(self):
self.connection.action("DROP TABLE history_old")
self.incDBVersion()
class UpgradeHistoryForGenericProviders(DropOldHistoryTable):
def test(self):
return self.checkDBVersion() >= 3
def execute(self):
providerMap = {'NZBs': 'NZBs.org',
'BinReq': 'Bin-Req',
'NZBsRUS': '''NZBs'R'US''',
'EZTV': 'EZTV@BT-Chat'}
for oldProvider in providerMap:
self.connection.action("UPDATE history SET provider = ? WHERE provider = ?", [providerMap[oldProvider], oldProvider])
self.incDBVersion()
class AddAirByDateOption(UpgradeHistoryForGenericProviders):
def test(self):
return self.checkDBVersion() >= 4
def execute(self):
self.connection.action("ALTER TABLE tv_shows ADD air_by_date NUMERIC")
self.incDBVersion()
class ChangeSabConfigFromIpToHost(AddAirByDateOption):
def test(self):
return self.checkDBVersion() >= 5
def execute(self):
sickbeard.SAB_HOST = 'http://' + sickbeard.SAB_HOST + '/sabnzbd/'
self.incDBVersion()
class FixSabHostURL(ChangeSabConfigFromIpToHost):
def test(self):
return self.checkDBVersion() >= 6
def execute(self):
if sickbeard.SAB_HOST.endswith('/sabnzbd/'):
sickbeard.SAB_HOST = sickbeard.SAB_HOST.replace('/sabnzbd/','/')
sickbeard.save_config()
self.incDBVersion()
class AddLang (FixSabHostURL):
def test(self):
return self.hasColumn("tv_shows", "lang")
def execute(self):
self.addColumn("tv_shows", "lang", "TEXT", "en")
class PopulateRootDirs (AddLang):
def test(self):
return self.checkDBVersion() >= 7
def execute(self):
dir_results = self.connection.select("SELECT location FROM tv_shows")
dir_counts = {}
for cur_dir in dir_results:
cur_root_dir = ek.ek(os.path.dirname, ek.ek(os.path.normpath, cur_dir["location"]))
if cur_root_dir not in dir_counts:
dir_counts[cur_root_dir] = 1
else:
dir_counts[cur_root_dir] += 1
logger.log(u"Dir counts: "+str(dir_counts), logger.DEBUG)
if not dir_counts:
self.incDBVersion()
return
default_root_dir = dir_counts.values().index(max(dir_counts.values()))
new_root_dirs = str(default_root_dir)+'|'+'|'.join(dir_counts.keys())
logger.log(u"Setting ROOT_DIRS to: "+new_root_dirs, logger.DEBUG)
sickbeard.ROOT_DIRS = new_root_dirs
sickbeard.save_config()
self.incDBVersion()
class SetNzbTorrentSettings(PopulateRootDirs):
def test(self):
return self.checkDBVersion() >= 8
def execute(self):
use_torrents = False
use_nzbs = False
for cur_provider in sickbeard.providers.sortedProviderList():
if cur_provider.isEnabled():
if cur_provider.providerType == GenericProvider.NZB:
use_nzbs = True
logger.log(u"Provider "+cur_provider.name+" is enabled, enabling NZBs in the upgrade")
break
elif cur_provider.providerType == GenericProvider.TORRENT:
use_torrents = True
logger.log(u"Provider "+cur_provider.name+" is enabled, enabling Torrents in the upgrade")
break
sickbeard.USE_TORRENTS = use_torrents
sickbeard.USE_NZBS = use_nzbs
sickbeard.save_config()
self.incDBVersion()
class FixAirByDateSetting(SetNzbTorrentSettings):
def test(self):
return self.checkDBVersion() >= 9
def execute(self):
shows = self.connection.select("SELECT * FROM tv_shows")
for cur_show in shows:
if cur_show["genre"] and "talk show" in cur_show["genre"].lower():
self.connection.action("UPDATE tv_shows SET air_by_date = ? WHERE tvdb_id = ?", [1, cur_show["tvdb_id"]])
self.incDBVersion()
class AddSizeAndSceneNameFields(FixAirByDateSetting):
def test(self):
return self.checkDBVersion() >= 10
def execute(self):
backupDatabase(11)
if not self.hasColumn("tv_episodes", "file_size"):
self.addColumn("tv_episodes", "file_size")
if not self.hasColumn("tv_episodes", "release_name"):
self.addColumn("tv_episodes", "release_name", "TEXT", "")
ep_results = self.connection.select("SELECT episode_id, location, file_size FROM tv_episodes")
logger.log(u"Adding file size to all episodes in DB, please be patient")
for cur_ep in ep_results:
if not cur_ep["location"]:
continue
# if there is no size yet then populate it for us
if (not cur_ep["file_size"] or not int(cur_ep["file_size"])) and ek.ek(os.path.isfile, cur_ep["location"]):
cur_size = ek.ek(os.path.getsize, cur_ep["location"])
self.connection.action("UPDATE tv_episodes SET file_size = ? WHERE episode_id = ?", [cur_size, int(cur_ep["episode_id"])])
# check each snatch to see if we can use it to get a release name from
history_results = self.connection.select("SELECT * FROM history WHERE provider != -1 ORDER BY date ASC")
logger.log(u"Adding release name to all episodes still in history")
for cur_result in history_results:
# find the associated download, if there isn't one then ignore it
download_results = self.connection.select("SELECT resource FROM history WHERE provider = -1 AND showid = ? AND season = ? AND episode = ? AND date > ?",
[cur_result["showid"], cur_result["season"], cur_result["episode"], cur_result["date"]])
if not download_results:
logger.log(u"Found a snatch in the history for "+cur_result["resource"]+" but couldn't find the associated download, skipping it", logger.DEBUG)
continue
nzb_name = cur_result["resource"]
file_name = ek.ek(os.path.basename, download_results[0]["resource"])
# take the extension off the filename, it's not needed
if '.' in file_name:
file_name = file_name.rpartition('.')[0]
# find the associated episode on disk
ep_results = self.connection.select("SELECT episode_id, status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ? AND location != ''",
[cur_result["showid"], cur_result["season"], cur_result["episode"]])
if not ep_results:
logger.log(u"The episode "+nzb_name+" was found in history but doesn't exist on disk anymore, skipping", logger.DEBUG)
continue
# get the status/quality of the existing ep and make sure it's what we expect
ep_status, ep_quality = common.Quality.splitCompositeStatus(int(ep_results[0]["status"]))
if ep_status != common.DOWNLOADED:
continue
if ep_quality != int(cur_result["quality"]):
continue
# make sure this is actually a real release name and not a season pack or something
for cur_name in (nzb_name, file_name):
logger.log(u"Checking if "+cur_name+" is actually a good release name", logger.DEBUG)
try:
np = NameParser(False)
parse_result = np.parse(cur_name)
except InvalidNameException:
continue
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
# if all is well by this point we'll just put the release name into the database
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [cur_name, ep_results[0]["episode_id"]])
break
# check each snatch to see if we can use it to get a release name from
empty_results = self.connection.select("SELECT episode_id, location FROM tv_episodes WHERE release_name = ''")
logger.log(u"Adding release name to all episodes with obvious scene filenames")
for cur_result in empty_results:
ep_file_name = ek.ek(os.path.basename, cur_result["location"])
ep_file_name = os.path.splitext(ep_file_name)[0]
# I only want to find real scene names here so anything with a space in it is out
if ' ' in ep_file_name:
continue
try:
np = NameParser(False)
parse_result = np.parse(ep_file_name)
except InvalidNameException:
continue
if not parse_result.release_group:
continue
logger.log(u"Name "+ep_file_name+" gave release group of "+parse_result.release_group+", seems valid", logger.DEBUG)
self.connection.action("UPDATE tv_episodes SET release_name = ? WHERE episode_id = ?", [ep_file_name, cur_result["episode_id"]])
self.incDBVersion()
class RenameSeasonFolders(AddSizeAndSceneNameFields):
def test(self):
return self.checkDBVersion() >= 11
def execute(self):
# rename the column
self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows")
self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, location TEXT, show_name TEXT, tvdb_id NUMERIC, network TEXT, genre TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, tvr_id NUMERIC, tvr_name TEXT, air_by_date NUMERIC, lang TEXT)")
sql = "INSERT INTO tv_shows(show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, flatten_folders, paused, startyear, tvr_id, tvr_name, air_by_date, lang) SELECT show_id, location, show_name, tvdb_id, network, genre, runtime, quality, airs, status, seasonfolders, paused, startyear, tvr_id, tvr_name, air_by_date, lang FROM tmp_tv_shows"
self.connection.action(sql)
# flip the values to be opposite of what they were before
self.connection.action("UPDATE tv_shows SET flatten_folders = 2 WHERE flatten_folders = 1")
self.connection.action("UPDATE tv_shows SET flatten_folders = 1 WHERE flatten_folders = 0")
self.connection.action("UPDATE tv_shows SET flatten_folders = 0 WHERE flatten_folders = 2")
self.connection.action("DROP TABLE tmp_tv_shows")
self.incDBVersion()
class AddSubtitlesSupport(RenameSeasonFolders):
def test(self):
return self.checkDBVersion() >= 12
def execute(self):
self.addColumn("tv_shows", "subtitles")
self.addColumn("tv_episodes", "subtitles", "TEXT", "")
self.addColumn("tv_episodes", "subtitles_searchcount")
self.addColumn("tv_episodes", "subtitles_lastsearch", "TIMESTAMP", str(datetime.datetime.min))
self.incDBVersion()
class AddIMDbInfo(RenameSeasonFolders):
def test(self):
return self.checkDBVersion() >= 13
def execute(self):
self.connection.action("CREATE TABLE imdb_info (tvdb_id INTEGER PRIMARY KEY, imdb_id TEXT, title TEXT, year NUMERIC, akas TEXT, runtimes NUMERIC, genres TEXT, countries TEXT, country_codes TEXT, certificates TEXT, rating TEXT, votes INTEGER, last_update NUMERIC)")
self.incDBVersion()
class Add1080pAndRawHDQualities(AddIMDbInfo):
"""Add support for 1080p related qualities along with RawHD
Quick overview of what the upgrade needs to do:
quality | old | new
--------------------------
hdwebdl | 1<<3 | 1<<5
hdbluray | 1<<4 | 1<<7
fullhdbluray | 1<<5 | 1<<8
--------------------------
rawhdtv | | 1<<3
fullhdtv | | 1<<4
fullhdwebdl | | 1<<6
"""
def test(self):
return self.checkDBVersion() >= 14
def _update_status(self, old_status):
(status, quality) = common.Quality.splitCompositeStatus(old_status)
return common.Quality.compositeStatus(status, self._update_quality(quality))
def _update_quality(self, old_quality):
"""Update bitwise flags to reflect new quality values
Check flag bits (clear old then set their new locations) starting
with the highest bits so we dont overwrite data we need later on
"""
result = old_quality
# move fullhdbluray from 1<<5 to 1<<8 if set
if(result & (1<<5)):
result = result & ~(1<<5)
result = result | (1<<8)
# move hdbluray from 1<<4 to 1<<7 if set
if(result & (1<<4)):
result = result & ~(1<<4)
result = result | (1<<7)
# move hdwebdl from 1<<3 to 1<<5 if set
if(result & (1<<3)):
result = result & ~(1<<3)
result = result | (1<<5)
return result
def _update_composite_qualities(self, status):
"""Unpack, Update, Return new quality values
Unpack the composite archive/initial values.
Update either qualities if needed.
Then return the new compsite quality value.
"""
best = (status & (0xffff << 16)) >> 16
initial = status & (0xffff)
best = self._update_quality(best)
initial = self._update_quality(initial)
result = ((best << 16) | initial)
return result
def execute(self):
backupDatabase(self.checkDBVersion())
# update the default quality so we dont grab the wrong qualities after migration
sickbeard.QUALITY_DEFAULT = self._update_composite_qualities(sickbeard.QUALITY_DEFAULT)
sickbeard.save_config()
# upgrade previous HD to HD720p -- shift previous qualities to new placevalues
old_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3], [])
new_hd = common.Quality.combineQualities([common.Quality.HDTV, common.Quality.HDWEBDL, common.Quality.HDBLURAY], [])
# update ANY -- shift existing qualities and add new 1080p qualities, note that rawHD was not added to the ANY template
old_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.HDWEBDL >> 2, common.Quality.HDBLURAY >> 3, common.Quality.UNKNOWN], [])
new_any = common.Quality.combineQualities([common.Quality.SDTV, common.Quality.SDDVD, common.Quality.HDTV, common.Quality.FULLHDTV, common.Quality.HDWEBDL, common.Quality.FULLHDWEBDL, common.Quality.HDBLURAY, common.Quality.FULLHDBLURAY, common.Quality.UNKNOWN], [])
# update qualities (including templates)
shows = self.connection.select("SELECT * FROM tv_shows")
for cur_show in shows:
if cur_show["quality"] == old_hd:
new_quality = new_hd
elif cur_show["quality"] == old_any:
new_quality = new_any
else:
new_quality = self._update_composite_qualities(cur_show["quality"])
self.connection.action("UPDATE tv_shows SET quality = ? WHERE tvdb_id = ?", [new_quality, cur_show["tvdb_id"]])
# update status that are are within the old hdwebdl (1<<3 which is 8) and better -- exclude unknown (1<<15 which is 32768)
episodes = self.connection.select("SELECT * FROM tv_episodes WHERE status/100 < 32768 AND status/100 >= 8")
for cur_episode in episodes:
self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [self._update_status(cur_episode["status"]), cur_episode["episode_id"]])
# make two seperate passes through the history since snatched and downloaded (action & quality) may not always coordinate together
# update previous history so it shows the correct action
historyAction = self.connection.select("SELECT * FROM history WHERE action/100 < 32768 AND action/100 >= 8")
for cur_entry in historyAction:
self.connection.action("UPDATE history SET action = ? WHERE showid = ? AND date = ?", [self._update_status(cur_entry["action"]), cur_entry["showid"], cur_entry["date"]])
# update previous history so it shows the correct quality
historyQuality = self.connection.select("SELECT * FROM history WHERE quality < 32768 AND quality >= 8")
for cur_entry in historyQuality:
self.connection.action("UPDATE history SET quality = ? WHERE showid = ? AND date = ?", [self._update_quality(cur_entry["quality"]), cur_entry["showid"], cur_entry["date"]])
self.incDBVersion()
class AddProperNamingSupport(AddIMDbInfo):
def test(self):
return self.checkDBVersion() >= 15
def execute(self):
self.addColumn("tv_episodes", "is_proper")
self.incDBVersion()
| gpl-3.0 | -1,343,770,489,762,128,600 | 43.19214 | 378 | 0.613439 | false |
DamnWidget/txorm | txorm/_compat/python2_.py | 1 | 1170 |
# Copyright (c) 2014 Oscar Campos <[email protected]>
# See LICENSE for details
import re
import sys
import urlparse
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import cStringIO as StringIO
except ImportError:
from StringIO import StringIO
_queryprog = None
def __splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
global _queryprog
if _queryprog is None:
_queryprog = re.compile('^(.*)\?([^?]*)$')
match = _queryprog.match(url)
if match:
return match.group(1, 2)
return url, None
urlparse.splitquery = __splitquery
# urlparse prior to Python 2.7.6 have a bug in parsing the port, fix it
if sys.version_info < (2, 7, 6):
def port(self):
netloc = self.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
if port:
port = int(port, 10)
# verify legal port
if (0 <= port <= 65535):
return port
return None
urlparse.ResultMixin.port = property(port)
__all__ = ['pickle', 'StringIO', 'urlparse']
| lgpl-3.0 | -8,705,973,143,589,217,000 | 21.5 | 71 | 0.582051 | false |
imclab/confer | scripts/chi2013/prepare_paper_paper_graph_data.py | 1 | 4179 | #!/usr/bin/python
import sys, os, operator, numpy, MySQLdb, json
import matplotlib.pyplot as plt
from db import entity
from db import session
from collections import defaultdict
'''
@author: anant bhardwaj
@date: Feb 12, 2013
script for preparing data in lenskit format
'''
entities = entity.Entity().entities
sessions = session.Session().sessions
connection = MySQLdb.connect(host="mysql.csail.mit.edu",
user="cobi",
passwd="su4Biha",
db="cobi")
nodes = {}
edges = defaultdict(dict)
likes = defaultdict(set)
scon = {}
papers_count = [];
def load_data():
for p in entities:
nodes[p] = {'title': entities[p]['title'], 'session': entities[p]['session'], 'award': entities[p]['award'], 'hm': entities[p]['hm']}
cursor = connection.cursor()
cursor.execute("SELECT auth_no, likes, email1 FROM pcs_authors where likes!= 'NULL' and likes !='[]';")
data = cursor.fetchall()
for row in data:
papers = json.loads(row[1])
papers_count.append(len(papers))
for p in papers:
likes[p].add(row[0])
for p1 in entities:
for p2 in entities:
edges[p1][p2] = -1
if(p1 != p2):
common_likes = likes[p1].intersection(likes[p2])
edges[p1][p2] = len(common_likes)
for s in sessions:
num_edges = 0
papers = sessions[s]['submissions']
for p1 in papers:
for p2 in papers:
try:
if(p1 != p2 and edges[p1][p2] > 0):
num_edges += 1
except:
pass
if(len(sessions[s]['submissions']) > 0):
scon[s] = float(num_edges)/len(sessions[s]['submissions'])
def main():
load_data()
nodesArray = []
linksArray = []
print numpy.mean(scon.values()), numpy.std(scon.values()), numpy.median(scon.values()), numpy.min(scon.values()), numpy.max(scon.values())
'''
plt.hist([awards_count, non_awards_count], bins=100, histtype='bar', stacked=True, color=['yellow', 'green'],
label=['Award Papers', 'Non-Award Papers'])
plt.title('Number of People Starred vs. Number of Papers')
plt.xlabel('Number of People Starred')
plt.ylabel('Number of Papers')
plt.legend()
plt.show()
awards_count = []
non_awards_count = []
likes_count = [len(v) for k,v in likes.iteritems()]
for k,v in likes.iteritems():
if k in nodes and (nodes[k]['award'] or nodes[k]['hm']):
awards_count.append(len(v))
else:
non_awards_count.append(len(v))
#print numpy.mean(papers_count), numpy.std(papers_count), numpy.median(papers_count), numpy.min(papers_count), numpy.max(papers_count)
print numpy.mean(likes_count), numpy.std(likes_count), numpy.median(likes_count), numpy.min(likes_count), numpy.max(likes_count)
print numpy.mean(awards_count), numpy.std(awards_count), numpy.median(awards_count), numpy.min(awards_count), numpy.max(awards_count)
plt.hist([awards_count, non_awards_count], bins=100, histtype='bar', stacked=True, color=['yellow', 'green'],
label=['Award Papers', 'Non-Award Papers'])
plt.title('Number of People Starred vs. Number of Papers')
plt.xlabel('Number of People Starred')
plt.ylabel('Number of Papers')
plt.legend()
plt.show()
plt.hist(papers_count, bins=20, color="cyan")
plt.title('Number of Papers vs. Number of People')
plt.xlabel('Number of Likes')
plt.ylabel('Number of People')
plt.show()
'''
k = 0
for node in nodes:
nodes[node]['id']= k
nodesArray.append({'title' : nodes[node]['title'], 'session': nodes[node]['session'], 'weight': len(likes[node])})
k = k+1
edgesToRemove = set()
'''
for edge in edges:
links = edges[edge]
for l in links:
weight = edges[edge][l]
if(weight > 14):
edgesToRemove.add(nodes[edge]['id'])
edgesToRemove.add(nodes[l]['id'])
'''
for edge in edges:
links = edges[edge]
for l in links:
weight = edges[edge][l]
if(weight > 0 and (nodes[edge]['id'] not in edgesToRemove) and (nodes[l]['id'] not in edgesToRemove)):
linksArray.append({'source' : nodes[edge]['id'], 'target' : nodes[l]['id'], 'weight': weight})
p = open('/Volumes/Workspace/www/data.json','w')
p.write(json.dumps({"nodes": nodesArray, "links": linksArray}))
if __name__ == '__main__':
main()
| mit | -679,769,721,634,642,000 | 26.675497 | 139 | 0.639627 | false |
richardliaw/ray | dashboard/tests/test_memory_utils.py | 1 | 8729 | import ray
from ray.new_dashboard.memory_utils import (
ReferenceType, decode_object_ref_if_needed, MemoryTableEntry, MemoryTable,
SortingType)
"""Memory Table Unit Test"""
NODE_ADDRESS = "127.0.0.1"
IS_DRIVER = True
PID = 1
OBJECT_ID = "7wpsIhgZiBz/////AQAAyAEAAAA="
ACTOR_ID = "fffffffffffffffff66d17ba010000c801000000"
DECODED_ID = decode_object_ref_if_needed(OBJECT_ID)
OBJECT_SIZE = 100
def build_memory_entry(*,
local_ref_count,
pinned_in_memory,
submitted_task_reference_count,
contained_in_owned,
object_size,
pid,
object_id=OBJECT_ID,
node_address=NODE_ADDRESS):
object_ref = {
"objectId": object_id,
"callSite": "(task call) /Users:458",
"objectSize": object_size,
"localRefCount": local_ref_count,
"pinnedInMemory": pinned_in_memory,
"submittedTaskRefCount": submitted_task_reference_count,
"containedInOwned": contained_in_owned
}
return MemoryTableEntry(
object_ref=object_ref,
node_address=node_address,
is_driver=IS_DRIVER,
pid=pid)
def build_local_reference_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS):
return build_memory_entry(
local_ref_count=1,
pinned_in_memory=False,
submitted_task_reference_count=0,
contained_in_owned=[],
object_size=object_size,
pid=pid,
node_address=node_address)
def build_used_by_pending_task_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS):
return build_memory_entry(
local_ref_count=0,
pinned_in_memory=False,
submitted_task_reference_count=2,
contained_in_owned=[],
object_size=object_size,
pid=pid,
node_address=node_address)
def build_captured_in_object_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS):
return build_memory_entry(
local_ref_count=0,
pinned_in_memory=False,
submitted_task_reference_count=0,
contained_in_owned=[OBJECT_ID],
object_size=object_size,
pid=pid,
node_address=node_address)
def build_actor_handle_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS):
return build_memory_entry(
local_ref_count=1,
pinned_in_memory=False,
submitted_task_reference_count=0,
contained_in_owned=[],
object_size=object_size,
pid=pid,
node_address=node_address,
object_id=ACTOR_ID)
def build_pinned_in_memory_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS):
return build_memory_entry(
local_ref_count=0,
pinned_in_memory=True,
submitted_task_reference_count=0,
contained_in_owned=[],
object_size=object_size,
pid=pid,
node_address=node_address)
def build_entry(object_size=OBJECT_SIZE,
pid=PID,
node_address=NODE_ADDRESS,
reference_type=ReferenceType.PINNED_IN_MEMORY):
if reference_type == ReferenceType.USED_BY_PENDING_TASK:
return build_used_by_pending_task_entry(
pid=pid, object_size=object_size, node_address=node_address)
elif reference_type == ReferenceType.LOCAL_REFERENCE:
return build_local_reference_entry(
pid=pid, object_size=object_size, node_address=node_address)
elif reference_type == ReferenceType.PINNED_IN_MEMORY:
return build_pinned_in_memory_entry(
pid=pid, object_size=object_size, node_address=node_address)
elif reference_type == ReferenceType.ACTOR_HANDLE:
return build_actor_handle_entry(
pid=pid, object_size=object_size, node_address=node_address)
elif reference_type == ReferenceType.CAPTURED_IN_OBJECT:
return build_captured_in_object_entry(
pid=pid, object_size=object_size, node_address=node_address)
def test_invalid_memory_entry():
memory_entry = build_memory_entry(
local_ref_count=0,
pinned_in_memory=False,
submitted_task_reference_count=0,
contained_in_owned=[],
object_size=OBJECT_SIZE,
pid=PID)
assert memory_entry.is_valid() is False
memory_entry = build_memory_entry(
local_ref_count=0,
pinned_in_memory=False,
submitted_task_reference_count=0,
contained_in_owned=[],
object_size=-1,
pid=PID)
assert memory_entry.is_valid() is False
def test_valid_reference_memory_entry():
memory_entry = build_local_reference_entry()
assert memory_entry.reference_type == ReferenceType.LOCAL_REFERENCE
assert memory_entry.object_ref == ray.ObjectRef(
decode_object_ref_if_needed(OBJECT_ID))
assert memory_entry.is_valid() is True
def test_reference_type():
# pinned in memory
memory_entry = build_pinned_in_memory_entry()
assert memory_entry.reference_type == ReferenceType.PINNED_IN_MEMORY
# used by pending task
memory_entry = build_used_by_pending_task_entry()
assert memory_entry.reference_type == ReferenceType.USED_BY_PENDING_TASK
# captued in object
memory_entry = build_captured_in_object_entry()
assert memory_entry.reference_type == ReferenceType.CAPTURED_IN_OBJECT
# actor handle
memory_entry = build_actor_handle_entry()
assert memory_entry.reference_type == ReferenceType.ACTOR_HANDLE
def test_memory_table_summary():
entries = [
build_pinned_in_memory_entry(),
build_used_by_pending_task_entry(),
build_captured_in_object_entry(),
build_actor_handle_entry(),
build_local_reference_entry(),
build_local_reference_entry()
]
memory_table = MemoryTable(entries)
assert len(memory_table.group) == 1
assert memory_table.summary["total_actor_handles"] == 1
assert memory_table.summary["total_captured_in_objects"] == 1
assert memory_table.summary["total_local_ref_count"] == 2
assert memory_table.summary[
"total_object_size"] == len(entries) * OBJECT_SIZE
assert memory_table.summary["total_pinned_in_memory"] == 1
assert memory_table.summary["total_used_by_pending_task"] == 1
def test_memory_table_sort_by_pid():
unsort = [1, 3, 2]
entries = [build_entry(pid=pid) for pid in unsort]
memory_table = MemoryTable(entries, sort_by_type=SortingType.PID)
sort = sorted(unsort)
for pid, entry in zip(sort, memory_table.table):
assert pid == entry.pid
def test_memory_table_sort_by_reference_type():
unsort = [
ReferenceType.USED_BY_PENDING_TASK, ReferenceType.LOCAL_REFERENCE,
ReferenceType.LOCAL_REFERENCE, ReferenceType.PINNED_IN_MEMORY
]
entries = [
build_entry(reference_type=reference_type) for reference_type in unsort
]
memory_table = MemoryTable(
entries, sort_by_type=SortingType.REFERENCE_TYPE)
sort = sorted(unsort)
for reference_type, entry in zip(sort, memory_table.table):
assert reference_type == entry.reference_type
def test_memory_table_sort_by_object_size():
unsort = [312, 214, -1, 1244, 642]
entries = [build_entry(object_size=object_size) for object_size in unsort]
memory_table = MemoryTable(entries, sort_by_type=SortingType.OBJECT_SIZE)
sort = sorted(unsort)
for object_size, entry in zip(sort, memory_table.table):
assert object_size == entry.object_size
def test_group_by():
node_second = "127.0.0.2"
node_first = "127.0.0.1"
entries = [
build_entry(node_address=node_second, pid=2),
build_entry(node_address=node_second, pid=1),
build_entry(node_address=node_first, pid=2),
build_entry(node_address=node_first, pid=1)
]
memory_table = MemoryTable(entries)
# Make sure it is correctly grouped
assert node_first in memory_table.group
assert node_second in memory_table.group
# make sure pid is sorted in the right order.
for group_key, group_memory_table in memory_table.group.items():
pid = 1
for entry in group_memory_table.table:
assert pid == entry.pid
pid += 1
if __name__ == "__main__":
import sys
import pytest
sys.exit(pytest.main(["-v", __file__]))
| apache-2.0 | -6,298,155,832,914,063,000 | 33.638889 | 79 | 0.624012 | false |
leiferikb/bitpop | src/tools/perf/benchmarks/sunspider.py | 1 | 2047 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import json
import os
from metrics import power
from telemetry import test
from telemetry.page import page_measurement
from telemetry.page import page_set
_URL = 'http://www.webkit.org/perf/sunspider-1.0.2/sunspider-1.0.2/driver.html'
class _SunspiderMeasurement(page_measurement.PageMeasurement):
def __init__(self):
super(_SunspiderMeasurement, self).__init__()
self._power_metric = power.PowerMetric()
def CustomizeBrowserOptions(self, options):
power.PowerMetric.CustomizeBrowserOptions(options)
def DidNavigateToPage(self, page, tab):
self._power_metric.Start(page, tab)
def MeasurePage(self, page, tab, results):
tab.WaitForJavaScriptExpression(
'window.location.pathname.indexOf("results.html") >= 0'
'&& typeof(output) != "undefined"', 300)
self._power_metric.Stop(page, tab)
self._power_metric.AddResults(tab, results)
js_get_results = 'JSON.stringify(output);'
js_results = json.loads(tab.EvaluateJavaScript(js_get_results))
r = collections.defaultdict(list)
totals = []
# js_results is: [{'foo': v1, 'bar': v2},
# {'foo': v3, 'bar': v4},
# ...]
for result in js_results:
total = 0
for key, value in result.iteritems():
r[key].append(value)
total += value
totals.append(total)
for key, values in r.iteritems():
results.Add(key, 'ms', values, data_type='unimportant')
results.Add('Total', 'ms', totals)
class Sunspider(test.Test):
"""Apple's SunSpider JavaScript benchmark."""
test = _SunspiderMeasurement
def CreatePageSet(self, options):
ps = page_set.PageSet(
archive_data_file='../page_sets/data/sunspider.json',
make_javascript_deterministic=False,
file_path=os.path.abspath(__file__))
ps.AddPageWithDefaultRunNavigate(_URL)
return ps
| gpl-3.0 | -1,330,169,115,319,398,100 | 30.984375 | 79 | 0.674646 | false |
ilendl2/django-wagtail-feeds | setup.py | 1 | 1560 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
"Django>=1.7,<1.10",
"Wagtail>=1.4",
]
test_requirements = [
"Django>=1.8,<1.10",
"Wagtail>=1.4",
]
setup(
name='django-wagtail-feeds',
version='0.0.2',
description="Support RSS Feeds, Facebook Instant Articles and Apple News",
long_description=readme + '\n\n' + history,
author="Christopher Clarke",
author_email='[email protected]',
url='https://github.com/chrisdev/django-wagtail-feeds',
packages=[
'wagtail_feeds',
],
package_dir={'wagtail_feeds':
'wagtail_feeds'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='wagtail_feeds',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='runtests.runtests',
tests_require=test_requirements
)
| mit | -4,203,972,471,546,435,000 | 27.363636 | 78 | 0.607692 | false |
arnaudcoquelet/myAvstServer | node_modules/soap/node_modules/node-expat/build/c4che/Release.cache.py | 1 | 1498 | AR = '/usr/bin/ar'
ARFLAGS = 'rcs'
CCFLAGS = ['-g']
CCFLAGS_MACBUNDLE = ['-fPIC']
CCFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CC_VERSION = ('4', '4', '5')
COMPILER_CXX = 'g++'
CPP = '/usr/bin/cpp'
CPPFLAGS_NODE = ['-D_GNU_SOURCE']
CPPPATH_EXPAT.H = ['/usr/include', '/usr/local/include']
CPPPATH_NODE = '/usr/local/include/node'
CPPPATH_ST = '-I%s'
CXX = ['/usr/bin/g++']
CXXDEFINES_ST = '-D%s'
CXXFLAGS = ['-g']
CXXFLAGS_DEBUG = ['-g']
CXXFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CXXFLAGS_RELEASE = ['-O2']
CXXLNK_SRC_F = ''
CXXLNK_TGT_F = ['-o', '']
CXX_NAME = 'gcc'
CXX_SRC_F = ''
CXX_TGT_F = ['-c', '-o', '']
DEST_BINFMT = 'elf'
DEST_CPU = 'x86'
DEST_OS = 'linux'
FULLSTATIC_MARKER = '-static'
HAVE_EXPAT_H = 1
LIBDIR = '/root/.node_libraries'
LIBPATH_NODE = '/usr/local/lib'
LIBPATH_ST = '-L%s'
LIB_EXPAT = ['expat']
LIB_ST = '-l%s'
LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
LINK_CXX = ['/usr/bin/g++']
NODE_PATH = '/root/.node_libraries'
PREFIX = '/usr/local'
PREFIX_NODE = '/usr/local'
RANLIB = '/usr/bin/ranlib'
RPATH_ST = '-Wl,-rpath,%s'
SHLIB_MARKER = '-Wl,-Bdynamic'
SONAME_ST = '-Wl,-h,%s'
STATICLIBPATH_ST = '-L%s'
STATICLIB_MARKER = '-Wl,-Bstatic'
STATICLIB_ST = '-l%s'
defines = {'HAVE_EXPAT_H': 1}
macbundle_PATTERN = '%s.bundle'
program_PATTERN = '%s'
shlib_CXXFLAGS = ['-fPIC', '-DPIC']
shlib_LINKFLAGS = ['-shared']
shlib_PATTERN = 'lib%s.so'
staticlib_LINKFLAGS = ['-Wl,-Bstatic']
staticlib_PATTERN = 'lib%s.a'
| gpl-2.0 | -36,677,115,917,842,780 | 27.264151 | 65 | 0.619493 | false |
abalkin/tz | tests/test_zoneinfo.py | 1 | 9069 | import unittest
import sys
from datetime import timedelta, timezone, datetime
import pickle
import pytest
import tz
from tz.tools import pairs
from tz.zoneinfo import ZoneInfo, enfold, parse_std_dst, parse_mnd_time, \
dth_day_of_week_n, PosixRules, ZERO, parse_time, julian_day, \
parse_name_offset
def tz_shifts(z):
for (_, prev_info), (time, info) in pairs(zip(z.ut, z.ti)):
shift = info[0] - prev_info[0]
yield time, shift
def tz_folds(z):
for time, shift in tz_shifts(z):
if shift < ZERO:
yield time, -shift
def tz_gaps(z):
for time, shift in tz_shifts(z):
if shift > ZERO:
yield time, shift
def tz_zeros(z):
for time, shift in tz_shifts(z):
if not shift:
yield time
class ZoneInfoTest(unittest.TestCase):
zonename = 'America/New_York'
version = None
def setUp(self):
if sys.platform == "win32":
self.skipTest("Skipping zoneinfo tests on Windows")
data = tz.tzdata.get(self.zonename)
self.tz = ZoneInfo.fromdata(data.types, data.times, data.rules)
def assertEquivDatetimes(self, a, b):
self.assertEqual((a.replace(tzinfo=None), getattr(a, 'fold', 0),
id(a.tzinfo)),
(b.replace(tzinfo=None), getattr(b, 'fold', 0),
id(b.tzinfo)))
def test_folds(self):
tz = self.tz
for dt, shift in tz_folds(tz):
for x in [0 * shift, 0.5 * shift, shift - timedelta.resolution]:
udt = dt + x
ldt = tz.fromutc(udt.replace(tzinfo=tz))
self.assertEqual(getattr(ldt, 'fold', 0), 1)
adt = udt.replace(tzinfo=timezone.utc).astimezone(tz)
self.assertEquivDatetimes(adt, ldt)
utcoffset = ldt.utcoffset()
self.assertEqual(ldt.replace(tzinfo=None), udt + utcoffset)
# Round trip
self.assertEquivDatetimes(ldt.astimezone(timezone.utc),
udt.replace(tzinfo=timezone.utc))
for x in [-timedelta.resolution, shift]:
udt = dt + x
udt = udt.replace(tzinfo=tz)
ldt = tz.fromutc(udt)
self.assertEqual(getattr(ldt, 'fold', 0), 0)
def test_gaps(self):
tz = self.tz
for dt, shift in tz_gaps(tz):
for x in [0 * shift, 0.5 * shift, shift - timedelta.resolution]:
udt = dt + x
udt = udt.replace(tzinfo=tz)
ldt = tz.fromutc(udt)
self.assertEqual(getattr(ldt, 'fold', 0), 0)
adt = udt.replace(tzinfo=timezone.utc).astimezone(tz)
self.assertEquivDatetimes(adt, ldt)
utcoffset = ldt.utcoffset()
self.assertEqual(ldt.replace(tzinfo=None),
udt.replace(tzinfo=None) + utcoffset)
# Create a local time inside the gap
ldt = tz.fromutc(dt.replace(tzinfo=tz)) - shift + x
self.assertLess(enfold(ldt, 1).utcoffset(),
enfold(ldt, 0).utcoffset(),
"At %s." % ldt)
for x in [-timedelta.resolution, shift]:
udt = dt + x
ldt = tz.fromutc(udt.replace(tzinfo=tz))
self.assertEqual(getattr(ldt, 'fold', 0), 0)
def test_zeros(self):
tz = self.tz
shifts = list(tz_shifts(tz))
folds = list(tz_folds(tz))
gaps = list(tz_gaps(tz))
zeros = list(tz_zeros(tz))
self.assertEqual(len(shifts), len(folds) + len(gaps) + len(zeros))
def test_fromutc_errors(self):
tz = self.tz
with self.assertRaises(TypeError):
tz.fromutc(None)
with self.assertRaises(ValueError):
dt = datetime(1, 1, 1)
tz.fromutc(dt)
def test_pickle():
ut = [datetime.min]
ti = [(timedelta(0), timedelta(0), 'UTC'), ]
z = ZoneInfo(ut, ti)
s = pickle.dumps(z)
r = pickle.loads(s)
assert z.ut == r.ut
def test_posix_rules_pickle():
z = PosixRules('EST+05EDT')
s = pickle.dumps(z)
r = pickle.loads(s)
assert r.tzstr == z.tzstr
@pytest.mark.parametrize('data, parsed', [
('EST5EDT', ('EST', timedelta(hours=-5), 'EDT')),
('EST+5EDT+4', ('EST', timedelta(hours=-5), 'EDT+4')),
('CHAST-12:45CHADT',
('CHAST', timedelta(hours=12, minutes=45), 'CHADT')),
('LHST-10:30LHDT-11',
('LHST', timedelta(hours=10, minutes=30), 'LHDT-11')),
('<+06>-6', ('+06', timedelta(hours=6), '')),
])
def test_parse_name_offset(data, parsed):
assert parsed == parse_name_offset(data)
@pytest.mark.parametrize('std_dst, parsed', [
('EST5EDT', (timedelta(hours=-5), ('EST', 'EDT'), None)),
('CET-1CEST', (timedelta(hours=1), ('CET', 'CEST'), None)),
('MSK-3', (timedelta(hours=3), ('MSK', ''), None)),
('CHAST-12:45CHADT',
(timedelta(hours=12, minutes=45), ('CHAST', 'CHADT'), None)),
('LHST-10:30LHDT-11',
(timedelta(hours=10, minutes=30), ('LHST', 'LHDT'), timedelta(hours=11))),
])
def test_parse_std_dst(std_dst, parsed):
assert parsed == parse_std_dst(std_dst)
@pytest.mark.parametrize('time_str, delta', [
('3', timedelta(hours=3)),
('3:45', timedelta(hours=3, minutes=45)),
('123:12:10', timedelta(hours=123, minutes=12, seconds=10)),
('-12:45', -timedelta(hours=12, minutes=45)),
])
def test_parse_time(time_str, delta):
assert delta == parse_time(time_str)
@pytest.mark.parametrize('mnd_time, dt', [
('M10.5.0', (2016, 10, 30, 2)),
('M10.5.0/3', (2016, 10, 30, 3)),
('J59/0', (2016, 2, 28, 0)),
])
def test_parse_mnd_time(mnd_time, dt):
dt = datetime(*dt)
f = parse_mnd_time(mnd_time)
assert dt == f(dt.year)
@pytest.mark.parametrize('y, m, n, d, day', [
(2016, 8, 1, 0, 7),
(2016, 8, 5, 0, 28),
])
def test_dth_day_of_week_n(y, m, n, d, day):
dt = dth_day_of_week_n(y, m, n, d)
assert dt.weekday() == (d - 1) % 7
assert dt.timetuple()[:3] == (y, m, day)
@pytest.mark.parametrize('year, n, month, day', [
(2015, 59, 2, 28),
(2015, 60, 3, 1),
(2016, 59, 2, 28),
(2016, 60, 3, 1),
])
def test_julian_day(year, n, month, day):
assert datetime(year, month, day) == julian_day(year, n)
@pytest.mark.parametrize('tz, year, dst_start, dst_end', [
# New York
('EST5EDT,M3.2.0,M11.1.0', 2016,
datetime(2016, 3, 13, 2), datetime(2016, 11, 6, 2)),
# Sydney, Australia (Southern hemisphere)
('AEST-10AEDT,M10.1.0,M4.1.0/3', 2016,
datetime(2016, 10, 2, 2), datetime(2016, 4, 3, 3)),
# Exotic: Chatham, Pacific
('CHAST-12:45CHADT,M9.5.0/2:45,M4.1.0/3:45', 2016,
datetime(2016, 9, 25, 2, 45), datetime(2016, 4, 3, 3, 45)),
# Exotic: Tehran, Iran
('IRST-3:30IRDT,J80/0,J264/0', 2016,
datetime(2016, 3, 21, 0), datetime(2016, 9, 21)),
# Exotic: Lord Howe, Australia (1/2 hour DST offset)
('LHST-10:30LHDT-11,M10.1.0,M4.1.0', 2016,
datetime(2016, 10, 2, 2), datetime(2016, 4, 3, 2))
])
def test_posix_rules_transitions(tz, year, dst_start, dst_end):
info = PosixRules(tz)
assert (dst_start, dst_end) == info.transitions(year)
dst_time = (dst_start + timedelta(1)).replace(tzinfo=info)
assert dst_time.dst() == info.dst_save
std_time = (dst_end + timedelta(1)).replace(tzinfo=info)
assert std_time.dst() == ZERO
# Ambiguous hour: the next dst_save interval after DST end.
fold_time_0 = (dst_end + info.dst_save / 2).replace(tzinfo=info)
fold_time_1 = enfold(fold_time_0, 1)
assert fold_time_0.dst() == info.dst_save
assert fold_time_1.dst() == ZERO
# Skipped hour: the next dst_save interval after DST start
gap_time_0 = (dst_start + info.dst_save / 2).replace(tzinfo=info)
gap_time_1 = enfold(gap_time_0, 1)
assert gap_time_0.dst() == ZERO
assert gap_time_1.dst() == info.dst_save
# Check that DST is dst_save ahead of STD
delta = dst_time.utcoffset() - std_time.utcoffset()
assert delta == info.dst_save
# Check that STD/DST abbreviations are correctly encoded in the TZ string
std_dst = tz.split(',', 2)[0]
std = std_time.tzname()
dst = dst_time.tzname()
assert std_dst.startswith(std)
assert dst in std_dst
def test_repr():
z = ZoneInfo.fromdata([], [])
z.tzid = 'America/New_York'
assert repr(z) == "tz.zoneinfo.ZoneInfo('America/New_York')"
def test_far_future():
z = ZoneInfo.fromdata([], [], 'EST5EDT,M3.2.0,M11.1.0')
far_summer = datetime(9999, 6, 1, tzinfo=z)
far_winter = datetime(9999, 12, 1, tzinfo=z)
assert far_summer.dst()
assert not far_winter.dst()
def test_lord_howe_rules():
lh = PosixRules('LHST-10:30LHDT-11,M10.1.0,M4.1.0')
u = datetime(2016, 10, 1, 15, 30, tzinfo=timezone.utc) # DST start
t = u.astimezone(lh)
assert t.strftime('%a %b %d %T %Y %Z') == 'Sun Oct 02 02:30:00 2016 LHDT'
assert t.dst() == timedelta(minutes=30)
| mit | 2,958,533,872,526,860,000 | 33.352273 | 79 | 0.568199 | false |
sipb/homeworld | platform/spire/src/command.py | 1 | 11993 | import argparse
import contextlib
import functools
import inspect
import time
ANSI_ESCAPE_CODE_RED = "\x1b[1;31m"
ANSI_ESCAPE_CODE_YELLOW = "\x1b[1;33m"
ANSI_ESCAPE_CODE_RESET = "\x1b[1;0m"
class CommandFailedException(Exception):
def __init__(self, message, hint):
super().__init__(message)
self.hint = hint
def __str__(self):
return '{}command failed: {}{}{}'.format(
ANSI_ESCAPE_CODE_RED,
super().__str__(),
'\n{}{}'.format(ANSI_ESCAPE_CODE_YELLOW, self.hint)
if self.hint is not None else '',
ANSI_ESCAPE_CODE_RESET)
def fail(message: str, hint: str = None) -> None:
raise CommandFailedException(message, hint)
class MultipleExceptions(Exception):
def __init__(self, message, errs):
messages = [message] + [str(e) for e in errs]
super().__init__('\n'.join(messages))
class Mux:
def __init__(self, description, mapping):
self.__doc__ = description
self.mapping = mapping
def configure(self, command: list, parser: argparse.ArgumentParser):
parser.set_defaults(argparse_parser=parser)
subparsers = parser.add_subparsers()
for component, subcommand in self.mapping.items():
subparser = subparsers.add_parser(
component,
description=inspect.getdoc(subcommand),
help=subcommand.short_doc(),
formatter_class=argparse.RawDescriptionHelpFormatter)
try:
subcommand.configure(command + [component], subparser)
except AttributeError as e:
raise Exception("error configuring subcommand {!r}".format(subcommand)) from e
def short_doc(self):
return self.__doc__
def add_dry_run_argument(parser: argparse.ArgumentParser, dest: str):
parser.add_argument("--dry-run", dest=dest, action="store_true", help="show operations performed by command without actually running them")
def add_show_commands_argument(parser: argparse.ArgumentParser, dest: str):
parser.add_argument("--show-commands", dest=dest, action="store_true", help="show the equivalent sequence of commands without running them")
class SeqMux(Mux):
def configure(self, command: list, parser: argparse.ArgumentParser):
super().configure(command, parser)
add_dry_run_argument(parser, 'dry_run_outer')
add_show_commands_argument(parser, 'show_commands_outer')
class Command:
def __init__(self, func):
self.func = func
self.sig = inspect.signature(self.func)
self._remove_ops_from_sig()
self._command = None
def _remove_ops_from_sig(self):
parameters = list(self.sig.parameters.values())
if parameters[0].name != 'ops':
raise ValueError('first argument to command must be ops')
parameters = parameters[1:]
self.sig = self.sig.replace(parameters=parameters)
# so that this can still be called as the original function
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
def operate(self, op, *args, **kwargs):
"Schedule this command to be run by Operations"
self.func(op, *args, **kwargs)
def process_args(self, argparse_args):
"Process command-line arguments into function arguments"
cli_args = vars(argparse_args)
posargs = []
kwargs = {}
for name, param in self.sig.parameters.items():
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
if param.default == inspect.Parameter.empty:
posargs.append(cli_args[name])
continue
kwargs[name] = cli_args[name]
continue
if param.kind == inspect.Parameter.VAR_POSITIONAL:
posargs.extend(cli_args[name])
continue
raise Exception("python argument type not recognized")
# fail early if arguments do not match function signature
self.sig.bind(*posargs, **kwargs)
return posargs, kwargs
def invoke(self, aargs):
ops = Operations()
args, kwargs = self.process_args(aargs)
self.operate(ops, *args, **kwargs)
ops()
def configure(self, command: list, parser: argparse.ArgumentParser):
parser.set_defaults(argparse_invoke=self.invoke,
argparse_parser=parser)
# convert function signature into argparse configuration
for name, param in self.sig.parameters.items():
try:
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
if param.annotation == bool:
if param.default == inspect.Parameter.empty or param.default:
raise ValueError("boolean argument must specify default value of false")
parser.add_argument('--' + name, action='store_true')
continue
if param.default == inspect.Parameter.empty:
parser.add_argument(name)
continue
if not (isinstance(param.default, str) or param.default is None):
raise ValueError("default for string argument must be string or None")
parser.add_argument('--' + name, default=param.default)
continue
if param.kind == inspect.Parameter.VAR_POSITIONAL:
parser.add_argument(name, nargs=argparse.REMAINDER)
continue
raise ValueError("python argument kind {} not recognized".format(param.kind))
except Exception as e:
raise Exception("command {}: failed to configure argument {}".format(command, name)) from e
self._command = command
def command(self, *args, **kwargs):
"Produce a string representation of this command with the specified arguments"
if self._command is None:
return None
bound = self.sig.bind(*args, **kwargs)
cl = self._command[:]
for k, v in bound.arguments.items():
param = self.sig.parameters[k]
if param.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
if param.default == inspect.Parameter.empty:
cl.append(str(v))
continue
if param.annotation == bool:
if v:
cl.append('--{}'.format(k))
continue
cl.append('--{}={}'.format(k, v))
continue
if param.kind == inspect.Parameter.VAR_POSITIONAL:
cl.extend(str(x) for x in v)
continue
raise Exception("python argument type not recognized")
return ' '.join(cl)
def short_doc(self):
doc = inspect.getdoc(self)
return doc.split('\n')[0] if doc else None
def wrapop(f):
return functools.update_wrapper(Command(f), f, updated=[])
class Seq(Command):
def configure(self, command: list, parser: argparse.ArgumentParser):
super().configure(command, parser)
add_dry_run_argument(parser, 'dry_run')
add_show_commands_argument(parser, 'show_commands')
def invoke(self, aargs):
op = Operations()
args, kwargs = self.process_args(aargs)
self.operate(op, *args, **kwargs)
if aargs.show_commands or aargs.show_commands_outer:
return op.print_commands()
op(dry_run=aargs.dry_run or aargs.dry_run_outer)
def wrapseq(f):
return functools.update_wrapper(Seq(f), f, updated=[])
class Simple(Command):
def _remove_ops_from_sig(self):
pass
def operate(self, op, *args, **kwargs):
op.add_operation(self.short_doc(), lambda: self.func(*args, **kwargs),
self.command(*args, **kwargs))
def invoke(self, aargs):
args, kwargs = self.process_args(aargs)
self.func(*args, **kwargs)
def wrap(f):
return functools.update_wrapper(Simple(f), f, updated=[])
# Decorator for delegating a function call to self._context if it exists,
# for use with Operations.context
def _delegate_to_context(f):
@functools.wraps(f)
def g(self, *args, **kwargs):
if self._context is None:
return f(self, *args, **kwargs)
return g(self._context, *args, **kwargs)
return g
class Operations:
def __init__(self):
self._ops = []
self._context = None
@_delegate_to_context
def add_operation(self, name: str, callback, command=None):
self._ops.append((name, callback, command))
def add_command(self, cmd, *args, **kwargs):
cmd.operate(self, *args, **kwargs)
def add_subcommand(self, cmd, *args, **kwargs):
op = Operations()
cmd.operate(op, *args, **kwargs)
self.add_operation(cmd.short_doc(), op, cmd.command(*args, **kwargs))
@_delegate_to_context
@contextlib.contextmanager
def context(self, name, ctx):
"""Context manager wrapper.
Any command scheduled within context(ctx) will be run within ctx.
For instance:
>>> with ops.context("some name", ctx) as c:
... ops.add_operation(some_function)
In this example, some_function will be run within ctx at runtime,
as in
>>> with ctx:
... some_function()
were written.
For convenience,
the original context ctx is made available to the context body as c.
"""
opc = OperationsContext(ctx)
# delegate adding commands within the context to opc
self._context = opc
yield ctx # send ctx back to the context body for convenience
self._context = None # deactivate delegation
self.add_operation(name, opc)
return ctx
def print_commands(self):
for name, _, command in self._ops:
if command is None:
print(">> {}".format(name))
else:
print("$ {}".format(command))
def __call__(self, depth=0, dry_run=False) -> None:
if depth == 0 and not dry_run:
print("== executing %d operations ==" % len(self._ops))
print()
startat = time.time()
for i, (name, operation, _) in enumerate(self._ops, 1):
if not name:
name = str(operation)
print('{}-- {} {}--'.format(
' ' * depth, name,
'({}/{}) '.format(i, len(self._ops)) if depth == 0 else ''))
if isinstance(operation, Operations):
operation(depth=depth + 1, dry_run=dry_run)
continue
if dry_run:
continue
operation()
if depth == 0 and not dry_run:
print()
print("== all operations executed in %.2f seconds! ==" % (time.time() - startat))
class OperationsContext(Operations):
def __init__(self, ctx):
super().__init__()
self.ctx = ctx
def __call__(self, depth=0, dry_run=False):
if dry_run:
super().__call__(depth=depth, dry_run=dry_run)
else:
with self.ctx:
super().__call__(depth=depth, dry_run=dry_run)
def main_invoke(command):
# we have to hardcode the name "spire" here, because using bazel's py_binary tool for packaging our program into a
# zip file means we'll get the name of our __main__.py, rather than the name of the original command invoked.
parser = argparse.ArgumentParser(
prog="spire",
description="Administrative toolkit for deploying and maintaining Hyades clusters",
)
command.configure(["spire"], parser)
args = parser.parse_args()
if "argparse_invoke" in args:
args.argparse_invoke(args)
else:
args.argparse_parser.print_help()
return 0
| mit | 2,203,870,510,626,277,000 | 34.8 | 144 | 0.58309 | false |
eile/ITK | Modules/ThirdParty/pygccxml/src/pygccxml/declarations/mdecl_wrapper.py | 1 | 3186 | # Copyright 2004-2008 Roman Yakovenko.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
defines class L{mdecl_wrapper_t} that allows to work on set of declarations,
as it was one declaration.
The L{class<mdecl_wrapper_t>} allows user to not write "for" loops within the code.
"""
import os
class call_redirector_t( object ):
"""Internal class used to call some function of objects"""
def __init__( self, name, decls ):
"""creates call_redirector_t instance.
@param name: name of method, to be called on every object in C{decls} list
@param decls: list of objects
"""
object.__init__( self )
self.name = name
self.decls = decls
def __call__( self, *arguments, **keywords ):
"""calls method C{self.name} on every object within C{self.decls} list"""
for d in self.decls:
callable_ = getattr(d, self.name)
callable_( *arguments, **keywords )
class mdecl_wrapper_t( object ):
"""Multiple declarations wrapper.
The main purpose of this class is to allow an user to work on many
declarations, as they were only one single declaration.
Example:
mb = module_builder_t( ... )
#lets say we want to exclude all member functions, that returns reference to int:
mb.member_functions( return_type='int &' ).exclude()
"exclude" function will be called on every function that match the criteria.
"""
def __init__( self, decls ):
"""@param decls: list of declarations to operate on.
@type decls: list of L{declaration wrappers<decl_wrapper_t>}
"""
object.__init__( self )
self.__dict__['declarations'] = decls
def __nonzero__( self ):
return bool( self.declarations )
def __len__( self ):
"""returns the number of declarations"""
return len( self.declarations )
def __getitem__( self, index ):
"""provides access to declaration"""
return self.declarations[index]
def __iter__( self ):
return iter(self.declarations)
def __ensure_attribute( self, name ):
invalid_decls = filter( lambda d: not hasattr( d, name ), self.declarations )
sep = os.linesep + ' '
if invalid_decls:
raise RuntimeError( "Next declarations don't have '%s' attribute: %s"
% ( name, sep.join( map( str, invalid_decls ) ) ) )
def __setattr__( self, name, value ):
"""Updates the value of attribute on all declarations.
@param name: name of attribute
@param value: new value of attribute
"""
self.__ensure_attribute( name )
for d in self.declarations:
setattr( d, name, value )
def __getattr__( self, name ):
"""@param name: name of method
"""
return call_redirector_t( name, self.declarations )
def __contains__( self, item ):
return item in self.declarations
def to_list(self):
l = []
for d in self.declarations:
l.append( d )
return l | apache-2.0 | 5,379,909,319,253,769,000 | 32.197917 | 85 | 0.603264 | false |
Azure/azure-sdk-for-python | sdk/maintenance/azure-mgmt-maintenance/azure/mgmt/maintenance/aio/operations/_apply_updates_operations.py | 1 | 19789 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ApplyUpdatesOperations:
"""ApplyUpdatesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.maintenance.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get_parent(
self,
resource_group_name: str,
resource_parent_type: str,
resource_parent_name: str,
provider_name: str,
resource_type: str,
resource_name: str,
apply_update_name: str,
**kwargs
) -> "_models.ApplyUpdate":
"""Track Updates to resource with parent.
Track maintenance updates to resource with parent.
:param resource_group_name: Resource group name.
:type resource_group_name: str
:param resource_parent_type: Resource parent type.
:type resource_parent_type: str
:param resource_parent_name: Resource parent identifier.
:type resource_parent_name: str
:param provider_name: Resource provider name.
:type provider_name: str
:param resource_type: Resource type.
:type resource_type: str
:param resource_name: Resource identifier.
:type resource_name: str
:param apply_update_name: applyUpdate Id.
:type apply_update_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplyUpdate, or the result of cls(response)
:rtype: ~azure.mgmt.maintenance.models.ApplyUpdate
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplyUpdate"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.get_parent.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'resourceParentType': self._serialize.url("resource_parent_type", resource_parent_type, 'str'),
'resourceParentName': self._serialize.url("resource_parent_name", resource_parent_name, 'str'),
'providerName': self._serialize.url("provider_name", provider_name, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'applyUpdateName': self._serialize.url("apply_update_name", apply_update_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.MaintenanceError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplyUpdate', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_parent.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceParentType}/{resourceParentName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/{applyUpdateName}'} # type: ignore
async def get(
self,
resource_group_name: str,
provider_name: str,
resource_type: str,
resource_name: str,
apply_update_name: str,
**kwargs
) -> "_models.ApplyUpdate":
"""Track Updates to resource.
Track maintenance updates to resource.
:param resource_group_name: Resource group name.
:type resource_group_name: str
:param provider_name: Resource provider name.
:type provider_name: str
:param resource_type: Resource type.
:type resource_type: str
:param resource_name: Resource identifier.
:type resource_name: str
:param apply_update_name: applyUpdate Id.
:type apply_update_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplyUpdate, or the result of cls(response)
:rtype: ~azure.mgmt.maintenance.models.ApplyUpdate
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplyUpdate"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'providerName': self._serialize.url("provider_name", provider_name, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
'applyUpdateName': self._serialize.url("apply_update_name", apply_update_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.MaintenanceError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplyUpdate', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/{applyUpdateName}'} # type: ignore
async def create_or_update_parent(
self,
resource_group_name: str,
provider_name: str,
resource_parent_type: str,
resource_parent_name: str,
resource_type: str,
resource_name: str,
**kwargs
) -> "_models.ApplyUpdate":
"""Apply Updates to resource with parent.
Apply maintenance updates to resource with parent.
:param resource_group_name: Resource group name.
:type resource_group_name: str
:param provider_name: Resource provider name.
:type provider_name: str
:param resource_parent_type: Resource parent type.
:type resource_parent_type: str
:param resource_parent_name: Resource parent identifier.
:type resource_parent_name: str
:param resource_type: Resource type.
:type resource_type: str
:param resource_name: Resource identifier.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplyUpdate, or the result of cls(response)
:rtype: ~azure.mgmt.maintenance.models.ApplyUpdate
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplyUpdate"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.create_or_update_parent.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'providerName': self._serialize.url("provider_name", provider_name, 'str'),
'resourceParentType': self._serialize.url("resource_parent_type", resource_parent_type, 'str'),
'resourceParentName': self._serialize.url("resource_parent_name", resource_parent_name, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.MaintenanceError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplyUpdate', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_parent.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceParentType}/{resourceParentName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/default'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
provider_name: str,
resource_type: str,
resource_name: str,
**kwargs
) -> "_models.ApplyUpdate":
"""Apply Updates to resource.
Apply maintenance updates to resource.
:param resource_group_name: Resource group name.
:type resource_group_name: str
:param provider_name: Resource provider name.
:type provider_name: str
:param resource_type: Resource type.
:type resource_type: str
:param resource_name: Resource identifier.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplyUpdate, or the result of cls(response)
:rtype: ~azure.mgmt.maintenance.models.ApplyUpdate
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplyUpdate"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'providerName': self._serialize.url("provider_name", provider_name, 'str'),
'resourceType': self._serialize.url("resource_type", resource_type, 'str'),
'resourceName': self._serialize.url("resource_name", resource_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.put(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.MaintenanceError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplyUpdate', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{providerName}/{resourceType}/{resourceName}/providers/Microsoft.Maintenance/applyUpdates/default'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.ListApplyUpdate"]:
"""Get Configuration records within a subscription.
Get Configuration records within a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListApplyUpdate or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.maintenance.models.ListApplyUpdate]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListApplyUpdate"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListApplyUpdate', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.MaintenanceError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Maintenance/applyUpdates'} # type: ignore
| mit | -6,968,445,616,262,737,000 | 47.148418 | 282 | 0.648188 | false |
OpenBfS/dokpool-plone | Plone/src/docpool.users/docpool/users/browser/personalpreferences.py | 1 | 1961 | # -*- coding: utf-8 -*-
from docpool.users import DocpoolMessageFactory as _
from docpool.users.interfaces import IDocPoolUsersLayer
from plone.app.users.browser.account import AccountPanelSchemaAdapter
from plone.app.users.browser.personalpreferences import PersonalPreferencesPanel
from plone.autoform import directives
from plone.supermodel import model
from plone.z3cform.fieldsets import extensible
from z3c.form import field
from z3c.form.browser.checkbox import CheckBoxFieldWidget
from zope import schema
from zope.component import adapter
from zope.interface import Interface
class IEnhancedPersonalPreferences(model.Schema):
""" Use all the fields from the default user data schema, and add various
extra fields.
"""
apps = schema.List(
title=_(u'label_user_apps', default=u'Applications'),
description=_(u'description_user_apps', default=u''),
required=False,
value_type=schema.Choice(
source="docpool.base.vocabularies.AvailableApps"),
)
directives.widget(apps=CheckBoxFieldWidget)
class EnhancedPersonalPreferencesAdapter(AccountPanelSchemaAdapter):
schema = IEnhancedPersonalPreferences
def get_apps(self):
return self.context.getProperty('apps', [])
def set_apps(self, value):
return self.context.setMemberProperties({'apps': value})
dp = property(get_apps, set_apps)
@adapter(Interface, IDocPoolUsersLayer, PersonalPreferencesPanel)
class PersonalPreferencesPanelExtender(extensible.FormExtender):
def update(self):
fields = field.Fields(IEnhancedPersonalPreferences)
self.add(fields)
# remove not needed fields
self.remove('wysiwyg_editor')
self.remove('language')
self.remove('timezone')
# little monkey patch
def updateWidgets(self):
super(PersonalPreferencesPanel, self).updateWidgets()
# skip the other fields
PersonalPreferencesPanel.updateWidgets = updateWidgets
| gpl-3.0 | 1,532,422,157,962,041,600 | 31.147541 | 80 | 0.748598 | false |
Kortemme-Lab/klab | klab/bio/pymolmod/scaffold_model_design.py | 1 | 9771 | #!/usr/bin/python
# encoding: utf-8
"""
scaffold_model_design.py
A PSE builder for scaffold/model/design structures.
Created by Shane O'Connor 2014.
The PyMOL commands are adapted from scripts developed and written by Roland A. Pache, Ph.D., Copyright (C) 2012, 2013.
"""
from klab.fs.fsio import write_file
from klab import colortext
from .psebuilder import PyMOLSessionBuilder, create_pymol_selection_from_PDB_residue_ids
# Notes:
#
# The select or cmd.select commands create the selection objects e.g. '(ExpStructure_mutations_s)' in the right pane. These
# are just selection sets so clicking on the name in the pane only results in a selection.
#
# The create or cmd.create commands create an object e.g. ExpStructure_mutations in the right pane. Clicking on this name
# toggles whether this selection is shown or not. To set up a default view, follow the create command with a show command
# e.g. show sticks, Scaffold_mutations.
#
# However, if you want the selection to be hidden when the PSE is loaded, you need to use the disable command, *not the hide command*
# e.g. disable spheres_Scaffold_HETATMs.
#
# There is another subtlety behavior difference between loading a PSE file versus pasting commands into the terminal of a PyMOL window.
# If you write e.g.
# select Scaffold_mutations, [some selection string]
# create Scaffold_mutations, [some selection string]
# into the terminal, two objects are created in the right pane. However, if you save the PSE and reload it, only one of these
# objects works as expected. Therefore, if you need both, use two separately named objects. Below, I instead write the equivalent of:
# select Scaffold_mutations_s, [some selection string]
# create Scaffold_mutations, [some selection string]
# to create two distinct objects. The '_s' is just my arbitrary convention to denote that the object came from a select command.
class ScaffoldModelDesignBuilder(PyMOLSessionBuilder):
def __init__(self, pdb_containers, settings = {}, rootdir = '/tmp'):
super(ScaffoldModelDesignBuilder, self).__init__(pdb_containers, settings, rootdir)
self.Scaffold = pdb_containers.get('Scaffold')
self.Model = pdb_containers['Model']
self.ExpStructure = pdb_containers.get('ExpStructure')
def _create_input_files(self):
#colortext.message('self.outdir: ' + self.outdir)
if self.Scaffold:
write_file(self._filepath('scaffold.pdb'), self.Scaffold.pdb_contents)
write_file(self._filepath('model.pdb'), self.Model.pdb_contents)
if self.ExpStructure:
write_file(self._filepath('design.pdb'), self.ExpStructure.pdb_contents)
def _add_preamble(self):
self.script.append("cd %(outdir)s" % self.__dict__)
def _add_load_section(self):
self.script.append("### Load the structures")
if self.ExpStructure:
self.script.append("load design.pdb, ExpStructure")
self.script.append("load model.pdb, RosettaModel")
self.script.append("load scaffold.pdb, Scaffold")
def _add_view_settings_section(self):
self.script.append('''
# Set general view options and hide waters
viewport 1200,800
hide eve
remove resn hoh
bg_color %(global.background-color)s
''' % self.color_scheme)
def _add_generic_chain_settings_section(self):
self.script.append('''
# Set generic chain and HETATM view options
show cartoon
util.cbc
# Hide selenomethionines and selenocysteines
hide sticks, resn CSE+SEC+MSE
util.cnc
set cartoon_side_chain_helper
set cartoon_rect_length, 0.9
set cartoon_oval_length, 0.9
set stick_radius, 0.2
''')
def _add_specific_chain_settings_section(self):
self.script.append('''
# Scaffold display
color %(Scaffold.bb)s, Scaffold
# RosettaModel display
show car, RosettaModel
color %(RosettaModel.bb)s, RosettaModel
''' % self.color_scheme)
if self.ExpStructure:
self.script.append('''
# ExpStructure display
show car, ExpStructure
color %(ExpStructure.bb)s, ExpStructure
''' % self.color_scheme)
def _add_superimposition_section(self):
self.script.append('''
# Superimpose the structures
super Scaffold, RosettaModel''')
if self.ExpStructure:
self.script.append("super ExpStructure, RosettaModel")
def _add_orient_view_section(self):
pass
def _add_scaffold_view_section(self):
self.script.append('''
# Scaffold view options
hide lines, Scaffold
hide ribbon, Scaffold
show car, Scaffold
util.cbc Scaffold''')
if self.ExpStructure:
# Hide the scaffold if there is an experimental structure
self.script.append('''
disable Scaffold''')
def _add_residue_highlighting_section(self):
if self.Scaffold:
scaffold_selection = 'Scaffold and (%s)' % (create_pymol_selection_from_PDB_residue_ids(self.Scaffold.residues_of_interest))
self.script.append('''
### Scaffold objects ###
# Scaffold mutations
has_mutations = cmd.count_atoms('%(scaffold_selection)s') > 0
if has_mutations: cmd.select('Scaffold_mutations_s', '%(scaffold_selection)s');
if has_mutations: cmd.create('Scaffold_mutations', '%(scaffold_selection)s');
if has_mutations: cmd.show('sticks', 'Scaffold_mutations')
''' % vars())
self.script.append('''
if has_mutations: cmd.color('%(Scaffold.mutations)s', 'Scaffold_mutations')
# Scaffold HETATMs - create
has_hetatms = cmd.count_atoms('Scaffold and het and !(resn CSE+SEC+MSE)') > 0
if has_hetatms: cmd.create('Scaffold_HETATMs', 'Scaffold and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('sticks', 'Scaffold_HETATMs')
if has_hetatms: cmd.disable('Scaffold_HETATMs')
if has_hetatms: cmd.create('spheres_Scaffold_HETATMs', 'Scaffold and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('spheres', 'spheres_Scaffold_HETATMs')
if has_hetatms: cmd.disable('spheres_Scaffold_HETATMs')
''' % self.color_scheme)
#self.script.append('set label_color, black')
#self.script.append('label n. CA and Scaffold and chain A and i. 122, "A122" ')
model_selection = 'RosettaModel and (%s)' % (create_pymol_selection_from_PDB_residue_ids(self.Model.residues_of_interest))
self.script.append('''
### Rosetta model objects ###
# Rosetta model mutations
has_mutations = cmd.count_atoms('%(model_selection)s') > 0
if has_mutations: cmd.select('RosettaModel_mutations_s', '%(model_selection)s');
if has_mutations: cmd.create('RosettaModel_mutations', '%(model_selection)s');
if has_mutations: cmd.show('sticks', 'RosettaModel_mutations')
''' % vars())
self.script.append('''
if has_mutations: cmd.color('%(RosettaModel.mutations)s', 'RosettaModel_mutations')
# Rosetta model HETATMs - create and display
has_hetatms = cmd.count_atoms('RosettaModel and het and !(resn CSE+SEC+MSE)') > 0
if has_hetatms: cmd.create('RosettaModel_HETATMs', 'RosettaModel and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('sticks', 'RosettaModel_HETATMs')
if has_hetatms: cmd.create('spheres_RosettaModel_HETATMs', 'RosettaModel and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('spheres', 'spheres_RosettaModel_HETATMs')
if has_hetatms: cmd.disable('spheres_RosettaModel_HETATMs')
''' % self.color_scheme)
if self.ExpStructure:
exp_structure_selection = 'ExpStructure and (%s)' % (create_pymol_selection_from_PDB_residue_ids(self.ExpStructure.residues_of_interest))
self.script.append('''
### ExpStructure objects ###
# ExpStructure mutations
has_mutations = cmd.count_atoms('%(exp_structure_selection)s') > 0
if has_mutations: cmd.select('ExpStructure_mutations_s', '%(exp_structure_selection)s');
if has_mutations: cmd.create('ExpStructure_mutations', '%(exp_structure_selection)s');
if has_mutations: cmd.show('sticks', 'ExpStructure_mutations')
''' % vars())
self.script.append('''if has_mutations: cmd.color('%(ExpStructure.mutations)s', 'ExpStructure_mutations')
# ExpStructure HETATMs - create and display
has_hetatms = cmd.count_atoms('ExpStructure and het and !(resn CSE+SEC+MSE)') > 0
if has_hetatms: cmd.create('ExpStructure_HETATMs', 'ExpStructure and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('sticks', 'ExpStructure_HETATMs')
if has_hetatms: cmd.create('spheres_ExpStructure_HETATMs', 'ExpStructure and het and !(resn CSE+SEC+MSE)');
if has_hetatms: cmd.show('spheres', 'spheres_ExpStructure_HETATMs')
if has_hetatms: cmd.disable('spheres_ExpStructure_HETATMs')
#ExpStructure and het and !(resn CSE+SEC+MSE)')
''' % self.color_scheme)
def _add_raytracing_section(self):
self.script.append('''
# Atom coloring
select none
util.cnc
# Set lighting
set two_sided_lighting, on
''')
def _add_postamble(self):
self.script.append('''
# Show only polar hydrogens
hide (hydro)
# Set zoom
zoom
# Re-order the objects in the right pane
order *,yes
order Scaffold_mutations_s, location=bottom
order RosettaModel_mutations_s, location=bottom
order ExpStructure_mutations_s, location=bottom
order spheres_Scaffold_HETATMs, location=bottom
order spheres_RosettaModel_HETATMs, location=bottom
order spheres_ExpStructure_HETATMs, location=bottom
save session.pse
quit
''')
def _create_script(self):
self.script = []
self._add_preamble()
self._add_load_section()
self._add_view_settings_section()
self._add_generic_chain_settings_section()
self._add_specific_chain_settings_section()
self._add_superimposition_section()
self._add_orient_view_section()
self._add_scaffold_view_section()
self._add_residue_highlighting_section()
self._add_raytracing_section()
self._add_postamble()
self.script = '\n'.join(self.script)
| mit | -6,901,978,881,668,865,000 | 37.624506 | 149 | 0.707911 | false |
tchellomello/home-assistant | homeassistant/components/kodi/device_trigger.py | 1 | 2824 | """Provides device automations for Kodi."""
from typing import List
import voluptuous as vol
from homeassistant.components.automation import AutomationActionType
from homeassistant.components.device_automation import TRIGGER_BASE_SCHEMA
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_PLATFORM,
CONF_TYPE,
)
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
from homeassistant.helpers import config_validation as cv, entity_registry
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, EVENT_TURN_OFF, EVENT_TURN_ON
TRIGGER_TYPES = {"turn_on", "turn_off"}
TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(TRIGGER_TYPES),
}
)
async def async_get_triggers(hass: HomeAssistant, device_id: str) -> List[dict]:
"""List device triggers for Kodi devices."""
registry = await entity_registry.async_get_registry(hass)
triggers = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain == "media_player":
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_on",
}
)
triggers.append(
{
CONF_PLATFORM: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "turn_off",
}
)
return triggers
@callback
def _attach_trigger(
hass: HomeAssistant, config: ConfigType, action: AutomationActionType, event_type
):
@callback
def _handle_event(event: Event):
if event.data[ATTR_ENTITY_ID] == config[CONF_ENTITY_ID]:
hass.async_run_job(
action,
{"trigger": {**config, "description": event_type}},
event.context,
)
return hass.bus.async_listen(event_type, _handle_event)
async def async_attach_trigger(
hass: HomeAssistant,
config: ConfigType,
action: AutomationActionType,
automation_info: dict,
) -> CALLBACK_TYPE:
"""Attach a trigger."""
config = TRIGGER_SCHEMA(config)
if config[CONF_TYPE] == "turn_on":
return _attach_trigger(hass, config, action, EVENT_TURN_ON)
if config[CONF_TYPE] == "turn_off":
return _attach_trigger(hass, config, action, EVENT_TURN_OFF)
return lambda: None
| apache-2.0 | 1,542,951,620,193,615,400 | 29.365591 | 85 | 0.61296 | false |
Jonwing/morphling | morphling/renderer.py | 1 | 4057 | # -*- coding: utf-8 -*-
import re
class Renderer(object):
'''
the default renderer for parser
'''
_escape_pattern = re.compile(r'&(?!#?\w+;)')
_not_allowed_schemes = ['javascript:', 'vbscript:']
# HTML tags
_p = 'p'
_tr = 'tr'
def __init__(self, **kwargs):
self._escape = kwargs.get('escape', True)
@property
def p(self):
'''
<p>
'''
return self._p
@property
def close_p(self):
return '</%s>' % self._p
@property
def placeholder(self):
return ''
@property
def hr(self):
return '<hr>\n'
@property
def line_break(self):
return '<br>\n'
def escape(self, content, quote=False, smart_amp=True):
if smart_amp:
content = self._escape_pattern.sub('&', content)
else:
content = content.replace('&', '&')
content = content.replace('<', '<').replace('>', '>')
if quote:
content = content.replace('"', '"').replace("'", ''')
return content
def escape_link(self, link):
lower_url = link.lower().strip('\x00\x1a \n\r\t')
for scheme in self._not_allowed_schemes:
if lower_url.startswith(scheme):
return ''
return self.escape(link, quote=True, smart_amp=False)
def open_tag(self, tag, **kwargs):
extras = ['%s=%s' % (k, v) for k, v in kwargs.items() if v]
tag = getattr(self, ''.join(['_', tag]), tag)
return '<{tag} {attrs}>'.format(tag=tag, attrs=' '.join(extras))
def close_tag(self, tag, breakline=False):
tag = getattr(self, ''.join(['_', tag]), tag)
if breakline:
return '</%s>\n' % tag
return '</%s>' % tag
def block_html(self, tag, content, breakline=True, **kwargs):
fmt = '{open_t}{cnt}{close_t}'
return fmt.format(
open_t=self.open_tag(tag, **kwargs),
cnt=content,
close_t=self.close_tag(tag, breakline=breakline)
)
def tr(self, content, **kwargs):
return self.block_html('tr', content, **kwargs)
def table(self, header, body):
return (
'<table>\n<thead>%s</thead>\n'
'<tbody>\n%s</tbody>\n</table>\n'
) % (header, body)
def code(self, content):
return self.block_html('code', content, False)
def emphasis(self, content):
return self.block_html('em', content, False)
def double_emphasis(self, content):
return self.block_html('strong', content, False)
def strikethrough(self, content):
return self.block_html('del', content, False)
def footnote_ref(self, ref_key, index):
hyperlink = self.block_html(
'a', index, breakline=False, **{'class': 'footnote', 'href': '#fn:%s' % ref_key})
return self.block_html('sup', hyperlink, False)
# return '<sup><a class=footnote href=#fn:%s>%s</a></sup>' % (ref_key, index)
def link(self, addr, text):
return self.block_html('a', text, breakline=False, href=addr)
# return '<a href={addr}>{text}<a>'.format(addr=addr, text=text)
def img(self, src, alt=None, title=None):
seg = '<img src=%s' % (self.escape_link(src) if self._escape else src)
if alt:
seg += 'alt=%s' % (self.escape(alt) if self._escape else alt)
if title:
seg += 'title=%s' % (self.escape(title) if self._escape else title)
return seg + '>'
def fence(self, code, language=None, escape=True):
if escape:
code = self.escape(code, quote=True, smart_amp=False)
lang = 'class=lang-%s' % language if language else ''
return '<pre><code {cls}>{code}\n</code></pre>'.format(cls=lang, code=code)
def link_definition(self, key, link, **kwargs):
fmt = '{open_p}[{key}] : {link}{close_p}'
return fmt.format(open_p=self.open_tag(self.p, **kwargs),
key=key, link=link, close_p=self.close_tag(self.p))
| mit | 8,642,026,942,259,503,000 | 31.198413 | 93 | 0.539808 | false |
knittledan/imageResizer | imageResizer.py | 1 | 6167 | # -*- coding: utf-8 -*-
#-------------------------------------------------------------------------------
# imageUtility.py
#-------------------------------------------------------------------------------
# Software License
# The Python Imaging Library (PIL) is
#
# Copyright © 1997-2011 by Secret Labs AB
# Copyright © 1995-2011 by Fredrik Lundh
#-------------------------------------------------------------------------------
import os
from PIL import Image
class Parameters(object):
# where to save thumnails and images
thumbNailPath = r'C:\Users\codingCobra\Desktop\backgrounds'
imagePath = r'C:\Users\codingCobra\Desktop\backgrounds'
# default parameters
maxImageWidth = 900
thumbHalfRez = 200
thumbWidth = 110
thumbHeight = 90
class ImageResizer(object):
"""
Utilities to Resize and Crop an image based on parameters.
Supply a path to the image that needs processing.
"""
WIDTH = 'width'
HEIGHT = 'height'
RESIZE = 'resize'
THUMB_NAIL = 'thumbNail'
def __init__(self, imagePath, parameters):
self.originalImage = self.__openImage(imagePath)
self.image = self.originalImage
self.mode = self.image.mode
self.format = self.image.format
self.width = self.image.size[0]
self.height = self.image.size[1]
self.name = self.__fileName(self.image)
self.savePrefix = 'resized_'
self.thumbPrefix = 'thumbnail_'
self.parameters = parameters
def __getattr__(self, key):
print 'ImageResizer has no attribute %s' % key
def __delattr__(self, key):
print 'You are not allowed to delete attributes.'
#---------------------------------------------------------------------------
# Methods
#---------------------------------------------------------------------------
def resizeImage(self, scaleBy=None, size=None):
"""
Uniformally Resize an image by height or width.
:param scaleBy: width or height
:param size: pixels count
:return:
"""
sizeDefault = int(self.parameters.maxImageWidth)
scaleBy = self.WIDTH if scaleBy is None else scaleBy
size = sizeDefault if size is None else size
self.__downRezImage(scaleBy, size)
self.__saveImage(self.RESIZE)
def createThumbNail(self):
"""
Resize image to smaller size then crop based on parameters
thumbWidth and thumbHeight
:return:
"""
halfRezWidth = int(self.parameters.thumbHalfRez)
newWidth = int(self.parameters.thumbWidth)
newHeight = int(self.parameters.thumbHeight)
if self.width > halfRezWidth:
self.__downRezImage(self.WIDTH, halfRezWidth)
left = (self.width - newWidth) /2
upper = (self.height - newHeight)/2
right = (self.width + newWidth) /2
lower = (self.height + newHeight)/2
box = (left, upper, right, lower)
self.image = self.image.crop(box)
self.__saveImage(self.THUMB_NAIL)
#---------------------------------------------------------------------------
# Helpers
#---------------------------------------------------------------------------
def __saveImage(self, saveType):
"""
Save processed image as thumbNail or resize.
:param saveType: resize or thumbNail
:return: boolean
"""
if saveType == self.RESIZE:
newName = str(self.savePrefix) + str(self.name)
savePath = self.parameters.imagePath
elif saveType == self.THUMB_NAIL:
newName = str(self.thumbPrefix) + str(self.name)
savePath = self.parameters.thumbNailPath
imagePath = os.path.join(savePath, newName)
try:
self.image.save(imagePath, "JPEG")
return True
except IOError, e:
raise IOError('Unable to save new image: %s' % str(e))
def __downRezImage(self, region, size):
"""
Resize image into memory before cropping.
:param region: width or height
:param size: pixels count
:return:
"""
if region == self.WIDTH:
ratio = float(size)/float(self.width)
newWidth = int(size)
newHeight = int(self.height*ratio)
if region == self.HEIGHT:
ratio = float(size)/float(self.height)
newHeight = int(size)
newWidth = int(self.width*ratio)
self.image = self.image.resize((newWidth, newHeight), Image.ANTIALIAS)
self.width = newWidth
self.height = newHeight
#---------------------------------------------------------------------------
# Statics
#---------------------------------------------------------------------------
@staticmethod
def __openImage(image):
"""
Open image using the PIL.
:param image: path to image
:return: PIL image obj
"""
if os.path.isfile(image):
try:
return Image.open(image)
except IOError:
raise
else:
mssage = 'This is not a file'
raise IOError(mssage)
@staticmethod
def __fileName(image):
"""
Get the name of the image without the path.
:param image: path to image
:return: imageName.ext
"""
return os.path.split(image.filename)[-1]
# example usages
path = r'C:\Users\codingCobra\Desktop\backgrounds\7YMpZvD.jpg'
image = ImageResizer(path, Parameters())
image.savePrefix = 'resized-1_'
image.thumbPrefix = 'thumb-1_'
image.resizeImage(scaleBy='width', size=700)
image.createThumbNail()
image = ImageResizer(path, Parameters())
image.savePrefix = 'resized-2_'
image.thumbPrefix = 'thumb-2_'
image.resizeImage(scaleBy='height', size=600)
image.createThumbNail()
image = ImageResizer(path, Parameters())
image.savePrefix = 'resized-3_'
image.thumbPrefix = 'thumb-3_'
image.resizeImage()
image.createThumbNail()
| mit | -2,618,635,322,029,660,700 | 31.967914 | 80 | 0.525061 | false |
saulshanabrook/pushgp.py | pushgp/push/instructions/base.py | 1 | 1227 | from functools import wraps
from inspect import signature
from . import utils
@utils.optional_arguments
def simple_instruction(f, multiple_return_items=False):
'''
Wraps an instruction funtion ``f`` to have it take its arguments from
Push stacks and add the return value to the ``exec``.
It requires that the function arguments be annotated with the stacks that
they draw from. If the stacks don't have enough values on them, it will
not modify anything.
If ``multiple_return_items`` is True, than will assume the function returns
an iterable of items, instead of only one item. It will push each
indivually to the ``exec``, instead of pushing the iterable as one
item.
'''
@wraps(f)
def wrapper(Push):
kwargs = {}
for parameter in signature(f).parameters.values():
try:
kwargs[parameter.name] = Push[parameter.annotation].pop()
except IndexError:
return
return_item = f(**kwargs)
if return_item is not None:
if multiple_return_items:
Push['exec'].extend(return_item)
else:
Push['exec'].append(return_item)
return wrapper
| bsd-3-clause | 5,302,662,193,434,582,000 | 33.083333 | 79 | 0.638957 | false |
emmanvg/cti-stix-elevator | stix2elevator/stix_stepper.py | 1 | 3114 | import io
import json
import sys
from six import text_type
from stix2.pattern_visitor import create_pattern_object
def step_cyber_observable(obj):
type_name20 = obj["type"]
if type_name20 == "file":
obj.pop("is_encrypted", None)
obj.pop("encryption_algorithm", None)
obj.pop("decryption_key", None)
if "extensions" in obj:
exts = obj["extensions"]
if "archive-ext" in exts:
exts["archive-ext"].pop("version", None)
if "raster-image-ext" in exts:
exts["raster-image-ext"].pop("image_compression_algorithm", None)
elif type_name20 == "network-traffic":
if "extensions" in obj:
exts = obj["extensions"]
if "socket-ext" in exts:
exts["socket-ext"].pop("protocol_family", None)
elif type_name20 == "process":
obj.pop("name", None)
obj.pop("arguments", None)
if "binary_ref" in obj:
obj["image_ref"] = obj["binary_ref"]
obj.pop("binary_ref", None)
elif type_name20 == "user-account":
if "password_last_changed" in obj:
obj["credential_last_changed"] = obj["password_last_changed"]
obj.pop("password_last_changed", None)
def step_observable_data(object):
for key, obj in object["objects"].items():
step_cyber_observable(obj)
def step_pattern(pattern):
pattern_obj = create_pattern_object(pattern, module_suffix="Elevator", module_name="stix2elevator.convert_pattern")
return text_type(pattern_obj.toSTIX21())
def step_object(object):
object["spec_version"] = "2.1"
if (object["type"] == "indicator" or object["type"] == "malware" or
object["type"] == "report" or object["type"] == "threat-actor" or
object["type"] == "tool"):
if "labels" in object:
types_property_name = object["type"].replace("-", "_") + "_types"
object[types_property_name] = object["labels"]
object.pop("labels")
if object["type"] == "indicator":
object["pattern"] = step_pattern(object["pattern"])
elif object["type"] == "observed-data":
step_observable_data(object)
# update "in place"
def step_bundle(bundle):
for o in bundle["objects"]:
step_object(o)
bundle.pop("spec_version", None)
return bundle
def step_file(fn, encoding="utf-8"):
sys.setrecursionlimit(5000)
with io.open(fn, "r", encoding=encoding) as json_data:
json_content = json.load(json_data)
if 'spec_version' in json_content and "type" in json_content and json_content["type"] == "bundle":
json_string = json.dumps(step_bundle(json_content),
ensure_ascii=False,
indent=4,
separators=(',', ': '),
sort_keys=True)
print(json_string)
return json_string
else:
print("stix_stepper only converts STIX 2.0 to STIX 2.1")
return
if __name__ == '__main__':
step_file(sys.argv[1])
| bsd-3-clause | -5,713,456,181,478,497,000 | 33.21978 | 119 | 0.571291 | false |
matejc/searx | searx/engines/bing_images.py | 1 | 3090 | """
Bing (Images)
@website https://www.bing.com/images
@provide-api yes (http://datamarket.azure.com/dataset/bing/search),
max. 5000 query/month
@using-api no (because of query limit)
@results HTML (using search portal)
@stable no (HTML can change)
@parse url, title, img_src
@todo currently there are up to 35 images receive per page,
because bing does not parse count=10.
limited response to 10 images
"""
from urllib import urlencode
from lxml import html
from json import loads
import re
from searx.engines.bing import _fetch_supported_languages, supported_languages_url
# engine dependent config
categories = ['images']
paging = True
safesearch = True
time_range_support = True
# search-url
base_url = 'https://www.bing.com/'
search_string = 'images/search?{query}&count=10&first={offset}'
time_range_string = '&qft=+filterui:age-lt{interval}'
thumb_url = "https://www.bing.com/th?id={ihk}"
time_range_dict = {'day': '1440',
'week': '10080',
'month': '43200',
'year': '525600'}
# safesearch definitions
safesearch_types = {2: 'STRICT',
1: 'DEMOTE',
0: 'OFF'}
_quote_keys_regex = re.compile('({|,)([a-z][a-z0-9]*):(")', re.I | re.U)
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * 10 + 1
# required for cookie
if params['language'] == 'all':
language = 'en-US'
else:
language = params['language']
search_path = search_string.format(
query=urlencode({'q': query}),
offset=offset)
params['cookies']['SRCHHPGUSR'] = \
'NEWWND=0&NRSLT=-1&SRCHLANG=' + language.split('-')[0] +\
'&ADLT=' + safesearch_types.get(params['safesearch'], 'DEMOTE')
params['url'] = base_url + search_path
if params['time_range'] in time_range_dict:
params['url'] += time_range_string.format(interval=time_range_dict[params['time_range']])
return params
# get response from search-request
def response(resp):
results = []
dom = html.fromstring(resp.text)
# parse results
for result in dom.xpath('//div[@class="dg_u"]/div'):
link = result.xpath('./a')[0]
# parse json-data (it is required to add a space, to make it parsable)
json_data = loads(_quote_keys_regex.sub(r'\1"\2": \3', link.attrib.get('m')))
title = link.attrib.get('t1')
ihk = link.attrib.get('ihk')
# url = 'http://' + link.attrib.get('t3')
url = json_data.get('surl')
img_src = json_data.get('imgurl')
# append result
results.append({'template': 'images.html',
'url': url,
'title': title,
'content': '',
'thumbnail_src': thumb_url.format(ihk=ihk),
'img_src': img_src})
# TODO stop parsing if 10 images are found
if len(results) >= 10:
break
# return results
return results
| agpl-3.0 | 4,278,168,107,756,276,700 | 27.878505 | 97 | 0.572492 | false |
oddskool/varan | varan/server.py | 1 | 1470 | # -*- coding: utf8 -*-
'''
Created on 27 dec. 2012
@author: ediemert
'''
import argparse
import ConfigParser
from twisted.internet import reactor
from varan import logger, VERSION
from varan.ts_store import TSStore
from varan.stream import Stream
from varan.application import Application
parser = argparse.ArgumentParser(description='varan : realtime twitter monitoring')
parser.add_argument('--config','-c', required=True)
parser.add_argument('--user','-u', required=True)
if __name__ == '__main__':
import sys
logger.info('-'*20+' varan v.%s '%VERSION+'-'*20)
args = parser.parse_args()
config = ConfigParser.ConfigParser()
config.read(args.config)
config.add_section('authentication')
config.set('authentication', 'password', args.user)
store = TSStore(config)
store.queries = [ q.strip() for q in config.get('timeseries','queries').split(',') ]
stream = Stream(config, store)
try:
#deferToThread(stream.__call__)
stream.start()
logger.info('stream listen started @main')
reactor.listenTCP(int(config.get('ws', 'port')),
Application(store))
logger.info('reactor will start @main')
reactor.run()
except KeyboardInterrupt:
logger.critical('got ctrl+c, quit')
stream.stop()
logger.critical('got ctrl+c, quit (2)')
reactor.stop()
logger.critical('got ctrl+c, quit (3)')
sys.exit(0)
| mit | -3,295,993,041,132,030,000 | 27.823529 | 88 | 0.638776 | false |
kcleong/rover_challenge | tests/test_grid.py | 1 | 1349 | # -*- coding: utf-8 -*-
from challenge.grid import Grid
from challenge.rover import Rover
import unittest
class GridTestSuite(unittest.TestCase):
"""Advanced test cases."""
def test_instantion(self):
""" Test if we can instantiate a grid object """
grid = Grid(5, 5)
self.assertEqual(type(grid), Grid)
def test_invalid_instantation(self):
""" Test if we can instantiate a grid object with invalid values """
with self.assertRaises(ValueError):
Grid('a', None) # This should give a value error
def test_xy_max(self):
""" Test if a given coords are set in the grid """
max_x = 7
max_y = 9
grid = Grid(max_x, max_y)
self.assertEqual(grid.max_x, max_x)
self.assertEqual(grid.max_y, max_y)
def test_turn(self):
""" Test a turn movement in the grid """
starting_pos = '12N'
turn = 'L'
grid = Grid(5, 5)
output = grid.move(starting_pos, turn)
self.assertEqual(output, '12W')
def test_movement(self):
""" Test a forward movement in the grid """
starting_pos = '12W'
movement = 'M'
grid = Grid(5, 5)
output = grid.move(starting_pos, movement)
self.assertEqual(output, '02W')
if __name__ == '__main__':
unittest.main() | mit | 6,842,073,942,312,527,000 | 24.471698 | 76 | 0.576723 | false |
bixbydev/Bixby | util/sqltocsv.py | 1 | 1559 |
#!/usr/bin/python
#------------------------------------------------------------------------------
# Copyright (C) 2013 Bradley Hilton <[email protected]>
#
# Distributed under the terms of the GNU GENERAL PUBLIC LICENSE V3.
#______________________________________________________________________________
# There is stuff below you may need to change. Specifically in the Oracle, MySQL, And Google Provisioning API Stuff sections.
# Filename: sqltocsv.py
import csv
def csv_from_sql(query, outputfile, dbcursor, supress_header=False):
f = open(outputfile, 'wb')
dbcursor.execute(query)
queryresults = dbcursor.fetchall()
csvwriter = csv.writer(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
if not supress_header:
csvwriter.writerow([i[0] for i in queryresults.description])
for row in queryresults:
csvwriter.writerow(row)
print row
f.close()
def csv_to_sql(csvfile, db_table, dbcursor=None):
"""Opens a CSV file. Reads the row headers
and generates an INSERT statement and inserts
rows into file. Row headers must match column names
in the insert table."""
with open(csvfile, 'rU') as f:
reader = csv.reader(f, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
headers = reader.next()
print headers
data = []
insert = 'INSERT INTO %s \n(' %db_table
columns = ', '.join(headers) +') \n'
values = 'VALUES ('+'%s, ' *(len(headers) - 1) +'%s)'
query = insert + columns + values
for row in reader:
if dbcursor:
dbcursor.execute(query, row)
print query %tuple(row)
| gpl-3.0 | -5,794,217,589,997,273,000 | 31.479167 | 125 | 0.627967 | false |
yarikoptic/Fail2Ban-Old-SVNGIT | client/jailsreader.py | 1 | 2336 | # This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Author: Cyril Jaquier
#
# $Revision$
__author__ = "Cyril Jaquier"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier"
__license__ = "GPL"
import logging
from configreader import ConfigReader
from jailreader import JailReader
# Gets the instance of the logger.
logSys = logging.getLogger("fail2ban.client.config")
class JailsReader(ConfigReader):
def __init__(self):
ConfigReader.__init__(self)
self.__jails = list()
def read(self):
ConfigReader.read(self, "jail")
def getOptions(self, section = None):
opts = []
self.__opts = ConfigReader.getOptions(self, "Definition", opts)
if section:
# Get the options of a specific jail.
jail = JailReader(section)
jail.read()
ret = jail.getOptions()
if ret:
if jail.isEnabled():
# We only add enabled jails
self.__jails.append(jail)
else:
logSys.error("Errors in jail '%s'. Skipping..." % section)
return False
else:
# Get the options of all jails.
for sec in self.sections():
jail = JailReader(sec)
jail.read()
ret = jail.getOptions()
if ret:
if jail.isEnabled():
# We only add enabled jails
self.__jails.append(jail)
else:
logSys.error("Errors in jail '" + sec + "'. Skipping...")
return False
return True
def convert(self):
stream = list()
for opt in self.__opts:
if opt == "":
stream.append([])
# Convert jails
for jail in self.__jails:
stream.extend(jail.convert())
# Start jails
for jail in self.__jails:
stream.append(["start", jail.getName()])
return stream
| gpl-2.0 | -9,168,791,466,367,704,000 | 25.862069 | 75 | 0.67637 | false |
GitAcrown/Kreybot | Krey/cogs/economy.py | 1 | 27235 | import discord
from discord.ext import commands
from cogs.utils.dataIO import dataIO, fileIO
from collections import namedtuple, defaultdict
from datetime import datetime
from random import randint
from copy import deepcopy
from .utils import checks
from __main__ import send_cmd_help
import os
import time
import logging
#Modifié
default_settings = {"BOOST" : 1, "PAYDAY_TIME" : 86400, "PAYDAY_CREDITS" : 150, "SLOT_MIN" : 5, "SLOT_MAX" : 500, "SLOT_TIME" : 120}
slot_payouts = """Gains possibles dans la machine:
:two: :two: :six: Offre * 5000
:four_leaf_clover: :four_leaf_clover: :four_leaf_clover: +1000
:cherries: :cherries: :cherries: +800
:two: :six: Offre * 4
:cherries: :cherries: Offre * 3
Trois symboles: +500
Deux symboles: Offre * 2"""
class BankError(Exception):
pass
class AccountAlreadyExists(BankError):
pass
class NoAccount(BankError):
pass
class InsufficientBalance(BankError):
pass
class NegativeValue(BankError):
pass
class SameSenderAndReceiver(BankError):
pass
class Bank:
def __init__(self, bot, file_path):
self.accounts = dataIO.load_json(file_path)
self.bot = bot
def create_account(self, user):
server = user.server
if not self.account_exists(user):
if server.id not in self.accounts:
self.accounts[server.id] = {}
if user.id in self.accounts: # Legacy account
balance = self.accounts[user.id]["balance"]
else:
balance = 0
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
account = {"name" : user.name, "balance" : balance,
"created_at" : timestamp}
self.accounts[server.id][user.id] = account
self._save_bank()
return self.get_account(user)
else:
raise AccountAlreadyExists()
def account_exists(self, user):
try:
self._get_account(user)
except NoAccount:
return False
return True
def withdraw_credits(self, user, amount):
server = user.server
if amount < 0:
raise NegativeValue()
account = self._get_account(user)
if account["balance"] >= amount:
account["balance"] -= amount
self.accounts[server.id][user.id] = account
self._save_bank()
else:
raise InsufficientBalance()
def deposit_credits(self, user, amount):
server = user.server
if amount < 0:
raise NegativeValue()
account = self._get_account(user)
account["balance"] += amount
self.accounts[server.id][user.id] = account
self._save_bank()
def set_credits(self, user, amount):
server = user.server
if amount < 0:
raise NegativeValue()
account = self._get_account(user)
account["balance"] = amount
self.accounts[server.id][user.id] = account
self._save_bank()
def transfer_credits(self, sender, receiver, amount):
server = sender.server
if amount < 0:
raise NegativeValue()
if sender is receiver:
raise SameSenderAndReceiver()
if self.account_exists(sender) and self.account_exists(receiver):
sender_acc = self._get_account(sender)
if sender_acc["balance"] < amount:
raise InsufficientBalance()
self.withdraw_credits(sender, amount)
self.deposit_credits(receiver, amount)
else:
raise NoAccount()
def can_spend(self, user, amount):
account = self._get_account(user)
if account["balance"] >= amount:
return True
else:
return False
def wipe_bank(self, server):
self.accounts[server.id] = {}
self._save_bank()
def get_server_accounts(self, server):
if server.id in self.accounts:
raw_server_accounts = deepcopy(self.accounts[server.id])
accounts = []
for k, v in raw_server_accounts.items():
v["id"] = k
v["server"] = server
acc = self._create_account_obj(v)
accounts.append(acc)
return accounts
else:
return []
def get_all_accounts(self):
accounts = []
for server_id, v in self.accounts.items():
server = self.bot.get_server(server_id)
if server is None:# Servers that have since been left will be ignored
continue # Same for users_id from the old bank format
raw_server_accounts = deepcopy(self.accounts[server.id])
for k, v in raw_server_accounts.items():
v["id"] = k
v["server"] = server
acc = self._create_account_obj(v)
accounts.append(acc)
return accounts
def get_balance(self, user):
account = self._get_account(user)
return account["balance"]
def get_account(self, user):
acc = self._get_account(user)
acc["id"] = user.id
acc["server"] = user.server
return self._create_account_obj(acc)
def _create_account_obj(self, account):
account["member"] = account["server"].get_member(account["id"])
account["created_at"] = datetime.strptime(account["created_at"],
"%Y-%m-%d %H:%M:%S")
Account = namedtuple("Account", "id name balance "
"created_at server member")
return Account(**account)
def _save_bank(self):
dataIO.save_json("data/economy/bank.json", self.accounts)
def _get_account(self, user):
server = user.server
try:
return deepcopy(self.accounts[server.id][user.id])
except KeyError:
raise NoAccount()
class Economy:
"""Soyez riche virtuellement !"""
def __init__(self, bot):
global default_settings
self.bot = bot
self.bank = Bank(bot, "data/economy/bank.json")
self.settings = fileIO("data/economy/settings.json", "load")
if "PAYDAY_TIME" in self.settings: #old format
default_settings = self.settings
self.settings = {}
self.settings = defaultdict(lambda: default_settings, self.settings)
self.payday_register = defaultdict(dict)
self.slot_register = defaultdict(dict)
@commands.group(name="bank", pass_context=True)
async def _bank(self, ctx):
"""Opérations bancaires"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@_bank.command(pass_context=True, no_pm=True, hidden=True) #Inutile depuis MAJ "auto_register"
async def register(self, ctx):
"""Enregistre un compte dans Bank"""
user = ctx.message.author
try:
account = self.bank.create_account(user)
await self.bot.say("{} Compte ouvert. Vous avez: {}§".format(user.mention,
account.balance))
except AccountAlreadyExists:
await self.bot.say("{} Tu as déjà un compte Bank.".format(user.mention))
async def auto_register(self, message): #Enregistre automatiquement
user = message.author
server = message.server
if server != None:
try:
account = self.bank.create_account(user)
except AccountAlreadyExists:
pass
else:
pass
@_bank.command(pass_context=True)
async def balance(self, ctx, user : discord.Member=None):
"""Montre l'argent possédé par quelqu'un.
Par défaut, son argent."""
if not user:
user = ctx.message.author
try:
await self.bot.say("{} Vous avez: {}§".format(user.mention, self.bank.get_balance(user)))
except NoAccount:
await self.bot.say("{} Vous n'avez pas de compte chez Bank. Tapez {}bank register pour en ouvrir un.".format(user.mention, ctx.prefix))
else:
try:
await self.bot.say("{} possède {}§".format(user.name, self.bank.get_balance(user)))
except NoAccount:
await self.bot.say("Cet utilisateur ne possède pas de compte Bank.")
@_bank.command(pass_context=True)
async def transfer(self, ctx, user : discord.Member, sum : int):
"""Transfert des crédits d'un utilisateur à un autre. (Taxe de 4%)"""
author = ctx.message.author
mult = sum * 0.96
sum = round(mult)
try:
self.bank.transfer_credits(author, user, sum)
logger.info("{}({}) transferred {} credits to {}({})".format(
author.name, author.id, sum, user.name, user.id))
await self.bot.say("{} crédits ont été transférés au compte de {}. (Taxe de 4%)".format(sum, user.name))
except NegativeValue:
await self.bot.say("Vous avez besoin de transférer au moins 1 crédit.")
except SameSenderAndReceiver:
await self.bot.say("Vous ne pouvez pas transférer des crédits à vous-même.")
except InsufficientBalance:
await self.bot.say("Vous n'avez pas cette somme dans votre compte.")
except NoAccount:
await self.bot.say("Cet utilisateur ne possède pas de compte.")
@_bank.command(name="set", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _set(self, ctx, user : discord.Member, sum : int):
"""Change la valeur d'un compte
Admin/Proprio seulement."""
author = ctx.message.author
try:
self.bank.set_credits(user, sum)
logger.info("{}({}) set {} credits to {} ({})".format(author.name, author.id, str(sum), user.name, user.id))
await self.bot.say("{} possède maintenant {}".format(user.name, str(sum)))
except NoAccount:
await self.bot.say("Cet utilisateur ne possède pas de compte.")
@commands.command(pass_context=True, no_pm=True)
async def rjd(self, ctx): # TODO
"""Pour avoir quelques crédits"""
author = ctx.message.author
server = author.server
id = author.id
sum = self.settings[server.id]["PAYDAY_CREDITS"] * self.settings[server.id]["BOOST"]
if self.bank.account_exists(author):
if id in self.payday_register[server.id]:
seconds = abs(self.payday_register[server.id][id] - int(time.perf_counter()))
if seconds >= self.settings[server.id]["PAYDAY_TIME"]:
self.bank.deposit_credits(author, sum)
self.payday_register[server.id][id] = int(time.perf_counter())
await self.bot.say("{} Voilà quelques crédits ! (+{}§)".format(author.mention, str(sum)))
else:
await self.bot.say("{} Trop tôt, il faudra attendre {}.".format(author.mention, self.display_time(self.settings[server.id]["PAYDAY_TIME"] - seconds)))
else:
self.payday_register[server.id][id] = int(time.perf_counter())
self.bank.deposit_credits(author, sum)
await self.bot.say("{} Voilà quelques crédits. (+{}§)".format(author.mention, str(sum)))
else:
await self.bot.say("{} Vous avez besoin d'un compte. tapez {}bank register pour en ouvrir un.".format(author.mention, ctx.prefix))
@commands.group(pass_context=True)
async def leaderboard(self, ctx):
"""Top par serveur ou global
Par défaut le serveur"""
if ctx.invoked_subcommand is None:
await ctx.invoke(self._server_leaderboard)
@leaderboard.command(name="server", pass_context=True)
async def _server_leaderboard(self, ctx, top : int=10):
"""Poste un top des personnes les plus riche
par défaut top 10""" #Originally coded by Airenkun - edited by irdumb
server = ctx.message.server
if top < 1:
top = 10
bank_sorted = sorted(self.bank.get_server_accounts(server),
key=lambda x: x.balance, reverse=True)
if len(bank_sorted) < top:
top = len(bank_sorted)
topten = bank_sorted[:top]
highscore = ""
place = 1
for acc in topten:
highscore += str(place).ljust(len(str(top))+1)
highscore += (acc.name+" ").ljust(23-len(str(acc.balance)))
highscore += str(acc.balance) + "\n"
place += 1
if highscore:
if len(highscore) < 1985:
await self.bot.say("```py\n"+highscore+"```")
else:
await self.bot.say("Trop gros pour être affiché.")
else:
await self.bot.say("Aucun compte à afficher.")
@leaderboard.command(name="global")
async def _global_leaderboard(self, top : int=10):
"""Affiche le top global mutli-serveur"""
if top < 1:
top = 10
bank_sorted = sorted(self.bank.get_all_accounts(),
key=lambda x: x.balance, reverse=True)
unique_accounts = []
for acc in bank_sorted:
if not self.already_in_list(unique_accounts, acc):
unique_accounts.append(acc)
if len(unique_accounts) < top:
top = len(unique_accounts)
topten = unique_accounts[:top]
highscore = ""
place = 1
for acc in topten:
highscore += str(place).ljust(len(str(top))+1)
highscore += ("{} |{}| ".format(acc.name, acc.server.name)).ljust(23-len(str(acc.balance)))
highscore += str(acc.balance) + "\n"
place += 1
if highscore:
if len(highscore) < 1985:
await self.bot.say("```py\n"+highscore+"```")
else:
await self.bot.say("Trop gros pour être affiché.")
else:
await self.bot.say("Aucun compte à afficher.")
def already_in_list(self, accounts, user):
for acc in accounts:
if user.id == acc.id:
return True
return False
@commands.command()
async def payouts(self):
"""Montre les gains possibles"""
await self.bot.whisper(slot_payouts)
@commands.command(pass_context=True, no_pm=True)
async def slot(self, ctx, bid : int):
"""Joue à la machine à sous"""
author = ctx.message.author
server = author.server
if not self.bank.account_exists(author):
await self.bot.say("{} Tu as besoin d'un compte pour y jouer. Tape {}bank register pour en ouvrir un.".format(author.mention, ctx.prefix))
return
if self.bank.can_spend(author, bid):
if bid >= self.settings[server.id]["SLOT_MIN"] and bid <= self.settings[server.id]["SLOT_MAX"]:
if author.id in self.slot_register:
if abs(self.slot_register[author.id] - int(time.perf_counter())) >= self.settings[server.id]["SLOT_TIME"]:
self.slot_register[author.id] = int(time.perf_counter())
await self.slot_machine(ctx.message, bid)
else:
await self.bot.say("La machine n'est pas encore disponible ! Attendez {} secondes entre chaque utilisation".format(self.settings[server.id]["SLOT_TIME"]))
else:
self.slot_register[author.id] = int(time.perf_counter())
await self.slot_machine(ctx.message, bid)
else:
await self.bot.say("{0} L'offre doit être entre {1} et {2}.".format(author.mention, self.settings[server.id]["SLOT_MIN"], self.settings[server.id]["SLOT_MAX"]))
else:
await self.bot.say("{0} Tu as besoin d'un compte avec assez de fonds pour y jouer.".format(author.mention))
async def slot_machine(self, message, bid):
reel_pattern = [":cherries:", ":cookie:", ":two:", ":four_leaf_clover:", ":cyclone:", ":sunflower:", ":six:", ":mushroom:", ":heart:", ":snowflake:"]
padding_before = [":mushroom:", ":heart:", ":snowflake:"] # padding prevents index errors
padding_after = [":cherries:", ":cookie:", ":two:"]
reel = padding_before + reel_pattern + padding_after
reels = []
for i in range(0, 3):
n = randint(3,12)
reels.append([reel[n - 1], reel[n], reel[n + 1]])
line = [reels[0][1], reels[1][1], reels[2][1]]
display_reels = "\n " + reels[0][0] + " " + reels[1][0] + " " + reels[2][0] + "\n"
display_reels += ">" + reels[0][1] + " " + reels[1][1] + " " + reels[2][1] + "\n"
display_reels += " " + reels[0][2] + " " + reels[1][2] + " " + reels[2][2] + "\n"
if line[0] == ":two:" and line[1] == ":two:" and line[2] == ":six:":
bid = bid * 5000
await self.bot.send_message(message.channel, "{}{} 226 ! Offre * 5000! {}! ".format(display_reels, message.author.mention, str(bid)))
elif line[0] == ":four_leaf_clover:" and line[1] == ":four_leaf_clover:" and line[2] == ":four_leaf_clover:":
bid += 1000
await self.bot.send_message(message.channel, "{}{} Trois trèfles ! +1000! ".format(display_reels, message.author.mention))
elif line[0] == ":cherries:" and line[1] == ":cherries:" and line[2] == ":cherries:":
bid += 800
await self.bot.send_message(message.channel, "{}{} Trois cerises ! +800! ".format(display_reels, message.author.mention))
elif line[0] == line[1] == line[2]:
bid += 500
await self.bot.send_message(message.channel, "{}{} Trois symboles ! +500! ".format(display_reels, message.author.mention))
elif line[0] == ":two:" and line[1] == ":six:" or line[1] == ":two:" and line[2] == ":six:":
bid = bid * 4
await self.bot.send_message(message.channel, "{}{} 26 ! Offre * 4! {}! ".format(display_reels, message.author.mention, str(bid)))
elif line[0] == ":cherries:" and line[1] == ":cherries:" or line[1] == ":cherries:" and line[2] == ":cherries:":
bid = bid * 3
await self.bot.send_message(message.channel, "{}{} Deux cerises ! Offre * 3! {}! ".format(display_reels, message.author.mention, str(bid)))
elif line[0] == line[1] or line[1] == line[2]:
bid = bid * 2
await self.bot.send_message(message.channel, "{}{} Deux symvoles ! Offre * 2! {}! ".format(display_reels, message.author.mention, str(bid)))
else:
await self.bot.send_message(message.channel, "{}{} Rien ! Offre perdue. ".format(display_reels, message.author.mention))
self.bank.withdraw_credits(message.author, bid)
await self.bot.send_message(message.channel, "Crédits restant: {}".format(self.bank.get_balance(message.author)))
return True
self.bank.deposit_credits(message.author, bid)
await self.bot.send_message(message.channel, "Crédits restant: {}".format(self.bank.get_balance(message.author)))
@commands.command(name="playrole", pass_context=True)
async def play_role(self, ctx):
"""Vous donne le rôle @Play pour être notifié au début de chaque partie d'un jeu lié à l'économie.
Si le rôle n'existe pas sur le serveur, il sera créé automatiquement."""
server = ctx.message.server
user = ctx.message.author
# Regarde si le rôle existe
if 'Play' not in [r.name for r in server.roles]:
await self.bot.say("Le rôle n'existe pas. Je vais donc le créer...")
try:
perms = discord.Permissions.none()
# Active les permissions voulues (si nécéssaire)
await self.bot.create_role(server, name="Play", permissions=perms)
await self.bot.say("Rôle crée ! Refaites la commande pour obtenir le rôle !")
try:
for c in server.channels:
if c.type.name == 'text':
perms = discord.PermissionOverwrite()
perms.send_messages = False
r = discord.utils.get(ctx.message.server.roles, name="Play")
await self.bot.edit_channel_permissions(c, r, perms)
await asyncio.sleep(1.5)
except discord.Forbidden:
await self.bot.say("Une erreur est apparue.")
except discord.Forbidden:
await self.bot.say("Je ne peux pas créer le rôle.")
else:
server = ctx.message.server
if user.id == self.bot.user.id:
await self.bot.say("Je ne peux pas obtenir ce rôle...")
r = discord.utils.get(ctx.message.server.roles, name="Play")
if 'Play' not in [r.name for r in user.roles]:
await self.bot.add_roles(user, r)
await self.bot.say("{} Vous avec maintenant le rôle *Play*".format(user.name))
else:
await self.bot.remove_roles(user, r)
await self.bot.say("{} Vous n'avez plus le rôle *Play*".format(user.name))
@commands.group(pass_context=True, no_pm=True)
@checks.admin_or_permissions(manage_server=True)
async def economyset(self, ctx):
"""Change les paramètres du module économie"""
server = ctx.message.server
settings = self.settings[server.id]
if ctx.invoked_subcommand is None:
msg = "```"
for k, v in settings.items():
msg += "{}: {}\n".format(k, v)
msg += "```"
await send_cmd_help(ctx)
await self.bot.say(msg)
@economyset.command(pass_context=True)
async def wipe(self, ctx):
"""Efface entièrement Bank. N'efface pas les données des autres modules."""
server = ctx.message.server
self.bank.wipe_bank(server)
await self.bot.say("Banque effacée.")
@economyset.command(pass_context=True)
async def boost(self, ctx, multiplicateur : int):
"""Active le boost et définit le multiplicateur"""
self.settings["BOOST"] = mult
if boost <= 0:
await self.bot.say("Le boost ne peut pas être inférieur ou égal à 0")
fileIO("data/economy/settings.json", "save", self.settings)
if boost < 1:
await self.bot.say("Le boost est maintenant de " + str(mult) + ", ce qui retire de l'argent à chaque distribution.")
fileIO("data/economy/settings.json", "save", self.settings)
if boost > 1:
await self.bot.say("Le boost est maintenant de " + str(mult))
fileIO("data/economy/settings.json", "save", self.settings)
@economyset.command(pass_context=True)
async def slotmin(self, ctx, bid : int):
"""Minimum slot machine bid"""
server = ctx.message.server
self.settings[server.id]["SLOT_MIN"] = bid
await self.bot.say("Minimum bid is now " + str(bid) + " credits.")
fileIO("data/economy/settings.json", "save", self.settings)
@economyset.command(pass_context=True)
async def slotmax(self, ctx, bid : int):
"""Maximum slot machine bid"""
server = ctx.message.server
self.settings[server.id]["SLOT_MAX"] = bid
await self.bot.say("Maximum bid is now " + str(bid) + " credits.")
fileIO("data/economy/settings.json", "save", self.settings)
@economyset.command(pass_context=True)
async def slottime(self, ctx, seconds : int):
"""Seconds between each slots use"""
server = ctx.message.server
self.settings[server.id]["SLOT_TIME"] = seconds
await self.bot.say("Cooldown is now " + str(seconds) + " seconds.")
fileIO("data/economy/settings.json", "save", self.settings)
@economyset.command(pass_context=True)
async def paydaytime(self, ctx, seconds : int):
"""Seconds between each payday"""
server = ctx.message.server
self.settings[server.id]["PAYDAY_TIME"] = seconds
await self.bot.say("Value modified. At least " + str(seconds) + " seconds must pass between each payday.")
fileIO("data/economy/settings.json", "save", self.settings)
@economyset.command(pass_context=True)
async def paydaycredits(self, ctx, credits : int):
"""Credits earned each payday"""
server = ctx.message.server
self.settings[server.id]["PAYDAY_CREDITS"] = credits
await self.bot.say("Every payday will now give " + str(credits) + " credits.")
fileIO("data/economy/settings.json", "save", self.settings)
def display_time(self, seconds, granularity=2): # What would I ever do without stackoverflow?
intervals = ( # Source: http://stackoverflow.com/a/24542445
('weeks', 604800), # 60 * 60 * 24 * 7
('days', 86400), # 60 * 60 * 24
('hours', 3600), # 60 * 60
('minutes', 60),
('seconds', 1),
)
result = []
for name, count in intervals:
value = seconds // count
if value:
seconds -= value * count
if value == 1:
name = name.rstrip('s')
result.append("{} {}".format(value, name))
return ', '.join(result[:granularity])
def check_folders():
if not os.path.exists("data/economy"):
print("Creating data/economy folder...")
os.makedirs("data/economy")
def check_files():
f = "data/economy/settings.json"
if not fileIO(f, "check"):
print("Creating default economy's settings.json...")
fileIO(f, "save", {})
f = "data/economy/bank.json"
if not fileIO(f, "check"):
print("Creating empty bank.json...")
fileIO(f, "save", {})
def setup(bot):
global logger
check_folders()
check_files()
logger = logging.getLogger("red.economy")
n = Economy(bot)
bot.add_listener(n.auto_register, "on_message")
if logger.level == 0: # Prevents the logger from being loaded again in case of module reload
logger.setLevel(logging.INFO)
handler = logging.FileHandler(filename='data/economy/economy.log', encoding='utf-8', mode='a')
handler.setFormatter(logging.Formatter('%(asctime)s %(message)s', datefmt="[%d/%m/%Y %H:%M]"))
logger.addHandler(handler)
bot.add_cog(n)
| mit | 8,481,619,921,465,153,000 | 42.147967 | 178 | 0.562226 | false |
shundread/pyweek24 | gamelib/main.py | 1 | 3587 | '''Game main module.
Contains the entry point used by the run_game.py script.
Feel free to put all your game code here, or in other modules in this "gamelib"
package.
'''
import json
import pygame
import data
import engine
class Game(object):
'''A class that delegates its engine functionalities to a hot-swappable
module'''
FPS = 60.0
def __init__(self):
self.running = False
self.data = { "gamestate": "newtitle" }
# Swapping state
self.swapped = False
# Error states
self.input_handle_error = None
self.simulate_error = None
self.render_error = None
def run(self):
engine.init()
clock = pygame.time.Clock()
self.running = True
dt = 0
frames = 0
while self.running:
self.handle_input()
if self.swapped:
self.swapped = False
continue
self.simulate(dt)
self.render()
dt = clock.tick(self.FPS)
frames += 1
# Report framerate on exit
ticks = pygame.time.get_ticks()
framerate = frames / (ticks / 1000.0)
print("Framerate was {0}".format(framerate))
def handle_input(self):
try:
engine.handle_input(self, self.data)
self.input_handling_error = None
except Exception as error:
if self.input_handling_error != error.message:
print("Unable to handle input, reason:")
print(error)
self.input_handling_error = error.message
def simulate(self, dt):
try:
engine.simulate(self, self.data, dt)
self.simulate_error = None
except Exception as error:
if self.simulate_error != error.message:
print("Unable to render, reason:")
print(error)
self.simulate_error = error.message
def render(self):
try:
engine.render(self.data)
self.render_error = None
except Exception as error:
if self.render_error != error.message:
print("Unable to render, reason:")
print(error)
self.render_error = error.message
def quit(self):
self.dump_data()
self.running = False
def request_swap(self):
try:
print("Attempting to swap engine...")
reload(engine)
print("Engine swapped. Reinitializing engine...")
engine.init()
print("Engine reinitialized\n")
except Exception as error:
print("Errors were thrown in the engine swap:")
print(error)
def dump_data(self):
print("Saving the gamestate...")
try:
with open("gamestate.json", "wt") as fout:
json_data = json.dumps(self.data, indent=4)
print(json_data)
fout.write(json_data)
print("Gamestate saved\n")
except Exception as error:
print("Unable to dump the data, reason:")
print(error)
def load_data(self):
print("Restoring the gamestate...")
try:
with open("gamestate.json", "rt") as fin:
new_data = json.load(fin)
self.data = new_data
print("Gamestate restored")
except Exception as error:
print("Unable to load the data, reason:")
print(error)
def main():
game = Game()
# game.load_data()
game.run()
| gpl-3.0 | -8,235,789,348,464,003,000 | 27.927419 | 79 | 0.539448 | false |
luftdanmark/fifo.li | config/settings/common.py | 1 | 9040 | # -*- coding: utf-8 -*-
"""
Django settings for fifo project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('fifo')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
'rest_framework', # rest framework
)
# Apps specific for this project go here.
LOCAL_APPS = (
'fifo.users', # custom users app
'fifo.queues', # queues app
'fifo.entries', # entries app
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'fifo.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Carl-Philip Majgaard""", '[email protected]'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
# DATABASES = {
# # Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
# 'default': env.db('DATABASE_URL', default='postgres:///fifo'),
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'fifodb',
'USER': 'fifouser',
'PASSWORD': '7xkKBfRbkMg9dk',
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATION
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'EST5EDT'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'none'
ACCOUNT_UNIQUE_EMAIL = True
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
ACCOUNT_ADAPTER = 'fifo.users.adapters.AccountAdapter'
SOCIALACCOUNT_ADAPTER = 'fifo.users.adapters.SocialAccountAdapter'
ACCOUNT_SIGNUP_FORM_CLASS = 'fifo.users.forms.SignupForm'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'home'
LOGIN_URL = 'home'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
ADMIN_URL = r'^admin/'
# Your common stuff: Below this line define 3rd party library settings
# REST_FRAMEWORK = {
# # Use Django's standard `django.contrib.auth` permissions,
# # or allow read-only access for unauthenticated users.
# 'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
# ]
# }
| mit | 7,447,248,319,303,379,000 | 33.903475 | 98 | 0.610066 | false |
ebolyen/qiime2 | qiime2/sdk/tests/test_plugin_manager.py | 1 | 4202 | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2017, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
import qiime2.plugin
import qiime2.sdk
from qiime2.plugin.plugin import SemanticTypeRecord, FormatRecord
from qiime2.core.testing.type import (IntSequence1, IntSequence2, Mapping,
FourInts, Kennel, Dog, Cat)
from qiime2.core.testing.format import (IntSequenceDirectoryFormat,
MappingDirectoryFormat,
IntSequenceV2DirectoryFormat,
IntSequenceFormatV2,
FourIntsDirectoryFormat,
IntSequenceFormat)
from qiime2.core.testing.util import get_dummy_plugin
class TestPluginManager(unittest.TestCase):
def setUp(self):
self.plugin = get_dummy_plugin()
# PluginManager is a singleton so there's no issue creating it again.
self.pm = qiime2.sdk.PluginManager()
def test_plugins(self):
plugins = self.pm.plugins
exp = {'dummy-plugin': self.plugin}
self.assertEqual(plugins, exp)
def test_semantic_types(self):
types = self.pm.semantic_types
exp = {
'IntSequence1': SemanticTypeRecord(semantic_type=IntSequence1,
plugin=self.plugin),
'IntSequence2': SemanticTypeRecord(semantic_type=IntSequence2,
plugin=self.plugin),
'Mapping': SemanticTypeRecord(semantic_type=Mapping,
plugin=self.plugin),
'FourInts': SemanticTypeRecord(semantic_type=FourInts,
plugin=self.plugin),
'Kennel': SemanticTypeRecord(semantic_type=Kennel,
plugin=self.plugin),
'Dog': SemanticTypeRecord(semantic_type=Dog,
plugin=self.plugin),
'Cat': SemanticTypeRecord(semantic_type=Cat,
plugin=self.plugin),
}
self.assertEqual(types, exp)
def test_importable_types(self):
types = self.pm.importable_types
exp = {IntSequence1, IntSequence2, FourInts, Mapping, Kennel[Dog],
Kennel[Cat]}
self.assertEqual(types, exp)
# TODO: add tests for type/directory/transformer registrations
def test_importable_formats(self):
obs = self.pm.importable_formats
exp = {
'IntSequenceDirectoryFormat':
FormatRecord(format=IntSequenceDirectoryFormat,
plugin=self.plugin),
'MappingDirectoryFormat':
FormatRecord(format=MappingDirectoryFormat,
plugin=self.plugin),
'IntSequenceV2DirectoryFormat':
FormatRecord(format=IntSequenceV2DirectoryFormat,
plugin=self.plugin),
'IntSequenceFormatV2':
FormatRecord(format=IntSequenceFormatV2,
plugin=self.plugin),
'FourIntsDirectoryFormat':
FormatRecord(format=FourIntsDirectoryFormat,
plugin=self.plugin),
'IntSequenceFormat':
FormatRecord(format=IntSequenceFormat,
plugin=self.plugin)
}
self.assertEqual(obs, exp)
def test_importable_formats_excludes_unimportables(self):
obs = self.pm.importable_formats
self.assertNotIn('UnimportableFormat', obs)
self.assertNotIn('UnimportableDirectoryFormat', obs)
obs = self.pm.formats
self.assertIn('UnimportableFormat', obs)
self.assertIn('UnimportableDirectoryFormat', obs)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 147,731,705,538,736,350 | 39.403846 | 78 | 0.546168 | false |
zardus/idalink | idalink/server.py | 1 | 1521 | # -*- coding: utf-8 -*-
# Copyright (C) 2013- Yan Shoshitaishvili aka. zardus
# Ruoyu Wang aka. fish
# Audrey Dutcher aka. rhelmot
# Kevin Borgolte aka. cao
from __future__ import print_function
# idc is just within IDA, so make pylint stop complaining
import idc # pylint: disable=F0401
import threading
from rpyc.core import SlaveService
from rpyc.utils.server import OneShotServer, ThreadedServer
def main_thread(port):
srv = ThreadedServer(SlaveService, port=port)
srv.start()
def main():
port = int(idc.ARGV[1]) if idc.ARGV[1:] else 18861
thread_mode = idc.ARGV[2] == 'threaded' if idc.ARGV[2:] else False
print('Received arguments: port=%s, thread_mode=%s' % (port, thread_mode))
# :note: For speed, we don't want to idc.Wait() here,
# but you might want to call it in your code
# to make sure that autoanalysis has finished.
if thread_mode:
thread = threading.Thread(target=main_thread, args=(port, thread_mode))
thread.daemon = True
thread.start()
else:
srv = OneShotServer(SlaveService, port=port)
# OneShotServer is a LIE so we have to do some shit
# this is copied from https://github.com/tomerfiliba/rpyc/blob/master/rpyc/utils/server.py
# specifically, the start method. if stuff breaks look here!
srv._listen()
srv._register()
srv.accept()
idc.Exit(0)
if __name__ == '__main__':
main()
| bsd-2-clause | 7,068,586,202,755,159,000 | 32.065217 | 98 | 0.627876 | false |
jeremiah-c-leary/vhdl-style-guide | vsg/tests/package_body/test_rule_201.py | 1 | 1173 |
import os
import unittest
from vsg.rules import package_body
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_201_test_input.vhd'))
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_201_test_input.fixed.vhd'), lExpected)
class test_package_body_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_201(self):
oRule = package_body.rule_201()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'package_body')
self.assertEqual(oRule.identifier, '201')
lExpected = [9, 16]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_201(self):
oRule = package_body.rule_201()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
| gpl-3.0 | 1,176,384,906,839,200,500 | 25.066667 | 106 | 0.679454 | false |
egbertbouman/tribler-g | Tribler/Category/TestCategory.py | 1 | 4853 | # Written by Yuan Yuan
# see LICENSE.txt for license information
import sys, os
execpath = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), '..', '..')
sys.path.append(execpath)
#print sys.path
from Utility.utility import getMetainfo
from Tribler.Category.Category import Category
DEBUG = False
def testFilter(catfilename, torrentpath):
readCategorisationFile(catfilename)
#print 'Install_dir is %s' % execpath
c = Category.getInstance(execpath, None)
total = porn = fn = fp = 0
for tfilename,isporn in tdict.items():
torrent = getMetainfo(os.path.join(torrentpath,tfilename))
name = torrent['info']['name']
cat = c.calculateCategory(torrent, name)
fporn = (cat == ['xxx'])
total+= 1
porn += int(isporn)
if isporn == fporn:
if DEBUG:
print (isporn, fporn), 'good', name
elif isporn and not fporn:
fn+=1
print 'FALSE NEGATIVE'
showTorrent(os.path.join(torrentpath,tfilename))
elif not isporn and fporn:
fp +=1
print 'FALSE POSITIVE'
showTorrent(os.path.join(torrentpath,tfilename))
print """
Total torrents: %(total)d
XXX torrents: %(porn)d
Correct filtered: %(good)d
False negatives: %(fn)d
False positives: %(fp)d
""" % {'total':total, 'porn':porn, 'fn':fn,'fp':fp,'good':total-fn-fp}
def readCategorisationFile(filename):
global tdict
tdict = {}
try:
f = file(filename, 'r')
lines = f.read().splitlines()
for line in lines:
if line:
parts = line.split('\t')
tdict[parts[0]] = bool(int(parts[1]))
f.close()
except IOError:
print 'No file %s found, starting with empty file' % filename
def getTorrentData(path, max_num=-1):
torrents= []
i = 0
for fname in os.listdir(path):
if fname.endswith('.torrent'):
torrents.append(os.path.join(path,fname))
if i%1000 == 0 and i:
print 'Loaded: %d torrents' % i
if i == int(max_num):
break
i+=1
print 'Loaded %d torrents' % len(torrents)
return torrents
def showTorrent(path):
torrent = getMetainfo(os.path.join(path))
name = torrent['info']['name']
print '------------------------------'
print '\tfiles :'
files_list = []
__size_change = 1024
try:
# the multi-files mode
for ifiles in torrent['info']["files"]:
files_list.append((ifiles['path'][-1], ifiles['length'] / float(__size_change)))
except KeyError:
# single mode
files_list.append((torrent['info']["name"],torrent['info']['length'] / float(__size_change)))
for fname, fsize in files_list:
print'\t\t%s\t%d kb' % (fname, fsize)
print 'Torrent name: %s' % name
print '\ttracker:%s' % torrent['announce']
print '------------------------------'
def createTorrentDataSet(filename, torrentpath):
initSaveFile(filename)
f_out = file(filename, 'a')
torrents = getTorrentData(torrentpath)
for torrent in torrents:
if os.path.split(torrent)[-1] in tset: # already done
continue
showTorrent(torrent)
ans = None
while ans not in ['q', 'y','n']:
print 'Is this torrent porn? (y/n/q)'
ans = sys.stdin.readline()[:-1].lower()
if ans == 'q':
break
else:
saveTorrent(f_out, torrent, (ans=='y'))
f_out.close()
def saveTorrent(f_out, torrent, boolean):
if torrent in tset:
return
tfilename = os.path.split(torrent)[-1]
assert tfilename
f_out.write('%s\t%d\n' % (tfilename, int(boolean)))
f_out.flush()
tset.add(torrent)
def initSaveFile(filename):
global tset
tset = set()
try:
f = file(filename, 'r')
lines = f.read().splitlines()
for line in lines:
tset.add(line.split('\t')[0])
f.close()
except IOError:
print 'No file %s found, starting with empty file' % filename
def main(args):
if len(args) != 4 or args[1] not in ['categorise', 'test']:
print 'Usage 1: %s categorise [torrent-dir] [torrent-data-file]' % args[0]
print 'Usage 2: %s test [torrent-dir] [torrent-data-file]' % args[0]
sys.exit(1)
if args[1] == 'categorise':
createTorrentDataSet(args[3], args[2])
elif args[1] == 'test':
testFilter(args[3], args[2])
print 'ready'
if __name__ == '__main__':
main(sys.argv)
| lgpl-2.1 | 2,244,882,841,439,192,600 | 30.790541 | 101 | 0.535957 | false |
TargetHolding/pyspark-elastic | python/setup.py | 1 | 1149 | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
basedir = os.path.dirname(os.path.abspath(__file__))
os.chdir(basedir)
def f(*path):
return open(os.path.join(basedir, *path))
setup(
name='pyspark_elastic',
maintainer='Frens Jan Rumph',
maintainer_email='[email protected]',
version='0.3.1',
description='Utilities to asssist in working with Elastic Serach and PySpark.',
long_description=f('../README.md').read(),
url='https://github.com/TargetHolding/pyspark-elastic',
license='Apache License 2.0',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Other Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Database',
'Topic :: Software Development :: Libraries',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Utilities',
]
)
| apache-2.0 | 3,498,440,906,200,967,700 | 26.357143 | 80 | 0.700609 | false |
FinaceInfo/Chinese-box-office-info | test/test_app.py | 1 | 2803 | from . import wsapp,socketio
import json
wsapp.testing = True
class TestBoxOfficeApp(object):
def test_day_cinema(self):
client = socketio.test_client(wsapp, namespace='/boxoffice')
client.emit('cn_day_cinema', {"query":"ok"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_day_cinema'
assert type(received[0]['args'][0]) == dict
client.emit('cn_box_office_day', {"query":"2016-12-24"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_day'
assert type(received[0]['args'][0]) == dict
def test_day_boxoffice(self):
client = socketio.test_client(wsapp, namespace='/boxoffice')
client.emit('cn_box_office_day', {"query":"ok"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_day'
assert type(received[0]['args'][0]) == dict
client.emit('cn_box_office_day', {"query":"12-24"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_day'
assert type(received[0]['args'][0]) == dict
def test_month_boxoffice(self):
client = socketio.test_client(wsapp, namespace='/boxoffice')
client.emit('cn_box_office_month', {"query":"ok"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_month'
assert type(received[0]['args'][0]) == dict
client.emit('cn_box_office_month', {"query":"2016-12"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_month'
assert type(received[0]['args'][0]) == dict
def test_realtime_boxoffice(self):
client = socketio.test_client(wsapp, namespace='/boxoffice')
client.emit('cn_box_office_realtime', {"query":"ok"}, namespace='/boxoffice')
received = client.get_received('/boxoffice')
assert len(received) == 1
assert len(received[0]['args']) == 1
assert received[0]['name'] == 'cn_box_office_realtime'
assert type(received[0]['args'][0]) == dict
| mit | 1,285,896,231,012,905,500 | 45.716667 | 88 | 0.599001 | false |
catapult-project/catapult-csm | telemetry/telemetry/internal/results/chart_json_output_formatter_unittest.py | 1 | 8663 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import StringIO
import unittest
from telemetry import benchmark
from telemetry import story
from telemetry.internal.results import chart_json_output_formatter
from telemetry.internal.results import page_test_results
from telemetry import page as page_module
from telemetry.value import improvement_direction
from telemetry.value import list_of_scalar_values
from telemetry.value import scalar
from telemetry.value import trace
from tracing.trace_data import trace_data
def _MakeStorySet():
ps = story.StorySet(base_dir=os.path.dirname(__file__))
ps.AddStory(page_module.Page(
'http://www.foo.com/', ps, ps.base_dir, name='http://www.foo.com/'))
ps.AddStory(page_module.Page(
'http://www.bar.com/', ps, ps.base_dir, name='http://www.bar.com/'))
return ps
class ChartJsonTest(unittest.TestCase):
def setUp(self):
self._output = StringIO.StringIO()
self._story_set = _MakeStorySet()
self._benchmark_metadata = benchmark.BenchmarkMetadata(
'benchmark_name', 'benchmark_description')
self._formatter = chart_json_output_formatter.ChartJsonOutputFormatter(
self._output, self._benchmark_metadata)
def testOutputAndParse(self):
results = page_test_results.PageTestResults()
self._output.truncate(0)
results.WillRunPage(self._story_set[0])
v0 = scalar.ScalarValue(results.current_page, 'foo', 'seconds', 3,
improvement_direction=improvement_direction.DOWN)
results.AddValue(v0)
results.DidRunPage(self._story_set[0])
self._formatter.Format(results)
d = json.loads(self._output.getvalue())
self.assertIn('foo', d['charts'])
def testOutputAndParseDisabled(self):
self._formatter.FormatDisabled(None)
d = json.loads(self._output.getvalue())
self.assertEquals(d['benchmark_name'], 'benchmark_name')
self.assertFalse(d['enabled'])
def testAsChartDictSerializable(self):
v0 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 3,
improvement_direction=improvement_direction.DOWN)
page_specific_values = [v0]
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
json.dumps(d)
def testAsChartDictBaseKeys(self):
page_specific_values = []
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertEquals(d['format_version'], '0.1')
self.assertEquals(d['next_version'], '0.2')
self.assertEquals(d['benchmark_metadata']['name'], 'benchmark_name')
self.assertEquals(d['benchmark_metadata']['description'],
'benchmark_description')
self.assertEquals(d['benchmark_metadata']['type'], 'telemetry_benchmark')
self.assertTrue(d['enabled'])
def testAsChartDictNoDescription(self):
page_specific_values = []
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
benchmark.BenchmarkMetadata('benchmark_name', ''),
page_specific_values,
summary_values)
self.assertEquals('', d['benchmark_metadata']['description'])
def testAsChartDictPageSpecificValuesSamePageWithInteractionRecord(self):
v0 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 3,
improvement_direction=improvement_direction.DOWN,
tir_label='MyIR')
v1 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 4,
improvement_direction=improvement_direction.DOWN,
tir_label='MyIR')
page_specific_values = [v0, v1]
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('MyIR@@foo' in d['charts'])
self.assertTrue('http://www.foo.com/' in d['charts']['MyIR@@foo'])
self.assertTrue(d['enabled'])
def testAsChartDictPageSpecificValuesSamePageWithoutInteractionRecord(self):
v0 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 3,
improvement_direction=improvement_direction.DOWN)
v1 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 4,
improvement_direction=improvement_direction.DOWN)
page_specific_values = [v0, v1]
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('foo' in d['charts'])
self.assertTrue('http://www.foo.com/' in d['charts']['foo'])
self.assertTrue(d['enabled'])
def testAsChartDictPageSpecificValuesAndComputedSummaryWithTraceName(self):
v0 = scalar.ScalarValue(self._story_set[0], 'foo.bar', 'seconds', 3,
improvement_direction=improvement_direction.DOWN)
v1 = scalar.ScalarValue(self._story_set[1], 'foo.bar', 'seconds', 4,
improvement_direction=improvement_direction.DOWN)
page_specific_values = [v0, v1]
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('foo' in d['charts'])
self.assertTrue('http://www.foo.com/' in d['charts']['foo'])
self.assertTrue('http://www.bar.com/' in d['charts']['foo'])
self.assertTrue('bar' in d['charts']['foo'])
self.assertTrue(d['enabled'])
def testAsChartDictPageSpecificValuesAndComputedSummaryWithoutTraceName(self):
v0 = scalar.ScalarValue(self._story_set[0], 'foo', 'seconds', 3,
improvement_direction=improvement_direction.DOWN)
v1 = scalar.ScalarValue(self._story_set[1], 'foo', 'seconds', 4,
improvement_direction=improvement_direction.DOWN)
page_specific_values = [v0, v1]
summary_values = []
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('foo' in d['charts'])
self.assertTrue('http://www.foo.com/' in d['charts']['foo'])
self.assertTrue('http://www.bar.com/' in d['charts']['foo'])
self.assertTrue('summary' in d['charts']['foo'])
self.assertTrue(d['enabled'])
def testAsChartDictSummaryValueWithTraceName(self):
v0 = list_of_scalar_values.ListOfScalarValues(
None, 'foo.bar', 'seconds', [3, 4],
improvement_direction=improvement_direction.DOWN)
page_specific_values = []
summary_values = [v0]
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('bar' in d['charts']['foo'])
self.assertTrue(d['enabled'])
def testAsChartDictSummaryValueWithoutTraceName(self):
v0 = list_of_scalar_values.ListOfScalarValues(
None, 'foo', 'seconds', [3, 4],
improvement_direction=improvement_direction.DOWN)
page_specific_values = []
summary_values = [v0]
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertTrue('summary' in d['charts']['foo'])
self.assertTrue(d['enabled'])
def testAsChartDictWithTraceValuesThatHasTirLabel(self):
v = trace.TraceValue(self._story_set[0],
trace_data.CreateTraceDataFromRawData([{'test': 1}]))
v.tir_label = 'background'
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values=[v],
summary_values=[v])
self.assertTrue('trace' in d['charts'])
self.assertTrue('http://www.foo.com/' in d['charts']['trace'],
msg=d['charts']['trace'])
self.assertTrue(d['enabled'])
def testAsChartDictValueSmokeTest(self):
v0 = list_of_scalar_values.ListOfScalarValues(
None, 'foo.bar', 'seconds', [3, 4],
improvement_direction=improvement_direction.DOWN)
page_specific_values = []
summary_values = [v0]
d = chart_json_output_formatter.ResultsAsChartDict(
self._benchmark_metadata,
page_specific_values,
summary_values)
self.assertEquals(d['charts']['foo']['bar']['values'], [3, 4])
| bsd-3-clause | 1,692,078,530,059,303,200 | 36.502165 | 80 | 0.659818 | false |
Mirantis/pumphouse | pumphouse/_vendor/fuelclient/cli/actions/role.py | 1 | 1906 | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from .base import Action
from .. import arguments as Args
from ..formatting import format_table
from ...objects.release import Release
class RoleAction(Action):
"""List all roles for specific release
"""
action_name = "role"
def __init__(self):
super(RoleAction, self).__init__()
self.args = [
Args.get_list_arg("List all roles for specific release"),
Args.get_release_arg("Release id", required=True)
]
self.flag_func_map = (
(None, self.list),
)
def list(self, params):
"""Print all available roles and their
conflicts for some release with id=1:
fuel role --rel 1
"""
release = Release(params.release, params=params)
data = release.get_fresh_data()
acceptable_keys = ("name", "conflicts")
roles = [
{
"name": role_name,
"conflicts": ", ".join(
metadata.get("conflicts", ["-"])
)
} for role_name, metadata in data["roles_metadata"].iteritems()]
self.serializer.print_to_output(
roles,
format_table(
roles,
acceptable_keys=acceptable_keys
)
)
| apache-2.0 | 6,900,483,627,728,180,000 | 31.862069 | 78 | 0.58447 | false |
alexey-ernest/ml-for-trading | cumulative_returns.py | 1 | 1740 |
import os
import pandas as pd
import matplotlib.pyplot as plt
def symbol_to_path(symbol, base_dir="data"):
"""Return CSV file path given ticker symbol."""
return os.path.join(base_dir, "{}.csv".format(str(symbol)))
def get_data(symbols, dates):
"""Read stock data (adjusted close) for given symbols from CSV files."""
df = pd.DataFrame(index=dates)
if 'SPY' not in symbols: # add SPY for reference, if absent
symbols.insert(0, 'SPY')
for symbol in symbols:
df_temp = pd.read_csv(symbol_to_path(symbol), index_col='Date',
parse_dates=True, usecols=['Date', 'Adj Close'],
na_values=['nan'])
df_temp = df_temp.rename(columns={'Adj Close': symbol})
df = df.join(df_temp)
if symbol == 'SPY':
df = df.dropna(subset=['SPY'])
return df
def plot_data(df, title="Stock prices", xlabel="Date", ylabel="Price"):
"""Plot stock prices with a custom title and meaningful axis labels."""
ax = df.plot(title=title, fontsize=12)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
plt.show()
def compute_cumulative_returns(df):
"""Compute and return the cumulative return values."""
cum_returns = (df/df.ix[0]) - 1
return cum_returns
def test_run():
# Define a date range
dates = pd.date_range('2015-11-23', '2016-11-18')
# Choose stock symbols to read
symbols = ['SPY', 'XOM']
# Get stock data
df = get_data(symbols, dates)
#plot_data(df)
# Compute daily returns
daily_returns = compute_cumulative_returns(df)
plot_data(daily_returns, title="Cumulative returns", ylabel="Cumulative returns")
if __name__ == "__main__":
test_run()
| mit | -1,047,409,870,784,264,300 | 29.526316 | 85 | 0.613793 | false |
better-dem/geo_feedback | survey/migrations/0001_initial.py | 1 | 3777 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-10-14 21:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FeedbackGoal',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('description', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('polygon_coords', models.CharField(max_length=500)),
('feedback_goals', models.ManyToManyField(to='survey.FeedbackGoal')),
],
),
migrations.CreateModel(
name='ProjectResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_time', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Project')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='QuestionResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='TMCQ',
fields=[
('question_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='survey.Question')),
('option1', models.CharField(max_length=30)),
('option2', models.CharField(max_length=30)),
('option3', models.CharField(max_length=30)),
('option4', models.CharField(max_length=30)),
('option5', models.CharField(max_length=30)),
],
bases=('survey.question',),
),
migrations.CreateModel(
name='TMCQResponse',
fields=[
('questionresponse_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='survey.QuestionResponse')),
('option_index', models.IntegerField()),
],
bases=('survey.questionresponse',),
),
migrations.AddField(
model_name='questionresponse',
name='project_response',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.ProjectResponse'),
),
migrations.AddField(
model_name='questionresponse',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Question'),
),
migrations.AddField(
model_name='question',
name='feedback_goal',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.FeedbackGoal'),
),
]
| agpl-3.0 | -8,674,803,448,750,544,000 | 40.966667 | 210 | 0.568705 | false |
DirkHoffmann/indico | indico/modules/events/requests/base.py | 1 | 7309 | # This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from flask_pluginengine import plugin_context
from wtforms.fields import SubmitField, TextAreaField
from indico.core.config import config
from indico.core.db import db
from indico.modules.events.requests.notifications import (notify_accepted_request, notify_new_modified_request,
notify_rejected_request, notify_withdrawn_request)
from indico.modules.events.requests.views import WPRequestsEventManagement
from indico.util.date_time import now_utc
from indico.util.i18n import _
from indico.web.flask.templating import get_overridable_template_name, get_template_module
from indico.web.forms.base import FormDefaults, IndicoForm
class RequestFormBase(IndicoForm):
def __init__(self, *args, **kwargs):
self.event = kwargs.pop('event')
self.request = kwargs.pop('request')
super().__init__(*args, **kwargs)
class RequestManagerForm(IndicoForm):
action_buttons = {'action_save', 'action_accept', 'action_reject'}
comment = TextAreaField(_('Comment'),
description=_('The comment will be shown only if the request is accepted or rejected.'))
action_save = SubmitField(_('Save'))
action_accept = SubmitField(_('Accept'))
action_reject = SubmitField(_('Reject'))
class RequestDefinitionBase:
"""A service request which can be sent by event managers."""
#: the plugin containing this request definition - assigned automatically
plugin = None
#: the unique internal name of the request type
name = None
#: the title of the request type as shown to users
title = None
#: the :class:`IndicoForm` to use for the request form
form = None
#: the :class:`IndicoForm` to use for the request manager form
manager_form = RequestManagerForm
#: default values to use if there's no existing request
form_defaults = {}
@classmethod
def render_form(cls, event, **kwargs):
"""Render the request form.
:param event: the event the request is for
:param kwargs: arguments passed to the template
"""
tpl = get_overridable_template_name('event_request_details.html', cls.plugin, 'events/requests/')
return WPRequestsEventManagement.render_template(tpl, event, **kwargs)
@classmethod
def create_form(cls, event, existing_request=None):
"""Create the request form.
:param event: the event the request is for
:param existing_request: the :class:`Request` if there's an existing request of this type
:return: an instance of an :class:`IndicoForm` subclass
"""
defaults = FormDefaults(existing_request.data if existing_request else cls.form_defaults)
with plugin_context(cls.plugin):
return cls.form(prefix='request-', obj=defaults, event=event, request=existing_request)
@classmethod
def create_manager_form(cls, req):
"""Create the request management form.
:param req: the :class:`Request` of the request
:return: an instance of an :class:`IndicoForm` subclass
"""
defaults = FormDefaults(req, **req.data)
with plugin_context(cls.plugin):
return cls.manager_form(prefix='request-manage-', obj=defaults)
@classmethod
def get_notification_template(cls, name, **context):
"""Get the template module for a notification email.
:param name: the template name
:param context: data passed to the template
"""
tpl = get_overridable_template_name(name, cls.plugin, 'events/requests/emails/', 'emails/')
return get_template_module(tpl, **context)
@classmethod
def can_be_managed(cls, user):
"""Check whether the user is allowed to manage this request type.
:param user: a :class:`.User`
"""
raise NotImplementedError
@classmethod
def get_manager_notification_emails(cls):
"""Return the email addresses of users who manage requests of this type.
The email addresses are used only for notifications.
It usually makes sense to return the email addresses of the users who
pass the :method:`can_be_managed` check.
:return: set of email addresses
"""
return set()
@classmethod
def get_notification_reply_email(cls):
"""Return the *Reply-To* e-mail address for notifications."""
return config.SUPPORT_EMAIL
@classmethod
def send(cls, req, data):
"""Send a new/modified request.
:param req: the :class:`Request` of the request
:param data: the form data from the request form
"""
req.data = dict(req.data or {}, **data)
is_new = req.id is None
if is_new:
db.session.add(req)
db.session.flush() # we need the creation dt for the notification
notify_new_modified_request(req, is_new)
@classmethod
def withdraw(cls, req, notify_event_managers=True):
"""Withdraw the request.
:param req: the :class:`Request` of the request
:param notify_event_managers: if event managers should be notified
"""
from indico.modules.events.requests.models.requests import RequestState
req.state = RequestState.withdrawn
notify_withdrawn_request(req, notify_event_managers)
@classmethod
def accept(cls, req, data, user):
"""Accept the request.
To ensure that additional data is saved, this method should
call :method:`manager_save`.
:param req: the :class:`Request` of the request
:param data: the form data from the management form
:param user: the user processing the request
"""
from indico.modules.events.requests.models.requests import RequestState
cls.manager_save(req, data)
req.state = RequestState.accepted
req.processed_by_user = user
req.processed_dt = now_utc()
notify_accepted_request(req)
@classmethod
def reject(cls, req, data, user):
"""Reject the request.
To ensure that additional data is saved, this method should
call :method:`manager_save`.
:param req: the :class:`Request` of the request
:param data: the form data from the management form
:param user: the user processing the request
"""
from indico.modules.events.requests.models.requests import RequestState
cls.manager_save(req, data)
req.state = RequestState.rejected
req.processed_by_user = user
req.processed_dt = now_utc()
notify_rejected_request(req)
@classmethod
def manager_save(cls, req, data):
"""Save management-specific data.
This method is called when the management form is submitted without
accepting/rejecting the request (which is guaranteed to be already
accepted or rejected).
:param req: the :class:`Request` of the request
:param data: the form data from the management form
"""
req.comment = data['comment']
| gpl-3.0 | 3,155,499,741,189,574,000 | 36.290816 | 116 | 0.658093 | false |
tholewebgods/jenkins-scripts | jobs/syncgit.py | 1 | 10188 | import json
import re
import datetime
import time
import os
import os.path
import dulwich.repo
import jenkinscli
import xml.etree.ElementTree as ET
import sys
import argparse
import textwrap
# This script will create jobs for each remote branch found in the repository
# in the current directory. It will also remove the jobs if the branches are
# no longer exiting.
#
# - The branch name pattern can be configured
# - The template job name can be configured
# - A branch is being ignored if the last commit is older than a configurable
# amount of days
#
# Requirements:
# - Python 2.6 (2.7 should work too)
# - dulwich (install it using # pip install dulwich)
# - py-jenkins-cli (https://github.com/tholewebgods/py-jenkins-cli)
#
BINARY_NAME="syncgit"
VERSION="0.1"
# Default for max. commit age of a branch
DEFAULT_MAX_COMMIT_AGE=30
class Jenkins(object):
"""
Jenkins job management.
- job_tpl -- the exact job name used as a template (this job might/should be disabled)
- job_name_tpl -- the resulting job name, has to contain one "%s" placeholder that will be replaced with the sanitized branch name
"""
def __init__(self, host, cli_jar, ssh_key, job_tpl, job_name_tpl):
self._jenkins = jenkinscli.JenkinsCli(host, cli_jar, ssh_key)
self._job_template = job_tpl
self._job_name_tpl = job_name_tpl
"""
Create Job for Git ref name
"""
def create_job(self, ref_name):
# load template and replace placeholder in config
config_template = self._jenkins.get_job(self._job_template)
# deserialize
root = ET.fromstring(config_template)
xpath = ".//scm/branches/hudson.plugins.git.BranchSpec/name"
# get branch name config node
name_element = root.findall(xpath)
# check if a "name" node has been selected
if len(name_element) > 0:
# set branch name config
name_element[0].text = ref_name
else:
raise Exception("Missing Git branch spec config in config template (xpath: %s)" % (xpath))
# serialize DOM
config = ET.tostring(root)
# replace slashes in ref name to get clean job name and build job name
filtered_ref_name = ref_name.replace("origin/", "")
# Python 2.6 does not support flags=..., using (?i)
filtered_ref_name = re.sub("(?i)[^a-z0-9_-]+", "-", filtered_ref_name)
job_name = self._job_name_tpl % filtered_ref_name
print "Creating and enabling job '%s' for branch %s" % (job_name, ref_name)
self._jenkins.create_job(job_name, config)
self._jenkins.enable_job(job_name)
"""
Remove Job by Git ref name
"""
def remove_job(self, ref_name):
# replace slashes in ref name to get clean job name and build job name
filtered_ref_name = ref_name.replace("origin/", "")
# Python 2.6 does not support flags=..., using (?i)
filtered_ref_name = re.sub("(?i)[^a-z0-9_-]+", "-", filtered_ref_name)
job_name = self._job_name_tpl % filtered_ref_name
print "Removing job '%s' for branch '%s'" % (job_name, ref_name)
self._jenkins.delete_job(job_name)
# get branch from one Job's config
def _get_branch_from_config(self, config):
root = ET.fromstring(config)
name_element = root.findall(".//scm/branches/hudson.plugins.git.BranchSpec/name")
if len(name_element) == 1:
return name_element[0].text
else:
return None
"""
Get all branches that are configured by Jobs.
Examines each Job in the list for their branch names
"""
def get_currently_configured_branches(self):
jobs = self._jenkins.get_joblist()
branches = []
for job in jobs:
if re.match("^" + (self._job_name_tpl % ""), job):
config = self._jenkins.get_job(job)
branch_name = self._get_branch_from_config(config)
if not re.match("^refs/remotes/", branch_name):
branch_name = "refs/remotes/" + branch_name
branches.append(branch_name)
return branches
"""
Represents branches in Git
"""
class GitBranches(object):
"""
Git branch management.
repo -- Repository location (relative or absolute paths)
ref_matcher -- A regular expression that matches branch names to create jobs for
max_commit_age -- Max days the last commit was made to a branch
"""
def __init__(self, repo, ref_matcher, max_commit_age):
self._repo = dulwich.repo.Repo(repo)
self._ref_matcher = ref_matcher
self._max_commit_age = max_commit_age
def get_branches(self):
_refs = []
# iterate over branches (refs) and their SHA1
for ref, sha1 in self._repo.get_refs().iteritems():
# ref matches the configured matcher
if re.match(self._ref_matcher, ref):
obj = self._repo.get_object(sha1)
_refs.append([ref, sha1, obj.commit_time])
# filter (ref, SHA1, commit time) tupel for outdated branches
refs = filter(lambda x: self._within_days(x[2], self._max_commit_age), _refs)
# extract ref
refs = set([x[0] for x in refs])
return refs
# Return True if the Unix timestamp is within the timerange now - days
def _within_days(self, timestamp, days):
return datetime.datetime.fromtimestamp(timestamp) >= (datetime.datetime.now() + datetime.timedelta(days=-days))
class GitJenkinsSync(object):
def __init__(self, host, cli_jar, ssh_key, job_tpl, job_name_tpl, repo, ref_matcher, max_commit_age):
self._jenkins = Jenkins(host, cli_jar, ssh_key, job_tpl, job_name_tpl)
self._git = GitBranches(repo, ref_matcher, max_commit_age)
"""Do the actual sync. Query both sides, do diff/intersection and create/remove jobs"""
def sync(self):
git_branches = self._git.get_branches()
job_branches = set(self._jenkins.get_currently_configured_branches())
print "Found these branches in the repository:\n %s" % "\n ".join(git_branches)
print "Found these branches configured in Jenkins:\n %s" % "\n ".join(job_branches)
to_remove = job_branches - git_branches
if len(to_remove) > 0:
print "Remove these:\n %s" % "\n ".join(to_remove)
for ref in to_remove:
self._jenkins.remove_job(ref.replace("refs/remotes/", ""))
else:
print "No branch jobs to remove."
to_create = git_branches - job_branches
if len(to_create) > 0:
print "Create these:\n %s" % "\n ".join(to_create)
for ref in to_create:
self._jenkins.create_job(ref.replace("refs/remotes/", ""))
else:
print "No branch jobs to create."
class CustomParser(argparse.ArgumentParser):
# extend help screen to print more
def print_help(self):
super(CustomParser, self).print_help()
print "example usage:"
print """
Create a job named "Build Project XYZ TEMPLATE" and set "BBBBB" in the Git
config section for the branch name.
%s --host http://localhost:8080/ --key /home/jenkins/.ssh/id_rsa_local \\
--jar /tmp/jenkins_cli.jar --tpl-job "Build Project XYZ TEMPLATE" \\
--job-name-tpl "Build Project XYZ %%s" --git-repo /tmp/sync-checkout \\
--ref-regex "^refs/remotes/origin/((dev|bugfix)/ACME-[0-9]+|int/[0-9]+)" \\
--max-commit-age 14
This will create jobs named like "Build Project XYZ dev-ACME-123-name"
""" % (BINARY_NAME)
# Validating store action for --max-commit-age
class MaxAgeSwitchAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if values > 1000 or values < 1:
raise Exception("Max commit age %d exceeds 1 - 1000" % values)
setattr(namespace, self.dest, values)
# Internal exception
class ArgumentValidationException(Exception):
def __init__(self, msg):
super(ArgumentValidationException, self).__init__(msg)
def _validate_arguments(parsed):
if not os.path.exists(parsed.ssh_key):
raise ArgumentValidationException("SSH Key does not exist: " + parsed.ssh_key)
if not os.path.exists(parsed.jar):
raise ArgumentValidationException("Jenkins CLI .jar does not exist: " + parsed.jar)
if parsed.jobname_tpl.count("%s") != 1:
raise ArgumentValidationException("Expected one \"%s\" placeholder in the job name template.")
if not os.path.exists(parsed.git_repo_path):
raise ArgumentValidationException("Git directory does not exist: " + parsed.git_repo_path)
try:
re.match(parsed.ref_regex, "")
except Exception as e:
raise ArgumentValidationException("Malformed regular expression '" + parsed.ref_regex + "': " + str(e))
def main(args):
# add_help=False,
parser = CustomParser(
prog=BINARY_NAME,
description="Sync Git branches by branch name pattern with corresponding jobs in Jenkins"
)
parser.add_argument( '-V','--version', action='version', version='%(prog)s ' + VERSION)
parser.add_argument(
'-J', '--host', dest="jenkins_host", action='store', metavar="URL", required=True,
help="URL to Jenkins in form <protocol>://<host>[:port][<path>]/"
)
parser.add_argument(
'-S', '--key', dest="ssh_key", action='store', metavar="PATH", required=True,
help="Path to the SSH key used for authentication"
)
parser.add_argument(
'-j', '--jar', dest="jar", action='store', metavar="PATH", required=True,
help="Path to the Jenkins CLI .jar"
)
parser.add_argument(
'-G', '--git-repo', dest="git_repo_path", action='store', metavar="PATH", required=True,
help="Path to the Git repository"
)
parser.add_argument(
'-T', '--tpl-job', dest="tpl_job", action='store', metavar="JOBNAME", required=True,
help="Name of the job used as template"
)
parser.add_argument(
'-n', '--job-name-tpl', dest="jobname_tpl", action='store', metavar="NAME", required=True,
help="Name template for the jobs being created, should contain \"%%s\" as placeholder for the branch name"
)
parser.add_argument(
'-R', '--ref-regex', dest="ref_regex", action='store', metavar="REGEX", required=True,
help="Regular expression matching the branch names to create jobs for"
)
parser.add_argument(
'-a', '--max-commit-age', dest="max_commit_age", action=MaxAgeSwitchAction, type=int, metavar="DAYS", required=False,
help="Max days the last commit was made on a branch. Defaults to %d" % DEFAULT_MAX_COMMIT_AGE
)
parsed = parser.parse_args(args)
_validate_arguments(parsed)
sync = GitJenkinsSync(
parsed.jenkins_host, parsed.jar, parsed.ssh_key,
parsed.tpl_job, parsed.jobname_tpl,
parsed.git_repo_path, parsed.ref_regex, parsed.max_commit_age
)
sync.sync()
if __name__ == "__main__":
try:
main(sys.argv[1:])
except Exception as e:
print "Error occured: %s" % str(e)
| mit | 2,865,741,185,627,415,000 | 31.037736 | 132 | 0.688457 | false |
agacek/camkes-tool | camkes/runner/NameMangling.py | 1 | 12529 | #
# Copyright 2014, NICTA
#
# This software may be distributed and modified according to the terms of
# the BSD 2-Clause license. Note that NO WARRANTY is provided.
# See "LICENSE_BSD2.txt" for details.
#
# @TAG(NICTA_BSD)
#
'''This code manages the name mangling (and reversal of such) that needs to
happen in the templates and follow-on logic in the runner. E.g. based on the
name of a component instance, we need to construct a name of the control TCB.
The logic for performing that translation and (if necessary) reversing it later
is encapsulated here so it can more easily be modified.
Callers should only import and use the Perspective class. When instantiating
one of these, generally as much information as is known should be provided to
give Perspective the opportunity to spot internal inconsistencies. See the
comments in the class itself for further information.'''
from camkes.internal.dictutils import get_fields
import re
class Deriver(object):
'''Logic for constructing one symbol from one or more other symbols. This
class itself is never intended to be directly instantiated and is probably
best understood by looking at its inherited children.'''
def inputs(self):
raise NotImplementedError
def output(self):
raise NotImplementedError
def derive(self, perspective):
raise NotImplementedError
class ForwardDeriver(Deriver):
'''Logic for deriving one symbol from several other symbols by way of
concatenation, interspersed with other static text.'''
def __init__(self, format, out):
self.format = format
self.out = out
def inputs(self):
return get_fields(self.format)
def output(self):
return self.out
def derive(self, perspective):
return self.format % perspective
class BackwardDeriver(Deriver):
'''Logic for deriving one symbol from one other symbol by pulling out a
substring of the input.'''
def __init__(self, regex, input, out):
self.regex = re.compile(regex)
self.input = input
self.out = out
def inputs(self):
return set([self.input])
def output(self):
return self.out
def derive(self, perspective):
m = self.regex.match(perspective[self.input])
if m is None:
return None
return m.group(1)
# The remaining derivers are for specific symbols (or qualities) that are not
# strings. These each need slightly inflected logic.
class ControlDeriver(Deriver):
def __init__(self, regex, input):
self.regex = re.compile(regex)
self.input = input
def inputs(self):
return set([self.input])
def output(self):
return 'control'
def derive(self, perspective):
return self.regex.match(perspective[self.input]) is not None
class PoolDeriver(Deriver):
def __init__(self, regex, input):
self.regex = re.compile(regex)
self.input = input
def inputs(self):
return set([self.input])
def output(self):
return 'pool'
def derive(self, perspective):
return self.regex.match(perspective[self.input]) is not None
class PoolIndexDeriver(Deriver):
def __init__(self, regex, input):
self.regex = re.compile(regex)
self.input = input
def inputs(self):
return set([self.input])
def output(self):
return 'pool_index'
def derive(self, perspective):
m = self.regex.match(perspective[self.input])
if m is None:
return None
return int(m.group(1))
class FromControlDeriver(ForwardDeriver):
def derive(self, perspective):
if not perspective.get('control', False):
return None
return self.format % perspective
class DMAFrameIndexDeriver(Deriver):
def __init__(self, regex, input):
self.regex = re.compile(regex)
self.input = input
def inputs(self):
return set([self.input])
def output(self):
return 'dma_frame_index'
def derive(self, perspective):
m = self.regex.match(perspective[self.input])
if m is None:
return None
return int(m.group(1))
# Phases.
RUNNER, TEMPLATES, FILTERS = range(3)
# Instantiate the derivers to describe how name mangling happens in CAmkES. If
# you want to modify the name mangling scheme, this is the place to do it.
DERIVATIONS = {
RUNNER:[
ForwardDeriver('pd_%(group)s_group_bin', 'pd'),
ForwardDeriver('pd_%(elf_name)s', 'pd'),
BackwardDeriver(r'^pd_(.+)$', 'pd', 'elf_name'),
BackwardDeriver(r'^pd_(.+)_group_bin$', 'pd', 'group'),
ForwardDeriver('cnode_%(group)s', 'cnode'),
BackwardDeriver(r'^cnode_(.+)$', 'cnode', 'group'),
], TEMPLATES:[
ForwardDeriver('dma_frame_%(dma_frame_index)s', 'dma_frame_symbol'),
DMAFrameIndexDeriver(r'^dma_frame_([0-9]+)$', 'dma_frame_symbol'),
ForwardDeriver('_camkes_ipc_buffer_%(instance)s_%(interface)s', 'ipc_buffer_symbol'),
FromControlDeriver('_camkes_ipc_buffer_%(instance)s__control', 'ipc_buffer_symbol'),
ControlDeriver(r'^_camkes_ipc_buffer_.+__control$', 'ipc_buffer_symbol'),
ForwardDeriver('_camkes_stack_%(instance)s_%(interface)s', 'stack_symbol'),
FromControlDeriver('_camkes_stack_%(instance)s__control', 'stack_symbol'),
ControlDeriver(r'^_camkes_stack_.+__control$', 'stack_symbol'),
ForwardDeriver('%(dataport)s_data', 'dataport_symbol'),
BackwardDeriver(r'^([^ ]+)_data$', 'dataport_symbol', 'dataport'),
ForwardDeriver('%(to_interface)s_attributes', 'hardware_attribute'),
BackwardDeriver(r'^(.+)_attributes', 'hardware_attribute', 'to_interface'),
ForwardDeriver('%(group)s_group_bin', 'elf_name'),
BackwardDeriver(r'^(.+)_group_bin', 'elf_name', 'group'),
ForwardDeriver('%(instance)s_main', 'entry_symbol'),
BackwardDeriver(r'^(.+)_main$', 'entry_symbol', 'instance'),
ForwardDeriver('%(instance)s_tls_setup', 'tls_symbol'),
BackwardDeriver(r'^(.+)_tls_setup$', 'tls_symbol', 'instance'),
ForwardDeriver('camkes_dma_pool', 'dma_pool_symbol'),
], FILTERS:[
ForwardDeriver('%(instance)s_tcb_%(interface)s', 'tcb'),
FromControlDeriver('%(instance)s_tcb__control', 'tcb'),
BackwardDeriver(r'^(.+)_tcb_.+$', 'tcb', 'instance'),
BackwardDeriver(r'^.+_tcb_([^_].*)$', 'tcb', 'interface'),
ControlDeriver(r'^.+_tcb__control$', 'tcb'),
ForwardDeriver('_camkes_ipc_buffer_%(instance)s_%(interface)s', 'ipc_buffer_symbol'),
FromControlDeriver('_camkes_ipc_buffer_%(instance)s__control', 'ipc_buffer_symbol'),
ControlDeriver(r'^_camkes_ipc_buffer_.+__control$', 'ipc_buffer_symbol'),
ForwardDeriver('_camkes_stack_%(instance)s_%(interface)s', 'stack_symbol'),
FromControlDeriver('_camkes_stack_%(instance)s__control', 'stack_symbol'),
ControlDeriver(r'^_camkes_stack_.+__control$', 'stack_symbol'),
ForwardDeriver('camkes %(instance)s_main', 'entry_symbol'),
BackwardDeriver(r'^camkes (.+)_main$', 'entry_symbol', 'instance'),
ForwardDeriver('camkes %(instance)s_tls_setup', 'tls_symbol'),
BackwardDeriver(r'^camkes (.+)_tls_setup$', 'tls_symbol', 'instance'),
ForwardDeriver('%(group)s_group_bin', 'elf_name'),
BackwardDeriver(r'^(.+)_group_bin', 'elf_name', 'group'),
PoolDeriver(r'.+_tcb_pool_[0-9]+$', 'tcb'),
PoolIndexDeriver(r'.+_tcb_pool_([0-9]+)$', 'tcb'),
ForwardDeriver('pd_%(group)s_group_bin', 'pd'),
ForwardDeriver('pd_%(elf_name)s', 'pd'),
BackwardDeriver(r'^pd_(.+)$', 'pd', 'elf_name'),
BackwardDeriver(r'^pd_(.+)_group_bin$', 'pd', 'group'),
ForwardDeriver('camkes %(instance)s %(dataport)s data', 'dataport_symbol'),
BackwardDeriver(r'^camkes ([^ ]+) [^ ]+ data$', 'dataport_symbol', 'instance'),
BackwardDeriver(r'^camkes [^ ]+ ([^ ]+) data$', 'dataport_symbol', 'dataport'),
ForwardDeriver('%(to_interface)s_attributes', 'hardware_attribute'),
BackwardDeriver(r'^(.+)_attributes', 'hardware_attribute', 'to_interface'),
ForwardDeriver('camkes %(instance)s_dma_pool', 'dma_pool_symbol'),
BackwardDeriver(r'^camkes (.+)_dma_pool$', 'dma_pool_symbol', 'instance'),
ForwardDeriver('%(instance)s_dma_frame_%(dma_frame_index)s', 'dma_frame_symbol'),
BackwardDeriver(r'^(.+)_dma_frame_[0-9]+$', 'dma_frame_symbol', 'instance'),
DMAFrameIndexDeriver(r'^.+_dma_frame_([0-9]+)$', 'dma_frame_symbol'),
ControlDeriver(r'^_control_priority$', 'priority_attribute'),
FromControlDeriver('_control_priority', 'priority_attribute'),
ForwardDeriver('%(interface)s_priority', 'priority_attribute'),
BackwardDeriver(r'^([^_].*)_priority$', 'priority_attribute', 'interface'),
ControlDeriver(r'^_control_domain$', 'domain_attribute'),
FromControlDeriver('_control_domain', 'domain_attribute'),
ForwardDeriver('%(interface)s_domain', 'domain_attribute'),
BackwardDeriver(r'^([^_].*)_domain$', 'domain_attribute', 'interface'),
ForwardDeriver('cnode_%(group)s', 'cnode'),
BackwardDeriver(r'^cnode_(.+)$', 'cnode', 'group'),
],
}
class Perspective(object):
'''A partial state from which to mangle symbols. That may make no sense,
but consider this as a collection of *some* of the symbols we need from
which *all* the symbols we need can be derived. You need to pass some
initial symbols in to the constructor. These may not be sufficient to
derive all other known symbols, but they must be sufficient to derive any
you need. The known symbols can be updated at any point via __setitem__. A
more appropriate name for this class would be 'context', but I didn't want
to cause confusion by introducing yet another 'context' into this code
base.'''
def __init__(self, phase=FILTERS, **kwargs):
self.kwargs = kwargs
self.derivations = DERIVATIONS[phase]
if __debug__:
# When optimisations are not enabled, infer everything possible
# upfront (not lazily). This can catch some internal
# inconsistencies though we will probably end up inferring things
# we don't need.
self._infer()
def _infer(self, limit=None):
'''Infer some or all possible unknown symbols. If the limit argument is
given, inference stops when we know that symbol.'''
prev_keys = set(self.kwargs.keys())
while limit is None or limit not in prev_keys:
for d in self.derivations:
if d.inputs() <= set(self.kwargs.keys()):
# We have enough information to use this derivation.
v = d.derive(self.kwargs)
if v is None:
# We could not derive this value.
continue
k = d.output()
if k in self.kwargs:
# We already knew this symbol. It had better have been
# the same as what we just derived for consistency.
assert self.kwargs[k] == v, \
'perspective is internally inconsistent: %s' % self.kwargs
else:
self.kwargs[k] = v
next_keys = set(self.kwargs.keys())
if prev_keys == next_keys:
# We didn't learn anything new this time around.
break
prev_keys = next_keys
def __setitem__(self, key, value):
assert key not in self.kwargs or self.kwargs[key] == value
# The following assertion is conservative. In the future, it may make
# sense to set some 'core' strings that we cannot infer.
assert key in map(lambda x: x.output(), self.derivations), \
'setting \'%s\' that is not inferrable' % key
self.kwargs[key] = value
if __debug__:
self._infer()
def __getitem__(self, key):
# As for the assertion in __setitem__, this is conservative.
assert key in map(lambda x: x.output(), self.derivations), \
'getting \'%s\' that is not inferrable' % key
if key not in self.kwargs:
self._infer(key)
if key not in self.kwargs:
raise Exception('not enough information to infer attribute, %s' % key)
return self.kwargs[key]
| bsd-2-clause | -1,779,961,257,078,954,200 | 45.576208 | 93 | 0.621758 | false |
JavaCardOS/pyResMan | pyResMan/Dialogs/pyResManCommandDialog_MifareAuthentication.py | 1 | 3904 | # -*- coding: utf-8 -*-
'''
Modified on 2017-03-28
@author: [email protected]
@organization: https://www.javacardos.com/
@copyright: JavaCardOS Technologies. All rights reserved.
'''
from pyResMan.BaseDialogs.pyResManCommandDialogBase_MifareAuthentication import CommandDialogBase_MifareAuthentication
from pyResMan.Util import IDOK, IDCANCEL
from pyResMan.Util import HexValidator, Util
###########################################################################
## Class CommandDialog_MifareAuthentication
###########################################################################
MODE_IDLE = 0
MODE_PARSING = 1
MODE_BUILDING = 2
class CommandDialog_MifareAuthentication ( CommandDialogBase_MifareAuthentication ):
def __init__( self, parent, bytesCount = 1 ):
CommandDialogBase_MifareAuthentication.__init__ ( self, parent )
self.__mode = MODE_IDLE
self._textctrlCommandValue.SetMaxLength(bytesCount * 2)
# Set validator;
self._textctrlCommandValue.SetValidator(HexValidator())
self._textctrlUID.SetValue('00000000')
for i in range(256):
self._choiceBlockNumber.Append('%d' %(i))
def _buttonOKOnButtonClick(self, event):
self.EndModal(IDOK)
def _buttonCancelOnButtonClick(self, event):
self.EndModal(IDCANCEL)
def getCommandName(self):
return self._statictextCommandName.GetLabelText()
def getCommandValue(self):
return self._textctrlCommandValue.GetValue()
def setCommandName(self, name):
self._statictextCommandName.SetLabelText(name)
self.SetTitle(name)
def setCommandValue(self, value):
self._textctrlCommandValue.SetValue(value)
self.parseCommandValue()
def parseCommandValue(self):
if self.__mode == MODE_IDLE:
self.__mode = MODE_PARSING
commandValue = Util.s2vl(self._textctrlCommandValue.GetValue())
self._choiceMode.SetSelection(0 if commandValue[0] == 0x60 else 1)
self._choiceBlockNumber.SetSelection(commandValue[1])
self._textctrlKey.SetValue(Util.vl2s(commandValue[2 : 8], ''))
if len(commandValue) >= 12:
self._textctrlUID.SetValue(Util.vl2s(commandValue[8 : ], ''))
self.__mode = MODE_IDLE
else:
pass
def buildCommandValue(self):
if self.__mode == MODE_IDLE:
self.__mode = MODE_BUILDING
commandValue = []
# Mode;
commandValue.append(0x60 if (self._choiceMode.GetSelection() == 0) else 0x61)
# Sector number;
commandValue.append(self._choiceBlockNumber.GetSelection())
# Key data;
keyData= [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]
try:
keyData = Util.s2vl(self._textctrlKey.GetValue())
except:
pass
for kd in keyData:
commandValue.append(kd)
# UID;
UID = [0x65, 0xE0, 0x5E, 0x1E]
try:
UID = Util.s2vl(self._textctrlUID.GetValue())
except:
pass
for id in UID:
commandValue.append(id)
#
self._textctrlCommandValue.SetValue(Util.vl2s(commandValue, ''))
self.__mode = MODE_IDLE
else:
pass
def _choiceModeOnChoice( self, event ):
self.buildCommandValue()
def _choiceBlockNumberOnChoice(self, event):
self.buildCommandValue()
def _textctrlKeyOnText( self, event ):
self.buildCommandValue()
def _textctrlUIDOnText(self, event):
self.buildCommandValue()
def _textctrlCommandValueOnText( self, event ):
self.parseCommandValue()
| gpl-2.0 | -8,872,457,769,474,260,000 | 32.367521 | 118 | 0.574795 | false |
fofix/fretwork | fretwork/midi/constants.py | 1 | 6316 | # -*- coding: utf-8 -*-
"""
A collection of constants from the midi spec.
"""
###################################################
## Midi channel events (The most usual events)
## also called "Channel Voice Messages"
NOTE_OFF = 0x80
# 1000cccc 0nnnnnnn 0vvvvvvv (channel, note, velocity)
NOTE_ON = 0x90
# 1001cccc 0nnnnnnn 0vvvvvvv (channel, note, velocity)
AFTERTOUCH = 0xA0
# 1010cccc 0nnnnnnn 0vvvvvvv (channel, note, velocity)
CONTINUOUS_CONTROLLER = 0xB0 # see Channel Mode Messages!!!
# 1011cccc 0ccccccc 0vvvvvvv (channel, controller, value)
PATCH_CHANGE = 0xC0
# 1100cccc 0ppppppp (channel, program)
CHANNEL_PRESSURE = 0xD0
# 1101cccc 0ppppppp (channel, pressure)
PITCH_BEND = 0xE0
# 1110cccc 0vvvvvvv 0wwwwwww (channel, value-lo, value-hi)
###################################################
## Channel Mode Messages (Continuous Controller)
## They share a status byte.
## The controller makes the difference here
# High resolution continuous controllers (MSB)
BANK_SELECT = 0x00
MODULATION_WHEEL = 0x01
BREATH_CONTROLLER = 0x02
FOOT_CONTROLLER = 0x04
PORTAMENTO_TIME = 0x05
DATA_ENTRY = 0x06
CHANNEL_VOLUME = 0x07
BALANCE = 0x08
PAN = 0x0A
EXPRESSION_CONTROLLER = 0x0B
EFFECT_CONTROL_1 = 0x0C
EFFECT_CONTROL_2 = 0x0D
GEN_PURPOSE_CONTROLLER_1 = 0x10
GEN_PURPOSE_CONTROLLER_2 = 0x11
GEN_PURPOSE_CONTROLLER_3 = 0x12
GEN_PURPOSE_CONTROLLER_4 = 0x13
# High resolution continuous controllers (LSB)
BANK_SELECT = 0x20
MODULATION_WHEEL = 0x21
BREATH_CONTROLLER = 0x22
FOOT_CONTROLLER = 0x24
PORTAMENTO_TIME = 0x25
DATA_ENTRY = 0x26
CHANNEL_VOLUME = 0x27
BALANCE = 0x28
PAN = 0x2A
EXPRESSION_CONTROLLER = 0x2B
EFFECT_CONTROL_1 = 0x2C
EFFECT_CONTROL_2 = 0x2D
GENERAL_PURPOSE_CONTROLLER_1 = 0x30
GENERAL_PURPOSE_CONTROLLER_2 = 0x31
GENERAL_PURPOSE_CONTROLLER_3 = 0x32
GENERAL_PURPOSE_CONTROLLER_4 = 0x33
# Switches
SUSTAIN_ONOFF = 0x40
PORTAMENTO_ONOFF = 0x41
SOSTENUTO_ONOFF = 0x42
SOFT_PEDAL_ONOFF = 0x43
LEGATO_ONOFF = 0x44
HOLD_2_ONOFF = 0x45
# Low resolution continuous controllers
SOUND_CONTROLLER_1 = 0x46 # (TG: Sound Variation; FX: Exciter On/Off)
SOUND_CONTROLLER_2 = 0x47 # (TG: Harmonic Content; FX: Compressor On/Off)
SOUND_CONTROLLER_3 = 0x48 # (TG: Release Time; FX: Distortion On/Off)
SOUND_CONTROLLER_4 = 0x49 # (TG: Attack Time; FX: EQ On/Off)
SOUND_CONTROLLER_5 = 0x4A # (TG: Brightness; FX: Expander On/Off)75 SOUND_CONTROLLER_6 (TG: Undefined; FX: Reverb OnOff)
SOUND_CONTROLLER_7 = 0x4C # (TG: Undefined; FX: Delay OnOff)
SOUND_CONTROLLER_8 = 0x4D # (TG: Undefined; FX: Pitch Transpose OnOff)
SOUND_CONTROLLER_9 = 0x4E # (TG: Undefined; FX: Flange/Chorus OnOff)
SOUND_CONTROLLER_10 = 0x4F # (TG: Undefined; FX: Special Effects OnOff)
GENERAL_PURPOSE_CONTROLLER_5 = 0x50
GENERAL_PURPOSE_CONTROLLER_6 = 0x51
GENERAL_PURPOSE_CONTROLLER_7 = 0x52
GENERAL_PURPOSE_CONTROLLER_8 = 0x53
PORTAMENTO_CONTROL = 0x54 # (PTC) (0vvvvvvv is the source Note number) (Detail)
EFFECTS_1 = 0x5B # (Ext. Effects Depth)
EFFECTS_2 = 0x5C # (Tremelo Depth)
EFFECTS_3 = 0x5D # (Chorus Depth)
EFFECTS_4 = 0x5E # (Celeste Depth)
EFFECTS_5 = 0x5F # (Phaser Depth)
DATA_INCREMENT = 0x60 # (0vvvvvvv is n/a; use 0)
DATA_DECREMENT = 0x61 # (0vvvvvvv is n/a; use 0)
NON_REGISTERED_PARAMETER_NUMBER = 0x62 # (LSB)
NON_REGISTERED_PARAMETER_NUMBER = 0x63 # (MSB)
REGISTERED_PARAMETER_NUMBER = 0x64 # (LSB)
REGISTERED_PARAMETER_NUMBER = 0x65 # (MSB)
# Channel Mode messages - (Detail)
ALL_SOUND_OFF = 0x78
RESET_ALL_CONTROLLERS = 0x79
LOCAL_CONTROL_ONOFF = 0x7A
ALL_NOTES_OFF = 0x7B
OMNI_MODE_OFF = 0x7C # (also causes ANO)
OMNI_MODE_ON = 0x7D # (also causes ANO)
MONO_MODE_ON = 0x7E # (Poly Off; also causes ANO)
POLY_MODE_ON = 0x7F # (Mono Off; also causes ANO)
###################################################
## System Common Messages, for all channels
SYSTEM_EXCLUSIVE = 0xF0
# 11110000 0iiiiiii 0ddddddd ... 11110111
MTC = 0xF1 # MIDI Time Code Quarter Frame
# 11110001
SONG_POSITION_POINTER = 0xF2
# 11110010 0vvvvvvv 0wwwwwww (lo-position, hi-position)
SONG_SELECT = 0xF3
# 11110011 0sssssss (songnumber)
#UNDEFINED = 0xF4
## 11110100
#UNDEFINED = 0xF5
## 11110101
TUNING_REQUEST = 0xF6
# 11110110
END_OFF_EXCLUSIVE = 0xF7 # terminator
# 11110111 # End of system exclusive
###################################################
## Midifile meta-events
SEQUENCE_NUMBER = 0x00 # 00 02 ss ss (seq-number)
TEXT = 0x01 # 01 len text...
COPYRIGHT = 0x02 # 02 len text...
SEQUENCE_NAME = 0x03 # 03 len text...
INSTRUMENT_NAME = 0x04 # 04 len text...
LYRIC = 0x05 # 05 len text...
MARKER = 0x06 # 06 len text...
CUEPOINT = 0x07 # 07 len text...
PROGRAM_NAME = 0x08 # 08 len text...
DEVICE_NAME = 0x09 # 09 len text...
MIDI_CH_PREFIX = 0x20 # MIDI channel prefix assignment (unofficial)
MIDI_PORT = 0x21 # 21 01 port, legacy stuff but still used
END_OF_TRACK = 0x2F # 2f 00
TEMPO = 0x51 # 51 03 tt tt tt (tempo in us/quarternote)
SMTP_OFFSET = 0x54 # 54 05 hh mm ss ff xx
TIME_SIGNATURE = 0x58 # 58 04 nn dd cc bb
KEY_SIGNATURE = 0x59 # ??? len text...
SPECIFIC = 0x7F # Sequencer specific event
FILE_HEADER = 'MThd'
TRACK_HEADER = 'MTrk'
###################################################
## System Realtime messages
## I don't supose these are to be found in midi files?!
TIMING_CLOCK = 0xF8
# undefined = 0xF9
SONG_START = 0xFA
SONG_CONTINUE = 0xFB
SONG_STOP = 0xFC
# undefined = 0xFD
ACTIVE_SENSING = 0xFE
SYSTEM_RESET = 0xFF
###################################################
## META EVENT, it is used only in midi files.
## In transmitted data it means system reset!!!
META_EVENT = 0xFF
# 11111111
###################################################
## Helper functions
def is_status(byte):
return (byte & 0x80) == 0x80 # 1000 0000
| gpl-2.0 | 6,677,587,943,104,313,000 | 29.365385 | 144 | 0.611463 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/bgp/global_/afi_safis/afi_safi/l3vpn_ipv4_unicast/__init__.py | 1 | 12158 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import prefix_limit
class l3vpn_ipv4_unicast(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/l3vpn-ipv4-unicast. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Unicast IPv4 L3VPN configuration options
"""
__slots__ = ("_path_helper", "_extmethods", "__prefix_limit")
_yang_name = "l3vpn-ipv4-unicast"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__prefix_limit = YANGDynClass(
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"l3vpn-ipv4-unicast",
]
def _get_prefix_limit(self):
"""
Getter method for prefix_limit, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/l3vpn_ipv4_unicast/prefix_limit (container)
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
return self.__prefix_limit
def _set_prefix_limit(self, v, load=False):
"""
Setter method for prefix_limit, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/l3vpn_ipv4_unicast/prefix_limit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix_limit() directly.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prefix_limit must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=prefix_limit.prefix_limit, is_container='container', yang_name="prefix-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__prefix_limit = t
if hasattr(self, "_set"):
self._set()
def _unset_prefix_limit(self):
self.__prefix_limit = YANGDynClass(
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
prefix_limit = __builtin__.property(_get_prefix_limit, _set_prefix_limit)
_pyangbind_elements = OrderedDict([("prefix_limit", prefix_limit)])
from . import prefix_limit
class l3vpn_ipv4_unicast(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/bgp/global/afi-safis/afi-safi/l3vpn-ipv4-unicast. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Unicast IPv4 L3VPN configuration options
"""
__slots__ = ("_path_helper", "_extmethods", "__prefix_limit")
_yang_name = "l3vpn-ipv4-unicast"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__prefix_limit = YANGDynClass(
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"bgp",
"global",
"afi-safis",
"afi-safi",
"l3vpn-ipv4-unicast",
]
def _get_prefix_limit(self):
"""
Getter method for prefix_limit, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/l3vpn_ipv4_unicast/prefix_limit (container)
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
return self.__prefix_limit
def _set_prefix_limit(self, v, load=False):
"""
Setter method for prefix_limit, mapped from YANG variable /network_instances/network_instance/protocols/protocol/bgp/global/afi_safis/afi_safi/l3vpn_ipv4_unicast/prefix_limit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_prefix_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_prefix_limit() directly.
YANG Description: Configure the maximum number of prefixes that will be
accepted from a peer
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """prefix_limit must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=prefix_limit.prefix_limit, is_container='container', yang_name="prefix-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=True)""",
}
)
self.__prefix_limit = t
if hasattr(self, "_set"):
self._set()
def _unset_prefix_limit(self):
self.__prefix_limit = YANGDynClass(
base=prefix_limit.prefix_limit,
is_container="container",
yang_name="prefix-limit",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=True,
)
prefix_limit = __builtin__.property(_get_prefix_limit, _set_prefix_limit)
_pyangbind_elements = OrderedDict([("prefix_limit", prefix_limit)])
| apache-2.0 | -3,330,760,887,242,618,000 | 37.596825 | 395 | 0.588255 | false |
COSMOGRAIL/COSMOULINE | pipe/modules/readandreplace_fct.py | 1 | 2009 | def justreplace(inputstring, repdict):
template = inputstring
for key, value in repdict.iteritems():
template = template.replace(key, value)
return template
def justread(inputfilename):
import sys
import os
infile = open(inputfilename, 'r')
content = infile.read()
infile.close()
return content
#Try to use readmancat in variousfct, it's better
#def readmancoords(mancatfile): # reads a man cat with format "id x y flux" and comments + blank lines
#
# import sys
# import os
#
# print "WARNING THIS FUNCTION IS DEPRECATED"
#
# myfile = open(mancatfile, "r")
# lines = myfile.readlines()
# myfile.close
# table=[]
# for line in lines:
# if line[0] == '#' or len(line) < 4:
# continue
# elements = line.split()
# if len(elements) != 4:
# print "Wrong format :", mancatfile
# sys.exit()
# starid = elements[0]
# xpos = float(elements[1])
# ypos = float(elements[2])
# flux = float(elements[3])
# table.append([starid, xpos, ypos, flux])
#
# print "I've read", len(table), "stars from", mancatfile
# return table
def readouttxt(outtxtfile, nbimg): # function to read the out.txt written by deconv.exe
import sys
import os
infile = open(outtxtfile, 'r')
content = infile.readlines()
nblines = len(content)
print "Number of lines :", nblines
infile.close()
i = 0
intpostable = []
while i < nblines:
line = content[i]
if line.find("Nombre d")>=0:
nbiter = line.split()[-1]
if nbiter[0] == ":":
nbiter = nbiter[1:]
nbiter = int(nbiter)
print "Number of iterations :", nbiter
if line.find(" - Num")>=0:
table = []
for j in range(i+1, i+1+nbimg):
values = map(float, content[j].split())
table.append(values)
intpostable.append(table)
i = i+nbimg
if line.find("* Valeurs finales de z1, z2, delta1 et delta2 :")>=0:
zdeltatable = []
for j in range(i+1, i+1+nbimg):
values = map(float, content[j].split())
zdeltatable.append(values)
i = i+nbimg
i = i+1
return intpostable, zdeltatable
| gpl-3.0 | 1,196,499,792,120,505,900 | 21.829545 | 102 | 0.649079 | false |
Eksmo/calibre | src/calibre/gui2/dialogs/conversion_error_ui.py | 1 | 2280 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/gugu/w/calibre/src/calibre/gui2/dialogs/conversion_error.ui'
#
# Created: Thu Jul 19 23:32:30 2012
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_ConversionErrorDialog(object):
def setupUi(self, ConversionErrorDialog):
ConversionErrorDialog.setObjectName(_fromUtf8("ConversionErrorDialog"))
ConversionErrorDialog.resize(658, 515)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(I("lt.png"))), QtGui.QIcon.Normal, QtGui.QIcon.Off)
ConversionErrorDialog.setWindowIcon(icon)
self.gridlayout = QtGui.QGridLayout(ConversionErrorDialog)
self.gridlayout.setObjectName(_fromUtf8("gridlayout"))
self.label = QtGui.QLabel(ConversionErrorDialog)
self.label.setText(_fromUtf8(""))
self.label.setPixmap(QtGui.QPixmap(_fromUtf8(I("dialog_error.png"))))
self.label.setObjectName(_fromUtf8("label"))
self.gridlayout.addWidget(self.label, 0, 0, 1, 1)
self.text = QtGui.QTextBrowser(ConversionErrorDialog)
self.text.setObjectName(_fromUtf8("text"))
self.gridlayout.addWidget(self.text, 0, 1, 2, 1)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridlayout.addItem(spacerItem, 1, 0, 1, 1)
self.buttonBox = QtGui.QDialogButtonBox(ConversionErrorDialog)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.gridlayout.addWidget(self.buttonBox, 2, 1, 1, 1)
self.retranslateUi(ConversionErrorDialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), ConversionErrorDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), ConversionErrorDialog.reject)
QtCore.QMetaObject.connectSlotsByName(ConversionErrorDialog)
def retranslateUi(self, ConversionErrorDialog):
ConversionErrorDialog.setWindowTitle(_("ERROR"))
| gpl-3.0 | 6,277,705,584,022,447,000 | 45.530612 | 120 | 0.716228 | false |
research-team/NEUCOGAR | NEST/cube/integration/excitement/synapses.py | 1 | 3707 | from keys import *
from simulation_params import *
import nest
import numpy.random as random
# Neuron parameters
hh_neuronparams = {'E_L': -70., # Resting membrane potential in mV
'V_T': -63., # Voltage offset that controls dynamics.
# -63mV results in a threshold around -50mV.
'C_m': 2., # Capacity of the membrane in pF 1
't_ref': 2., # Duration of refractory period (V_m = V_reset) in ms
'tau_syn_ex': 5., # Time constant of postsynaptic excitatory currents in ms
'tau_syn_in': 10. # Time constant of postsynaptic inhibitory currents in ms
}
# Synapse common parameters
STDP_synapseparams = {
'alpha': random.normal(0.5, 5.0), # Asymmetry parameter (scales depressing increments as alpha*lambda)
'lambda': 0.5 # Step size
}
# Glutamate synapse
STDP_synparams_Glu = dict({'delay': random.uniform(low=1.0, high=1.3), # Distribution of delay values for connections
'weight': w_Glu, # Weight (power) of synapse
'Wmax': 20.}, **STDP_synapseparams) # Maximum allowed weight
# GABA synapse
STDP_synparams_GABA = dict({'delay': random.uniform(low=1.0, high=1.3),
'weight': w_GABA,
'Wmax': -20.}, **STDP_synapseparams)
# Acetylcholine synapse
STDP_synparams_ACh = dict({'delay': random.uniform(low=1.0, high=1.3),
'weight': w_ACh,
'Wmax': 20.}, **STDP_synapseparams)
# Noradrenaline excitatory synapse
NORA_synparams_ex = dict({'delay': 1.,
'weight': w_NA_ex,
'Wmax': 100.})
# Noradrenaline inhibitory synapse
NORA_synparams_in = dict({'delay': 1.,
'weight': w_NA_in,
'Wmax': -100.})
# Dopamine excitatory synapse
DOPA_synparams_ex = dict({'delay': 1.,
'weight': w_DA_ex,
'Wmax': 100.})
# Dopamine inhibitory synapse
DOPA_synparams_in = dict({'delay': 1.,
'weight': w_DA_in,
'Wmax': -100.})
# Serotonin excitatory synapse
SERO_synparams_ex = dict({'delay': 1.,
'weight': w_SERO_ex,
'Wmax': 100.})
# Serotonin inhibitory synapse
SERO_synparams_in = dict({'delay': 1.,
'weight': w_SERO_in,
'Wmax': -100.})
# Dictionary of synapses with keys and their parameters
synapses = {GABA: (gaba_synapse, w_GABA ),
Glu: (glu_synapse, w_Glu ),
ACh: (ach_synapse, w_ACh ),
NA_ex: (nora_synapse_ex, w_NA_ex),
NA_in: (nora_synapse_in, w_NA_in),
DA_ex: (dopa_synapse_ex, w_DA_ex),
DA_in: (dopa_synapse_in, w_DA_in),
SERO_ex: (sero_synapse_ex, w_SERO_ex),
SERO_in: (sero_synapse_in, w_SERO_in),
}
# Parameters for generator
static_syn = {
'weight': w_Glu * 5,
'delay': pg_delay
}
# Device parameters
multimeter_param = {'to_memory': True,
'to_file': False,
'withtime': True,
'interval': 0.1,
'record_from': ['V_m'],
'withgid': True}
detector_param = {'label': 'spikes',
'withtime': True,
'withgid': True,
'to_file': False,
'to_memory': True,
'scientific': True}
| gpl-2.0 | -8,908,638,224,511,350,000 | 38.021053 | 118 | 0.488535 | false |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/theano/ifelse.py | 1 | 30209 | """
IfElse introduces lazy evaluation in Theano (coupled with the CVM/VM
linkers). It resembles the if clause of any programming language, that
has a `then` and `else` branch, and executes either one or the other
according to the condition provided.
This op differs from the already existent `switch` op, that evaluates both
branches of the clause and afterwards picks (according to the condition)
which value to report. Note also that `switch` is an elemwise operation (so
it picks each entry of a matrix according to the condition) while `ifelse`
is a global operation with a scalar condition.
"""
from __future__ import absolute_import, print_function, division
from copy import deepcopy
from theano.compat import izip
import logging
import numpy
import theano.tensor
from theano.tensor import TensorType
from theano import gof
from theano.gof import Op, Apply
from six import iteritems
from six.moves import xrange
from theano.compile import optdb
from theano.tensor import opt
from theano.scan_module.scan_utils import find_up
from theano.scan_module.scan_utils import clone
__docformat__ = 'restructedtext en'
__authors__ = ("Razvan Pascanu "
"James Bergstra "
"Dumitru Erhan "
"David Warde-Farley")
__copyright__ = "(c) 2010, Universite de Montreal"
__contact__ = "Razvan Pascanu <r.pascanu@gmail>"
_logger = logging.getLogger('theano.ifelse')
class IfElse(Op):
"""
Op that provides conditional graph evaluation if used with the CVM/VM
linkers. Note that there exist a helpful function `ifelse` that should
be used to instantiate the op!
According to a scalar condition `condition` the op evaluates and then
returns all the tensors provided on the `then` branch, otherwise it
evaluates and returns the tensors provided on the `else` branch. The op
supports multiple tensors on each branch, with the condition that the same
number of tensors are on the `then` as on the `else` and there is a one
to one correspondence between them (shape and dtype wise).
The `then` branch is defined as the first N tensors (after the
condition), while the `else` branch is defined as the last N tensors.
Example usage:
``rval = ifelse(condition, rval_if_true1, .., rval_if_trueN,
rval_if_false1, rval_if_false2, .., rval_if_falseN)``
:note:
Other Linkers then CVM and VM are INCOMPATIBLE with this Op, and
will ignore its lazy characteristic, computing both the True and
False branch before picking one.
"""
def __init__(self, n_outs, as_view=False, gpu=False, name=None):
if as_view:
# check destroyhandler and others to ensure that a view_map with
# multiple inputs can work
view_map = {}
for idx in xrange(n_outs):
view_map[idx] = [idx + 1]
self.view_map = view_map
self.as_view = as_view
self.gpu = gpu
self.n_outs = n_outs
self.name = name
def __eq__(self, other):
if not type(self) == type(other):
return False
if not self.as_view == other.as_view:
return False
if not self.gpu == other.gpu:
return False
if not self.n_outs == other.n_outs:
return False
return True
def __hash__(self):
rval = (hash(type(self)) ^
hash(self.as_view) ^
hash(self.gpu) ^
hash(self.n_outs))
return rval
def __str__(self):
args = []
if self.name is not None:
args.append(self.name)
if self.as_view:
args.append('inplace')
if self.gpu:
args.append('gpu')
return 'if{%s}' % ','.join(args)
def infer_shape(self, node, inputs_shapes):
# By construction, corresponding then/else pairs have the same number
# of dimensions
ts_shapes = inputs_shapes[1:][:self.n_outs]
fs_shapes = inputs_shapes[1:][self.n_outs:]
# All elements of all shape tuples for the true and false outputs are
# unpacked into the inputs of a separate ifelse, and then the outputs
# of that ifelse are packed back into shape tuples.
new_ts_inputs = []
for ts_shape in ts_shapes:
if isinstance(ts_shape, (list, tuple)):
new_ts_inputs += list(ts_shape)
else:
# It can be None for generic objects
return [None] * self.n_outs
new_fs_inputs = []
for fs_shape in fs_shapes:
if isinstance(fs_shape, (list, tuple)):
new_fs_inputs += list(fs_shape)
else:
# It can be None for generic objects
return [None] * self.n_outs
assert len(new_ts_inputs) == len(new_fs_inputs)
if len(new_ts_inputs + new_fs_inputs) > 0:
name_tokens = ['shape']
if self.name is not None:
name_tokens.append(self.name)
new_ifelse = IfElse(
n_outs=len(new_ts_inputs),
as_view=False,
gpu=False,
name='_'.join(name_tokens))
new_outs = new_ifelse(node.inputs[0],
*(new_ts_inputs + new_fs_inputs),
**dict(return_list=True))
else:
new_outs = []
# generate pairs of shapes
out_shapes = []
for out in node.outputs:
out_shapes.append(tuple(new_outs[:out.ndim]))
new_outs = new_outs[out.ndim:]
# new_outs should be an empty list after last iteration
assert len(new_outs) == 0
return out_shapes
def make_node(self, c, *args):
assert len(args) == 2 * self.n_outs, (
"Wrong number of arguments to make_node: "
"expected %d, got %d" % (2 * self.n_outs, len(args))
)
c = theano.tensor.as_tensor_variable(c)
if not self.gpu:
# When gpu is true, we are given only cuda ndarrays, and we want
# to keep them be cuda ndarrays
nw_args = []
for x in args:
if hasattr(x, '_as_TensorVariable'):
nw_args.append(x._as_TensorVariable())
elif isinstance(x, theano.Variable):
nw_args.append(x)
else:
nw_args.append(theano.tensor.as_tensor_variable(x))
args = nw_args
ts = args[:self.n_outs]
fs = args[self.n_outs:]
for t, f in izip(ts, fs):
if t.type != f.type:
raise TypeError(('IfElse requires same types for true and '
'false return values'), t, f, t.type, f.type)
if c.ndim > 0:
raise TypeError(('Condition given to the op has to be a scalar '
'with 0 standing for False, anything else '
'for True'))
return Apply(self, [c] + list(args), [t.type() for t in ts])
def R_op(self, inputs, eval_points):
return self(inputs[0], *eval_points[1:], **dict(return_list=True))
def grad(self, ins, grads):
ts = ins[1:][:self.n_outs]
fs = ins[1:][self.n_outs:]
if self.name is not None:
nw_name_t = self.name + '_grad_t'
nw_name_f = self.name + '_grad_f'
else:
nw_name_t = None
nw_name_f = None
if_true_op = IfElse(n_outs=self.n_outs,
as_view=self.as_view,
gpu=self.gpu,
name=nw_name_t)
if_false_op = IfElse(n_outs=self.n_outs,
as_view=self.as_view,
gpu=self.gpu,
name=nw_name_f)
# The grads can have a different dtype then the inputs.
# As inputs true/false pair must have the same dtype,
# we must cast the zeros to the corresponding grad dtype
# and not the input dtype.
if_true = ([ins[0]] +
grads +
[theano.tensor.zeros_like(t, dtype=grads[i].dtype)
for i, t in enumerate(ts)])
if_false = ([ins[0]] +
[theano.tensor.zeros_like(f, dtype=grads[i].dtype)
for i, f in enumerate(fs)] +
grads)
condition = ins[0]
# condition does affect the elements of the output so it is connected.
# For the sake of making the gradient convenient we assume that
# condition + epsilon always triggers the same branch as condition
condition_grad = condition.zeros_like().astype(theano.config.floatX)
return ([condition_grad] +
if_true_op(*if_true, **dict(return_list=True)) +
if_false_op(*if_false, **dict(return_list=True)))
def make_thunk(self, node, storage_map, compute_map, no_recycling, impl=None):
cond = node.inputs[0]
ts = node.inputs[1:][:self.n_outs]
fs = node.inputs[1:][self.n_outs:]
outputs = node.outputs
def thunk():
if not compute_map[cond][0]:
return [0]
else:
truthval = storage_map[cond][0]
if truthval != 0:
ls = [idx + 1 for idx in xrange(self.n_outs)
if not compute_map[ts[idx]][0]]
if len(ls) > 0:
return ls
else:
for out, t in izip(outputs, ts):
compute_map[out][0] = 1
val = storage_map[t][0]
if self.as_view:
storage_map[out][0] = val
# Work around broken numpy deepcopy
elif type(val) in (numpy.ndarray, numpy.memmap):
storage_map[out][0] = val.copy()
else:
storage_map[out][0] = deepcopy(val)
return []
else:
ls = [1 + idx + self.n_outs for idx in xrange(self.n_outs)
if not compute_map[fs[idx]][0]]
if len(ls) > 0:
return ls
else:
for out, f in izip(outputs, fs):
compute_map[out][0] = 1
# can't view both outputs unless destroyhandler
# improves
# Work around broken numpy deepcopy
val = storage_map[f][0]
if type(val) in (numpy.ndarray, numpy.memmap):
storage_map[out][0] = val.copy()
else:
storage_map[out][0] = deepcopy(val)
return []
thunk.lazy = True
thunk.inputs = [storage_map[v] for v in node.inputs]
thunk.outputs = [storage_map[v] for v in node.outputs]
return thunk
def ifelse(condition, then_branch, else_branch, name=None):
"""
This function corresponds to an if statement, returning (and evaluating)
inputs in the ``then_branch`` if ``condition`` evaluates to True or
inputs in the ``else_branch`` if ``condition`` evalutates to False.
:type condition: scalar like
:param condition:
``condition`` should be a tensor scalar representing the condition.
If it evaluates to 0 it corresponds to False, anything else stands
for True.
:type then_branch: list of theano expressions/ theano expression
:param then_branch:
A single theano variable or a list of theano variables that the
function should return as the output if ``condition`` evaluates to
true. The number of variables should match those in the
``else_branch``, and there should be a one to one correspondance
(type wise) with the tensors provided in the else branch
:type else_branch: list of theano expressions/ theano expressions
:param else_branch:
A single theano variable or a list of theano variables that the
function should return as the output if ``condition`` evaluates to
false. The number of variables should match those in the then branch,
and there should be a one to one correspondace (type wise) with the
tensors provided in the then branch.
:return:
A list of theano variables or a single variable (depending on the
nature of the ``then_branch`` and ``else_branch``). More exactly if
``then_branch`` and ``else_branch`` is a tensor, then
the return variable will be just a single variable, otherwise a
list. The value returns correspond either to the values in the
``then_branch`` or in the ``else_branch`` depending on the value of
``cond``.
"""
rval_type = None
if type(then_branch) is list:
rval_type = list
elif type(then_branch) is tuple:
rval_type = tuple
if type(then_branch) not in (list, tuple):
then_branch = [then_branch]
if type(else_branch) not in (list, tuple):
else_branch = [else_branch]
# Some of the elements might be converted into another type,
# we will store them in these new_... lists.
new_then_branch = []
new_else_branch = []
for then_branch_elem, else_branch_elem in izip(then_branch, else_branch):
if not isinstance(then_branch_elem, theano.Variable):
then_branch_elem = theano.tensor.as_tensor_variable(
then_branch_elem)
if not isinstance(else_branch_elem, theano.Variable):
else_branch_elem = theano.tensor.as_tensor_variable(
else_branch_elem)
if then_branch_elem.type != else_branch_elem.type:
# If one of them is a TensorType, and the other one can be
# converted into one, then we try to do that.
# This case happens when one of the elements has a GPU type,
# for instance a shared variable that was silently moved to GPU.
if (isinstance(then_branch_elem.type, TensorType) and not
isinstance(else_branch_elem.type, TensorType)):
else_branch_elem = then_branch_elem.type.filter_variable(
else_branch_elem)
elif (isinstance(else_branch_elem.type, TensorType) and not
isinstance(then_branch_elem.type, TensorType)):
then_branch_elem = else_branch_elem.type.filter_variable(
then_branch_elem)
if then_branch_elem.type != else_branch_elem.type:
# If the types still don't match, there is a problem.
raise TypeError(
'The two branches should have identical types, but '
'they are %s and %s respectively. This error could be '
'raised if for example you provided a one element '
'list on the `then` branch but a tensor on the `else` '
'branch.' %
(then_branch_elem.type, else_branch_elem.type))
new_then_branch.append(then_branch_elem)
new_else_branch.append(else_branch_elem)
if len(then_branch) != len(else_branch):
raise ValueError(('The number of values on the `then` branch'
' should have the same number of variables as '
'the `else` branch : (variables on `then` '
'%d' % len(then_branch) + ', variables on `else` '
'%d' % len(else_branch) + ')'))
new_ifelse = IfElse(n_outs=len(then_branch),
as_view=False,
gpu=False,
name=name)
ins = [condition] + list(new_then_branch) + list(new_else_branch)
rval = new_ifelse(*ins, **dict(return_list=True))
if rval_type is None:
return rval[0]
elif rval_type is list:
return list(rval)
else:
return tuple(rval)
@gof.local_optimizer([IfElse])
def cond_make_inplace(node):
op = node.op
if (isinstance(op, IfElse) and
not op.as_view and
# For big graph, do not make inplace scalar to speed up
# optimization.
(len(node.fgraph.apply_nodes) < 500 or
not all([getattr(o.type, 'ndim', -1) == 0
for o in node.outputs]))):
return IfElse(n_outs=op.n_outs,
as_view=True,
gpu=op.gpu,
name=op.name)(*node.inputs, **dict(return_list=True))
return False
optdb.register('cond_make_inplace', opt.in2out(cond_make_inplace,
ignore_newtrees=True), 95, 'fast_run', 'inplace')
# XXX: Optimizations commented pending further debugging (certain optimizations
# make computation less lazy than it should be currently).
#
# ifelse_equilibrium = gof.EquilibriumDB()
# ifelse_seqopt = gof.SequenceDB()
# ifelse_equilibrium.register('seq_ifelse', ifelse_seqopt, 'fast_run',
# 'ifelse')
''' Comments:
I've wrote this comments to explain how the optimization of ifelse function
(for future developers that need to parse this part of code. Please try to
keep this comments in sync with whatever changes you add to the code.
ifelse optimization are registered before canonicalize !
The optimizations are called in sequence as follows:
* equilibrium shell (runs until no change):
* ifelse_lift
* ifelse_merge_ifs
* ifelse_merge_nodes
* ifelse_remove_identical_inside
* ifelse_sameCondTrue_inside
* ifelse_sameCondFalse_inside
* merge_nodes_1
* ifelse_sameCondTrue
* ifelse_sameCondFalse
* ifelse_removeIdentical
where, each of the optimization do the following things:
`ifelse_lift` (def cond_lift_single_if):
'''
# optdb.register('ifelse_equilibriumOpt', ifelse_equilibrium, .5, 'fast_run',
# 'ifelse')
acceptable_ops = (theano.tensor.basic.Dot,
theano.tensor.basic.Reshape,
theano.tensor.basic.Shape,
theano.tensor.SpecifyShape,
theano.tensor.basic.MaxAndArgmax,
theano.tensor.Subtensor,
theano.tensor.IncSubtensor,
theano.tensor.basic.Rebroadcast,
theano.tensor.basic.Alloc,
theano.tensor.elemwise.Elemwise,
theano.tensor.elemwise.DimShuffle)
@gof.local_optimizer(acceptable_ops)
def ifelse_lift_single_if_through_acceptable_ops(main_node):
"""This optimization lifts up certain ifelse instances.
op(ifelse(c, x, y)) -> ifelse(c, op(x), op(y))
if `op` is in the `acceptable_ops` list, and there is no other if as
input to that specific `op`, and the if has no other clients !?
"""
if not (isinstance(main_node.op, acceptable_ops)):
return False
all_inp_nodes = set()
for inp in main_node.inputs:
all_inp_nodes.add(inp.owner)
ifnodes = [x for x in list(all_inp_nodes)
if x and isinstance(x.op, IfElse)]
# if we have multiple ifs as inputs .. it all becomes quite complicated
# :)
if len(ifnodes) != 1:
return False
node = ifnodes[0]
op = node.op
ts = node.inputs[1:][:op.n_outs]
fs = node.inputs[1:][op.n_outs:]
# outs = main_node.outputs
mop = main_node.op
true_ins = []
false_ins = []
for x in main_node.inputs:
if x in node.outputs:
idx = node.outputs.index(x)
true_ins.append(ts[idx])
false_ins.append(fs[idx])
else:
true_ins.append(x)
false_ins.append(x)
true_eval = mop(*true_ins, **dict(return_list=True))
false_eval = mop(*false_ins, **dict(return_list=True))
# true_eval = clone(outs, replace = dict(zip(node.outputs, ts)))
# false_eval = clone(outs, replace = dict(zip(node.outputs, fs)))
nw_outs = ifelse(node.inputs[0], true_eval, false_eval, return_list=True)
return nw_outs
@gof.local_optimizer([IfElse])
def cond_merge_ifs_true(node):
op = node.op
if not isinstance(op, IfElse):
return False
t_ins = node.inputs[1:][:op.n_outs]
replace = {}
for idx, tval in enumerate(t_ins):
if (tval.owner and isinstance(tval.owner.op, IfElse) and
tval.owner.inputs[0] == node.inputs[0]):
ins_op = tval.owner.op
ins_t = tval.owner.inputs[1:][:ins_op.n_outs]
replace[idx + 1] = ins_t[tval.owner.outputs.index(tval)]
if len(replace) == 0:
return False
old_ins = list(node.inputs)
for pos, var in iteritems(replace):
old_ins[pos] = var
return op(*old_ins, **dict(return_list=True))
@gof.local_optimizer([IfElse])
def cond_merge_ifs_false(node):
op = node.op
if not isinstance(op, IfElse):
return False
f_ins = node.inputs[1:][op.n_outs:]
replace = {}
for idx, fval in enumerate(f_ins):
if (fval.owner and isinstance(fval.owner.op, IfElse) and
fval.owner.inputs[0] == node.inputs[0]):
ins_op = fval.owner.op
ins_t = fval.owner.inputs[1:][ins_op.n_outs:]
replace[idx + 1 + op.n_outs] = \
ins_t[fval.owner.outputs.index(fval)]
if len(replace) == 0:
return False
old_ins = list(node.inputs)
for pos, var in iteritems(replace):
old_ins[pos] = var
return op(*old_ins, **dict(return_list=True))
class CondMerge(gof.Optimizer):
""" Graph Optimizer that merges different cond ops """
def add_requirements(self, fgraph):
fgraph.add_feature(gof.toolbox.ReplaceValidate())
def apply(self, fgraph):
nodelist = list(fgraph.toposort())
cond_nodes = [s for s in nodelist if isinstance(s.op, IfElse)]
if len(cond_nodes) < 2:
return False
merging_node = cond_nodes[0]
for proposal in cond_nodes[1:]:
if (proposal.inputs[0] == merging_node.inputs[0] and
not find_up(proposal, merging_node)):
# Create a list of replacements for proposal
mn_ts = merging_node.inputs[1:][:merging_node.op.n_outs]
mn_fs = merging_node.inputs[1:][merging_node.op.n_outs:]
pl_ts = proposal.inputs[1:][:proposal.op.n_outs]
pl_fs = proposal.inputs[1:][proposal.op.n_outs:]
new_ins = ([merging_node.inputs[0]] +
mn_ts + pl_ts + mn_fs + pl_fs)
mn_name = '?'
if merging_node.op.name:
mn_name = merging_node.op.name
pl_name = '?'
# mn_n_ts = len(mn_ts)
# mn_n_fs = len(mn_fs)
if proposal.op.name:
pl_name = proposal.op.name
new_ifelse = IfElse(
n_outs=len(mn_ts + pl_ts),
as_view=False,
gpu=False,
name=mn_name + '&' + pl_name)
print('here')
new_outs = new_ifelse(*new_ins, **dict(return_list=True))
new_outs = [clone(x) for x in new_outs]
old_outs = []
if type(merging_node.outputs) not in (list, tuple):
old_outs += [merging_node.outputs]
else:
old_outs += merging_node.outputs
if type(proposal.outputs) not in (list, tuple):
old_outs += [proposal.outputs]
else:
old_outs += proposal.outputs
pairs = list(zip(old_outs, new_outs))
fgraph.replace_all_validate(pairs, reason='cond_merge')
@gof.local_optimizer([IfElse])
def cond_remove_identical(node):
op = node.op
if not isinstance(op, IfElse):
return False
ts = node.inputs[1:][:op.n_outs]
fs = node.inputs[1:][op.n_outs:]
# sync outs
out_map = {}
for idx in xrange(len(node.outputs)):
if idx not in out_map:
for jdx in xrange(idx + 1, len(node.outputs)):
if (ts[idx] == ts[jdx] and
fs[idx] == fs[jdx] and
jdx not in out_map):
out_map[jdx] = idx
if len(out_map) == 0:
return False
nw_ts = []
nw_fs = []
inv_map = {}
pos = 0
for idx in xrange(len(node.outputs)):
if idx not in out_map:
inv_map[idx] = pos
pos = pos + 1
nw_ts.append(ts[idx])
nw_fs.append(fs[idx])
new_ifelse = IfElse(n_outs=len(nw_ts),
as_view=op.as_view,
gpu=op.gpu,
name=op.name)
new_ins = [node.inputs[0]] + nw_ts + nw_fs
new_outs = new_ifelse(*new_ins, **dict(return_list=True))
rval = []
for idx in xrange(len(node.outputs)):
if idx in out_map:
rval += [new_outs[inv_map[out_map[idx]]]]
else:
rval += [new_outs[inv_map[idx]]]
return rval
@gof.local_optimizer([IfElse])
def cond_merge_random_op(main_node):
if isinstance(main_node.op, IfElse):
return False
all_inp_nodes = set()
for inp in main_node.inputs:
all_inp_nodes.add(inp.owner)
cond_nodes = [x for x in list(all_inp_nodes)
if x and isinstance(x.op, IfElse)]
if len(cond_nodes) < 2:
return False
merging_node = cond_nodes[0]
for proposal in cond_nodes[1:]:
if (proposal.inputs[0] == merging_node.inputs[0] and
not find_up(proposal, merging_node) and
not find_up(merging_node, proposal)):
# Create a list of replacements for proposal
mn_ts = merging_node.inputs[1:][:merging_node.op.n_outs]
mn_fs = merging_node.inputs[1:][merging_node.op.n_outs:]
pl_ts = proposal.inputs[1:][:proposal.op.n_outs]
pl_fs = proposal.inputs[1:][proposal.op.n_outs:]
new_ins = ([merging_node.inputs[0]] +
mn_ts + pl_ts + mn_fs + pl_fs)
mn_name = '?'
if merging_node.op.name:
mn_name = merging_node.op.name
pl_name = '?'
# mn_n_ts = len(mn_ts)
# mn_n_fs = len(mn_fs)
if proposal.op.name:
pl_name = proposal.op.name
new_ifelse = IfElse(
n_outs=len(mn_ts + pl_ts),
as_view=False,
gpu=False,
name=mn_name + '&' + pl_name)
new_outs = new_ifelse(*new_ins, **dict(return_list=True))
old_outs = []
if type(merging_node.outputs) not in (list, tuple):
old_outs += [merging_node.outputs]
else:
old_outs += merging_node.outputs
if type(proposal.outputs) not in (list, tuple):
old_outs += [proposal.outputs]
else:
old_outs += proposal.outputs
pairs = list(zip(old_outs, new_outs))
main_outs = clone(main_node.outputs, replace=pairs)
return main_outs
# XXX: Optimizations commented pending further debugging (certain optimizations
# make computation less lazy than it should be currently).
#
# pushout_equilibrium = gof.EquilibriumDB()
#
# XXX: This optimization doesn't seem to exist anymore?
# pushout_equilibrium.register("cond_lift_single_if",
# opt.in2out(cond_lift_single_if,
# ignore_newtrees=True),
# 'fast_run', 'ifelse')
#
# pushout_equilibrium.register("cond_merge_random_op",
# opt.in2out(cond_merge_random_op,
# ignore_newtrees=True),
# 'fast_run', 'ifelse')
#
#
# pushout_equilibrium.register("ifelse_merge",
# gof.MergeOptimizer(skip_const_merge=False),
# 'fast_run', 'ifelse')
#
# pushout_equilibrium.register("ifelse_remove_identical_inside",
# opt.in2out(cond_remove_identical,
# ignore_newtrees=True),
# 'fast_run', 'ifelse')
#
# pushout_equilibrium.register('ifelse_sameCondTrue_inside',
# opt.in2out(cond_merge_ifs_true,
# ignore_newtrees=True),
# 'fast_run', 'ifelse')
#
# pushout_equilibrium.register('ifelse_sameCondFalse_inside',
# opt.in2out(cond_merge_ifs_false,
# ignore_newtrees=True),
# 'fast_run', 'ifelse')
#
# ifelse_seqopt.register('ifelse_condPushOut_equilibrium',
# pushout_equilibrium,
# 1, 'fast_run', 'ifelse')
#
# ifelse_seqopt.register('merge_nodes_1',
# gof.MergeOptimizer(skip_const_merge=False),
# 2, 'fast_run', 'ifelse')
#
#
# ifelse_seqopt.register('ifelse_sameCondTrue',
# opt.in2out(cond_merge_ifs_true,
# ignore_newtrees=True),
# 3, 'fast_run', 'ifelse')
#
#
# ifelse_seqopt.register('ifelse_sameCondFalse',
# opt.in2out(cond_merge_ifs_false,
# ignore_newtrees=True),
# 4, 'fast_run', 'ifelse')
#
#
# ifelse_seqopt.register('ifelse_removeIdenetical',
# opt.in2out(cond_remove_identical,
# ignore_newtrees=True),
# 7, 'fast_run', 'ifelse')
| agpl-3.0 | -1,317,404,698,973,674,000 | 37.581098 | 82 | 0.543646 | false |
ReactiveX/RxPY | rx/core/operators/exclusive.py | 1 | 2032 | from typing import Callable
import rx
from rx.core import Observable
from rx.disposable import CompositeDisposable, SingleAssignmentDisposable
from rx.internal.utils import is_future
def _exclusive() -> Callable[[Observable], Observable]:
"""Performs a exclusive waiting for the first to finish before
subscribing to another observable. Observables that come in between
subscriptions will be dropped on the floor.
Returns:
An exclusive observable with only the results that
happen when subscribed.
"""
def exclusive(source: Observable) -> Observable:
def subscribe(observer, scheduler=None):
has_current = [False]
is_stopped = [False]
m = SingleAssignmentDisposable()
g = CompositeDisposable()
g.add(m)
def on_next(inner_source):
if not has_current[0]:
has_current[0] = True
inner_source = rx.from_future(inner_source) if is_future(inner_source) else inner_source
inner_subscription = SingleAssignmentDisposable()
g.add(inner_subscription)
def on_completed_inner():
g.remove(inner_subscription)
has_current[0] = False
if is_stopped[0] and len(g) == 1:
observer.on_completed()
inner_subscription.disposable = inner_source.subscribe_(
observer.on_next,
observer.on_error,
on_completed_inner,
scheduler
)
def on_completed():
is_stopped[0] = True
if not has_current[0] and len(g) == 1:
observer.on_completed()
m.disposable = source.subscribe_(on_next, observer.on_error, on_completed, scheduler)
return g
return Observable(subscribe)
return exclusive
| mit | -6,735,326,453,861,305,000 | 34.034483 | 108 | 0.557579 | false |
rmcgibbo/nebterpolator | nebterpolator/smoothing.py | 1 | 6493 | """Smoothing a 1d signal
"""
##############################################################################
# Imports
##############################################################################
# library imports
import numpy as np
from scipy.optimize import leastsq
from scipy.signal import lfilter, lfilter_zi, filtfilt, butter
##############################################################################
# Globals
##############################################################################
__all__ = ['polynomial_smooth', 'window_smooth', 'buttersworth_smooth']
##############################################################################
# Functions
##############################################################################
def polynomial_smooth(y, x=None, order=2, end_weight=1):
"""Smooth a dataset by fitting it to a polynomial
Parameters
----------
y : np.ndarray
The signal
x : np.ndarray, optional
The x coordinate of each point. If left unsupplied, we'll
take the x range to be just the ints 0 through len(y)-1
order : int
The order of the polynomial
Returns
-------
smoothed : np.ndarray
The value of the fitted polynomial at each point x
"""
if x is None:
x = np.arange(len(y))
weights = np.r_[end_weight, np.ones(len(x)-2), end_weight]
def func(p):
return (np.polyval(p, x) - y) * weights
# need 1 more for the constant, so that order 2 is quadratic
# (even though it's 3 params)
#popt, pcov = curve_fit(func, x, y, p0=np.ones(order+1), sigma=1.0/weights)
popt, covp, info, msg, ier = leastsq(func, x0=np.zeros(order+1),
full_output=True)
return np.polyval(popt, x)
def window_smooth(signal, window_len=11, window='hanning'):
"""Smooth the data using a window with requested size.
This method is based on the convolution of a scaled window with the signal.
The signal is prepared by introducing reflected copies of the signal
(with the window size) in both ends so that transient parts are minimized
in the begining and end part of the output signal.
This code is copied from the scipy cookbook, with sytlistic improvements.
http://www.scipy.org/Cookbook/SignalSmooth
Parameters
----------
signal : np.ndarray, ndim=1
The input signal
window_len: int
The dimension of the smoothing window; should be an odd integer
window: {'flat', 'hanning', 'hamming', 'bartlett', 'blackman'}
Which type of window to use? Flat will produce a moving average
smoothin
Returns
-------
output : np.ndarray, ndim=1
The smoothed signal
"""
if signal.ndim != 1:
raise TypeError('I only smooth 1d arrays')
if signal.size < window_len:
raise ValueError("Input vector needs to be bigger than window size.")
if window_len % 2 != 1:
raise ValueError('window_len must be an odd integer')
if window_len < 3:
return signal
if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise ValueError("Window is on of 'flat', 'hanning', 'hamming', "
"'bartlett', 'blackman'")
# this does a mirroring padding
padded = np.r_[2*signal[0] - signal[window_len-1: 0: -1],
signal,
2*signal[-1] - signal[-2: -window_len-1: -1]]
if window == 'flat':
w = np.ones(window_len, 'd')
else:
w = getattr(np, window)(window_len)
output = np.convolve(w / w.sum(), padded, mode='valid')
return output[(window_len/2):-(window_len/2)]
def buttersworth_smooth(signal, width=11, order=3):
"""Smooth the data using zero-delay buttersworth filter
This code is copied from the scipy cookbook, with sytlistic improvements.
http://www.scipy.org/Cookbook/FiltFilt
Parameters
----------
signal : np.ndarray, ndim=1
The input signal
width : float
This acts very similar to the window_len in the window smoother. In
the implementation, the frequency of the low-pass filter is taken to
be two over this width, so it's like "half the period" of the sinusiod
where the filter starts to kick in.
order : int, optional
The order of the filter. A small odd number is recommended. Higher
order filters cutoff more quickly, but have worse numerical
properties.
Returns
-------
output : np.ndarray, ndim=1
The smoothed signal
"""
if width < 2.0:
return signal
# first pad the signal on the ends
pad = int(np.ceil((width + 1)/2)*2 - 1) # nearest odd integer
padded = np.r_[signal[pad - 1: 0: -1], signal, signal[-1: -pad: -1]]
#padded = np.r_[[signal[0]]*pad, signal, [signal[-1]]*pad]
b, a = butter(order, 2.0 / width)
# Apply the filter to the width. Use lfilter_zi to choose the
# initial condition of the filter.
zi = lfilter_zi(b, a)
z, _ = lfilter(b, a, padded, zi=zi*padded[0])
# Apply the filter again, to have a result filtered at an order
# the same as filtfilt.
z2, _ = lfilter(b, a, z, zi=zi*z[0])
# Use filtfilt to apply the filter.
output = filtfilt(b, a, padded)
return output[(pad-1): -(pad-1)]
def angular_smooth(signal, smoothing_func=buttersworth_smooth, **kwargs):
"""Smooth an signal which represents an angle by filtering its
sine and cosine components separately.
Parameters
----------
signal : np.ndarray, ndim=1
The input signal
smoothing_func : callable
A function that takes the signal as its first argument and smoothes
it.
All other parameters (**kwargs) will be passed through to smoothing_func.
Returns
-------
smoothed_signal : bp.ndarray, ndim=1
The smoothed version of the function.
"""
sin = smoothing_func(np.sin(signal), **kwargs)
cos = smoothing_func(np.cos(signal), **kwargs)
return np.arctan2(sin, cos)
def main():
"test code"
import matplotlib.pyplot as pp
N = 1000
sigma = 0.25
x = np.cumsum(sigma * np.random.randn(N))
y = np.cumsum(sigma * np.random.randn(N))
signal = np.arctan2(x, y)
pp.plot(signal)
pp.plot(np.arctan2(filtfit_smooth(np.sin(signal), width=21),
filtfit_smooth(np.cos(signal), width=21)))
pp.show()
if __name__ == '__main__':
main()
| gpl-3.0 | 7,068,469,299,697,415,000 | 31.465 | 79 | 0.574003 | false |
DYFeng/pyaiml | aiml/Kernel.py | 1 | 46819 | # -*- coding: latin-1 -*-
"""This file contains the public interface to the aiml module."""
import AimlParser
import DefaultSubs
import Utils
from PatternMgr import PatternMgr
from WordSub import WordSub
from ConfigParser import ConfigParser
import copy
import glob
import os
import random
import re
import string
import sys
import time
import threading
import xml.sax
class Kernel:
# module constants
_globalSessionID = "_global" # key of the global session (duh)
_maxHistorySize = 10 # maximum length of the _inputs and _responses lists
_maxRecursionDepth = 100 # maximum number of recursive <srai>/<sr> tags before the response is aborted.
# special predicate keys
_inputHistory = "_inputHistory" # keys to a queue (list) of recent user input
_outputHistory = "_outputHistory" # keys to a queue (list) of recent responses.
_inputStack = "_inputStack" # Should always be empty in between calls to respond()
def __init__(self):
self._verboseMode = True
self._version = "PyAIML 0.8.6"
self._brain = PatternMgr()
self._respondLock = threading.RLock()
self._textEncoding = "utf-8"
# set up the sessions
self._sessions = {}
self._addSession(self._globalSessionID)
# Set up the bot predicates
self._botPredicates = {}
self.setBotPredicate("name", "Nameless")
# set up the word substitutors (subbers):
self._subbers = {}
self._subbers['gender'] = WordSub(DefaultSubs.defaultGender)
self._subbers['person'] = WordSub(DefaultSubs.defaultPerson)
self._subbers['person2'] = WordSub(DefaultSubs.defaultPerson2)
self._subbers['normal'] = WordSub(DefaultSubs.defaultNormal)
# set up the element processors
self._elementProcessors = {
"bot": self._processBot,
"condition": self._processCondition,
"date": self._processDate,
"formal": self._processFormal,
"gender": self._processGender,
"get": self._processGet,
"gossip": self._processGossip,
"id": self._processId,
"input": self._processInput,
"javascript": self._processJavascript,
"learn": self._processLearn,
"li": self._processLi,
"lowercase": self._processLowercase,
"person": self._processPerson,
"person2": self._processPerson2,
"random": self._processRandom,
"text": self._processText,
"sentence": self._processSentence,
"set": self._processSet,
"size": self._processSize,
"sr": self._processSr,
"srai": self._processSrai,
"star": self._processStar,
"system": self._processSystem,
"template": self._processTemplate,
"that": self._processThat,
"thatstar": self._processThatstar,
"think": self._processThink,
"topicstar": self._processTopicstar,
"uppercase": self._processUppercase,
"version": self._processVersion,
}
def bootstrap(self, brainFile = None, learnFiles = [], commands = []):
"""Prepare a Kernel object for use.
If a brainFile argument is provided, the Kernel attempts to
load the brain at the specified filename.
If learnFiles is provided, the Kernel attempts to load the
specified AIML files.
Finally, each of the input strings in the commands list is
passed to respond().
"""
start = time.clock()
if brainFile:
self.loadBrain(brainFile)
# learnFiles might be a string, in which case it should be
# turned into a single-element list.
learns = learnFiles
try: learns = [ learnFiles + "" ]
except: pass
for file in learns:
self.learn(file)
# ditto for commands
cmds = commands
try: cmds = [ commands + "" ]
except: pass
for cmd in cmds:
print self._respond(cmd, self._globalSessionID)
if self._verboseMode:
print "Kernel bootstrap completed in %.2f seconds" % (time.clock() - start)
def verbose(self, isVerbose = True):
"""Enable/disable verbose output mode."""
self._verboseMode = isVerbose
def version(self):
"""Return the Kernel's version string."""
return self._version
def numCategories(self):
"""Return the number of categories the Kernel has learned."""
# there's a one-to-one mapping between templates and categories
return self._brain.numTemplates()
def resetBrain(self):
"""Reset the brain to its initial state.
This is essentially equivilant to:
del(kern)
kern = aiml.Kernel()
"""
del(self._brain)
self.__init__()
def loadBrain(self, filename):
"""Attempt to load a previously-saved 'brain' from the
specified filename.
NOTE: the current contents of the 'brain' will be discarded!
"""
if self._verboseMode: print "Loading brain from %s..." % filename,
start = time.clock()
self._brain.restore(filename)
if self._verboseMode:
end = time.clock() - start
print "done (%d categories in %.2f seconds)" % (self._brain.numTemplates(), end)
def saveBrain(self, filename):
"""Dump the contents of the bot's brain to a file on disk."""
if self._verboseMode: print "Saving brain to %s..." % filename,
start = time.clock()
self._brain.save(filename)
if self._verboseMode:
print "done (%.2f seconds)" % (time.clock() - start)
def getPredicate(self, name, sessionID = _globalSessionID):
"""Retrieve the current value of the predicate 'name' from the
specified session.
If name is not a valid predicate in the session, the empty
string is returned.
"""
try: return self._sessions[sessionID][name]
except KeyError: return ""
def setPredicate(self, name, value, sessionID = _globalSessionID):
"""Set the value of the predicate 'name' in the specified
session.
If sessionID is not a valid session, it will be created. If
name is not a valid predicate in the session, it will be
created.
"""
self._addSession(sessionID) # add the session, if it doesn't already exist.
self._sessions[sessionID][name] = value
def getBotPredicate(self, name):
"""Retrieve the value of the specified bot predicate.
If name is not a valid bot predicate, the empty string is returned.
"""
try: return self._botPredicates[name]
except KeyError: return ""
def setBotPredicate(self, name, value):
"""Set the value of the specified bot predicate.
If name is not a valid bot predicate, it will be created.
"""
self._botPredicates[name] = value
# Clumsy hack: if updating the bot name, we must update the
# name in the brain as well
if name == "name":
self._brain.setBotName(self.getBotPredicate("name"))
def setTextEncoding(self, encoding):
"""Set the text encoding used when loading AIML files (Latin-1, UTF-8, etc.)."""
self._textEncoding = encoding
def loadSubs(self, filename):
"""Load a substitutions file.
The file must be in the Windows-style INI format (see the
standard ConfigParser module docs for information on this
format). Each section of the file is loaded into its own
substituter.
"""
inFile = file(filename)
parser = ConfigParser()
parser.readfp(inFile, filename)
inFile.close()
for s in parser.sections():
# Add a new WordSub instance for this section. If one already
# exists, delete it.
if self._subbers.has_key(s):
del(self._subbers[s])
self._subbers[s] = WordSub()
# iterate over the key,value pairs and add them to the subber
for k,v in parser.items(s):
self._subbers[s][k] = v
def _addSession(self, sessionID):
"""Create a new session with the specified ID string."""
if self._sessions.has_key(sessionID):
return
# Create the session.
self._sessions[sessionID] = {
# Initialize the special reserved predicates
self._inputHistory: [],
self._outputHistory: [],
self._inputStack: []
}
def _deleteSession(self, sessionID):
"""Delete the specified session."""
if self._sessions.has_key(sessionID):
self._sessions.pop(sessionID)
def getSessionData(self, sessionID = None):
"""Return a copy of the session data dictionary for the
specified session.
If no sessionID is specified, return a dictionary containing
*all* of the individual session dictionaries.
"""
s = None
if sessionID is not None:
try: s = self._sessions[sessionID]
except KeyError: s = {}
else:
s = self._sessions
return copy.deepcopy(s)
def learn(self, filename):
"""Load and learn the contents of the specified AIML file.
If filename includes wildcard characters, all matching files
will be loaded and learned.
"""
for f in glob.glob(filename):
if self._verboseMode: print "Loading %s..." % f,
start = time.clock()
# Load and parse the AIML file.
parser = AimlParser.create_parser()
handler = parser.getContentHandler()
handler.setEncoding(self._textEncoding)
try: parser.parse(f)
except xml.sax.SAXParseException, msg:
err = "\nFATAL PARSE ERROR in file %s:\n%s\n" % (f,msg)
sys.stderr.write(err)
continue
# store the pattern/template pairs in the PatternMgr.
for key,tem in handler.categories.items():
self._brain.add(key,tem)
# Parsing was successful.
if self._verboseMode:
print "done (%.2f seconds)" % (time.clock() - start)
def respond(self, input, sessionID = _globalSessionID):
"""Return the Kernel's response to the input string."""
if len(input) == 0:
return ""
#ensure that input is a unicode string
try: input = input.decode(self._textEncoding, 'replace')
except UnicodeError: pass
except AttributeError: pass
# prevent other threads from stomping all over us.
self._respondLock.acquire()
# Add the session, if it doesn't already exist
self._addSession(sessionID)
# split the input into discrete sentences
sentences = Utils.sentences(input)
finalResponse = ""
for s in sentences:
# Add the input to the history list before fetching the
# response, so that <input/> tags work properly.
inputHistory = self.getPredicate(self._inputHistory, sessionID)
inputHistory.append(s)
while len(inputHistory) > self._maxHistorySize:
inputHistory.pop(0)
self.setPredicate(self._inputHistory, inputHistory, sessionID)
# Fetch the response
response = self._respond(s, sessionID)
# add the data from this exchange to the history lists
outputHistory = self.getPredicate(self._outputHistory, sessionID)
outputHistory.append(response)
while len(outputHistory) > self._maxHistorySize:
outputHistory.pop(0)
self.setPredicate(self._outputHistory, outputHistory, sessionID)
# append this response to the final response.
finalResponse += (response + " ")
finalResponse = finalResponse.strip()
assert(len(self.getPredicate(self._inputStack, sessionID)) == 0)
# release the lock and return
self._respondLock.release()
try: return finalResponse.encode(self._textEncoding)
except UnicodeError: return finalResponse
# This version of _respond() just fetches the response for some input.
# It does not mess with the input and output histories. Recursive calls
# to respond() spawned from tags like <srai> should call this function
# instead of respond().
def _respond(self, input, sessionID):
"""Private version of respond(), does the real work."""
if len(input) == 0:
return ""
# guard against infinite recursion
inputStack = self.getPredicate(self._inputStack, sessionID)
if len(inputStack) > self._maxRecursionDepth:
if self._verboseMode:
err = "WARNING: maximum recursion depth exceeded (input='%s')" % input.encode(self._textEncoding, 'replace')
sys.stderr.write(err)
return ""
# push the input onto the input stack
inputStack = self.getPredicate(self._inputStack, sessionID)
inputStack.append(input)
self.setPredicate(self._inputStack, inputStack, sessionID)
# run the input through the 'normal' subber
subbedInput = self._subbers['normal'].sub(input)
# fetch the bot's previous response, to pass to the match()
# function as 'that'.
outputHistory = self.getPredicate(self._outputHistory, sessionID)
try: that = outputHistory[-1]
except IndexError: that = ""
subbedThat = self._subbers['normal'].sub(that)
# fetch the current topic
topic = self.getPredicate("topic", sessionID)
subbedTopic = self._subbers['normal'].sub(topic)
# Determine the final response.
response = ""
elem = self._brain.match(subbedInput, subbedThat, subbedTopic)
if elem is None:
if self._verboseMode:
err = "WARNING: No match found for input: %s\n" % input.encode(self._textEncoding)
sys.stderr.write(err)
else:
# Process the element into a response string.
response += self._processElement(elem, sessionID).strip()
response += " "
response = response.strip()
# pop the top entry off the input stack.
inputStack = self.getPredicate(self._inputStack, sessionID)
inputStack.pop()
self.setPredicate(self._inputStack, inputStack, sessionID)
return response
def _processElement(self,elem, sessionID):
"""Process an AIML element.
The first item of the elem list is the name of the element's
XML tag. The second item is a dictionary containing any
attributes passed to that tag, and their values. Any further
items in the list are the elements enclosed by the current
element's begin and end tags; they are handled by each
element's handler function.
"""
try:
handlerFunc = self._elementProcessors[elem[0]]
except:
# Oops -- there's no handler function for this element
# type!
if self._verboseMode:
err = "WARNING: No handler found for <%s> element\n" % elem[0].encode(self._textEncoding, 'replace')
sys.stderr.write(err)
return ""
return handlerFunc(elem, sessionID)
######################################################
### Individual element-processing functions follow ###
######################################################
# <bot>
def _processBot(self, elem, sessionID):
"""Process a <bot> AIML element.
Required element attributes:
name: The name of the bot predicate to retrieve.
<bot> elements are used to fetch the value of global,
read-only "bot predicates." These predicates cannot be set
from within AIML; you must use the setBotPredicate() function.
"""
attrName = elem[1]['name']
return self.getBotPredicate(attrName)
# <condition>
def _processCondition(self, elem, sessionID):
"""Process a <condition> AIML element.
Optional element attributes:
name: The name of a predicate to test.
value: The value to test the predicate for.
<condition> elements come in three flavors. Each has different
attributes, and each handles their contents differently.
The simplest case is when the <condition> tag has both a 'name'
and a 'value' attribute. In this case, if the predicate
'name' has the value 'value', then the contents of the element
are processed and returned.
If the <condition> element has only a 'name' attribute, then
its contents are a series of <li> elements, each of which has
a 'value' attribute. The list is scanned from top to bottom
until a match is found. Optionally, the last <li> element can
have no 'value' attribute, in which case it is processed and
returned if no other match is found.
If the <condition> element has neither a 'name' nor a 'value'
attribute, then it behaves almost exactly like the previous
case, except that each <li> subelement (except the optional
last entry) must now include both 'name' and 'value'
attributes.
"""
attr = None
response = ""
attr = elem[1]
# Case #1: test the value of a specific predicate for a
# specific value.
if attr.has_key('name') and attr.has_key('value'):
val = self.getPredicate(attr['name'], sessionID)
if val == attr['value']:
for e in elem[2:]:
response += self._processElement(e,sessionID)
return response
else:
# Case #2 and #3: Cycle through <li> contents, testing a
# name and value pair for each one.
try:
name = None
if attr.has_key('name'):
name = attr['name']
# Get the list of <li> elemnents
listitems = []
for e in elem[2:]:
if e[0] == 'li':
listitems.append(e)
# if listitems is empty, return the empty string
if len(listitems) == 0:
return ""
# iterate through the list looking for a condition that
# matches.
foundMatch = False
for li in listitems:
try:
liAttr = li[1]
# if this is the last list item, it's allowed
# to have no attributes. We just skip it for now.
if len(liAttr.keys()) == 0 and li == listitems[-1]:
continue
# get the name of the predicate to test
liName = name
if liName == None:
liName = liAttr['name']
# get the value to check against
liValue = liAttr['value']
# do the test
if self.getPredicate(liName, sessionID) == liValue:
foundMatch = True
response += self._processElement(li,sessionID)
break
except:
# No attributes, no name/value attributes, no
# such predicate/session, or processing error.
if self._verboseMode: print "Something amiss -- skipping listitem", li
raise
if not foundMatch:
# Check the last element of listitems. If it has
# no 'name' or 'value' attribute, process it.
try:
li = listitems[-1]
liAttr = li[1]
if not (liAttr.has_key('name') or liAttr.has_key('value')):
response += self._processElement(li, sessionID)
except:
# listitems was empty, no attributes, missing
# name/value attributes, or processing error.
if self._verboseMode: print "error in default listitem"
raise
except:
# Some other catastrophic cataclysm
if self._verboseMode: print "catastrophic condition failure"
raise
return response
# <date>
def _processDate(self, elem, sessionID):
"""Process a <date> AIML element.
<date> elements resolve to the current date and time. The
AIML specification doesn't require any particular format for
this information, so I go with whatever's simplest.
"""
return time.asctime()
# <formal>
def _processFormal(self, elem, sessionID):
"""Process a <formal> AIML element.
<formal> elements process their contents recursively, and then
capitalize the first letter of each word of the result.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return string.capwords(response)
# <gender>
def _processGender(self,elem, sessionID):
"""Process a <gender> AIML element.
<gender> elements process their contents, and then swap the
gender of any third-person singular pronouns in the result.
This subsitution is handled by the aiml.WordSub module.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return self._subbers['gender'].sub(response)
# <get>
def _processGet(self, elem, sessionID):
"""Process a <get> AIML element.
Required element attributes:
name: The name of the predicate whose value should be
retrieved from the specified session and returned. If the
predicate doesn't exist, the empty string is returned.
<get> elements return the value of a predicate from the
specified session.
"""
return self.getPredicate(elem[1]['name'], sessionID)
# <gossip>
def _processGossip(self, elem, sessionID):
"""Process a <gossip> AIML element.
<gossip> elements are used to capture and store user input in
an implementation-defined manner, theoretically allowing the
bot to learn from the people it chats with. I haven't
descided how to define my implementation, so right now
<gossip> behaves identically to <think>.
"""
return self._processThink(elem, sessionID)
# <id>
def _processId(self, elem, sessionID):
""" Process an <id> AIML element.
<id> elements return a unique "user id" for a specific
conversation. In PyAIML, the user id is the name of the
current session.
"""
return sessionID
# <input>
def _processInput(self, elem, sessionID):
"""Process an <input> AIML element.
Optional attribute elements:
index: The index of the element from the history list to
return. 1 means the most recent item, 2 means the one
before that, and so on.
<input> elements return an entry from the input history for
the current session.
"""
inputHistory = self.getPredicate(self._inputHistory, sessionID)
try: index = int(elem[1]['index'])
except: index = 1
try: return inputHistory[-index]
except IndexError:
if self._verboseMode:
err = "No such index %d while processing <input> element.\n" % index
sys.stderr.write(err)
return ""
# <javascript>
def _processJavascript(self, elem, sessionID):
"""Process a <javascript> AIML element.
<javascript> elements process their contents recursively, and
then run the results through a server-side Javascript
interpreter to compute the final response. Implementations
are not required to provide an actual Javascript interpreter,
and right now PyAIML doesn't; <javascript> elements are behave
exactly like <think> elements.
"""
return self._processThink(elem, sessionID)
# <learn>
def _processLearn(self, elem, sessionID):
"""Process a <learn> AIML element.
<learn> elements process their contents recursively, and then
treat the result as an AIML file to open and learn.
"""
filename = ""
for e in elem[2:]:
filename += self._processElement(e, sessionID)
self.learn(filename)
return ""
# <li>
def _processLi(self,elem, sessionID):
"""Process an <li> AIML element.
Optional attribute elements:
name: the name of a predicate to query.
value: the value to check that predicate for.
<li> elements process their contents recursively and return
the results. They can only appear inside <condition> and
<random> elements. See _processCondition() and
_processRandom() for details of their usage.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return response
# <lowercase>
def _processLowercase(self,elem, sessionID):
"""Process a <lowercase> AIML element.
<lowercase> elements process their contents recursively, and
then convert the results to all-lowercase.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return string.lower(response)
# <person>
def _processPerson(self,elem, sessionID):
"""Process a <person> AIML element.
<person> elements process their contents recursively, and then
convert all pronouns in the results from 1st person to 2nd
person, and vice versa. This subsitution is handled by the
aiml.WordSub module.
If the <person> tag is used atomically (e.g. <person/>), it is
a shortcut for <person><star/></person>.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
if len(elem[2:]) == 0: # atomic <person/> = <person><star/></person>
response = self._processElement(['star',{}], sessionID)
return self._subbers['person'].sub(response)
# <person2>
def _processPerson2(self,elem, sessionID):
"""Process a <person2> AIML element.
<person2> elements process their contents recursively, and then
convert all pronouns in the results from 1st person to 3rd
person, and vice versa. This subsitution is handled by the
aiml.WordSub module.
If the <person2> tag is used atomically (e.g. <person2/>), it is
a shortcut for <person2><star/></person2>.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
if len(elem[2:]) == 0: # atomic <person2/> = <person2><star/></person2>
response = self._processElement(['star',{}], sessionID)
return self._subbers['person2'].sub(response)
# <random>
def _processRandom(self, elem, sessionID):
"""Process a <random> AIML element.
<random> elements contain zero or more <li> elements. If
none, the empty string is returned. If one or more <li>
elements are present, one of them is selected randomly to be
processed recursively and have its results returned. Only the
chosen <li> element's contents are processed. Any non-<li> contents are
ignored.
"""
listitems = []
for e in elem[2:]:
if e[0] == 'li':
listitems.append(e)
if len(listitems) == 0:
return ""
# select and process a random listitem.
random.shuffle(listitems)
return self._processElement(listitems[0], sessionID)
# <sentence>
def _processSentence(self,elem, sessionID):
"""Process a <sentence> AIML element.
<sentence> elements process their contents recursively, and
then capitalize the first letter of the results.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
try:
response = response.strip()
words = string.split(response, " ", 1)
words[0] = string.capitalize(words[0])
response = string.join(words)
return response
except IndexError: # response was empty
return ""
# <set>
def _processSet(self, elem, sessionID):
"""Process a <set> AIML element.
Required element attributes:
name: The name of the predicate to set.
<set> elements process their contents recursively, and assign the results to a predicate
(given by their 'name' attribute) in the current session. The contents of the element
are also returned.
"""
value = ""
for e in elem[2:]:
value += self._processElement(e, sessionID)
self.setPredicate(elem[1]['name'], value, sessionID)
return value
# <size>
def _processSize(self,elem, sessionID):
"""Process a <size> AIML element.
<size> elements return the number of AIML categories currently
in the bot's brain.
"""
return str(self.numCategories())
# <sr>
def _processSr(self,elem,sessionID):
"""Process an <sr> AIML element.
<sr> elements are shortcuts for <srai><star/></srai>.
"""
star = self._processElement(['star',{}], sessionID)
response = self._respond(star, sessionID)
return response
# <srai>
def _processSrai(self,elem, sessionID):
"""Process a <srai> AIML element.
<srai> elements recursively process their contents, and then
pass the results right back into the AIML interpreter as a new
piece of input. The results of this new input string are
returned.
"""
newInput = ""
for e in elem[2:]:
newInput += self._processElement(e, sessionID)
return self._respond(newInput, sessionID)
# <star>
def _processStar(self, elem, sessionID):
"""Process a <star> AIML element.
Optional attribute elements:
index: Which "*" character in the current pattern should
be matched?
<star> elements return the text fragment matched by the "*"
character in the current input pattern. For example, if the
input "Hello Tom Smith, how are you?" matched the pattern
"HELLO * HOW ARE YOU", then a <star> element in the template
would evaluate to "Tom Smith".
"""
try: index = int(elem[1]['index'])
except KeyError: index = 1
# fetch the user's last input
inputStack = self.getPredicate(self._inputStack, sessionID)
input = self._subbers['normal'].sub(inputStack[-1])
# fetch the Kernel's last response (for 'that' context)
outputHistory = self.getPredicate(self._outputHistory, sessionID)
try: that = self._subbers['normal'].sub(outputHistory[-1])
except: that = "" # there might not be any output yet
topic = self.getPredicate("topic", sessionID)
response = self._brain.star("star", input, that, topic, index)
return response
# <system>
def _processSystem(self,elem, sessionID):
"""Process a <system> AIML element.
<system> elements process their contents recursively, and then
attempt to execute the results as a shell command on the
server. The AIML interpreter blocks until the command is
complete, and then returns the command's output.
For cross-platform compatibility, any file paths inside
<system> tags should use Unix-style forward slashes ("/") as a
directory separator.
"""
# build up the command string
command = ""
for e in elem[2:]:
command += self._processElement(e, sessionID)
# normalize the path to the command. Under Windows, this
# switches forward-slashes to back-slashes; all system
# elements should use unix-style paths for cross-platform
# compatibility.
#executable,args = command.split(" ", 1)
#executable = os.path.normpath(executable)
#command = executable + " " + args
command = os.path.normpath(command)
# execute the command.
response = ""
try:
out = os.popen(command)
except RuntimeError, msg:
if self._verboseMode:
err = "WARNING: RuntimeError while processing \"system\" element:\n%s\n" % msg.encode(self._textEncoding, 'replace')
sys.stderr.write(err)
return "There was an error while computing my response. Please inform my botmaster."
time.sleep(0.01) # I'm told this works around a potential IOError exception.
for line in out:
response += line + "\n"
response = string.join(response.splitlines()).strip()
return response
# <template>
def _processTemplate(self,elem, sessionID):
"""Process a <template> AIML element.
<template> elements recursively process their contents, and
return the results. <template> is the root node of any AIML
response tree.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return response
# text
def _processText(self,elem, sessionID):
"""Process a raw text element.
Raw text elements aren't really AIML tags. Text elements cannot contain
other elements; instead, the third item of the 'elem' list is a text
string, which is immediately returned. They have a single attribute,
automatically inserted by the parser, which indicates whether whitespace
in the text should be preserved or not.
"""
try: elem[2] + ""
except TypeError: raise TypeError, "Text element contents are not text"
# If the the whitespace behavior for this element is "default",
# we reduce all stretches of >1 whitespace characters to a single
# space. To improve performance, we do this only once for each
# text element encountered, and save the results for the future.
if elem[1]["xml:space"] == "default":
elem[2] = re.sub("\s+", " ", elem[2])
elem[1]["xml:space"] = "preserve"
return elem[2]
# <that>
def _processThat(self,elem, sessionID):
"""Process a <that> AIML element.
Optional element attributes:
index: Specifies which element from the output history to
return. 1 is the most recent response, 2 is the next most
recent, and so on.
<that> elements (when they appear inside <template> elements)
are the output equivilant of <input> elements; they return one
of the Kernel's previous responses.
"""
outputHistory = self.getPredicate(self._outputHistory, sessionID)
index = 1
try:
# According to the AIML spec, the optional index attribute
# can either have the form "x" or "x,y". x refers to how
# far back in the output history to go. y refers to which
# sentence of the specified response to return.
index = int(elem[1]['index'].split(',')[0])
except:
pass
try: return outputHistory[-index]
except IndexError:
if self._verboseMode:
err = "No such index %d while processing <that> element.\n" % index
sys.stderr.write(err)
return ""
# <thatstar>
def _processThatstar(self, elem, sessionID):
"""Process a <thatstar> AIML element.
Optional element attributes:
index: Specifies which "*" in the <that> pattern to match.
<thatstar> elements are similar to <star> elements, except
that where <star/> returns the portion of the input string
matched by a "*" character in the pattern, <thatstar/> returns
the portion of the previous input string that was matched by a
"*" in the current category's <that> pattern.
"""
try: index = int(elem[1]['index'])
except KeyError: index = 1
# fetch the user's last input
inputStack = self.getPredicate(self._inputStack, sessionID)
input = self._subbers['normal'].sub(inputStack[-1])
# fetch the Kernel's last response (for 'that' context)
outputHistory = self.getPredicate(self._outputHistory, sessionID)
try: that = self._subbers['normal'].sub(outputHistory[-1])
except: that = "" # there might not be any output yet
topic = self.getPredicate("topic", sessionID)
response = self._brain.star("thatstar", input, that, topic, index)
return response
# <think>
def _processThink(self,elem, sessionID):
"""Process a <think> AIML element.
<think> elements process their contents recursively, and then
discard the results and return the empty string. They're
useful for setting predicates and learning AIML files without
generating any output.
"""
for e in elem[2:]:
self._processElement(e, sessionID)
return ""
# <topicstar>
def _processTopicstar(self, elem, sessionID):
"""Process a <topicstar> AIML element.
Optional element attributes:
index: Specifies which "*" in the <topic> pattern to match.
<topicstar> elements are similar to <star> elements, except
that where <star/> returns the portion of the input string
matched by a "*" character in the pattern, <topicstar/>
returns the portion of current topic string that was matched
by a "*" in the current category's <topic> pattern.
"""
try: index = int(elem[1]['index'])
except KeyError: index = 1
# fetch the user's last input
inputStack = self.getPredicate(self._inputStack, sessionID)
input = self._subbers['normal'].sub(inputStack[-1])
# fetch the Kernel's last response (for 'that' context)
outputHistory = self.getPredicate(self._outputHistory, sessionID)
try: that = self._subbers['normal'].sub(outputHistory[-1])
except: that = "" # there might not be any output yet
topic = self.getPredicate("topic", sessionID)
response = self._brain.star("topicstar", input, that, topic, index)
return response
# <uppercase>
def _processUppercase(self,elem, sessionID):
"""Process an <uppercase> AIML element.
<uppercase> elements process their contents recursively, and
return the results with all lower-case characters converted to
upper-case.
"""
response = ""
for e in elem[2:]:
response += self._processElement(e, sessionID)
return string.upper(response)
# <version>
def _processVersion(self,elem, sessionID):
"""Process a <version> AIML element.
<version> elements return the version number of the AIML
interpreter.
"""
return self.version()
##################################################
### Self-test functions follow ###
##################################################
def _testTag(kern, tag, input, outputList):
"""Tests 'tag' by feeding the Kernel 'input'. If the result
matches any of the strings in 'outputList', the test passes.
"""
global _numTests, _numPassed
_numTests += 1
print "Testing <" + tag + ">:",
response = kern.respond(input).decode(kern._textEncoding)
if response in outputList:
print "PASSED"
_numPassed += 1
return True
else:
print "FAILED (response: '%s')" % response.encode(kern._textEncoding, 'replace')
return False
if __name__ == "__main__":
# Run some self-tests
k = Kernel()
k.bootstrap(learnFiles="self-test.aiml")
global _numTests, _numPassed
_numTests = 0
_numPassed = 0
_testTag(k, 'bot', 'test bot', ["My name is Nameless"])
k.setPredicate('gender', 'male')
_testTag(k, 'condition test #1', 'test condition name value', ['You are handsome'])
k.setPredicate('gender', 'female')
_testTag(k, 'condition test #2', 'test condition name value', [''])
_testTag(k, 'condition test #3', 'test condition name', ['You are beautiful'])
k.setPredicate('gender', 'robot')
_testTag(k, 'condition test #4', 'test condition name', ['You are genderless'])
_testTag(k, 'condition test #5', 'test condition', ['You are genderless'])
k.setPredicate('gender', 'male')
_testTag(k, 'condition test #6', 'test condition', ['You are handsome'])
# the date test will occasionally fail if the original and "test"
# times cross a second boundary. There's no good way to avoid
# this problem and still do a meaningful test, so we simply
# provide a friendly message to be printed if the test fails.
date_warning = """
NOTE: the <date> test will occasionally report failure even if it
succeeds. So long as the response looks like a date/time string,
there's nothing to worry about.
"""
if not _testTag(k, 'date', 'test date', ["The date is %s" % time.asctime()]):
print date_warning
_testTag(k, 'formal', 'test formal', ["Formal Test Passed"])
_testTag(k, 'gender', 'test gender', ["He'd told her he heard that her hernia is history"])
_testTag(k, 'get/set', 'test get and set', ["I like cheese. My favorite food is cheese"])
_testTag(k, 'gossip', 'test gossip', ["Gossip is not yet implemented"])
_testTag(k, 'id', 'test id', ["Your id is _global"])
_testTag(k, 'input', 'test input', ['You just said: test input'])
_testTag(k, 'javascript', 'test javascript', ["Javascript is not yet implemented"])
_testTag(k, 'lowercase', 'test lowercase', ["The Last Word Should Be lowercase"])
_testTag(k, 'person', 'test person', ['HE think i knows that my actions threaten him and his.'])
_testTag(k, 'person2', 'test person2', ['YOU think me know that my actions threaten you and yours.'])
_testTag(k, 'person2 (no contents)', 'test person2 I Love Lucy', ['YOU Love Lucy'])
_testTag(k, 'random', 'test random', ["response #1", "response #2", "response #3"])
_testTag(k, 'random empty', 'test random empty', ["Nothing here!"])
_testTag(k, 'sentence', "test sentence", ["My first letter should be capitalized."])
_testTag(k, 'size', "test size", ["I've learned %d categories" % k.numCategories()])
_testTag(k, 'sr', "test sr test srai", ["srai results: srai test passed"])
_testTag(k, 'sr nested', "test nested sr test srai", ["srai results: srai test passed"])
_testTag(k, 'srai', "test srai", ["srai test passed"])
_testTag(k, 'srai infinite', "test srai infinite", [""])
_testTag(k, 'star test #1', 'You should test star begin', ['Begin star matched: You should'])
_testTag(k, 'star test #2', 'test star creamy goodness middle', ['Middle star matched: creamy goodness'])
_testTag(k, 'star test #3', 'test star end the credits roll', ['End star matched: the credits roll'])
_testTag(k, 'star test #4', 'test star having multiple stars in a pattern makes me extremely happy',
['Multiple stars matched: having, stars in a pattern, extremely happy'])
_testTag(k, 'system', "test system", ["The system says hello!"])
_testTag(k, 'that test #1', "test that", ["I just said: The system says hello!"])
_testTag(k, 'that test #2', "test that", ["I have already answered this question"])
_testTag(k, 'thatstar test #1', "test thatstar", ["I say beans"])
_testTag(k, 'thatstar test #2', "test thatstar", ["I just said \"beans\""])
_testTag(k, 'thatstar test #3', "test thatstar multiple", ['I say beans and franks for everybody'])
_testTag(k, 'thatstar test #4', "test thatstar multiple", ['Yes, beans and franks for all!'])
_testTag(k, 'think', "test think", [""])
k.setPredicate("topic", "fruit")
_testTag(k, 'topic', "test topic", ["We were discussing apples and oranges"])
k.setPredicate("topic", "Soylent Green")
_testTag(k, 'topicstar test #1', 'test topicstar', ["Solyent Green is made of people!"])
k.setPredicate("topic", "Soylent Ham and Cheese")
_testTag(k, 'topicstar test #2', 'test topicstar multiple', ["Both Soylents Ham and Cheese are made of people!"])
_testTag(k, 'unicode support', u"ÔÇÉϺÃ", [u"Hey, you speak Chinese! ÔÇÉϺÃ"])
_testTag(k, 'uppercase', 'test uppercase', ["The Last Word Should Be UPPERCASE"])
_testTag(k, 'version', 'test version', ["PyAIML is version %s" % k.version()])
_testTag(k, 'whitespace preservation', 'test whitespace', ["Extra Spaces\n Rule! (but not in here!) But Here They Do!"])
# Report test results
print "--------------------"
if _numTests == _numPassed:
print "%d of %d tests passed!" % (_numPassed, _numTests)
else:
print "%d of %d tests passed (see above for detailed errors)" % (_numPassed, _numTests)
# Run an interactive interpreter
#print "\nEntering interactive mode (ctrl-c to exit)"
#while True: print k.respond(raw_input("> "))
| bsd-2-clause | -5,386,696,871,265,022,000 | 38.543074 | 139 | 0.589312 | false |
Eric89GXL/vispy | vispy/visuals/shaders/tests/test_function.py | 1 | 13400 | # -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from vispy.visuals.shaders import (Function, MainFunction, Variable, Varying,
FunctionChain, StatementList)
# Users normally don't need these, but I want to test them
from vispy.visuals.shaders.expression import FunctionCall, TextExpression
from vispy.testing import (assert_in, assert_not_in, assert_is,
run_tests_if_main, assert_raises, assert_equal)
## Define some snippets
transformScale = Function("""
vec4 transform_scale(vec4 pos)
{
pos.xyz *= $scale;
return pos;
}
""")
transformZOffset = Function("""
vec4 transform_zoffset(vec4 pos)
{
pos.z += $offset;
return pos;
}
""")
vert_template = Function("""
void main(void)
{
int nlights = $nlights;
vec4 pos = $position;
pos += $correction;
gl_Position = $endtransform(pos);
}
""")
frag_template = Function("""
void main(void)
{
gl_Fragcolor = $color;
}
""")
data = 'just some dummy variable, Function is agnostic about this'
## Examples
def test_example1():
""" Just a few simple compositions.
"""
# Get function objects. Generate random name for transforms
code = Function(vert_template)
t1 = Function(transformScale)
t2 = Function(transformZOffset)
t3 = Function(transformScale)
# We need to create a variable in order to use it in two places
pos = Variable('attribute vec4 a_position')
# Compose everything together
code['position'] = t1(t2(pos))
code['correction'] = t1(pos) # Look, we use t1 again, different sig
code['endtransform'] = t3 # function pointer rather than function call
code['nlights'] = '4'
t1['scale'] = t2
t3['scale'] = (3.0, 4.0, 5.0)
t2['offset'] = '1.0'
code2 = Function(frag_template)
code2['color'] = Varying('v_position')
code['gl_PointSize'] = '3.0'
code[code2['color']] = pos
print(code)
def test_example2():
""" Demonstrate how a transform would work.
"""
vert_template = Function("""
void main(void)
{
gl_Position = $position;
}
""")
transformScale = Function("""
vec4 transform_scale(vec4 pos)
{
pos.xyz *= $scale;
return pos;
}
""")
class Transform(object):
def __init__(self):
# Equivalent methods to create new function object
self.func = Function(transformScale)
self.func['scale'] = 'uniform float'
#self.func = Function(transformScale)
def set_scale(self, scale):
self.func['scale'].value = scale
transforms = [Transform(), Transform(), Transform()]
code = Function(vert_template)
ob = Variable('attribute vec3 a_position')
for trans in transforms:
ob = trans.func(ob)
code['position'] = ob
print(code)
## Tests
def test_TextExpression():
exp = TextExpression('foo bar')
assert_equal('foo bar', exp.expression(None))
assert_equal(None, exp.definition(None, ('120', '')))
assert_raises(TypeError, TextExpression, 4)
def test_FunctionCall():
fun = Function(transformScale)
fun['scale'] = '1.0'
fun2 = Function(transformZOffset)
# No args
assert_raises(TypeError, fun) # need 1 arg
assert_raises(TypeError, fun, 1, 2) # need 1 arg
call = fun('x')
# Test repr
exp = call.expression({fun: 'y'})
assert_equal(exp, 'y(x)')
# Test sig
assert len(call._args) == 1
# Test dependencies
assert_in(fun, call.dependencies())
assert_in(call._args[0], call.dependencies())
# More args
call = fun(fun2('foo'))
# Test repr
exp = call.expression({fun: 'y', fun2: 'z'})
assert_in('y(z(', exp)
# Test sig
assert len(call._args) == 1
call2 = call._args[0]
assert len(call2._args) == 1
# Test dependencies
assert_in(fun, call.dependencies())
assert_in(call._args[0], call.dependencies())
assert_in(fun2, call.dependencies())
assert_in(call2._args[0], call.dependencies())
def test_Variable():
# Test init fail
assert_raises(TypeError, Variable) # no args
assert_raises(TypeError, Variable, 3) # wrong type
assert_raises(TypeError, Variable, "name", "str") # wrong type
assert_raises(ValueError, Variable, 'bla bla') # need correct vtype
assert_raises(ValueError, Variable, 'uniform b l a') # too many
# Test init success
var = Variable('uniform float bla') # Finally
assert_equal(var.name, 'bla')
assert_equal(var.dtype, 'float')
assert_equal(var.vtype, 'uniform')
assert var.value is None
# test assign new value
var.value = 10.
assert_equal(var.dtype, 'float') # type is locked; won't change
# test name-only init
var = Variable('bla') # Finally
assert_equal(var.name, 'bla')
assert_equal(var.dtype, None)
assert_equal(var.vtype, None)
assert var.value is None
# test assign new value
var.value = 10
assert_equal(var.dtype, 'int')
assert_equal(var.vtype, 'uniform')
assert_equal(var.value, 10)
# test init with value
var = Variable('bla', (1, 2, 3)) # Also valid
assert_equal(var.name, 'bla')
assert_equal(var.dtype, 'vec3')
assert_equal(var.vtype, 'uniform')
assert_equal(var.value, (1, 2, 3))
# Test value
#var = Variable('uniform float bla', data) # Also valid
#assert_equal(var.value, data)
#var.value = 3
#assert_equal(var.value, 3)
# Test repr
var = Variable('uniform float bla')
assert_in('uniform float bla', var.compile())
# Test injection, definition, dependencies
assert_equal(var.expression({var: 'xxx'}), 'xxx')
assert_equal(var.definition({var: 'xxx'}, ('120', ''), None),
'uniform float xxx;')
assert_in(var, var.dependencies())
# Renaming
var = Variable('uniform float bla')
assert_equal(var.name, 'bla')
var.name = 'foo'
assert_equal(var.name, 'foo')
def test_function_basics():
# Test init fail
assert_raises(TypeError, Function) # no args
assert_raises(ValueError, Function, 3) # need string
# Test init success 1
fun = Function('void main(){}')
assert_equal(fun.name, 'main')
assert len(fun.template_vars) == 0
# Test init success with template vars
fun = Function('void main(){$foo; $bar;}')
assert_equal(fun.name, 'main')
assert len(fun.template_vars) == 2
assert_in('foo', fun.template_vars)
assert_in('bar', fun.template_vars)
# Test setting verbatim expressions
assert_raises(KeyError, fun.__setitem__, 'bla', '33') # no such template
fun['foo'] = '33'
fun['bar'] = 'bla bla'
assert_is(type(fun['foo']), TextExpression)
assert_equal(fun['foo'].expression(None), '33')
assert_is(type(fun['bar']), TextExpression)
assert_equal(fun['bar'].expression(None), 'bla bla')
# Test setting call expressions
fun = Function('void main(){\n$foo;\n$bar;\n$spam(XX);\n$eggs(YY);\n}')
trans = Function('float transform_scale(float x) {return x+1.0;}')
assert_raises(TypeError, trans) # requires 1 arg
assert_raises(TypeError, trans, '1', '2')
fun['foo'] = trans('2')
fun['bar'] = trans('3')
fun['spam'] = trans
fun['eggs'] = trans
#
for name in ['foo', 'bar']:
assert_is(type(fun[name]), FunctionCall)
assert_equal(fun[name].function, trans)
assert_in(trans, fun.dependencies())
for name in ['spam', 'eggs']:
assert_equal(fun[name], trans)
#
text = fun.compile()
assert_in('\ntransform_scale(2);\n', text)
assert_in('\ntransform_scale(3);\n', text)
assert_in('\ntransform_scale(XX);\n', text)
assert_in('\ntransform_scale(YY);\n', text)
# test pre/post assignments
fun = Function('void main() {some stuff;}')
fun['pre'] = '__pre__'
fun['post'] = '__post__'
text = fun.compile()
assert text == 'void main() {\n __pre__\nsome stuff;\n __post__\n}\n'
# Test variable expressions
fun = Function('void main(){$foo; $bar;}')
fun['foo'] = Variable('uniform float bla')
fun['bar'] = Variable('attribute float bla')
assert_is(type(fun['foo']), Variable)
assert_is(type(fun['bar']), Variable)
assert_in(fun['foo'], fun.dependencies())
assert_in(fun['bar'], fun.dependencies())
# Test special variables
fun = Function('void main(){$foo; $bar;}')
variable = Variable('attribute vec3 v_pos')
varying = Variable('varying vec3 color')
# These do not work due to index
assert_raises(TypeError, fun.__setitem__, 3, 3) # not a string
assert_raises(KeyError, fun.__setitem__, 'xxx', 3) # unknown template var
assert_raises(TypeError, fun.__setitem__, variable, 3) # only varyings
# These work
fun['gl_PointSize'] = '3.0'
fun[varying] = variable
# And getting works
assert_equal(fun['gl_PointSize'].text, '3.0')
assert_equal(fun[varying], variable)
def test_function_changed():
ch = []
class C(object):
def _dep_changed(self, dep, **kwargs):
ch.append(dep)
ch_obj = C()
def assert_changed(*objs):
assert set(ch) == set(objs)
while ch:
ch.pop()
fun1 = Function('void main(){$var1; $var2;}')
fun1._dependents[ch_obj] = None
fun1['var1'] = 'x'
fun1['var2'] = 'y'
assert_changed(fun1)
fun1['var1'] = 'z'
assert_changed(fun1)
# same value; should result in no change events
fun1['var1'] = 'z'
assert_changed()
fun1['var1'] = 0.5
var1 = fun1['var1']
var1._dependents[ch_obj] = None
assert_changed(fun1)
var1.name = 'xxx'
assert_changed(fun1, var1)
# changing type requires code change
var1.value = 7
assert_changed(fun1, var1)
# changing value (but not type) requires no code changes
var1.value = 6
assert_changed()
# test variable disconnect
fun1['var1'] = Variable('var1', 7)
var2 = fun1['var1']
var2._dependents[ch_obj] = None
#assert_changed(fun1)
# var2 is now connected
var2.value = (1, 2, 3, 4)
assert_changed(fun1, var2)
# ..but var1 no longer triggers fun1.changed
assert_changed()
var1.value = 0.5
assert_changed(var1)
# test expressions
fun2 = Function('float fn(float x){return $var1 + x;}')
fun3 = Function('float fn(float x){return $var1 + x;}')
exp1 = fun2(fun3(0.5))
fun1['var2'] = exp1
assert_changed(fun1)
fun2._dependents[ch_obj] = None
fun3._dependents[ch_obj] = None
exp1._dependents[ch_obj] = None
fun2['var1'] = 'x'
assert_changed(fun1, fun2, exp1)
fun3['var1'] = 'x'
assert_changed(fun1, fun3, exp1)
# test disconnect
fun1['var2'] = fun2
assert_changed(fun1)
# triggers change
fun2['var1'] = 0.9
assert_changed(fun1, fun2, exp1)
# no longer triggers change
fun3['var1'] = 0.9
assert_changed(fun3, exp1)
def test_FunctionChain():
f1 = Function("void f1(){}")
f2 = Function("void f2(){}")
f3 = Function("float f3(vec3 x){}")
f4 = Function("vec3 f4(vec3 y){}")
f5 = Function("vec3 f5(vec4 z){}")
ch = FunctionChain('chain', [f1, f2])
assert ch.name == 'chain'
assert ch.args == []
assert ch.rtype == 'void'
assert_in('f1', ch.compile())
assert_in('f2', ch.compile())
ch.remove(f2)
assert_not_in('f2', ch.compile())
ch.append(f2)
assert_in('f2', ch.compile())
ch = FunctionChain(funcs=[f5, f4, f3])
assert_equal('float', ch.rtype)
assert_equal([('vec4', 'z')], ch.args)
assert_in('f3', ch.compile())
assert_in('f4', ch.compile())
assert_in('f5', ch.compile())
assert_in(f3, ch.dependencies())
assert_in(f4, ch.dependencies())
assert_in(f5, ch.dependencies())
def test_StatementList():
func = Function("void func() {}")
main = Function("void main() {}")
main['pre'] = StatementList()
expr = func()
main['pre'].add(expr, 0)
assert list(main['pre'].items) == [expr]
main['pre'].add(expr)
assert list(main['pre'].items) == [expr]
code = main.compile()
assert " func();" in code
main['pre'].remove(expr)
assert list(main['pre'].items) == []
def test_MainFunction():
code = """
const float pi = 3.0; // close enough.
vec4 rotate(vec4 pos) {
return pos; // just kidding.
}
attribute mat4 m_transform;
attribute vec4 a_pos;
void main() {
gl_Position = m_transform * a_pos;
}
"""
mf = MainFunction('vertex', code)
assert mf.name == 'main'
assert mf.rtype == 'void'
assert len(mf.args) == 0
sn = set(mf.static_names())
assert sn == set(['pi', 'rotate', 'pos', 'm_transform', 'a_pos'])
if __name__ == '__main__':
for key in [key for key in globals()]:
if key.startswith('test_'):
func = globals()[key]
print('running', func.__name__)
func()
# Uncomment to run example
print('='*80)
test_example1()
run_tests_if_main()
| bsd-3-clause | -3,269,073,025,328,622,000 | 26.68595 | 79 | 0.59 | false |
jsa4000/OpenGL-Python | zero/core/engine.py | 1 | 4721 | import time
from .base import Thread
from .controllers import DisplayController, DeviceController
from ..system import InputManager, SceneManager, RenderManager
__all__ = ['CoreEngine']
class CoreEngine(Thread):
""" Core Engine Class
This class is the main loop of the process that will manage all
the scene like inputs, updates, rendering, physics, etc..
"""
@property
def display(self):
""" Return display controller
"""
return self._display
@property
def device(self):
""" Return device controller
"""
return self._device
@property
def render(self):
""" Return render controller
"""
return self._render
@property
def scene(self):
""" Get current Scene Graph
"""
return self._scene
@scene.setter
def scene(self, value):
""" This will set the new Scene Graph to render.
"""
self._scene = value
def __init__(self, display, device, render, scene, fps=60):
""" Contructor for the class
This class is the main loop for the Engine. In this class all the
Managers and workers will be created.
Devices or controllers that will be used in for the engine. They
will take the Scene Graph and perform the work that corresponds
i.e. input, update, physics, render etc.
Controllers are used for the cases where more devices or drivers are
used, for example in cases of diplays or devices, where it can be used
more than one device at the same time. Also it can be used for rendering
where depending on the type of rendering it could be used one or more
rendering types, like opengl, directx, ray casting, etc..
Also the engine will initialize the Display and do the calls to
the display driver so the Scene could be rendered properly.
Parameters:
display: controller that will be used to display- The admited
classes will be :DisplayController or Display
device: controller or device that will be used to interact with the
user by the Human User Devices(HUD). The admited classes are :
DeviceController or any of the devices associated with it that allows
get_events operation, like KeyboardDevice, MouseDevice, etc..
render: controller that will be used for the engine. The rende controller
will manage all the interface between the engine and the drivers being
used.
scene: This object will contain the whole scene with all the entities
and componentes. The catalogueManager.Instance() is storing all this
information in the creation and bindings between entities and components.
fps: frames-per-second the engine will use.
"""
super(CoreEngine,self).__init__()
# Initilaize parameters
self._display = display
self._device = device
self._render = render
self._scene = scene
self._fps = fps
# Initialize the variables for the Managers
self._input_manager = None
self._scene_manager = None
self._render_manager = None
def __del__(self):
""" Clean up the memory
"""
# Call threadBase __del__
super(CoreEngine,self).__del__()
def init(self):
""" Initialize all the Managers at start
"""
self._input_manager = InputManager(self).init()
self._scene_manager = SceneManager(self).init()
self._render_manager = RenderManager(self).init()
# Return itself for Cascade
return self
# Override
def _process(self):
""" Main process running the engine
Basically the overal loop will be: Input, Update and Render
"""
# Display must be created in the same context (thread) as OpenGL
self.display.init()
# Start the Main loop for the program
while self.running:
# Process Inputs from the user
self._input_manager.run(False)
# Update Scene, Physics, Logic and solvers
self._scene_manager.run()
# Finally render the scene
self._render_manager.run()
time.sleep(1/60)
# Update the display
self.display.update()
# Set running to false
self._running = False
def stop(self, close=False):
"""This method force to Stops the engine and close the window
"""
super(CoreEngine,self).stop()
# Close All the windows and dipose
self.display.close(True)
| apache-2.0 | -1,644,156,547,459,216,400 | 31.115646 | 81 | 0.61703 | false |
Scalr/pychef | setup.py | 1 | 1211 | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
import os
from setuptools import setup, find_packages
setup(
name = 'PyChef',
version = '0.2.4-dev',
packages = find_packages(),
author = 'Noah Kantrowitz',
author_email = '[email protected]',
description = 'Python implementation of a Chef API client.',
long_description = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
license = 'BSD',
keywords = '',
url = 'http://github.com/coderanger/pychef',
classifiers = [
#'Development Status :: 1 - Planning',
#'Development Status :: 2 - Pre-Alpha',
#'Development Status :: 3 - Alpha',
#'Development Status :: 4 - Beta',
'Development Status :: 5 - Production/Stable',
#'Development Status :: 6 - Mature',
#'Development Status :: 7 - Inactive',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
zip_safe = False,
install_requires = ['six>=1.9.0', 'pyOpenSSL>=0.15'],
tests_require = ['unittest2', 'mock'],
test_suite = 'unittest2.collector',
)
| apache-2.0 | -75,939,780,706,307,180 | 33.6 | 90 | 0.592898 | false |
shootstar/novatest | nova/tests/api/openstack/compute/plugins/v3/test_images.py | 1 | 48561 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests of the new image services, both as a service layer,
and as a WSGI layer
"""
import urlparse
from lxml import etree
import webob
from nova.api.openstack.compute.plugins.v3 import images
from nova.api.openstack.compute.views import images as images_view
from nova.api.openstack import xmlutil
from nova import exception
from nova.image import glance
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import matchers
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
NOW_API_FORMAT = "2010-10-11T10:30:22Z"
class ImagesControllerTest(test.TestCase):
"""
Test of the OpenStack API /images application controller w/Glance.
"""
def setUp(self):
"""Run before each test."""
super(ImagesControllerTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fakes.stub_out_compute_api_snapshot(self.stubs)
fakes.stub_out_compute_api_backup(self.stubs)
fakes.stub_out_glance(self.stubs)
self.controller = images.ImagesController()
def test_get_image(self):
fake_req = fakes.HTTPRequestV3.blank('/os-images/123')
actual_image = self.controller.show(fake_req, '124')
href = "http://localhost/v3/images/124"
bookmark = "http://localhost/images/124"
alternate = "%s/fake/images/124" % glance.generate_glance_url()
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v3/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
expected_image = {
"image": {
"id": "124",
"name": "queued snapshot",
"updated": NOW_API_FORMAT,
"created": NOW_API_FORMAT,
"status": "SAVING",
"progress": 25,
"size": 25165824,
"minDisk": 0,
"minRam": 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"metadata": {
"instance_uuid": server_uuid,
"user_id": "fake",
},
"links": [{
"rel": "self",
"href": href,
},
{
"rel": "bookmark",
"href": bookmark,
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate
}],
},
}
self.assertThat(actual_image, matchers.DictMatches(expected_image))
def test_get_image_with_custom_prefix(self):
self.flags(osapi_compute_link_prefix='https://zoo.com:42',
osapi_glance_link_prefix='http://circus.com:34')
fake_req = fakes.HTTPRequestV3.blank('/v3/os-images/124')
actual_image = self.controller.show(fake_req, '124')
href = "https://zoo.com:42/v3/images/124"
bookmark = "https://zoo.com:42/images/124"
alternate = "http://circus.com:34/fake/images/124"
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "https://zoo.com:42/v3/servers/" + server_uuid
server_bookmark = "https://zoo.com:42/servers/" + server_uuid
expected_image = {
"image": {
"id": "124",
"name": "queued snapshot",
"updated": NOW_API_FORMAT,
"created": NOW_API_FORMAT,
"status": "SAVING",
"progress": 25,
"size": 25165824,
"minDisk": 0,
"minRam": 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"metadata": {
"instance_uuid": server_uuid,
"user_id": "fake",
},
"links": [{
"rel": "self",
"href": href,
},
{
"rel": "bookmark",
"href": bookmark,
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate
}],
},
}
self.assertThat(actual_image, matchers.DictMatches(expected_image))
def test_get_image_404(self):
fake_req = fakes.HTTPRequestV3.blank('/os-images/unknown')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, fake_req, 'unknown')
def test_get_image_details(self):
request = fakes.HTTPRequestV3.blank('/os-images/detail')
response = self.controller.detail(request)
response_list = response["images"]
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v3/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
alternate = "%s/fake/images/%s"
expected = [{
'id': '123',
'name': 'public image',
'metadata': {'key1': 'value1'},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
"size": 25165824,
'minDisk': 10,
'minRam': 128,
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (glance.generate_glance_url(), 123),
}],
},
{
'id': '124',
'name': 'queued snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 25,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (glance.generate_glance_url(), 124),
}],
},
{
'id': '125',
'name': 'saving snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
'progress': 50,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/125",
},
{
"rel": "bookmark",
"href": "http://localhost/images/125",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/125" % glance.generate_glance_url()
}],
},
{
'id': '126',
'name': 'active snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/126",
},
{
"rel": "bookmark",
"href": "http://localhost/images/126",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/126" % glance.generate_glance_url()
}],
},
{
'id': '127',
'name': 'killed snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ERROR',
'progress': 0,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/127",
},
{
"rel": "bookmark",
"href": "http://localhost/images/127",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/127" % glance.generate_glance_url()
}],
},
{
'id': '128',
'name': 'deleted snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/128",
},
{
"rel": "bookmark",
"href": "http://localhost/images/128",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/128" % glance.generate_glance_url()
}],
},
{
'id': '129',
'name': 'pending_delete snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'DELETED',
'progress': 0,
"size": 25165824,
'minDisk': 0,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/129",
},
{
"rel": "bookmark",
"href": "http://localhost/images/129",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/129" % glance.generate_glance_url()
}],
},
{
'id': '130',
'name': None,
'metadata': {},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
'progress': 100,
"size": 0,
'minDisk': 0,
'minRam': 0,
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/130",
},
{
"rel": "bookmark",
"href": "http://localhost/images/130",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": "%s/fake/images/130" % glance.generate_glance_url()
}],
},
]
self.assertThat(expected, matchers.DictListMatches(response_list))
def test_get_image_details_with_limit(self):
request = fakes.HTTPRequestV3.blank('/os-images/detail?limit=2')
response = self.controller.detail(request)
response_list = response["images"]
response_links = response["images_links"]
server_uuid = "aa640691-d1a7-4a67-9d3c-d35ee6b3cc74"
server_href = "http://localhost/v3/servers/" + server_uuid
server_bookmark = "http://localhost/servers/" + server_uuid
alternate = "%s/fake/images/%s"
expected = [{
'id': '123',
'name': 'public image',
'metadata': {'key1': 'value1'},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'ACTIVE',
"size": 25165824,
'minDisk': 10,
'progress': 100,
'minRam': 128,
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/123",
},
{
"rel": "bookmark",
"href": "http://localhost/images/123",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (glance.generate_glance_url(), 123),
}],
},
{
'id': '124',
'name': 'queued snapshot',
'metadata': {
u'instance_uuid': server_uuid,
u'user_id': u'fake',
},
'updated': NOW_API_FORMAT,
'created': NOW_API_FORMAT,
'status': 'SAVING',
"size": 25165824,
'minDisk': 0,
'progress': 25,
'minRam': 0,
'server': {
'id': server_uuid,
"links": [{
"rel": "self",
"href": server_href,
},
{
"rel": "bookmark",
"href": server_bookmark,
}],
},
"links": [{
"rel": "self",
"href": "http://localhost/v3/images/124",
},
{
"rel": "bookmark",
"href": "http://localhost/images/124",
},
{
"rel": "alternate",
"type": "application/vnd.openstack.image",
"href": alternate % (glance.generate_glance_url(), 124),
}],
}]
self.assertThat(expected, matchers.DictListMatches(response_list))
href_parts = urlparse.urlparse(response_links[0]['href'])
self.assertEqual('/v3/images', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
self.assertThat({'limit': ['2'], 'marker': ['124']},
matchers.DictMatches(params))
def test_image_detail_filter_with_name(self):
image_service = self.mox.CreateMockAnything()
filters = {'name': 'testname'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail'
'?name=testname')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_with_status(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'active'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail'
'?status=ACTIVE')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_with_property(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-test': '3'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail'
'?property-test=3')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_server_href(self):
image_service = self.mox.CreateMockAnything()
uuid = 'fa95aaf5-ab3b-4cd8-88c0-2be7dd051aaf'
ref = 'http://localhost:8774/servers/' + uuid
url = '/v3/os-images/detail?server=' + ref
filters = {'property-instance_uuid': uuid}
request = fakes.HTTPRequestV3.blank(url)
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_server_uuid(self):
image_service = self.mox.CreateMockAnything()
uuid = 'fa95aaf5-ab3b-4cd8-88c0-2be7dd051aaf'
url = '/v2/fake/images/detail?server=' + uuid
filters = {'property-instance_uuid': uuid}
request = fakes.HTTPRequestV3.blank(url)
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_changes_since(self):
image_service = self.mox.CreateMockAnything()
filters = {'changes-since': '2011-01-24T17:08Z'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail'
'?changes-since=2011-01-24T17:08Z')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_with_type(self):
image_service = self.mox.CreateMockAnything()
filters = {'property-image_type': 'BASE'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail?type=BASE')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_filter_not_supported(self):
image_service = self.mox.CreateMockAnything()
filters = {'status': 'active'}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail?status='
'ACTIVE&UNSUPPORTEDFILTER=testname')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_no_filters(self):
image_service = self.mox.CreateMockAnything()
filters = {}
request = fakes.HTTPRequestV3.blank('/v3/os-images/detail')
context = request.environ['nova.context']
image_service.detail(context, filters=filters).AndReturn([])
self.mox.ReplayAll()
controller = images.ImagesController(image_service=image_service)
controller.detail(request)
def test_image_detail_invalid_marker(self):
class InvalidImageService(object):
def detail(self, *args, **kwargs):
raise exception.Invalid('meow')
request = fakes.HTTPRequestV3.blank('/v3/os-images?marker=invalid')
controller = images.ImagesController(
image_service=InvalidImageService())
self.assertRaises(webob.exc.HTTPBadRequest, controller.detail,
request)
def test_generate_alternate_link(self):
view = images_view.ViewBuilder()
request = fakes.HTTPRequestV3.blank('/v3/os-images/1')
generated_url = view._get_alternate_link(request, 1)
actual_url = "%s/fake/images/1" % glance.generate_glance_url()
self.assertEqual(generated_url, actual_url)
def test_delete_image(self):
request = fakes.HTTPRequestV3.blank('/v3/os-images/124')
request.method = 'DELETE'
response = self.controller.delete(request, '124')
self.assertEqual(response.status_int, 204)
def test_delete_deleted_image(self):
"""If you try to delete a deleted image, you get back 403 Forbidden."""
deleted_image_id = 128
# see nova.tests.api.openstack.fakes:_make_image_fixtures
request = fakes.HTTPRequestV3.blank(
'/v3/os-images/%s' % deleted_image_id)
request.method = 'DELETE'
self.assertRaises(webob.exc.HTTPForbidden, self.controller.delete,
request, '%s' % deleted_image_id)
def test_delete_image_not_found(self):
request = fakes.HTTPRequestV3.blank('/v3/os-images/300')
request.method = 'DELETE'
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, request, '300')
class ImageXMLSerializationTest(test.TestCase):
TIMESTAMP = "2010-10-11T10:30:22Z"
SERVER_UUID = 'aa640691-d1a7-4a67-9d3c-d35ee6b3cc74'
SERVER_HREF = 'http://localhost/v3/servers/' + SERVER_UUID
SERVER_BOOKMARK = 'http://localhost/servers/' + SERVER_UUID
IMAGE_HREF = 'http://localhost/v3/os-images/%s'
IMAGE_NEXT = 'http://localhost/v3/os-images?limit=%s&marker=%s'
IMAGE_BOOKMARK = 'http://localhost/os-images/%s'
def test_xml_declaration(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>")
self.assertTrue(has_dec)
def test_show(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minRam': 10,
'minDisk': 100,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_zero_metadata(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
meta_nodes = root.findall('{0}meta'.format(ATOMNS))
self.assertEqual(len(meta_nodes), 0)
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_image_no_metadata_key(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
meta_nodes = root.findall('{0}meta'.format(ATOMNS))
self.assertEqual(len(meta_nodes), 0)
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_no_server(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root, None)
def test_show_with_min_ram(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minRam': 256,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress',
'minRam']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_show_with_min_disk(self):
serializer = images.ImageTemplate()
fixture = {
'image': {
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'progress': 80,
'minDisk': 5,
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'image')
image_dict = fixture['image']
for key in ['name', 'id', 'updated', 'created', 'status', 'progress',
'minDisk']:
self.assertEqual(root.get(key), str(image_dict[key]))
link_nodes = root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
metadata_root = root.find('{0}metadata'.format(NS))
metadata_elems = metadata_root.findall('{0}meta'.format(NS))
self.assertEqual(len(metadata_elems), 1)
for i, metadata_elem in enumerate(metadata_elems):
(meta_key, meta_value) = image_dict['metadata'].items()[i]
self.assertEqual(str(metadata_elem.get('key')), str(meta_key))
self.assertEqual(str(metadata_elem.text).strip(), str(meta_value))
server_root = root.find('{0}server'.format(NS))
self.assertEqual(server_root.get('id'), image_dict['server']['id'])
link_nodes = server_root.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['server']['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index(self):
serializer = images.MinimalImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': 2,
'name': 'Image2',
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
]
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
def test_index_with_links(self):
serializer = images.MinimalImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': 2,
'name': 'Image2',
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
],
'images_links': [
{
'rel': 'next',
'href': self.IMAGE_NEXT % (2, 2),
}
],
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
# Check images_links
images_links = root.findall('{0}link'.format(ATOMNS))
for i, link in enumerate(fixture['images_links']):
for key, value in link.items():
self.assertEqual(images_links[i].get(key), value)
def test_index_zero_images(self):
serializer = images.MinimalImagesTemplate()
fixtures = {
'images': [],
}
output = serializer.serialize(fixtures)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images_index')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 0)
def test_detail(self):
serializer = images.ImagesTemplate()
fixture = {
'images': [
{
'id': 1,
'name': 'Image1',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'ACTIVE',
'server': {
'id': self.SERVER_UUID,
'links': [
{
'href': self.SERVER_HREF,
'rel': 'self',
},
{
'href': self.SERVER_BOOKMARK,
'rel': 'bookmark',
},
],
},
'links': [
{
'href': self.IMAGE_HREF % 1,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 1,
'rel': 'bookmark',
},
],
},
{
'id': '2',
'name': 'Image2',
'created': self.TIMESTAMP,
'updated': self.TIMESTAMP,
'status': 'SAVING',
'progress': 80,
'metadata': {
'key1': 'value1',
},
'links': [
{
'href': self.IMAGE_HREF % 2,
'rel': 'self',
},
{
'href': self.IMAGE_BOOKMARK % 2,
'rel': 'bookmark',
},
],
},
]
}
output = serializer.serialize(fixture)
root = etree.XML(output)
xmlutil.validate_schema(root, 'images')
image_elems = root.findall('{0}image'.format(NS))
self.assertEqual(len(image_elems), 2)
for i, image_elem in enumerate(image_elems):
image_dict = fixture['images'][i]
for key in ['name', 'id', 'updated', 'created', 'status']:
self.assertEqual(image_elem.get(key), str(image_dict[key]))
link_nodes = image_elem.findall('{0}link'.format(ATOMNS))
self.assertEqual(len(link_nodes), 2)
for i, link in enumerate(image_dict['links']):
for key, value in link.items():
self.assertEqual(link_nodes[i].get(key), value)
| apache-2.0 | -8,783,039,065,084,534,000 | 34.971111 | 79 | 0.440147 | false |
endlessm/chromium-browser | testing/libfuzzer/gen_fuzzer_config.py | 2 | 2892 | #!/usr/bin/python2
#
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate or update an existing config (.options file) for libfuzzer test.
Invoked by GN from fuzzer_test.gni.
"""
import ConfigParser
import argparse
import os
import sys
def AddSectionOptions(config, section_name, options):
"""Add |options| to the |section_name| section of |config|.
Throws an
assertion error if any option in |options| does not have exactly two
elements.
"""
if not options:
return
config.add_section(section_name)
for option_and_value in options:
assert len(option_and_value) == 2, (
'%s is not an option, value pair' % option_and_value)
config.set(section_name, *option_and_value)
def main():
parser = argparse.ArgumentParser(description='Generate fuzzer config.')
parser.add_argument('--config', required=True)
parser.add_argument('--dict')
parser.add_argument('--libfuzzer_options', nargs='+', default=[])
parser.add_argument('--asan_options', nargs='+', default=[])
parser.add_argument('--msan_options', nargs='+', default=[])
parser.add_argument('--ubsan_options', nargs='+', default=[])
parser.add_argument('--grammar_options', nargs='+', default=[])
parser.add_argument(
'--environment_variables',
nargs='+',
default=[],
choices=['AFL_DRIVER_DONT_DEFER=1'])
args = parser.parse_args()
# Script shouldn't be invoked without any arguments, but just in case.
if not (args.dict or args.libfuzzer_options or args.environment_variables or
args.asan_options or args.msan_options or args.ubsan_options or
args.grammar_options):
return
config = ConfigParser.ConfigParser()
libfuzzer_options = []
if args.dict:
libfuzzer_options.append(('dict', os.path.basename(args.dict)))
libfuzzer_options.extend(
option.split('=') for option in args.libfuzzer_options)
AddSectionOptions(config, 'libfuzzer', libfuzzer_options)
AddSectionOptions(config, 'asan',
[option.split('=') for option in args.asan_options])
AddSectionOptions(config, 'msan',
[option.split('=') for option in args.msan_options])
AddSectionOptions(config, 'ubsan',
[option.split('=') for option in args.ubsan_options])
AddSectionOptions(config, 'grammar',
[option.split('=') for option in args.grammar_options])
AddSectionOptions(
config, 'env',
[option.split('=') for option in args.environment_variables])
# Generate .options file.
config_path = args.config
with open(config_path, 'w') as options_file:
options_file.write(
'# This is an automatically generated config for ClusterFuzz.\n')
config.write(options_file)
if __name__ == '__main__':
main()
| bsd-3-clause | -5,600,407,831,230,750,000 | 30.78022 | 78 | 0.671508 | false |
sky111144/nicoBookworld | server/app/blueprint/home.py | 1 | 12289 | #!/usr/bin/python
#coding=utf-8
from flask import Blueprint,render_template,make_response,redirect,request,g,jsonify
from flask import session as flaskSession
from sqlalchemy import distinct,desc,or_
from app.model.base import User,Site,Novel,Shelf,Comment,Message
from novelSpider.task import createDownloader
def object_to_dict(data, flag):
if flag == 'shelf':
result = {
'status': 'success',
'data': []
}
for novel in data:
result['data'].append({
'id': novel.id,
'name': novel.novelName,
'author': novel.novelAuthor,
'img': novel.novelImg,
'intro': novel.novelIntro,
'lastUpdate': novel.lastUpdate,
})
elif flag == 'novel':
charpts = data['charpts']
info = data['info']
result = {
'info': {
'id': info.id,
'name': info.novelName,
'author': info.novelAuthor,
'img': info.novelImg,
'lastUpdate': info.lastUpdate.charptName,
'type': info.novelType,
'intro': info.novelIntro
},
'charpts': []
}
for charpt in charpts:
result['charpts'].append({
'id': charpt.id,
'name': charpt.charptName
})
elif flag == 'charpt':
result = {
'id': data.id,
'name': data.charptName,
'content': data.charptContent
}
elif flag == 'search':
result = []
for novel in data:
result.append({
'id': novel.id,
'name': novel.novelName,
'author': novel.novelAuthor,
'img': novel.novelImg,
'intro': novel.novelIntro,
'lastUpdate': novel.lastUpdate
})
elif flag == 'comment':
result = {
'status': 'success',
'data': []
}
for comment in data:
result['data'].append({
'userId': comment[1].id,
'username': comment[1].username,
'novelId': comment[0].novelId,
'comment': comment[0].comment,
'time': comment[0].time
})
elif flag == 'message':
result = {
'status': 'success',
'data': []
}
for message in data:
result['data'].append({
'senderId': message[0].senderId,
'receiverId': message[1].id,
'receiverName': message[1].username,
'message': message[0].message,
'time': message[0].time
})
elif flag == 'userMessage':
result = {
'status': 'success',
'data': []
}
for message in data:
result['data'].append({
'userId': message[1].id,
'username': message[1].username,
'message': message[0].message,
'time': message[0].time
})
elif flag == 'userComment':
result = {
'status': 'success',
'data': []
}
for comment in data:
result['data'].append({
'novelId': comment[1].id,
'novelName': comment[1].novelName,
'comment': comment[0].comment,
'time': comment[0].time
})
return result
homeBlueprint = Blueprint(
'home',
__name__
)
@homeBlueprint.route('/novel/list/<int:novelNum>')
def novelList(novelNum):
novel = g.dbSession.query(Site).limit(novelNum)
return jsonify(object_to_dict(novel, 'shelf'))
@homeBlueprint.route('/shelf')
def shelf():
userId = request.cookies.get('userId')
shelf = g.dbSession.query(Site).join(Shelf, Site.id == Shelf.novelId).filter(Shelf.userId == userId).all()
return jsonify(object_to_dict(shelf, 'shelf'))
@homeBlueprint.route('/novel/<int:id>')
def novel(id):
data = {}
charpts = g.dbSession.query(Novel).filter_by(novelId=id).all()
page = request.values.get('page')
size = request.values.get('size')
if page is not None and size is not None:
page = int(request.values.get('page'))
size = int(request.values.get('size'))
data['charpts'] = charpts[(page-1)*size:page*size]
else :
data['charpts'] = charpts
data['info'] = g.dbSession.query(Site).filter_by(id=id).first()
data['info'].lastUpdate = charpts[-1]
return jsonify(object_to_dict(data, 'novel'))
@homeBlueprint.route('/novel/<int:id>/<int:charptId>')
def charpt(id, charptId):
novel = g.dbSession.query(Novel).filter_by(id=charptId, novelId=id).first()
return jsonify(object_to_dict(novel, 'charpt'))
@homeBlueprint.route('/search', methods=['GET'])
def search():
query = request.args.get('query')
if query != '':
novel = g.dbSession.query(Site).filter(
or_(Site.novelName.like('%%%s%%'%query))
).all()
return jsonify(object_to_dict(novel, 'search'))
else:
return jsonify({
'status': 'fail',
'msg': '搜索失败',
'data': []
})
@homeBlueprint.route('/user/<int:userId>', methods=['GET'])
def userInfo(userId):
userInfo = g.dbSession.query(User).filter_by(id=userId).first()
return jsonify({
'status': 'success',
'msg': '获取用户信息成功',
'data': {
'username': userInfo.username,
'id': userInfo.id
}
})
# 查询用户个人评论
@homeBlueprint.route('/user/comment', methods=['GET'])
def userComment():
userId = request.cookies.get('userId')
comments = g.dbSession.query(Comment,Site).join(Site, Comment.novelId==Site.id).filter(
Comment.userId==userId
).all()
return jsonify(object_to_dict(comments,'userComment'))
# 查询用户个人私信
@homeBlueprint.route('/user/message', methods=['GET'])
def userMessage():
userId = request.cookies.get('userId')
messages = g.dbSession.query(Message,User).join(User, Message.receiverId==User.id).filter(
Message.senderId==userId
).all()
return jsonify(object_to_dict(messages,'userMessage'))
@homeBlueprint.route('/message/<int:userId>', methods=['POST', 'GET'])
def message(userId):
senderId = request.cookies.get('userId')
if request.method == 'POST':
message = request.get_json().get('message')
g.dbSession.add(Message(
senderId=senderId,
receiverId=userId,
message=message,
time=g.time
))
g.dbSession.commit()
return jsonify({
'status': 'success',
'msg': '私信成功'
})
elif request.method == 'GET':
messages = g.dbSession.query(Message,User).join(User, User.id==Message.receiverId).filter(
Message.senderId==senderId,
Message.receiverId==userId
).all()
return jsonify(object_to_dict(messages,'message'))
@homeBlueprint.route('/comment/<int:novelId>', methods=['POST', 'GET'])
def comment(novelId):
userId = request.cookies.get('userId')
if request.method == 'POST':
comment = request.get_json().get('comment')
g.dbSession.add(Comment(
userId=userId,
novelId=novelId,
comment=comment,
time=g.time
))
g.dbSession.commit()
return jsonify({
'status': 'success',
'msg': '评论成功'
})
elif request.method == 'GET':
comments = g.dbSession.query(Comment,User).join(User, User.id==Comment.userId).filter(
Comment.novelId==novelId
).all()
return jsonify(object_to_dict(comments,'comment'))
@homeBlueprint.route('/collect/<int:novelId>', methods=['GET'])
def collectNovel(novelId):
userId = request.cookies.get('userId')
if userId is not None:
userId = int(userId)
isCollected = g.dbSession.query(Shelf).filter_by(
novelId=novelId,
userId=userId
).count()
result = jsonify({
'status': 'fail',
'msg': '收藏失败'
})
if isCollected == 0 and userId is not None:
g.dbSession.add(Shelf(
novelId=novelId,
userId=userId
))
g.dbSession.commit()
result = jsonify({
'status': 'success',
'msg': '收藏成功'
})
return result
@homeBlueprint.route('/register', methods=['POST'])
def register():
username = request.get_json().get('username')
password = request.get_json().get('password')
email = request.get_json().get('email')
result = jsonify({
'status': 'fail',
'msg': '注册失败'
})
if username == None or password == None or email == None:
return result
user = g.dbSession.query(User).filter_by(email=email).all()
isExsisted = len(user) == 0
if isExsisted:
g.dbSession.add(User(
username=username,
password=password,
email=email
))
result = jsonify({
'status': 'success',
'msg': '注册成功',
'data': {
'username': username
}
})
flaskSession['username'] = username
g.dbSession.commit()
res = make_response(result)
return res
@homeBlueprint.route('/login', methods=['POST'])
def login():
username = request.get_json().get('username')
password = request.get_json().get('password')
user = g.dbSession.query(User).filter_by(username=username,password=password).all()
isIllegal = len(user) == 1
result = {
'status': 'fail',
'msg': '登录失败'
}
if isIllegal:
flaskSession['username'] = username
result = {
'status': 'success',
'msg': '登录成功',
'data': {
'userId': user[0].id,
'username': username
}
}
res = make_response(jsonify(result))
if isIllegal:
res.set_cookie('isLogin', 'true', expires=g.expires)
res.set_cookie('username', username, expires=g.expires)
res.set_cookie('userId', str(user[0].id), expires=g.expires)
return res
@homeBlueprint.route('/logout', methods=['POST'])
def logout():
if 'username' in flaskSession:
flaskSession['username'] = None
res = make_response(jsonify({
'status': 'success',
'msg': '退出成功'
}))
res.set_cookie('username', '')
res.set_cookie('userId', '')
return res
@homeBlueprint.route('/changePassword', methods=['POST'])
def changePassword():
oldPassword = request.get_json().get('oldPassword')
newPassword = request.get_json().get('newPassword')
username = request.get_json().get('username')
isUserself = g.dbSession.query(User).filter_by(username=username,password=oldPassword).count()
result = {
'status': 'fail',
'msg': '修改失败'
}
if isUserself == 1:
g.dbSession.query(User).filter_by(username=username).update({
User.password: newPassword
})
g.dbSession.commit()
result = {
'status': 'success',
'msg': '修改成功'
}
return jsonify(result)
@homeBlueprint.route('/novel/count')
def novelCount():
count = g.dbSession.query(Site).count()
return jsonify({
'status': 'success',
'data': {
'count': count
}
})
@homeBlueprint.route('/charpt/count')
def charptCount():
count = g.dbSession.query(Novel).count()
return jsonify({
'status': 'success',
'data': {
'count': count
}
})
@homeBlueprint.route('/task/getCharptList', methods=['GET'])
def getCharptList():
downloader = createDownloader()
downloader.getCharptList(1)
return jsonify({
'status': 'success'
})
@homeBlueprint.route('/task/getCharptContent', methods=['GET'])
def getCharptContent():
downloader = createDownloader()
downloader.getCharptContent(charptNum=1)
return jsonify({
'status': 'success'
})
| apache-2.0 | 5,621,720,612,245,947,000 | 29.982143 | 110 | 0.545492 | false |
AntelopeAudio/zen-launcher | zen_launcher/runner.py | 1 | 1918 | import os
import re
import shutil
import subprocess
import sys
BASE_DIR = os.path.expanduser('~/.antelope/zen/panel')
if not os.path.exists(BASE_DIR):
os.makedirs(BASE_DIR)
def get_panel_dir(ver, create=True):
stripped = ver.strip()
if re.match(r'\d+(\.\d+)*', stripped) is not None:
d = os.path.join(BASE_DIR, stripped)
if create and not os.path.exists(d):
os.makedirs(d)
return d
def sort_vers(vers):
"""Return versions sorted in descending order. Format is expected to be
consistent. For example passing ['1.10.1', '1.11'] (Note that
'1.10.1' has a micro version number and '1.11' doesn't.) will yield
incorrect results.
"""
key = lambda v: int(v.replace('.', ''))
return list(sorted(vers, key=key, reverse=True))
def get_latest_panel_version():
"""Returns None in case of no panels installed.
"""
vers = os.listdir(BASE_DIR)
srt = sort_vers(vers)
if srt:
return srt[0]
def get_latest_panel_dir():
"""Returns None in case of no panels installed.
"""
latest = get_latest_panel_version()
if latest is not None:
return os.path.join(BASE_DIR, latest)
return None
def run_version(ver):
d = get_panel_dir(ver, create=False)
if not os.path.exists(d):
raise ValueError
if sys.platform.startswith('win'):
# print('Starting {} for Windows'.format(d))
subprocess.call('cd "{}" && ZenStudio.exe'.format(d), shell=True)
elif sys.platform.startswith('darwin'):
ret = subprocess.call('cd "{}" && open ./*.app'.format(d), shell=True)
if ret != 0:
# In case of error, remove the CP directory. This way the
# next run will trigger the download process anew. Not the
# smartest thing, but the easiest. :)
shutil.rmtree(d)
else:
print('Starting {} for GNU'.format(d))
| gpl-3.0 | 7,596,631,990,808,295,000 | 27.205882 | 78 | 0.611053 | false |
PrincetonUniversity/pywsse | wsse/server/drf/tests/test_authentication.py | 1 | 11320 | # wsse/server/drf/tests/test_authentication.py
# coding=utf-8
# pywsse
# Authors: Rushy Panchal, Naphat Sanguansin, Adam Libresco, Jérémie Lumbroso
# Date: September 1st, 2016
# Description: Test DRF WSSE Authentication backend.
import contextlib
import hashlib
import base64
import datetime
import itertools
from rest_framework.test import APITestCase, APIRequestFactory
from rest_framework import status
from django.contrib.auth.models import User
from django.utils import timezone
from wsse import utils, settings
from wsse.compat import reverse_lazy
from wsse.server.django.wsse.models import UserSecret
def setUpModule():
'''
Set up the module for running tests.
'''
# Set the nonce store to the Django store after saving the current settings
# so they can be restored later.
global __old_nonce_settings
__old_nonce_settings = (settings.NONCE_STORE, settings.NONCE_STORE_ARGS)
settings.NONCE_STORE = 'wsse.server.django.wsse.store.DjangoNonceStore'
settings.NONCE_STORE_ARGS = []
def tearDownModule():
'''
Tear down the module after running tests.
'''
# Restore the nonce settings.
settings.NONCE_STORE, settings.NONCE_STORE_ARGS = __old_nonce_settings
class WSSEAuthenticationTests(APITestCase):
'''
Test WSSE Authentication on the API.
'''
factory = APIRequestFactory()
base_url = reverse_lazy('api-test')
@contextlib.contextmanager
def http_auth(self, header):
'''
Perform HTTP authentication, through headers, in a request.
The headers are automatically cleared afterwards.
'''
kwargs = {utils._django_header(settings.REQUEST_HEADER): header}
self.client.credentials(**kwargs)
yield
# Clear the credential headers.
self.client.credentials()
@classmethod
def setUpClass(cls):
'''
Set up the class for running tests.
'''
cls.user = User.objects.create(username = 'test')
cls.user_secret = UserSecret.objects.create(user = cls.user)
@classmethod
def tearDownClass(cls):
'''
Tear down the class after running tests.
'''
cls.user.delete()
def make_header_values(self, user = None, username = None, timestamp = None,
digest = None, b64_digest = None, nonce = None, b64_nonce = None,
digest_algorithm = None):
'''
Make the header values from the given parameters.
:param user: (optional) user to authenticate with header
:type user: django.contrib.auth.models.User
:param username: (optional) username to provide in header
:type username: str
:param timestamp: (optional) timestamp to use in header
:type timestamp: str
:param digest: (optional) header digest
:type digest: bytes
:param b64_digest: (optional) header digest as base64
:type b64_digest: bytes
:param nonce: (optional) header nonce
:type nonce: bytes
:param b64_nonce: (optional) header nonce as base64
:type b64_nonce: bytes
:param digest_algorithm: (optional, default: sha256) digest algorithm to
use. It must be supported by hashlib.
:type digest_algorithm: str
:return: WSSE authentication header parts
:rtype: dict
'''
if user is None:
user = self.user
if username is None:
username = user.username
if timestamp is None:
now = timezone.now()
timestamp = now.strftime(settings.TIMESTAMP_FORMATS[0])
if nonce is None:
nonce = utils._random_string(length = settings.NONCE_LENGTH)
if digest is None:
digest = utils._b64_digest(nonce, timestamp, self.user_secret.secret,
algorithm = digest_algorithm)
if b64_nonce is None:
b64_nonce = base64.b64encode(utils._to_bytes(nonce))
if b64_digest is not None:
digest = b64_digest
header_values = {
'Username': username,
'PasswordDigest': utils._from_bytes(digest),
'Nonce': utils._from_bytes(b64_nonce),
'Created': timestamp
}
return header_values
def make_header(self, *args, **kwargs):
'''
Make the header from the given values.
:return: WSSE authentication header
:rtype: str
'''
header_values = self.make_header_values(*args, **kwargs)
header = (', '.join('{k}="{v}"'.format(
k = k, v = v) for k, v in header_values.items()))
return header
def test_valid_authentication(self):
'''
Authenticate with a valid username. The authentication should succeed.
'''
with self.http_auth(self.make_header()):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_valid_authentication_alternative_timestamp_format(self):
'''
Authenticate with a valid username, using an alternative timestamp format.
The authentication should succeed.
'''
now = timezone.now()
timestamp = now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
with self.http_auth(self.make_header(timestamp = timestamp)):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_valid_authentication_alternative_headers(self):
'''
Make a valid authentication request. Use various permutations of the
header format.
'''
default_params = ['Username', 'PasswordDigest', 'Nonce', 'Created']
for params in itertools.permutations(default_params):
header_values = self.make_header_values()
header = ('UsernameToken ' + ', '.join('{k}="{v}"'.format(
k = param, v = header_values[param]) for param in params))
with self.http_auth(header):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_valid_authentication_drift(self):
'''
Authenticate with a valid username with drift on the timestamp.
The authentication should succeed.
'''
ts = (timezone.now() +
datetime.timedelta(seconds = settings.DRIFT_OFFSET - 1))
timestamp = ts.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
with self.http_auth(self.make_header()):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_no_authentication(self):
'''
Perform a request with no attempt at authentication. Authentication
should not succeed.
'''
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_wrong_format_authentication(self):
'''
Perform a request with incorrect authentication header format.
Authentication should not succeed.
'''
with self.http_auth('WrongFormat=27'):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_invalid_timestamp_authentication(self):
'''
Perform a request with an invalid timestamp.
Authentication should not succeed.
'''
with self.http_auth(self.make_header(timestamp = 'Nope')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_invalid_timestamp_format_authentication(self):
'''
Perform a request with an invalid timestamp format.
Authentication should not succeed.
'''
now = timezone.now()
timestamp = now.strftime("%m/%d/%Y, %M:%S.%f")
with self.http_auth(self.make_header(timestamp = timestamp)):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_expired_timestamp(self):
'''
Authenticate an expired timestamp. The authentication should not succeed.
'''
now = timezone.now() - datetime.timedelta(
seconds = settings.TIMESTAMP_DURATION + 1)
timestamp = now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
with self.http_auth(self.make_header(timestamp = timestamp)):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_future_timestamp(self):
'''
Authenticate a future timestamp. The authentication should not succeed.
'''
now = timezone.now() + datetime.timedelta(
seconds = settings.TIMESTAMP_DURATION + 1)
timestamp = now.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
with self.http_auth(self.make_header(timestamp = timestamp)):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_incorrect_username(self):
'''
Authenticate with an incorrect username. The authetication should not
succeed.
'''
with self.http_auth(self.make_header(username = 'nope')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_invalid_b64_nonce(self):
'''
Authenticate with a nonce that is not base64. The authentication should not
succeed.
'''
with self.http_auth(self.make_header(b64_nonce = '?????????')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_short_nonce(self):
'''
Authenticate with a nonce that is fewer than 8 characters. The
authentication should not succeed.
'''
with self.http_auth(self.make_header(b64_nonce = 'short')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_long_nonce(self):
'''
Authenticate with a nonce that is longer than 32 characters. The
authentication should not succeed.
'''
with self.http_auth(self.make_header(b64_nonce = 'a' * 72)):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_authenticate_sha1(self):
'''
Authenticate with a valid header, but calculate the digest using SHA-1.
The authentication should not succeed.
'''
with self.http_auth(self.make_header(
digest_algorithm = 'sha1')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_invalid_digest(self):
'''
Authenticate with an invalid digest. The authentication should not succeed.
'''
with self.http_auth(self.make_header(
digest = 'nope'.encode('utf-8'))):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_invalid_digest_b64(self):
'''
Authenticate with an invalid digest, in base64.
The authentication should not succeed.
'''
with self.http_auth(self.make_header(b64_digest = 'nope')):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_replay_attack(self):
'''
Authenticate with a valid header twice. The second authentication should
be detected as a replay attack.
'''
header = self.make_header()
with self.http_auth(header):
response = self.client.get(self.base_url)
with self.http_auth(header):
second_response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(second_response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_replay_attack_multiple(self):
'''
Authenticate with a valid header multiple times.
The following authentication attempts should be detected as replay attacks.
'''
header = self.make_header()
with self.http_auth(header):
response = self.client.get(self.base_url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
for _ in range(10):
with self.http_auth(header):
new_resp = self.client.get(self.base_url)
self.assertEqual(new_resp.status_code, status.HTTP_401_UNAUTHORIZED)
| lgpl-3.0 | 6,710,530,665,607,529,000 | 29.343164 | 77 | 0.721417 | false |
google/uncertainty-baselines | baselines/toxic_comments/dropout.py | 1 | 38010 | # coding=utf-8
# Copyright 2021 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BERT model with Monte Carlo dropout.
This script trains model on WikipediaTalk data, and evaluate on both
WikipediaTalk and CivilComment datasets.
"""
import os
import time
from absl import app
from absl import flags
from absl import logging
import robustness_metrics as rm
import tensorflow as tf
from tensorflow_addons import losses as tfa_losses
from tensorflow_addons import metrics as tfa_metrics
import uncertainty_baselines as ub
import metrics as tc_metrics # local file import
import utils # local file import
from uncertainty_baselines.datasets import toxic_comments as ds
from tensorboard.plugins.hparams import api as hp
# Data flags
flags.DEFINE_string(
'in_dataset_dir', None,
'Path to in-domain dataset (WikipediaToxicityDataset).')
flags.DEFINE_string(
'ood_dataset_dir', None,
'Path to out-of-domain dataset (CivilCommentsDataset).')
flags.DEFINE_string(
'identity_dataset_dir', None,
'Path to out-of-domain dataset with identity labels '
'(CivilCommentsIdentitiesDataset).')
# Model flags
flags.DEFINE_string('model_family', 'bert',
'Types of model to use. Can be either TextCNN or BERT.')
# Model flags, BERT.
flags.DEFINE_string(
'bert_dir', None,
'Directory to BERT pre-trained checkpoints and config files.')
flags.DEFINE_string(
'bert_ckpt_dir', None, 'Directory to BERT pre-trained checkpoints. '
'If None then then default to {bert_dir}/bert_model.ckpt.')
flags.DEFINE_string(
'bert_config_dir', None, 'Directory to BERT config files. '
'If None then then default to {bert_dir}/bert_config.json.')
# Dropout flags
flags.DEFINE_float('dropout_rate', 0.1, 'Dropout rate.')
flags.DEFINE_bool(
'channel_wise_dropout_all', True,
'Whether to apply channel-wise dropout for all layers.')
flags.DEFINE_bool(
'channel_wise_dropout_mha', False,
'Whether to apply channel-wise dropout to the multi-head attention layer.')
flags.DEFINE_bool(
'channel_wise_dropout_att', False,
'Whether to apply channel-wise dropout to the attention output layer.')
flags.DEFINE_bool(
'channel_wise_dropout_ffn', False,
'Whether to apply channel-wise dropout to the hidden feedforward layer.')
flags.DEFINE_bool(
'use_mc_dropout_mha', False,
'Whether to apply Monte Carlo dropout to the multi-head attention layer.')
flags.DEFINE_bool(
'use_mc_dropout_att', True,
'Whether to apply Monte Carlo dropout to the attention output layer.')
flags.DEFINE_bool(
'use_mc_dropout_ffn', True,
'Whether to apply Monte Carlo dropout to the hidden feedforward layer.')
flags.DEFINE_bool(
'use_mc_dropout_output', False,
'Whether to apply Monte Carlo dropout to the dense output layer.')
# Optimization and evaluation flags
flags.DEFINE_integer('seed', 8, 'Random seed.')
flags.DEFINE_integer('per_core_batch_size', 32, 'Batch size per TPU core/GPU.')
flags.DEFINE_float(
'base_learning_rate', 5e-5,
'Base learning rate when total batch size is 128. It is '
'scaled by the ratio of the total batch size to 128.')
flags.DEFINE_float('one_minus_momentum', 0.1, 'Optimizer momentum.')
flags.DEFINE_integer(
'checkpoint_interval', 5,
'Number of epochs between saving checkpoints. Use -1 to '
'never save checkpoints.')
flags.DEFINE_integer('evaluation_interval', 1,
'Number of epochs between evaluation.')
flags.DEFINE_integer('num_ece_bins', 15, 'Number of bins for ECE.')
flags.DEFINE_integer(
'num_approx_bins', 1000,
'Number of bins for approximating collaborative and abstention metrics.')
flags.DEFINE_list(
'fractions',
['0.0', '0.001', '0.005', '0.01', '0.02', '0.05', '0.1', '0.15', '0.2'],
'A list of fractions of total examples to send to '
'the moderators (up to 1).')
flags.DEFINE_string('output_dir', '/tmp/toxic_comments', 'Output directory.')
flags.DEFINE_integer('train_epochs', 5, 'Number of training epochs.')
flags.DEFINE_float(
'warmup_proportion', 0.1,
'Proportion of training to perform linear learning rate warmup for. '
'E.g., 0.1 = 10% of training.')
flags.DEFINE_float(
'ece_label_threshold', 0.7,
'Threshold used to convert toxicity score into binary labels for computing '
'Expected Calibration Error (ECE). Default is 0.7 which is the threshold '
'value recommended by Jigsaw team.')
# Loss type
flags.DEFINE_enum('loss_type', 'cross_entropy',
['cross_entropy', 'focal_cross_entropy', 'mse', 'mae'],
'Type of loss function to use.')
flags.DEFINE_float('focal_loss_alpha', 0.1,
'Multiplicative factor used in the focal loss [1]-[2] to '
'downweight common cases.')
flags.DEFINE_float('focal_loss_gamma', 5.,
'Exponentiate factor used in the focal loss [1]-[2] to '
'push model to minimize in-confident examples.')
# Accelerator flags.
flags.DEFINE_bool('use_gpu', False, 'Whether to run on GPU or otherwise TPU.')
flags.DEFINE_bool('use_bfloat16', False, 'Whether to use mixed precision.')
flags.DEFINE_integer('num_cores', 8, 'Number of TPU cores or number of GPUs.')
flags.DEFINE_string('tpu', None,
'Name of the TPU. Only used if use_gpu is False.')
FLAGS = flags.FLAGS
_MAX_SEQ_LENGTH = 512
def main(argv):
del argv # unused arg
tf.io.gfile.makedirs(FLAGS.output_dir)
logging.info('Model checkpoint will be saved at %s', FLAGS.output_dir)
tf.random.set_seed(FLAGS.seed)
if FLAGS.use_gpu:
logging.info('Use GPU')
strategy = tf.distribute.MirroredStrategy()
else:
logging.info('Use TPU at %s',
FLAGS.tpu if FLAGS.tpu is not None else 'local')
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=FLAGS.tpu)
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
batch_size = FLAGS.per_core_batch_size * FLAGS.num_cores
test_batch_size = batch_size
data_buffer_size = batch_size * 10
train_dataset_builder = ds.WikipediaToxicityDataset(
split='train',
data_dir=FLAGS.in_dataset_dir,
shuffle_buffer_size=data_buffer_size)
ind_dataset_builder = ds.WikipediaToxicityDataset(
split='test',
data_dir=FLAGS.in_dataset_dir,
shuffle_buffer_size=data_buffer_size)
ood_dataset_builder = ds.CivilCommentsDataset(
split='test',
data_dir=FLAGS.ood_dataset_dir,
shuffle_buffer_size=data_buffer_size)
ood_identity_dataset_builder = ds.CivilCommentsIdentitiesDataset(
split='test',
data_dir=FLAGS.identity_dataset_dir,
shuffle_buffer_size=data_buffer_size)
train_dataset_builders = {
'wikipedia_toxicity_subtypes': train_dataset_builder
}
test_dataset_builders = {
'ind': ind_dataset_builder,
'ood': ood_dataset_builder,
'ood_identity': ood_identity_dataset_builder,
}
if FLAGS.prediction_mode and FLAGS.identity_prediction:
for dataset_name in utils.IDENTITY_LABELS:
if utils.NUM_EXAMPLES[dataset_name]['test'] > 100:
test_dataset_builders[dataset_name] = ds.CivilCommentsIdentitiesDataset(
split='test',
data_dir=os.path.join(
FLAGS.identity_specific_dataset_dir, dataset_name),
shuffle_buffer_size=data_buffer_size)
for dataset_name in utils.IDENTITY_TYPES:
if utils.NUM_EXAMPLES[dataset_name]['test'] > 100:
test_dataset_builders[dataset_name] = ds.CivilCommentsIdentitiesDataset(
split='test',
data_dir=os.path.join(
FLAGS.identity_type_dataset_dir, dataset_name),
shuffle_buffer_size=data_buffer_size)
class_weight = utils.create_class_weight(
train_dataset_builders, test_dataset_builders)
logging.info('class_weight: %s', str(class_weight))
ds_info = train_dataset_builder.tfds_info
# Positive and negative classes.
num_classes = ds_info.metadata['num_classes']
train_datasets = {}
dataset_steps_per_epoch = {}
total_steps_per_epoch = 0
# TODO(jereliu): Apply strategy.experimental_distribute_dataset to the
# dataset_builders.
for dataset_name, dataset_builder in train_dataset_builders.items():
train_datasets[dataset_name] = dataset_builder.load(
batch_size=FLAGS.per_core_batch_size)
dataset_steps_per_epoch[dataset_name] = (
dataset_builder.num_examples // batch_size)
total_steps_per_epoch += dataset_steps_per_epoch[dataset_name]
test_datasets = {}
steps_per_eval = {}
for dataset_name, dataset_builder in test_dataset_builders.items():
test_datasets[dataset_name] = dataset_builder.load(
batch_size=test_batch_size)
if dataset_name in ['ind', 'ood', 'ood_identity']:
steps_per_eval[dataset_name] = (
dataset_builder.num_examples // test_batch_size)
else:
steps_per_eval[dataset_name] = (
utils.NUM_EXAMPLES[dataset_name]['test'] // test_batch_size)
if FLAGS.use_bfloat16:
policy = tf.keras.mixed_precision.experimental.Policy('mixed_bfloat16')
tf.keras.mixed_precision.experimental.set_policy(policy)
summary_writer = tf.summary.create_file_writer(
os.path.join(FLAGS.output_dir, 'summaries'))
with strategy.scope():
logging.info('Building %s model', FLAGS.model_family)
bert_config_dir, bert_ckpt_dir = utils.resolve_bert_ckpt_and_config_dir(
FLAGS.bert_model_type, FLAGS.bert_dir, FLAGS.bert_config_dir,
FLAGS.bert_ckpt_dir)
bert_config = utils.create_config(bert_config_dir)
bert_config.hidden_dropout_prob = FLAGS.dropout_rate
bert_config.attention_probs_dropout_prob = FLAGS.dropout_rate
model, bert_encoder = ub.models.bert_dropout_model(
num_classes=num_classes,
bert_config=bert_config,
use_mc_dropout_mha=FLAGS.use_mc_dropout_mha,
use_mc_dropout_att=FLAGS.use_mc_dropout_att,
use_mc_dropout_ffn=FLAGS.use_mc_dropout_ffn,
use_mc_dropout_output=FLAGS.use_mc_dropout_output,
channel_wise_dropout_mha=FLAGS.channel_wise_dropout_mha,
channel_wise_dropout_att=FLAGS.channel_wise_dropout_att,
channel_wise_dropout_ffn=FLAGS.channel_wise_dropout_ffn)
# Create an AdamW optimizer with beta_2=0.999, epsilon=1e-6.
optimizer = utils.create_optimizer(
FLAGS.base_learning_rate,
steps_per_epoch=total_steps_per_epoch,
epochs=FLAGS.train_epochs,
warmup_proportion=FLAGS.warmup_proportion,
beta_1=1.0 - FLAGS.one_minus_momentum)
logging.info('Model input shape: %s', model.input_shape)
logging.info('Model output shape: %s', model.output_shape)
logging.info('Model number of weights: %s', model.count_params())
metrics = {
'train/negative_log_likelihood':
tf.keras.metrics.Mean(),
'train/accuracy':
tf.keras.metrics.Accuracy(),
'train/accuracy_weighted':
tf.keras.metrics.Accuracy(),
'train/auroc':
tf.keras.metrics.AUC(),
'train/loss':
tf.keras.metrics.Mean(),
'train/ece':
rm.metrics.ExpectedCalibrationError(num_bins=FLAGS.num_ece_bins),
'train/precision':
tf.keras.metrics.Precision(),
'train/recall':
tf.keras.metrics.Recall(),
'train/f1':
tfa_metrics.F1Score(
num_classes=num_classes,
average='micro',
threshold=FLAGS.ece_label_threshold),
}
checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer)
if FLAGS.prediction_mode:
latest_checkpoint = tf.train.latest_checkpoint(FLAGS.eval_checkpoint_dir)
else:
latest_checkpoint = tf.train.latest_checkpoint(FLAGS.output_dir)
initial_epoch = 0
if latest_checkpoint:
# checkpoint.restore must be within a strategy.scope() so that optimizer
# slot variables are mirrored.
checkpoint.restore(latest_checkpoint)
logging.info('Loaded checkpoint %s', latest_checkpoint)
initial_epoch = optimizer.iterations.numpy() // total_steps_per_epoch
elif FLAGS.model_family.lower() == 'bert':
# load BERT from initial checkpoint
bert_checkpoint = tf.train.Checkpoint(model=bert_encoder)
bert_checkpoint.restore(bert_ckpt_dir).assert_existing_objects_matched()
logging.info('Loaded BERT checkpoint %s', bert_ckpt_dir)
metrics.update({
'test/negative_log_likelihood':
tf.keras.metrics.Mean(),
'test/auroc':
tf.keras.metrics.AUC(curve='ROC'),
'test/aupr':
tf.keras.metrics.AUC(curve='PR'),
'test/brier':
tf.keras.metrics.MeanSquaredError(),
'test/brier_weighted':
tf.keras.metrics.MeanSquaredError(),
'test/ece':
rm.metrics.ExpectedCalibrationError(num_bins=FLAGS.num_ece_bins),
'test/acc':
tf.keras.metrics.Accuracy(),
'test/acc_weighted':
tf.keras.metrics.Accuracy(),
'test/eval_time':
tf.keras.metrics.Mean(),
'test/precision':
tf.keras.metrics.Precision(),
'test/recall':
tf.keras.metrics.Recall(),
'test/f1':
tfa_metrics.F1Score(
num_classes=num_classes,
average='micro',
threshold=FLAGS.ece_label_threshold)
})
for policy in ('uncertainty', 'toxicity'):
metrics.update({
'test_{}/calibration_auroc'.format(policy):
tc_metrics.CalibrationAUC(curve='ROC'),
'test_{}/calibration_auprc'.format(policy):
tc_metrics.CalibrationAUC(curve='PR')
})
for fraction in FLAGS.fractions:
metrics.update({
'test_{}/collab_acc_{}'.format(policy, fraction):
rm.metrics.OracleCollaborativeAccuracy(
fraction=float(fraction), num_bins=FLAGS.num_approx_bins),
'test_{}/abstain_prec_{}'.format(policy, fraction):
tc_metrics.AbstainPrecision(
abstain_fraction=float(fraction),
num_approx_bins=FLAGS.num_approx_bins),
'test_{}/abstain_recall_{}'.format(policy, fraction):
tc_metrics.AbstainRecall(
abstain_fraction=float(fraction),
num_approx_bins=FLAGS.num_approx_bins),
'test_{}/collab_auroc_{}'.format(policy, fraction):
tc_metrics.OracleCollaborativeAUC(
oracle_fraction=float(fraction),
num_bins=FLAGS.num_approx_bins),
'test_{}/collab_auprc_{}'.format(policy, fraction):
tc_metrics.OracleCollaborativeAUC(
oracle_fraction=float(fraction),
curve='PR',
num_bins=FLAGS.num_approx_bins),
})
for dataset_name, test_dataset in test_datasets.items():
if dataset_name != 'ind':
metrics.update({
'test/nll_{}'.format(dataset_name):
tf.keras.metrics.Mean(),
'test/auroc_{}'.format(dataset_name):
tf.keras.metrics.AUC(curve='ROC'),
'test/aupr_{}'.format(dataset_name):
tf.keras.metrics.AUC(curve='PR'),
'test/brier_{}'.format(dataset_name):
tf.keras.metrics.MeanSquaredError(),
'test/brier_weighted_{}'.format(dataset_name):
tf.keras.metrics.MeanSquaredError(),
'test/ece_{}'.format(dataset_name):
rm.metrics.ExpectedCalibrationError(num_bins=FLAGS.num_ece_bins
),
'test/acc_{}'.format(dataset_name):
tf.keras.metrics.Accuracy(),
'test/acc_weighted_{}'.format(dataset_name):
tf.keras.metrics.Accuracy(),
'test/eval_time_{}'.format(dataset_name):
tf.keras.metrics.Mean(),
'test/precision_{}'.format(dataset_name):
tf.keras.metrics.Precision(),
'test/recall_{}'.format(dataset_name):
tf.keras.metrics.Recall(),
'test/f1_{}'.format(dataset_name):
tfa_metrics.F1Score(
num_classes=num_classes,
average='micro',
threshold=FLAGS.ece_label_threshold)
})
for policy in ('uncertainty', 'toxicity'):
metrics.update({
'test_{}/calibration_auroc_{}'.format(policy, dataset_name):
tc_metrics.CalibrationAUC(curve='ROC'),
'test_{}/calibration_auprc_{}'.format(policy, dataset_name):
tc_metrics.CalibrationAUC(curve='PR'),
})
for fraction in FLAGS.fractions:
metrics.update({
'test_{}/collab_acc_{}_{}'.format(policy, fraction,
dataset_name):
rm.metrics.OracleCollaborativeAccuracy(
fraction=float(fraction),
num_bins=FLAGS.num_approx_bins),
'test_{}/abstain_prec_{}_{}'.format(policy, fraction,
dataset_name):
tc_metrics.AbstainPrecision(
abstain_fraction=float(fraction),
num_approx_bins=FLAGS.num_approx_bins),
'test_{}/abstain_recall_{}_{}'.format(policy, fraction,
dataset_name):
tc_metrics.AbstainRecall(
abstain_fraction=float(fraction),
num_approx_bins=FLAGS.num_approx_bins),
'test_{}/collab_auroc_{}_{}'.format(policy, fraction,
dataset_name):
tc_metrics.OracleCollaborativeAUC(
oracle_fraction=float(fraction),
num_bins=FLAGS.num_approx_bins),
'test_{}/collab_auprc_{}_{}'.format(policy, fraction,
dataset_name):
tc_metrics.OracleCollaborativeAUC(
oracle_fraction=float(fraction),
curve='PR',
num_bins=FLAGS.num_approx_bins),
})
@tf.function
def generate_sample_weight(labels, class_weight, label_threshold=0.7):
"""Generate sample weight for weighted accuracy calculation."""
if label_threshold != 0.7:
logging.warning('The class weight was based on `label_threshold` = 0.7, '
'and weighted accuracy/brier will be meaningless if '
'`label_threshold` is not equal to this value, which is '
'recommended by Jigsaw Conversation AI team.')
labels_int = tf.cast(labels > label_threshold, tf.int32)
sample_weight = tf.gather(class_weight, labels_int)
return sample_weight
@tf.function
def train_step(iterator, dataset_name, num_steps):
"""Training StepFn."""
def step_fn(inputs):
"""Per-Replica StepFn."""
features, labels, _ = utils.create_feature_and_label(inputs)
with tf.GradientTape() as tape:
logits = model(features, training=True)
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
loss_logits = tf.squeeze(logits, axis=1)
if FLAGS.loss_type == 'cross_entropy':
logging.info('Using cross entropy loss')
negative_log_likelihood = tf.nn.sigmoid_cross_entropy_with_logits(
labels, loss_logits)
elif FLAGS.loss_type == 'focal_cross_entropy':
logging.info('Using focal cross entropy loss')
negative_log_likelihood = tfa_losses.sigmoid_focal_crossentropy(
labels, loss_logits,
alpha=FLAGS.focal_loss_alpha, gamma=FLAGS.focal_loss_gamma,
from_logits=True)
elif FLAGS.loss_type == 'mse':
logging.info('Using mean squared error loss')
loss_probs = tf.nn.sigmoid(loss_logits)
negative_log_likelihood = tf.keras.losses.mean_squared_error(
labels, loss_probs)
elif FLAGS.loss_type == 'mae':
logging.info('Using mean absolute error loss')
loss_probs = tf.nn.sigmoid(loss_logits)
negative_log_likelihood = tf.keras.losses.mean_absolute_error(
labels, loss_probs)
negative_log_likelihood = tf.reduce_mean(negative_log_likelihood)
l2_loss = sum(model.losses)
loss = negative_log_likelihood + l2_loss
# Scale the loss given the TPUStrategy will reduce sum all gradients.
scaled_loss = loss / strategy.num_replicas_in_sync
grads = tape.gradient(scaled_loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
probs = tf.nn.sigmoid(logits)
# Cast labels to discrete for ECE computation.
ece_labels = tf.cast(labels > FLAGS.ece_label_threshold, tf.float32)
one_hot_labels = tf.one_hot(tf.cast(ece_labels, tf.int32),
depth=num_classes)
ece_probs = tf.concat([1. - probs, probs], axis=1)
auc_probs = tf.squeeze(probs, axis=1)
pred_labels = tf.math.argmax(ece_probs, axis=-1)
sample_weight = generate_sample_weight(
labels, class_weight['train/{}'.format(dataset_name)],
FLAGS.ece_label_threshold)
metrics['train/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['train/accuracy'].update_state(labels, pred_labels)
metrics['train/accuracy_weighted'].update_state(
ece_labels, pred_labels, sample_weight=sample_weight)
metrics['train/auroc'].update_state(labels, auc_probs)
metrics['train/loss'].update_state(loss)
metrics['train/ece'].add_batch(ece_probs, label=ece_labels)
metrics['train/precision'].update_state(ece_labels, pred_labels)
metrics['train/recall'].update_state(ece_labels, pred_labels)
metrics['train/f1'].update_state(one_hot_labels, ece_probs)
for _ in tf.range(tf.cast(num_steps, tf.int32)):
strategy.run(step_fn, args=(next(iterator),))
@tf.function
def test_step(iterator, dataset_name):
"""Evaluation StepFn to log metrics."""
def step_fn(inputs):
"""Per-Replica StepFn."""
features, labels, _ = utils.create_feature_and_label(inputs)
eval_start_time = time.time()
logits = model(features, training=False)
eval_time = (time.time() - eval_start_time) / FLAGS.per_core_batch_size
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
probs = tf.nn.sigmoid(logits)
# Cast labels to discrete for ECE computation.
ece_labels = tf.cast(labels > FLAGS.ece_label_threshold, tf.float32)
one_hot_labels = tf.one_hot(tf.cast(ece_labels, tf.int32),
depth=num_classes)
ece_probs = tf.concat([1. - probs, probs], axis=1)
pred_labels = tf.math.argmax(ece_probs, axis=-1)
auc_probs = tf.squeeze(probs, axis=1)
loss_logits = tf.squeeze(logits, axis=1)
negative_log_likelihood = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(labels, loss_logits))
# Use normalized binary predictive variance as the confidence score.
# Since the prediction variance p*(1-p) is within range (0, 0.25),
# normalize it by maximum value so the confidence is between (0, 1).
calib_confidence = 1. - probs * (1. - probs) / .25
sample_weight = generate_sample_weight(
labels, class_weight['test/{}'.format(dataset_name)],
FLAGS.ece_label_threshold)
if dataset_name == 'ind':
metrics['test/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['test/auroc'].update_state(labels, auc_probs)
metrics['test/aupr'].update_state(labels, auc_probs)
metrics['test/brier'].update_state(labels, auc_probs)
metrics['test/brier_weighted'].update_state(
tf.expand_dims(labels, -1), probs, sample_weight=sample_weight)
metrics['test/ece'].add_batch(ece_probs, label=ece_labels)
metrics['test/acc'].update_state(ece_labels, pred_labels)
metrics['test/acc_weighted'].update_state(
ece_labels, pred_labels, sample_weight=sample_weight)
metrics['test/eval_time'].update_state(eval_time)
metrics['test/precision'].update_state(ece_labels, pred_labels)
metrics['test/recall'].update_state(ece_labels, pred_labels)
metrics['test/f1'].update_state(one_hot_labels, ece_probs)
for policy in ('uncertainty', 'toxicity'):
# calib_confidence or decreasing toxicity score.
confidence = 1. - probs if policy == 'toxicity' else calib_confidence
binning_confidence = tf.squeeze(confidence)
metrics['test_{}/calibration_auroc'.format(policy)].update_state(
ece_labels, pred_labels, confidence)
metrics['test_{}/calibration_auprc'.format(policy)].update_state(
ece_labels, pred_labels, confidence)
for fraction in FLAGS.fractions:
metrics['test_{}/collab_acc_{}'.format(policy, fraction)].add_batch(
ece_probs,
label=ece_labels,
custom_binning_score=binning_confidence)
metrics['test_{}/abstain_prec_{}'.format(
policy, fraction)].update_state(ece_labels, pred_labels,
confidence)
metrics['test_{}/abstain_recall_{}'.format(
policy, fraction)].update_state(ece_labels, pred_labels,
confidence)
metrics['test_{}/collab_auroc_{}'.format(
policy, fraction)].update_state(
labels, auc_probs, custom_binning_score=binning_confidence)
metrics['test_{}/collab_auprc_{}'.format(
policy, fraction)].update_state(
labels, auc_probs, custom_binning_score=binning_confidence)
else:
metrics['test/nll_{}'.format(dataset_name)].update_state(
negative_log_likelihood)
metrics['test/auroc_{}'.format(dataset_name)].update_state(
labels, auc_probs)
metrics['test/aupr_{}'.format(dataset_name)].update_state(
labels, auc_probs)
metrics['test/brier_{}'.format(dataset_name)].update_state(
labels, auc_probs)
metrics['test/brier_weighted_{}'.format(dataset_name)].update_state(
tf.expand_dims(labels, -1), probs, sample_weight=sample_weight)
metrics['test/ece_{}'.format(dataset_name)].add_batch(
ece_probs, label=ece_labels)
metrics['test/acc_{}'.format(dataset_name)].update_state(
ece_labels, pred_labels)
metrics['test/acc_weighted_{}'.format(dataset_name)].update_state(
ece_labels, pred_labels, sample_weight=sample_weight)
metrics['test/eval_time_{}'.format(dataset_name)].update_state(
eval_time)
metrics['test/precision_{}'.format(dataset_name)].update_state(
ece_labels, pred_labels)
metrics['test/recall_{}'.format(dataset_name)].update_state(
ece_labels, pred_labels)
metrics['test/f1_{}'.format(dataset_name)].update_state(
one_hot_labels, ece_probs)
for policy in ('uncertainty', 'toxicity'):
# calib_confidence or decreasing toxicity score.
confidence = 1. - probs if policy == 'toxicity' else calib_confidence
binning_confidence = tf.squeeze(confidence)
metrics['test_{}/calibration_auroc_{}'.format(
policy, dataset_name)].update_state(ece_labels, pred_labels,
confidence)
metrics['test_{}/calibration_auprc_{}'.format(
policy, dataset_name)].update_state(ece_labels, pred_labels,
confidence)
for fraction in FLAGS.fractions:
metrics['test_{}/collab_acc_{}_{}'.format(
policy, fraction, dataset_name)].add_batch(
ece_probs,
label=ece_labels,
custom_binning_score=binning_confidence)
metrics['test_{}/abstain_prec_{}_{}'.format(
policy, fraction,
dataset_name)].update_state(ece_labels, pred_labels, confidence)
metrics['test_{}/abstain_recall_{}_{}'.format(
policy, fraction,
dataset_name)].update_state(ece_labels, pred_labels, confidence)
metrics['test_{}/collab_auroc_{}_{}'.format(
policy, fraction, dataset_name)].update_state(
labels, auc_probs, custom_binning_score=binning_confidence)
metrics['test_{}/collab_auprc_{}_{}'.format(
policy, fraction, dataset_name)].update_state(
labels, auc_probs, custom_binning_score=binning_confidence)
strategy.run(step_fn, args=(next(iterator),))
@tf.function
def final_eval_step(iterator):
"""Final Evaluation StepFn to save prediction to directory."""
def step_fn(inputs):
bert_features, labels, additional_labels = utils.create_feature_and_label(
inputs)
logits = model(bert_features, training=False)
features = inputs['input_ids']
return features, logits, labels, additional_labels
(per_replica_texts, per_replica_logits, per_replica_labels,
per_replica_additional_labels) = (
strategy.run(step_fn, args=(next(iterator),)))
if strategy.num_replicas_in_sync > 1:
texts_list = tf.concat(per_replica_texts.values, axis=0)
logits_list = tf.concat(per_replica_logits.values, axis=0)
labels_list = tf.concat(per_replica_labels.values, axis=0)
additional_labels_dict = {}
for additional_label in utils.IDENTITY_LABELS:
if additional_label in per_replica_additional_labels:
additional_labels_dict[additional_label] = tf.concat(
per_replica_additional_labels[additional_label], axis=0)
else:
texts_list = per_replica_texts
logits_list = per_replica_logits
labels_list = per_replica_labels
additional_labels_dict = {}
for additional_label in utils.IDENTITY_LABELS:
if additional_label in per_replica_additional_labels:
additional_labels_dict[
additional_label] = per_replica_additional_labels[
additional_label]
return texts_list, logits_list, labels_list, additional_labels_dict
if FLAGS.prediction_mode:
# Prediction and exit.
for dataset_name, test_dataset in test_datasets.items():
test_iterator = iter(test_dataset) # pytype: disable=wrong-arg-types
message = 'Final eval on dataset {}'.format(dataset_name)
logging.info(message)
texts_all = []
logits_all = []
labels_all = []
additional_labels_all_dict = {}
if 'identity' in dataset_name:
for identity_label_name in utils.IDENTITY_LABELS:
additional_labels_all_dict[identity_label_name] = []
try:
with tf.experimental.async_scope():
for step in range(steps_per_eval[dataset_name]):
if step % 20 == 0:
message = 'Starting to run eval step {}/{} of dataset: {}'.format(
step, steps_per_eval[dataset_name], dataset_name)
logging.info(message)
(text_step, logits_step, labels_step,
additional_labels_dict_step) = final_eval_step(test_iterator)
texts_all.append(text_step)
logits_all.append(logits_step)
labels_all.append(labels_step)
if 'identity' in dataset_name:
for identity_label_name in utils.IDENTITY_LABELS:
additional_labels_all_dict[identity_label_name].append(
additional_labels_dict_step[identity_label_name])
except (StopIteration, tf.errors.OutOfRangeError):
tf.experimental.async_clear_error()
logging.info('Done with eval on %s', dataset_name)
texts_all = tf.concat(texts_all, axis=0)
logits_all = tf.concat(logits_all, axis=0)
labels_all = tf.concat(labels_all, axis=0)
additional_labels_all = []
if additional_labels_all_dict:
for identity_label_name in utils.IDENTITY_LABELS:
additional_labels_all.append(
tf.concat(
additional_labels_all_dict[identity_label_name], axis=0))
additional_labels_all = tf.convert_to_tensor(additional_labels_all)
utils.save_prediction(
texts_all.numpy(),
path=os.path.join(FLAGS.output_dir, 'texts_{}'.format(dataset_name)))
utils.save_prediction(
labels_all.numpy(),
path=os.path.join(FLAGS.output_dir, 'labels_{}'.format(dataset_name)))
utils.save_prediction(
logits_all.numpy(),
path=os.path.join(FLAGS.output_dir, 'logits_{}'.format(dataset_name)))
if 'identity' in dataset_name:
utils.save_prediction(
additional_labels_all.numpy(),
path=os.path.join(FLAGS.output_dir,
'additional_labels_{}'.format(dataset_name)))
logging.info('Done with testing on %s', dataset_name)
else:
# Execute train / eval loop.
start_time = time.time()
train_iterators = {}
for dataset_name, train_dataset in train_datasets.items():
train_iterators[dataset_name] = iter(train_dataset)
for epoch in range(initial_epoch, FLAGS.train_epochs):
logging.info('Starting to run epoch: %s', epoch)
for dataset_name, train_iterator in train_iterators.items():
train_step(
train_iterator, dataset_name, dataset_steps_per_epoch[dataset_name])
current_step = (
epoch * total_steps_per_epoch +
dataset_steps_per_epoch[dataset_name])
max_steps = total_steps_per_epoch * FLAGS.train_epochs
time_elapsed = time.time() - start_time
steps_per_sec = float(current_step) / time_elapsed
eta_seconds = (max_steps - current_step) / steps_per_sec
message = ('{:.1%} completion: epoch {:d}/{:d}. {:.1f} steps/s. '
'ETA: {:.0f} min. Time elapsed: {:.0f} min'.format(
current_step / max_steps, epoch + 1,
FLAGS.train_epochs, steps_per_sec, eta_seconds / 60,
time_elapsed / 60))
logging.info(message)
if epoch % FLAGS.evaluation_interval == 0:
for dataset_name, test_dataset in test_datasets.items():
test_iterator = iter(test_dataset) # pytype: disable=wrong-arg-types
logging.info('Testing on dataset %s', dataset_name)
try:
with tf.experimental.async_scope():
for step in range(steps_per_eval[dataset_name]):
if step % 20 == 0:
logging.info('Starting to run eval step %s/%s of epoch: %s',
step, steps_per_eval[dataset_name], epoch)
test_step(test_iterator, dataset_name)
except (StopIteration, tf.errors.OutOfRangeError):
tf.experimental.async_clear_error()
logging.info('Done with testing on %s', dataset_name)
logging.info('Train Loss: %.4f, AUROC: %.4f',
metrics['train/loss'].result(),
metrics['train/auroc'].result())
logging.info('Test NLL: %.4f, AUROC: %.4f',
metrics['test/negative_log_likelihood'].result(),
metrics['test/auroc'].result())
# record results
total_results = {
name: metric.result() for name, metric in metrics.items()
}
# Metrics from Robustness Metrics (like ECE) will return a dict with a
# single key/value, instead of a scalar.
total_results = {
k: (list(v.values())[0] if isinstance(v, dict) else v)
for k, v in total_results.items()
}
with summary_writer.as_default():
for name, result in total_results.items():
tf.summary.scalar(name, result, step=epoch + 1)
for name, metric in metrics.items():
metric.reset_states()
checkpoint_interval = min(FLAGS.checkpoint_interval, FLAGS.train_epochs)
if checkpoint_interval > 0 and (epoch + 1) % checkpoint_interval == 0:
checkpoint_name = checkpoint.save(
os.path.join(FLAGS.output_dir, 'checkpoint'))
logging.info('Saved checkpoint to %s', checkpoint_name)
# Save model in SavedModel format on exit.
final_save_name = os.path.join(FLAGS.output_dir, 'model')
model.save(final_save_name)
logging.info('Saved model to %s', final_save_name)
with summary_writer.as_default():
hp.hparams({
'base_learning_rate': FLAGS.base_learning_rate,
'one_minus_momentum': FLAGS.one_minus_momentum,
'dropout_rate': FLAGS.dropout_rate,
})
if __name__ == '__main__':
app.run(main)
| apache-2.0 | -8,853,672,654,614,823,000 | 42.489703 | 80 | 0.615891 | false |
jepler/linuxcnc-mirror | configs/by_machine/plasmac/pmx485.py | 4 | 9722 | #!/usr/bin/env python2
'''
pmx485.py
Copyright (C) 2019 2020 Phillip A Carter
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
'''
import sys
import hal
import time
import serial
address = '01'
regRead = '04'
regWrite = '06'
rCurrent = '2094'
rCurrentMax = '209A'
rCurrentMin = '2099'
rFault = '2098'
rMode = '2093'
rPressure = '2096'
rPressureMax = '209D'
rPressureMin = '209C'
validRead = '0402'
started = False
errorCount = 0
# create pmx485 component
pmx485 = hal.component('pmx485')
pmx485.newpin('mode_set', hal.HAL_FLOAT, hal.HAL_IN) #set cutting mode
pmx485.newpin('current_set', hal.HAL_FLOAT, hal.HAL_IN) #set cutting current
pmx485.newpin('pressure_set', hal.HAL_FLOAT, hal.HAL_IN) #set gas pressure
pmx485.newpin('enable', hal.HAL_BIT, hal.HAL_IN) #enabler
pmx485.newpin('mode', hal.HAL_FLOAT, hal.HAL_OUT) #cut mode feedback
pmx485.newpin('current', hal.HAL_FLOAT, hal.HAL_OUT) #cutting current feedback
pmx485.newpin('pressure', hal.HAL_FLOAT, hal.HAL_OUT) #gas pressure feedback
pmx485.newpin('fault', hal.HAL_FLOAT, hal.HAL_OUT) #fault code
pmx485.newpin('status', hal.HAL_BIT, hal.HAL_OUT) #connection status out
pmx485.newpin('current_min', hal.HAL_FLOAT, hal.HAL_OUT) #minimum allowed current
pmx485.newpin('current_max', hal.HAL_FLOAT, hal.HAL_OUT) #maximum allowed current
pmx485.newpin('pressure_min', hal.HAL_FLOAT, hal.HAL_OUT) #minimum allowed gas pressure
pmx485.newpin('pressure_max', hal.HAL_FLOAT, hal.HAL_OUT) #maximum allowed gas pressure
pmx485.ready()
enabled = pmx485.enable
# connection setup
comPort = sys.argv[1]
try:
comms = serial.Serial(comPort,
baudrate = 19200,
bytesize = 8,
parity = 'E',
stopbits = 1,
timeout = 0.1
)
except:
print'\nCould not open {} for Powermax communications\n'.format(comPort)
raise SystemExit
# get the checksum
def get_lrc(data):
lrc = 0
for i in xrange(0, len(data), 2):
a, b = data[i:i+2]
try:
lrc = (lrc + int(a + b, 16)) & 255
except:
return '00'
lrc = ('{:02X}'.format((((lrc ^ 255) + 1) & 255))).upper()
return lrc
# write data to register
def write_register(reg, value):
data = '{}{}{}{}'.format(address, regWrite, reg, value)
if len(data) == 12:
lrc = get_lrc(data)
packet = ':{}{}\r\n'.format(data, lrc)
reply = ''
comms.write(packet)
reply = comms.readline()
if reply:
if reply == packet:
return 1
return 0
# read data from register
def read_register(reg):
data = '{}{}{}0001'.format(address, regRead, reg)
if len(data) == 12:
lrc = get_lrc(data)
packet = ':{}{}\r\n'.format(data, lrc)
reply = ''
comms.write(packet)
reply = comms.readline()
if reply:
if len(reply) == 15 and reply[:7] == ':{}{}'.format(address, validRead):
lrc = get_lrc(reply[1:11])
if lrc == reply[11:13]:
return reply[7:11]
return 0
# set machine to local mode
def close_machine():
mode = write_register(rMode, '{:04X}'.format(0))
current = write_register(rCurrent, '{:04X}'.format(0))
pressure = write_register(rPressure, '{:04X}'.format(0))
# set machine to remote mode
def open_machine():
# set mode
mode = write_register(rMode, '{:04X}'.format(int(pmx485.mode_set)))
# set current
current = write_register(rCurrent, '{:04X}'.format(int(pmx485.current_set * 64.0)))
# set pressure
pressure = write_register(rPressure, '{:04X}'.format(int(pmx485.pressure_set * 128.0)))
if mode and current and pressure:
return True
else:
return False
# get settings limits
def get_limits():
# get minimum current setting
cMin = read_register(rCurrentMin)
if cMin:
pmx485.current_min = round(int(cMin, 16) / 64.0, 1)
# get maximum current setting
cMax = read_register(rCurrentMax)
if cMax:
pmx485.current_max = round(int(cMax, 16) / 64.0, 1)
# get minimum pressure setting
pMin = read_register(rPressureMin)
if pMin:
pmx485.pressure_min = round(int(pMin, 16) / 128.0, 1)
# get maximum pressure setting
pMax = read_register(rPressureMax)
if pMax:
pmx485.pressure_max = round(int(pMax, 16) / 128.0, 1)
if cMin and cMax and pMin and pMax:
# debugging
# print('\nPowermax Settings:')
# print(' Mode Force = {}'.format(int(pmx485.mode_set)))
# print(' Current Force = {}'.format(int(pmx485.current_set)))
# print('Pressure Force = {}'.format(int(pmx485.pressure_set)))
# print(' Current Min = {}'.format(pmx485.current_min))
# print(' Current Max = {}'.format(pmx485.current_max))
# print(' Pressure Min = {}'.format(pmx485.pressure_min))
# print(' Pressure Max = {}\n'.format(pmx485.pressure_max))
return True
else:
return False
# main loop
try:
while 1:
if hal.component_exists('plasmac_run'):
if enabled != pmx485.enable:
enabled = pmx485.enable
if not enabled:
close_machine()
comms.close()
pmx485.status = False
started = False
if enabled:
if not started:
if not comms.isOpen():
comms.open()
if open_machine():
started = True
if started and get_limits():
started = True
else:
started = False
else:
# set mode
if pmx485.mode_set != pmx485.mode:
mode = write_register(rMode, '{:04X}'.format(int(pmx485.mode_set)))
if mode:
pmx485.mode = pmx485.mode_set
get_limits()
# get mode
else:
mode = read_register(rMode)
if mode:
pmx485.mode = int(mode, 16)
# set current
if pmx485.current_set != round(pmx485.current, 1):
current = write_register(rCurrent, '{:04X}'.format(int(pmx485.current_set * 64)))
if current:
pmx485.current = pmx485.current_set
# get current
else:
current = read_register(rCurrent)
if current:
pmx485.current = round(int(current, 16) / 64.0, 1)
# set pressure
if pmx485.pressure_set != round(pmx485.pressure, 1):
pressure = write_register(rPressure, '{:04X}'.format(int(pmx485.pressure_set * 128)))
if pressure:
pmx485.pressure = pmx485.pressure_set
# get pressure
else:
pressure = read_register(rPressure)
if pressure:
pmx485.pressure = round(int(pressure, 16) / 128.0, 1)
# get fault code
fault = read_register(rFault)
if fault:
pmx485.fault = int(fault, 16)
# set status
if mode and current and pressure and fault:
pmx485.status = True
errorCount = 0
else:
errorCount += 1
# debugging
# print('\nPMX485 STATUS ERROR #{}'.format(errorCount))
# if not mode:
# print(' Mode: set={:5.1f} get={:5.1f}'.format(pmx485.mode_set, pmx485.mode))
# if not current:
# print(' Current: set={:5.1f} get={:5.1f}'.format(pmx485.current_set, pmx485.current))
# if not pressure:
# print('Pressure: set={:5.1f} get={:5.1f}'.format(pmx485.pressure_set, pmx485.pressure))
# if not fault:
# print(' Fault: get={:5.1f}'.format(pmx485.fault))
if errorCount > 2:
print('Closing pmx485.py, error count exceeded')
errorCount = 0
comms.close()
pmx485.status = False
started = False
except:
print('Shutting down pmx485 communications')
if started:
if not comms.isOpen():
comms.open()
close_machine()
comms.close()
| lgpl-2.1 | -3,219,997,986,610,003,500 | 37.275591 | 119 | 0.528595 | false |
honzajavorek/python.cz | pythoncz/models/meetups.py | 1 | 2352 | from functools import lru_cache
from lxml import html
import requests
from slugify import slugify
__all__ = ('get_meetups',)
WIKI_URL = ('https://cs.wikipedia.org/wiki/'
'Seznam_m%C4%9Bst_v_%C4%8Cesku_podle_po%C4%8Dtu_obyvatel')
@lru_cache()
def get_meetups(lang='cs'):
return sort_by_city_size(scrape_meetups(lang))
def scrape_meetups(lang='cs'):
"""
Ideally, pyvo.cz would have an API where we get all this info. Let's assume
HTML API is good enough API for us now.
"""
url = 'https://pyvo.cz/en/' if lang == 'en' else 'https://pyvo.cz/'
res = requests.get(url, headers={'Accept-Charset': 'utf-8'})
res.raise_for_status()
root = html.fromstring(res.content.decode('utf-8'))
root.make_links_absolute(res.url)
for event in root.cssselect('#events .event'):
try:
yield {
'name': event.cssselect('h3')[0].text_content().strip(),
'url': event.cssselect('h3 a')[0].get('href'),
}
except IndexError:
continue
@lru_cache()
def scrape_cities():
res = requests.get(WIKI_URL)
res.raise_for_status()
root = html.fromstring(res.text)
rows = root.cssselect('.wikitable tbody tr')
return [row.cssselect('td')[1].text_content().strip() for row in rows[1:]]
def sort_by_city_size(meetups):
"""
Sorts given iterable of meetups by the size of the city. While pyvo.cz
lists the meetups according to when the closest event happens or happened,
this doesn't make sense for python.cz where the meetups are listed just
as a general overview. Also alphabetical sorting is pretty much just
confusing for the visitor. It only makes sense to sort the meetups by the
size of the city. The most populated cities have a larger probability
that the visitor of the page is close to them, thus they deserve to be
higher in the list.
"""
city_slugs = [slugify(city) + '-pyvo' for city in scrape_cities()]
# convert list [city1, city2, ...] into dict {city1: 0, city2: 1, ...}
city_slugs = {city: n for n, city in enumerate(city_slugs)}
city_slugs['hradec-pyvo'] = city_slugs['hradec-kralove-pyvo']
def key_func(meetup):
slug = meetup['url'].rstrip('/').split('/')[-1]
return city_slugs[slug]
return sorted(meetups, key=key_func)
| mit | -6,805,475,397,628,776,000 | 32.6 | 79 | 0.642007 | false |
peoplepower/botlab | com.ppc.Microservices/intelligence/dailyreport/location_dailyreport_microservice.py | 1 | 26353 | '''
Created on November 20, 2019
This file is subject to the terms and conditions defined in the
file 'LICENSE.txt', which is part of this source code package.
@author: David Moss
'''
from intelligence.intelligence import Intelligence
import domain
import json
import utilities.utilities as utilities
import signals.analytics as analytics
# Section weights
WEIGHT_ALERTS = 0
WEIGHT_NOTES = 5
WEIGHT_TASKS = 10
WEIGHT_SLEEP = 15
WEIGHT_ACTIVITIES = 20
WEIGHT_MEALS = 25
WEIGHT_MEDICATION = 30
WEIGHT_BATHROOM = 35
WEIGHT_SOCIAL = 40
WEIGHT_MEMORIES = 45
WEIGHT_SYSTEM = 50
# Section ID's
SECTION_ID_ALERTS = "alerts"
SECTION_ID_NOTES = "notes"
SECTION_ID_TASKS = "tasks"
SECTION_ID_SLEEP = "sleep"
SECTION_ID_ACTIVITIES = "activities"
SECTION_ID_MEALS = "meals"
SECTION_ID_MEDICATION = "medication"
SECTION_ID_BATHROOM = "bathroom"
SECTION_ID_SOCIAL = "social"
SECTION_ID_MEMORIES = "memories"
SECTION_ID_SYSTEM = "system"
# Section Colors
SECTION_COLOR_ALERTS = "D0021B"
SECTION_COLOR_NOTES = "530F8B"
SECTION_COLOR_TASKS = "00AD9D"
SECTION_COLOR_SLEEP = "946C49"
SECTION_COLOR_ACTIVITIES = "27195F"
SECTION_COLOR_MEALS = "C1006E"
SECTION_COLOR_MEDICATION = "1E6601"
SECTION_COLOR_BATHROOM = "17A5F6"
SECTION_COLOR_SOCIAL = "B6B038"
SECTION_COLOR_MEMORIES = "600000"
SECTION_COLOR_SYSTEM = "787F84"
# Reasons why the occupancy status would have changed
REASON_ML = "ML"
REASON_USER = "USER"
# Timer references
TIMER_REFERENCE_ADVANCE_REPORTS = "new"
# State UI content address
DAILY_REPORT_ADDRESS = "dailyreport"
class LocationDailyReportMicroservice(Intelligence):
"""
Create a daily report
"""
def __init__(self, botengine, parent):
"""
Instantiate this object
:param parent: Parent object, either a location or a device object.
"""
Intelligence.__init__(self, botengine, parent)
# Timestamp at which the current report was created
self.current_report_ms = None
# Timestamp at which the home went into SLEEP mode
self.started_sleeping_ms = None
# Last report we emailed
self.last_emailed_report_ms = None
def initialize(self, botengine):
"""
Initialize
:param botengine: BotEngine environment
"""
if not hasattr(self, 'last_emailed_report_ms'):
self.last_emailed_report_ms = None
return
def destroy(self, botengine):
"""
This device or object is getting permanently deleted - it is no longer in the user's account.
:param botengine: BotEngine environment
"""
return
def mode_updated(self, botengine, current_mode):
"""
Mode was updated
:param botengine: BotEngine environment
:param current_mode: Current mode
:param current_timestamp: Current timestamp
"""
return
def occupancy_status_updated(self, botengine, status, reason, last_status, last_reason):
"""
AI Occupancy Status updated
:param botengine: BotEngine
:param status: Current occupancy status
:param reason: Current occupancy reason
:param last_status: Last occupancy status
:param last_reason: Last occupancy reason
"""
if 'SLEEP' in status and REASON_ML in reason and self.started_sleeping_ms is None:
# Started sleeping
self.started_sleeping_ms = botengine.get_timestamp()
if self.parent.get_relative_time_of_day(botengine) > 12.0:
# Went to sleep before midnight - send out the daily report now.
self.last_emailed_report_ms = self.current_report_ms
self.email_report(botengine)
if 'SLEEP' not in status and 'S2H' not in status and self.started_sleeping_ms is not None:
# Stopped sleeping
self.started_sleeping_ms = None
return
def device_measurements_updated(self, botengine, device_object):
"""
Device was updated
:param botengine: BotEngine environment
:param device_object: Device object that was updated
"""
return
def device_metadata_updated(self, botengine, device_object):
"""
Evaluate a device that is new or whose goal/scenario was recently updated
:param botengine: BotEngine environment
:param device_object: Device object that was updated
"""
return
def device_alert(self, botengine, device_object, alert_type, alert_params):
"""
Device sent an alert.
When a device disconnects, it will send an alert like this: [{u'alertType': u'status', u'params': [{u'name': u'deviceStatus', u'value': u'2'}], u'deviceId': u'eb10e80a006f0d00'}]
When a device reconnects, it will send an alert like this: [{u'alertType': u'on', u'deviceId': u'eb10e80a006f0d00'}]
:param botengine: BotEngine environment
:param device_object: Device object that sent the alert
:param alert_type: Type of alert
"""
return
def device_added(self, botengine, device_object):
"""
A new Device was added to this Location
:param botengine: BotEngine environment
:param device_object: Device object that is getting added
"""
return
def device_deleted(self, botengine, device_object):
"""
Device is getting deleted
:param botengine: BotEngine environment
:param device_object: Device object that is getting deleted
"""
return
def question_answered(self, botengine, question_object):
"""
The user answered a question
:param botengine: BotEngine environment
:param question_object: Question object
"""
return
def datastream_updated(self, botengine, address, content):
"""
Data Stream Message Received
:param botengine: BotEngine environment
:param address: Data Stream address
:param content: Content of the message
"""
if hasattr(self, address):
getattr(self, address)(botengine, content)
def schedule_fired(self, botengine, schedule_id):
"""
The bot executed on a hard coded schedule specified by our runtime.json file
:param botengine: BotEngine environment
:param schedule_id: Schedule ID that is executing from our list of runtime schedules
"""
return
def timer_fired(self, botengine, argument):
"""
The bot's intelligence timer fired
:param botengine: Current botengine environment
:param argument: Argument applied when setting the timer
"""
return
def file_uploaded(self, botengine, device_object, file_id, filesize_bytes, content_type, file_extension):
"""
A device file has been uploaded
:param botengine: BotEngine environment
:param device_object: Device object that uploaded the file
:param file_id: File ID to reference this file at the server
:param filesize_bytes: The file size in bytes
:param content_type: The content type, for example 'video/mp4'
:param file_extension: The file extension, for example 'mp4'
"""
return
def coordinates_updated(self, botengine, latitude, longitude):
"""
Approximate coordinates of the parent proxy device object have been updated
:param latitude: Latitude
:param longitude: Longitude
"""
return
def user_role_updated(self, botengine, user_id, alert_category, location_access, previous_alert_category, previous_location_access):
"""
A user changed roles
:param botengine: BotEngine environment
:param user_id: User ID that changed roles
:param alert_category: User's current alert/communications category (1=resident; 2=supporter)
:param location_access: User's access to the location and devices. (0=None; 10=read location/device data; 20=control devices and modes; 30=update location info and manage devices)
:param previous_alert_category: User's previous category, if any
:param previous_location_access: User's previous access to the location, if any
"""
return
def midnight_fired(self, botengine, content=None):
"""
Data stream message - Midnight timer fired
:param botengine:
:param content:
:return:
"""
# If we haven't emailed the daily report yet because the person hasn't gone to sleep yet, email it now.
if self.current_report_ms is not None:
if self.last_emailed_report_ms != self.current_report_ms:
self.last_emailed_report_ms = self.current_report_ms
if "SLEEP" not in self.parent.occupancy_status and "VACATION" not in self.parent.occupancy_status:
self.add_entry(botengine, SECTION_ID_SLEEP, comment=_("Hasn't gone to sleep by midnight."), include_timestamp=True)
self.email_report(botengine)
# Create a new report
self.current_report_ms = self._get_todays_timestamp(botengine)
report = {}
name = self._get_resident_name(botengine)
if name is not None:
report['title'] = name.upper()
else:
report['title'] = _("DAILY REPORT")
report['subtitle'] = _("Daily Report for {}").format(self.parent.get_local_datetime(botengine).strftime("%A %B %-d, %Y"))
report['created_ms'] = botengine.get_timestamp()
report['sections'] = []
self.parent.set_location_property_separately(botengine, DAILY_REPORT_ADDRESS, report, overwrite=True, timestamp_ms=self.current_report_ms)
analytics.track(botengine,
self.parent,
"daily_report_initialized",
properties={
"timestamp_ms": self.current_report_ms
})
# Add our first entry if possible.
if self.started_sleeping_ms is not None and "SLEEP" in self.parent.occupancy_status:
self.add_entry(botengine, SECTION_ID_SLEEP, comment=_("Went to sleep."), subtitle=_("Currently sleeping."), include_timestamp=True, timestamp_override_ms=self.started_sleeping_ms)
def daily_report_entry(self, botengine, content):
"""
Data stream message to add content to our daily report
:param botengine: BotEngine environment
:param content: Data Stream Content
:return:
"""
botengine.get_logger().info("location_dailyreport_microservice: 'daily_report_entry' data stream message received.")
if 'section_id' not in content:
botengine.get_logger().error("location_dailyreport_microservice: Section ID not found in data stream message {}".format(content))
return
section_id = content['section_id']
comment = None
subtitle = None
identifier = None
include_timestamp = False
timestamp_override_ms = None
if 'comment' in content:
comment = content['comment']
if 'subtitle' in content:
subtitle = content['subtitle']
if 'identifier' in content:
identifier = content['identifier']
if 'include_timestamp' in content:
include_timestamp = content['include_timestamp']
if 'timestamp_override_ms' in content:
timestamp_override_ms = content['timestamp_override_ms']
self.add_entry(botengine, section_id, comment=comment, subtitle=subtitle, identifier=identifier, include_timestamp=include_timestamp, timestamp_override_ms=timestamp_override_ms)
def add_entry(self, botengine, section_id, comment=None, subtitle=None, identifier=None, include_timestamp=False, timestamp_override_ms=None):
"""
Add a section and bullet point the current daily report
:param botengine: BotEngine environment
:param comment: Comment like "Woke up."
:param subtitle: Subtitle comment like "Consistent sleep schedule and good quality sleep last night."
:param identifier: Optional identifier to come back and edit this entry later.
:param include_timestamp: True to include a timestamp like "7:00 AM - <comment>" (default is False)
:param timestamp_override_ms: Optional timestamp in milliseconds to override the current time when citing the timestamp with include_timestamp=True
"""
botengine.get_logger().info("location_dailyreport_microservice.add_entry(): Current report timestamp is {}".format(self.current_report_ms))
# Make sure our midnight schedule fired properly.
# We added a 1 hour buffer for backwards compatibility, because the self.current_report_ms was previously being set to the current botengine.get_timestamp()
# which was some time after midnight.
if self.current_report_ms is None:
self.midnight_fired(botengine)
if self._get_todays_timestamp(botengine) < (self.current_report_ms - utilities.ONE_HOUR_MS):
self.midnight_fired(botengine)
report = botengine.get_ui_content(DAILY_REPORT_ADDRESS, timestamp_ms=self.current_report_ms)
if report is None:
botengine.get_logger().info("location_dailyreport_microservice: There is currently no active daily report.")
self.midnight_fired(botengine)
report = botengine.get_ui_content(DAILY_REPORT_ADDRESS, self.current_report_ms)
if report is None:
return
else:
botengine.get_logger().info("location_dailyreport_microservice: Successfully created and loaded a new report.")
else:
botengine.get_logger().info("location_dailyreport_microservice: Successfully loaded an existing report.")
focused_section = self._get_section_object(botengine, report, section_id)
if focused_section is None:
botengine.get_logger().info("location_dailyreport_microservice: Need to create a new section for section_id '{}'.".format(section_id))
if section_id == SECTION_ID_ALERTS:
focused_section = {
"weight": WEIGHT_ALERTS,
"id": SECTION_ID_ALERTS,
"title": _("Today's Alerts"),
"icon": "comment-exclamation",
"color": SECTION_COLOR_ALERTS,
"items": []
}
elif section_id == SECTION_ID_NOTES:
focused_section = {
"weight": WEIGHT_NOTES,
"id": SECTION_ID_NOTES,
"title": _("Today's Notes"),
"icon": "clipboard",
"color": SECTION_COLOR_NOTES,
"items": []
}
elif section_id == SECTION_ID_TASKS:
focused_section = {
"weight": WEIGHT_TASKS,
"id": SECTION_ID_TASKS,
"title": _("Today's Tasks"),
"icon": "clipboard-list-check",
"color": SECTION_COLOR_TASKS,
"items": []
}
elif section_id == SECTION_ID_SLEEP:
focused_section = {
"weight": WEIGHT_SLEEP,
"id": SECTION_ID_SLEEP,
"title": _("Sleep"),
"icon": "moon",
"color": SECTION_COLOR_SLEEP,
"items": []
}
elif section_id == SECTION_ID_BATHROOM:
focused_section = {
"weight": WEIGHT_BATHROOM,
"id": SECTION_ID_BATHROOM,
"title": _("Bathroom"),
"icon": "toilet",
"color": SECTION_COLOR_BATHROOM,
"items": []
}
elif section_id == SECTION_ID_ACTIVITIES:
focused_section = {
"weight": WEIGHT_ACTIVITIES,
"id": SECTION_ID_ACTIVITIES,
"title": _("Activities"),
"icon": "walking",
"color": SECTION_COLOR_ACTIVITIES,
"items": []
}
elif section_id == SECTION_ID_MEALS:
focused_section = {
"weight": WEIGHT_MEALS,
"id": SECTION_ID_MEALS,
"title": _("Meals"),
"icon": "utensils",
"color": SECTION_COLOR_MEALS,
"items": []
}
elif section_id == SECTION_ID_MEDICATION:
focused_section = {
"weight": WEIGHT_MEDICATION,
"id": SECTION_ID_MEDICATION,
"title": _("Medication"),
"icon": "pills",
"color": SECTION_COLOR_MEDICATION,
"items": []
}
elif section_id == SECTION_ID_SOCIAL:
focused_section = {
"weight": WEIGHT_SOCIAL,
"id": SECTION_ID_SOCIAL,
"title": _("Social"),
"icon": "user-friends",
"color": SECTION_COLOR_SOCIAL,
"items": []
}
elif section_id == SECTION_ID_MEMORIES:
focused_section = {
"weight": WEIGHT_MEMORIES,
"id": SECTION_ID_MEMORIES,
"title": _("Memories"),
"icon": "camera-retro",
"color": SECTION_COLOR_MEMORIES,
"items": []
}
elif section_id == SECTION_ID_SYSTEM:
focused_section = {
"weight": WEIGHT_SYSTEM,
"id": SECTION_ID_SYSTEM,
"title": _("System Status"),
"icon": "brain",
"color": SECTION_COLOR_SYSTEM,
"items": []
}
else:
botengine.get_logger().error("location_dailyreport_microservice: Unknown section '{}'".format(section_id))
return
if 'sections' not in report:
report['sections'] = []
report['sections'].append(focused_section)
report['sections'] = sorted(report['sections'], key=lambda k: k['weight'])
if comment is not None or identifier is not None:
if include_timestamp and comment is not None:
if timestamp_override_ms is not None:
dt = self.parent.get_local_datetime_from_timestamp(botengine, timestamp_override_ms)
else:
dt = self.parent.get_local_datetime(botengine)
if section_id == SECTION_ID_SLEEP:
# Sleep timestamps include the day
comment = "{} - {}".format(dt.strftime("%-I:%M %p %A"), comment)
else:
# Other timestamps don't include the day
comment = "{} - {}".format(dt.strftime("%-I:%M %p"), comment)
if identifier is None and comment is not None:
ts = botengine.get_timestamp()
if timestamp_override_ms is not None:
ts = timestamp_override_ms
focused_item = {
"timestamp_ms": ts,
"comment": comment
}
focused_section['items'].append(focused_item)
focused_section['items'] = sorted(focused_section['items'], key=lambda k: k['timestamp_ms'])
else:
# Try to overwrite any previous entry with this identifier
focused_item = None
for item in focused_section['items']:
if 'id' in item:
if item['id'] == identifier:
focused_item = item
if focused_item is not None:
# Edit the item in place
if comment is not None:
# Modify the item
ts = botengine.get_timestamp()
if timestamp_override_ms is not None:
ts = timestamp_override_ms
focused_item['timestamp_ms'] = ts
focused_item['comment'] = comment
focused_section['items'] = sorted(focused_section['items'], key=lambda k: k['timestamp_ms'])
else:
# Delete the item
focused_section['items'].remove(focused_item)
focused_section['items'] = sorted(focused_section['items'], key=lambda k: k['timestamp_ms'])
if len(focused_section['items']) == 0:
# Delete the entire section
report['sections'].remove(focused_section)
else:
# Add the item
ts = botengine.get_timestamp()
if timestamp_override_ms is not None:
ts = timestamp_override_ms
focused_item = {
"timestamp_ms": ts,
"comment": comment,
"id": identifier
}
focused_section['items'].append(focused_item)
focused_section['items'] = sorted(focused_section['items'], key=lambda k: k['timestamp_ms'])
if subtitle is not None:
# Manually defined subtitle for this section
focused_section['subtitle'] = subtitle
else:
# Auto-generated subtitles for specific sections that support it
if section_id == SECTION_ID_NOTES:
if len(focused_section['items']) == 0:
focused_section['subtitle'] = _("No notes captured today.")
elif len(focused_section['items']) == 1:
focused_section['subtitle'] = _("Captured one note today.")
elif len(focused_section['items']) > 1:
focused_section['subtitle'] = _("Captured {} notes today.").format(len(focused_section['items']))
elif section_id == SECTION_ID_TASKS:
if len(focused_section['items']) == 0:
focused_section['subtitle'] = _("No tasks updated today.")
elif len(focused_section['items']) == 1:
focused_section['subtitle'] = _("Updated one task today.")
elif len(focused_section['items']) > 1:
focused_section['subtitle'] = _("Updated {} tasks today.").format(len(focused_section['items']))
elif section_id == SECTION_ID_MEDICATION:
if len(focused_section['items']) == 0:
focused_section['subtitle'] = _("No medication accessed today.")
elif len(focused_section['items']) == 1:
focused_section['subtitle'] = _("Accessed medicine once today.")
elif len(focused_section['items']) > 1:
focused_section['subtitle'] = _("Accessed medicine {} times today.").format(len(focused_section['items']))
elif section_id == SECTION_ID_BATHROOM:
if len(focused_section['items']) == 0:
focused_section['subtitle'] = _("No bathroom visits observed today.")
elif len(focused_section['items']) == 1:
focused_section['subtitle'] = _("Visited the bathroom once today.")
elif len(focused_section['items']) > 1:
focused_section['subtitle'] = _("Visited the bathroom {} times today.").format(len(focused_section['items']))
self.parent.set_location_property_separately(botengine, DAILY_REPORT_ADDRESS, report, overwrite=True, timestamp_ms=self.current_report_ms)
def email_report(self, botengine):
"""
Email the current report
:param botengine:
:return:
"""
return
def _get_section_object(self, botengine, report, section_id):
"""
Find and return a section object out of all the sections in the report dictionary that is passed in
:param botengine:
:param report: report dictionary object
:param section_id: section ID to return
:return: section object dictionary, or None if it doesn't exist
"""
if report is not None:
if 'sections' in report:
for section in report['sections']:
if section['id'] == section_id:
return section
return None
def _get_resident_name(self, botengine):
"""
Get the name of the resident in a way that we can use this in a sentence
:param botengine:
:return:
"""
residents = botengine.get_location_user_names(to_residents=True, to_supporters=False, sms_only=False)
name = ""
if len(residents) == 0:
# Nobody lives here, nothing to do
return None
elif len(residents) == 1:
name = "{} {}".format(residents[0]['firstName'], residents[0]['lastName']).strip()
elif len(residents) == 2:
a = _("and")
# a and b
name = "{} {} {}".format(residents[0]['firstName'], a, residents[1]['firstName'])
elif len(residents) > 2:
# So, we only list 3 names max just because we don't want to waste a ton of SMS space.
a = _("and")
# a, b, and c
name = "{}, {}, {} {}".format(residents[0]['firstName'], residents[1]['firstName'], a, residents[2]['firstName'])
return name
def _get_todays_timestamp(self, botengine):
"""
Get the timestamp for midnight last night
:param botengine:
:return:
"""
return self.parent.timezone_aware_datetime_to_unix_timestamp(botengine, self.parent.get_midnight_last_night(botengine))
| apache-2.0 | 9,092,760,513,962,145,000 | 39.111111 | 191 | 0.567222 | false |
miumok98/weblate | weblate/billing/admin.py | 1 | 1644 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <[email protected]>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from weblate.billing.models import Plan, Billing
class PlanAdmin(admin.ModelAdmin):
list_display = (
'name', 'price', 'limit_strings', 'limit_languages',
'limit_repositories', 'limit_projects',
)
class BillingAdmin(admin.ModelAdmin):
list_display = (
'user', 'plan',
'list_projects',
'count_repositories', 'count_strings', 'count_words',
'count_languages',
'in_limits',
)
list_filter = ('plan', )
search_fields = ('user__username', 'projects__name')
def list_projects(self, obj):
return u','.join(obj.projects.values_list('name', flat=True))
list_projects.short_description = _('Projects')
admin.site.register(Plan, PlanAdmin)
admin.site.register(Billing, BillingAdmin)
| gpl-3.0 | -1,565,841,120,037,533,000 | 31.176471 | 71 | 0.69287 | false |
smithchristian/arcpy-create-base-dataset | supportingModules/pln.py | 1 | 2508 | # ----------------------------------------------------------------------------
# Name: pln.py (Planning.py)
# Purpose: This module contains variables for the construction
# of a planning dataset. This module is to be used in
# conjunction with create-Base-DataSet/main.py.
# Description
# and Examples: Regulatory planning data: Regional plans, LGA planning
# schemes, Zoning, Strategic Plan data, Growth Management,
# Zone boundaries.
#
# Author: Christian Fletcher Smith
#
# Created: 10/02/2015
# Copyright: (c) smithc5 2015
# Version: 2
# -----------------------------------------------------------------------------
# This is the name for the planning dataset
PLN_GDB_NAME = "Planning.gdb"
'''
The following information outlines the variable structure for each feature
in order to be used correctly within main.py.
NOTE: The * used in the information below is to indicate a user defined
name.
Feature variable structure:
# Layer Name ----------------------------------------------------------
* -- This is the source location of the layer to be clipped.
*_FC_NAME -- This is the .gdb name and feature class name for the layer to
be used. The user only needs to populate text after the '{}\', as
'{}\' is formatted to use the variable ADM_GDB_NAME.
*_ALIAS -- This is the alias name to be displayed within ArcGIS.
*_DIC -- The dictionary is used to store all the features variables which
will be imported into main.py as required.
example:
# Planning Zones -----------------------------------
PLNZONE = r"D:\Planning\PlanningZones.shp"
PLNZONE_FC_NAME = "{}\Planning_Zones.format(PLN_GDB_NAME)
PLNZONE_ALIAS = "Planning Zones"
PLNZONE_DIC = {"source_location": PLNZONE,
"output_name": PLNZONE_FC_NAME,
"alias": PLNZONE_ALIAS}
'''
# TODO: need to add in layer variables
# ----------------------------------------------------------------------------
# DO NOT ADD LAYER VARIABLES BELOW THIS LINE!
#
# The following list comprehension is designed to compile all the dictionaries
# from the above layers into a single list. This list is imported into main.py
# when required.
# ----------------------------------------------------------------------------
PLN_DIC_LIST = [val for name, val in globals().items() if name.endswith('_DIC')]
| mit | 7,385,860,457,291,931,000 | 33.828571 | 80 | 0.544657 | false |
smc170/fam-study-password | game_code.py | 1 | 2177 | """
Family Study Password - The Biblical Game
Copyright (C) 2013 Spencer Caesare
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
#!/usr/bin/env python3
import sys
from sys import exit
from PySide import QtCore, QtGui
from random import *
from game_gui import Ui_main_window
from game_list import cards
class game_window(QtGui.QWidget, Ui_main_window):
def __init__(self, parent=None):
super(game_window, self).__init__(parent)
self.setupUi(self)
self.loop_count = 0
self.random_word()
def random_word(self):
if self.loop_count >= 2:
self.get_button.clicked.disconnect(self.random_word)
else:
pass
self.card_to_play = choice(cards)
cards.remove(self.card_to_play)
password_label = self.password_label
get_button = self.get_button
self.password_label.setText('Push Button To Receive Word')
self.get_button.setText('Push Me To Get A Word')
self.loop_count += 1
self.get_button.clicked.connect(self.set_labels)
def set_labels(self):
self.password_label.setText(self.card_to_play)
self.get_button.setText('Push To Clear Word')
self.get_button.clicked.disconnect(self.set_labels)
self.get_button.clicked.connect(self.random_word)
if not cards:
self.password_label.setText("Congrats! You've gone through all the words! Press the button to quit.")
self.get_button.setText('Push Me To Quit')
self.get_button.clicked.connect(QtCore.QCoreApplication.instance().quit)
else:
pass
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
window = game_window()
window.show()
sys.exit(app.exec_())
| gpl-3.0 | -7,751,935,000,141,383,000 | 23.738636 | 104 | 0.726229 | false |
lcy-seso/models | fluid/icnet/_ce.py | 1 | 1308 | # this file is only used for continuous evaluation test!
import os
import sys
sys.path.append(os.environ['ceroot'])
from kpi import CostKpi, DurationKpi, AccKpi
# NOTE kpi.py should shared in models in some way!!!!
train_cost_kpi = CostKpi('train_cost', 0.05, 0, actived=True)
train_duration_kpi = DurationKpi('train_duration', 0.06, 0, actived=True)
tracking_kpis = [
train_cost_kpi,
train_duration_kpi,
]
def parse_log(log):
'''
This method should be implemented by model developers.
The suggestion:
each line in the log should be key, value, for example:
"
train_cost\t1.0
test_cost\t1.0
train_cost\t1.0
train_cost\t1.0
train_acc\t1.2
"
'''
for line in log.split('\n'):
fs = line.strip().split('\t')
print(fs)
if len(fs) == 3 and fs[0] == 'kpis':
kpi_name = fs[1]
kpi_value = float(fs[2])
yield kpi_name, kpi_value
def log_to_ce(log):
kpi_tracker = {}
for kpi in tracking_kpis:
kpi_tracker[kpi.name] = kpi
for (kpi_name, kpi_value) in parse_log(log):
print(kpi_name, kpi_value)
kpi_tracker[kpi_name].add_record(kpi_value)
kpi_tracker[kpi_name].persist()
if __name__ == '__main__':
log = sys.stdin.read()
log_to_ce(log)
| apache-2.0 | 771,536,410,682,091,400 | 21.947368 | 73 | 0.603211 | false |
dajuno/nmrpy | mri0D.py | 1 | 9879 | # -*- coding: utf8 -*-
'''
Simulate magnetization of one group of nuclear spins "0D"
solving the Bloch equation within a frame of reference rotating with w_rf
dM/dt = g*(M x B) + relax
M: magnetization
B: applied magnetic field = B_0 + B_RF + B_G
g: gyromagnetic ratio
relax: T1, T2 relaxation terms '''
# TODO: [ ] spin echo sequence: 90y - TE/2 - 180x - TE - 180x - ..
# [ ] compute MRI signal
# [ ] compare to analytical solution
# [ ] and matrix formulism
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import ode
from utils import progressbar
# import warning
# class spin:
# ''' spin class
# (add some methods later?) '''
# def __init__(self, M0=1, T1=0.200, T2=0.600, Minit=[0, 0, 1]):
# ''' constructor
# M0 equilibrium magnetization
# Minit initial magnetization
# T1 relaxation time of substance
# T2
# '''
# # gyromagnetic ratio of protons (¹H):
# self.gm = 42.6e6 # Hz/Tesla
# self.M0 = 1
# self.T1 = T1
# self.T2 = T2
# self.Minit = Minit
def pulseseq(t, s, params, it):
''' compute contribution to magnetic field `Beff(t)` at time `t`
due to static gradient `Bg`, RF pulse `Brf` and/or gradient pulse `Brfg`
return: B'(t) = Bg + Brf(t) + Brfg(t)
= [Bx, By, Bz]
'''
B1 = params.get('amp')
w0 = params.get('w0')
TR = params.get('TR')
TE = params.get('TE')
pseq = params.get('pseq')
dphi = params.get('dephase') # dephase angle in rad: by how much will
# magnetization be dephased between P1 and P2 ?
if pseq == 'flip90':
tp = np.pi/(2*B1*s['gm'])
dt = TE/2
dB = dphi/s['gm']/dt
if np.mod(t, TR) <= tp: # 90° flip
Bp = B1*np.array([np.cos(w0*t), 0, -dB])
else:
Bp = np.array([0, 0, -dB])
elif pseq == 'continuous':
Bp = B1*np.array([np.cos(w0*t), 0, 0])
elif pseq == 'pulsed':
if np.mod(t, TR) < TE: # echo!
Bp = B1*np.array([np.cos(w0*t), 0, 0])
else:
Bp = np.array([0, 0, 0])
elif pseq == 'spinecho':
''' - one pulse of length tp flips M by pi/2
- magnetization is dephased due to field inhomogeinities
(specify angle in rad!!)
- refocus pulse after \tau -> pi flip
- phase coherence restored after 2\tau
cf. Slichter
'''
# pulse duration pi flip
tp = np.pi/(2*B1*s['gm'])
dt = TE/2
dB = dphi/s['gm']/dt
if np.mod(t, TR) <= tp: # 90° flip
Bp = B1*np.array([np.cos(w0*t), 0, -dB])
# elif np.mod(t, TR) <= tp + TE/2: # dephase!
elif np.mod(t, TR) <= TE/2: # dephase!
Bp = np.array([0, 0, -dB])
# elif np.mod(t, TR) <= TE/2+3*tp: # 180° flip
elif np.mod(t, TR) <= TE/2+2*tp: # 180° flip
Bp = B1*np.array([np.cos(w0*t), 0, -dB])
else:
Bp = np.array([0, 0, -dB])
else:
Bp = np.array([0, 0, 0])
return Bp
def bloch(s, tend=1, nsteps=1000, backend='vode', pulse_params={},
B0=3, dw_rot=0, dw_rf=0, rtol=1e-6):
''' solve Bloch equations for spin `s` in the ROTATING FRAME OF REFERENCE
rotating with the Larmor frequency plus a shift `dw_rot` (default: 0)
setting dw_rot = None (-> -w0) corresponds to the laboratory frame.
dw_fr: frequency shift for off resonance excitation
'''
w0 = -s['gm']*B0
# RF freq in rotating frame of reference is `w - w_fr`,
# so just the "off resonance" freq (=w_0-w_rf) plus the
# difference in frequency between wf_fr and w_0
if dw_rot is None:
dw_rot = -w0
pulse_params['w0'] = dw_rot + dw_rf
def rhs(t, y, s, pulse_params, B0, w0, dw_rot, it):
B = np.array([0, 0, B0]) # static
B = B + pulseseq(t, s, pulse_params, it) # RF
# rotating frame with w+dw
B = B + np.array([0, 0, (w0+dw_rot)/s['gm']])
# relax
R = np.array([y[0]/s['T2'], y[1]/s['T2'], (y[2]-s['M0'])/s['T1']])
return s['gm']*np.cross(y, B) - R
''' VAR 1 ## automatic step size control '''
it = 1
sol = []
t = []
dt = tend/nsteps
solver = ode(rhs).set_integrator(backend, rtol=rtol)
solver.set_initial_value(s['Minit'], 0)
solver.set_f_params(s, pulse_params, B0, w0, dw_rot, it)
while solver.successful() and solver.t < tend:
# works only with vode!! not recommended:
# solver.integrate(tend, step=True)
solver.integrate(solver.t+dt)
t.append(solver.t)
sol.append(solver.y)
it = it + 1
progressbar(solver.t, tend, 'solve')
return np.array(t), np.array(sol)
def plot_3Dtime(t, M, skip=10):
from mpl_toolkits.mplot3d import Axes3D
import time
plt.ion()
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.axis([-1, 1, -1, 1])
ax.plot([0, 0], [0, 0], [-1, 1], '-.k')
ax.plot([-1, 1], [0, 0], [0, 0], '-.k')
ax.plot([0, 0], [-1, 1], [0, 0], '-.k')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
for i in range(0, len(t), skip):
ax.plot([0, M[i, 0]], [0, M[i, 1]], [0, M[i, 2]],
'-<r')
# print('%i \t t = %g s' % (i, t[i]))
progressbar(t[i], t.max(), s='plot')
plt.draw()
time.sleep(0.05)
def plot_relax(t, M):
plt.ion()
fig, (ax1, ax2) = plt.subplots(1, 2, sharey=True)
Mt = np.sqrt(M[:, 0]**2 + M[:, 1]**2)
ax1.plot(t, Mt)
ax1.set_xlabel('time in ms')
ax1.set_ylabel('$|M|$')
ax1.set_title('T1 relaxation')
ax2.plot(t, M[:, 2])
ax2.set_title('T2 relaxation')
def plot_pulse(t, M, params, s):
plt.ion()
fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True)
# plot magnetization components
ax1.plot(t, M)
ax1.legend(('$M_x$', '$M_y$', '$M_z$'))
ax1.set_xlabel('time in ms')
ax1.set_ylabel('$M$')
ax1.set_title('Magnetization')
plt.draw()
# plot pulse train
pseq = params.get('pseq')
if pseq == 'spinecho' or pseq == 'flip90':
TE = params.get('TE')
TR = params.get('TR')
B1 = params.get('amp')
N = int(np.ceil(t[-1]/TR)) # number of periods
tp = np.pi/(2*B1*s['gm'])
# draw polygone of one period:
if pseq == 'spinecho':
p1 = [0, 1, 1, 0, 0, 1, 1, 0, 0]
tp1 = np.array([0, 0, tp, tp, tp+TE/2, tp+TE/2, TE/2+3*tp,
TE/2+3*tp, TR])
elif pseq == 'flip90':
p1 = [0, 1, 1, 0, 0]
tp1 = np.array([0, 0, tp, tp, TR])
p, tp = [], []
for i in range(N):
tp.extend(tp1+i*TR)
p.extend(p1)
ax2.plot(tp, p)
ax2.set_ylim([-0.2, 1.2])
ax1.set_xlim([0, t.max()])
plt.draw()
if __name__ == '__main__':
B0 = 3
# spin dict
s = {
'M0': 1,
'T1': 0.100,
'T2': 0.600,
'Minit': [0, 0, 1],
'gm': 42.6e6
}
# pulse dict
pulse = {
'TE': 0.050,
'TR': 1.000,
'amp': 1.75e-5, # B1 = 1.75e-5 taken from Yuan1987
'pseq': 'flip90',
'dephase': .1
}
w0 = s['gm']*B0
nsteps = 1e3
# t, M = bloch(s, tend=0.2, backend='dopri5', pulse_params=pulse, dw_rot=0,
# dw_rf=0, rtol=1e-6, nsteps=nsteps, B0=B0)
# Mc = M[:, 0] + 1j*M[:, 1]
# MANY SPINS EXPERIMENT
N = 100
r = 2*np.random.rand(N) - 1
dw_off = r*100 # frequency shift between +-100 Hz
dphi = r*B0*0.5 # max angle (rad) of dephasing during TE/2
var = dphi # dw_off
M = []
i = 0
Mc = np.zeros((nsteps, N), dtype=complex)
for x in var:
print('\nrun %i/%i \t shift %.2f' % (i+1, len(var), x))
pulse['dephase'] = x
t, H = bloch(s, tend=0.2, backend='dopri5', pulse_params=pulse,
dw_rot=0, dw_rf=0, rtol=1e-6, nsteps=nsteps, B0=B0)
M.append(H)
Mc[:, i] = H[:, 0] + 1j*H[:, 1]
i += 1
M = np.array(M)
# integrate Mt to get signal
def plot_cplx(t, Mc):
plt.figure()
plt.ion()
plt.plot(t, np.real(Mc), '-', t, np.imag(Mc), ':')
def plot_signal(t, M):
signal = np.sum(M, 0)[:, 0:2]
fig = plt.figure()
plt.ion()
plt.plot(t, signal)
plt.plot(t, signal[:, 0]+signal[:, 1], ':')
plt.legend(('x', 'y', 'sum'))
ax = fig.gca()
ylim = ax.get_ylim()
TE = pulse['TE']
plt.plot([TE, TE], [ylim[0], ylim[1]], '-.k')
# *** BENCHMARK: COMPARE ODE BACKENDS
# Mloop = []
# for be in ['vode', 'lsoda', 'dopri5', 'dop853']:
# t, M = bloch(s, tend=0.1, backend=be, pulse_params=pulse, dw_rot=0,
# dw_rf=0, rtol=1e-6, nsteps=1e5, B0=B0)
# Mloop.append(M)
# *** EXAMPLE: continuous excitation, M -> 2pi turn
# pulse = {'TE': 20, 'TR': 50, 'amp': 1, 'pseq': 'continuous'}
# t1 = 2*np.pi/s.gm/1
# t, M = bloch(s, tend=t1, backend='vode', pulse_params=pulse, dw_rot=0,
# rtol=1e-6, nsteps=1e3, B0=B0)
# *** EXAMPLE: free precession, relaxed
# pulse = {'pseq': 'none'}
# s = spin(Minit=[0.7, 0, 0.8])
# laboratory frame (insane)
# t, M = bloch(s, backend='dopri5', tend=0.01, nsteps=1e4,
# pulse_params=pulse, dw_rot=None, rtol=1e-3, B0=3)
# rotating reference frame (sensible)
# t, M = bloch(s, backend='vode', nsteps=1e3, pulse_params=pulse,
# dw_rot=100, rtol=1e-6, B0=3)
# ** BENCHMARK ** dopri5 (RK45): 1 loops, best of 3: 346 ms per loop
# vode (ABF): 10 loops, best of 3: 77.1 ms per loop
# command: %timeit %run mri0D.py
# plot_relax(t, y)
# plot3Dtime(t, y)
| mit | 2,515,762,664,697,472,000 | 29.760125 | 79 | 0.50081 | false |
ctuning/ck-env | soft/lib.papi/customize.py | 1 | 2808 | #
# Collective Knowledge (individual environment - setup)
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: Grigori Fursin, [email protected], http://fursin.net
#
import os
##############################################################################
# setup environment setup
def setup(i):
"""
Input: {
cfg - meta of this soft entry
self_cfg - meta of module soft
ck_kernel - import CK kernel module (to reuse functions)
host_os_uoa - host OS UOA
host_os_uid - host OS UID
host_os_dict - host OS meta
target_os_uoa - target OS UOA
target_os_uid - target OS UID
target_os_dict - target OS meta
target_device_id - target device ID (if via ADB)
tags - list of tags used to search this entry
env - updated environment vars from meta
customize - updated customize vars from meta
deps - resolved dependencies for this soft
interactive - if 'yes', can ask questions, otherwise quiet
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
bat - prepared string for bat file
}
"""
import os
# Get variables
ck=i['ck_kernel']
s=''
iv=i.get('interactive','')
cus=i.get('customize',{})
fp=cus.get('full_path','')
hosd=i['host_os_dict']
tosd=i['target_os_dict']
# Check platform
env=i['env']
src=cus.get('install_env',{}).get('PACKAGE_SUB_DIR','')
ep=cus['env_prefix']
pl=os.path.dirname(fp)
p2=os.path.dirname(pl)
p3=os.path.dirname(p2)
pb=os.path.join(p2,'bin')
pinc=os.path.join(p2,'include')
psrc=''
if src!='':
psrc=os.path.join(p3,src)
cus['path_src']=psrc
env[ep+'_SRC']=psrc
cus['path_bin']=pb
cus['path_lib']=pl
cus['path_include']=pinc
env[ep]=p2
env[ep+'_BIN']=pb
env[ep+'_LIB']=pl
env[ep+'_INCLUDE']=pinc
lb=os.path.basename(fp)
lbs=lb
if lbs.endswith('.so'):
lbs=lbs[:-3]+'.a'
cus['static_lib']=lbs
cus['dynamic_lib']=lb
env[ep+'_STATIC_NAME']=cus.get('static_lib','')
env[ep+'_DYNAMIC_NAME']=cus.get('dynamic_lib','')
r = ck.access({'action': 'lib_path_export_script', 'module_uoa': 'os', 'host_os_dict': hosd,
'lib_path': cus.get('path_lib','')})
if r['return']>0: return r
s += r['script']
return {'return':0, 'bat':s}
| bsd-3-clause | -5,265,577,132,017,976,000 | 24.297297 | 97 | 0.503561 | false |
CanalTP/navitia | source/jormungandr/tests/integration_tests_settings.py | 1 | 2752 | # encoding: utf-8
# Copyright (c) 2001-2018, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import
import os
START_MONITORING_THREAD = False
SAVE_STAT = True
DISABLE_DATABASE = True
# for tests we want only 1/2 seconds timeout instead of the normal 10s
INSTANCE_TIMEOUT = int(os.environ.get('CUSTOM_INSTANCE_TIMEOUT', 500))
STAT_CIRCUIT_BREAKER_MAX_FAIL = int(os.getenv('JORMUNGANDR_STAT_CIRCUIT_BREAKER_MAX_FAIL', 1000))
STAT_CIRCUIT_BREAKER_TIMEOUT_S = int(os.getenv('JORMUNGANDR_STAT_CIRCUIT_BREAKER_TIMEOUT_S', 1))
# do not authenticate for tests
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'default': {'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s'}
},
'handlers': {'default': {'level': 'INFO', 'class': 'logging.StreamHandler', 'formatter': 'default'}},
'loggers': {
'': {'handlers': ['default'], 'level': 'INFO', 'propagate': True},
'navitiacommon.default_values': {'handlers': ['default'], 'level': 'ERROR', 'propagate': True},
},
}
CACHE_CONFIGURATION = {'CACHE_TYPE': 'null'}
# List of enabled modules
MODULES = {
'v1': { # API v1 of Navitia
'import_path': 'jormungandr.modules.v1_routing.v1_routing',
'class_name': 'V1Routing',
}
}
# circuit breaker parameters, for the tests by default we don't want the circuit breaker
CIRCUIT_BREAKER_MAX_INSTANCE_FAIL = 99999
CIRCUIT_BREAKER_INSTANCE_TIMEOUT_S = 1
GRAPHICAL_ISOCHRONE = True
HEAT_MAP = True
PATCH_WITH_GEVENT_SOCKET = True
GREENLET_POOL_FOR_RIDESHARING_SERVICES = True
| agpl-3.0 | 7,036,363,262,379,790,000 | 34.74026 | 105 | 0.706759 | false |
SymbiFlow/prjxray | fuzzers/005-tilegrid/bram_block/top.py | 1 | 1635 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
import os
import random
random.seed(int(os.getenv("SEED"), 16))
from prjxray import util
from prjxray.db import Database
def gen_sites():
db = Database(util.get_db_root(), util.get_part())
grid = db.grid()
for tile_name in sorted(grid.tiles()):
loc = grid.loc_of_tilename(tile_name)
gridinfo = grid.gridinfo_at_loc(loc)
for site_name, site_type in gridinfo.sites.items():
if site_type in ['FIFO18E1']:
yield tile_name, site_name
def write_params(params):
pinstr = 'tile,val,site\n'
for tile, (site, val) in sorted(params.items()):
pinstr += '%s,%s,%s\n' % (tile, val, site)
open('params.csv', 'w').write(pinstr)
def run():
print('''
module top();
''')
params = {}
sites = list(gen_sites())
for (tile_name, site_name), isone in zip(sites,
util.gen_fuzz_states(len(sites))):
params[tile_name] = (site_name, isone)
print(
'''
(* KEEP, DONT_TOUCH, LOC = "{site_name}" *)
RAMB18E1 #(
.INIT_00(256'b{isone})
) bram_{site_name} ();'''.format(
site_name=site_name,
isone=isone,
))
print("endmodule")
write_params(params)
if __name__ == '__main__':
run()
| isc | 4,388,151,815,182,200,000 | 24.546875 | 79 | 0.549847 | false |
keepokeepo/MITx-6.00.1x-Introduction-to-Computer-Science-and-Programming-Using-Python | PSET 4/ps4b.py | 1 | 6456 | from ps4a import *
import time
#
#
# Computer chooses a word
#
#
def compChooseWord(hand, wordList, n):
"""
Given a hand and a wordList, find the word that gives
the maximum value score, and return it.
This word should be calculated by considering all the words
in the wordList.
If no words in the wordList can be made from the hand, return None.
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: string or None
"""
# Create a new variable to store the maximum score seen so far (initially 0)
bestScore = 0
# Create a new variable to store the best word seen so far (initially None)
bestWord = None
# For each word in the wordList
for word in wordList:
# If you can construct the word from your hand
if isValidWord(word, hand, wordList):
# find out how much making that word is worth
score = getWordScore(word, n)
# If the score for that word is higher than your best score
if (score > bestScore):
# update your best score, and best word accordingly
bestScore = score
bestWord = word
# return the best word you found.
return bestWord
#
# Computer plays a hand
#
def compPlayHand(hand, wordList, n):
"""
Allows the computer to play the given hand, following the same procedure
as playHand, except instead of the user choosing a word, the computer
chooses it.
1) The hand is displayed.
2) The computer chooses a word.
3) After every valid word: the word and the score for that word is
displayed, the remaining letters in the hand are displayed, and the
computer chooses another word.
4) The sum of the word scores is displayed when the hand finishes.
5) The hand finishes when the computer has exhausted its possible
choices (i.e. compChooseWord returns None).
hand: dictionary (string -> int)
wordList: list (string)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
"""
# Keep track of the total score
totalScore = 0
# As long as there are still letters left in the hand:
while (calculateHandlen(hand) > 0) :
# Display the hand
print("Current Hand: ", end=' ')
displayHand(hand)
# computer's word
word = compChooseWord(hand, wordList, n)
# If the input is a single period:
if word == None:
# End the game (break out of the loop)
break
# Otherwise (the input is not a single period):
else :
# If the word is not valid:
if (not isValidWord(word, hand, wordList)) :
print('This is a terrible error! I need to check my own code!')
break
# Otherwise (the word is valid):
else :
# Tell the user how many points the word earned, and the updated total score
score = getWordScore(word, n)
totalScore += score
print('"' + word + '" earned ' + str(score) + ' points. Total: ' + str(totalScore) + ' points')
# Update hand and show the updated hand to the user
hand = updateHand(hand, word)
print()
# Game is over (user entered a '.' or ran out of letters), so tell user the total score
print('Total score: ' + str(totalScore) + ' points.')
#
# Problem #6: Playing a game
#
#
def playGame(wordList):
"""
Allow the user to play an arbitrary number of hands.
1) Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'e', immediately exit the game.
* If the user inputs anything that's not 'n', 'r', or 'e', keep asking them again.
2) Asks the user to input a 'u' or a 'c'.
* If the user inputs anything that's not 'c' or 'u', keep asking them again.
3) Switch functionality based on the above choices:
* If the user inputted 'n', play a new (random) hand.
* Else, if the user inputted 'r', play the last hand again.
* If the user inputted 'u', let the user play the game
with the selected hand, using playHand.
* If the user inputted 'c', let the computer play the
game with the selected hand, using compPlayHand.
4) After the computer or user has played the hand, repeat from step 1
wordList: list (string)
"""
#print("playGame not yet implemented.") # <-- Remove this when you code this function
gameCompleted = False
hand = {}
n = HAND_SIZE
while not gameCompleted:
userInput = input('Enter n to deal a new hand, r to replay the last hand, ' + \
'or e to end game: ')
validPlayer = False
if userInput == 'e':
gameCompleted = True
break
elif userInput == 'r':
if hand == {}:
print('You have not played a hand yet. Please play a new hand first!')
else:
while validPlayer == False:
choosePlayer = input('Enter u to have yourself play, c to have the computer play: ')
if choosePlayer == 'u':
validPlayer = True
playHand(hand, wordList, n)
elif choosePlayer == 'c':
validPlayer = True
compPlayHand(hand, wordList, n)
else:
print('Invalid command.')
elif userInput == 'n':
hand = dealHand(n)
while validPlayer == False:
choosePlayer = input('Enter u to have yourself play, c to have the computer play: ')
if choosePlayer == 'u':
validPlayer = True
playHand(hand, wordList, n)
elif choosePlayer == 'c':
validPlayer = True
compPlayHand(hand, wordList, n)
else:
print('Invalid command.')
else:
print('Invalid command.')
#
# Build data structures used for entire session and play game
#
if __name__ == '__main__':
wordList = loadWords()
playGame(wordList)
| mit | 3,707,142,585,639,417,000 | 34.668508 | 125 | 0.568154 | false |
miing/mci_migo | webui/tests/test_templates.py | 1 | 7266 | from django.test import TestCase
from django.conf import settings
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse
from gargoyle.testutils import switches
from identityprovider.models.openidmodels import OpenIDRPConfig
from identityprovider.readonly import ReadOnlyManager
from identityprovider.tests.utils import SSOBaseUnittestTestCase
from identityprovider.tests.utils import patch_settings
from mock import patch
from pyquery import PyQuery
from unittest import skipUnless
class UbuntuLoginTemplateTestCase(TestCase):
@patch('webui.views.ui.get_rpconfig_from_request')
def test_rpconfig_with_logo_url(self, mock_get_rpconfig):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
logo='http://localhost/img.png')
mock_get_rpconfig.return_value = rpconfig
with patch_settings(BRAND='ubuntu'):
response = self.client.get('/+login')
self.assertTemplateUsed(response, 'registration/login.html')
self.assertContains(response, 'id="rpconfig_logo"')
self.assertContains(response, 'src="http://localhost/img.png"')
@patch('webui.views.ui.get_rpconfig_from_request')
def test_rpconfig_without_logo_url(self, mock_get_rpconfig):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
logo='')
mock_get_rpconfig.return_value = rpconfig
with patch_settings(BRAND='ubuntu'):
response = self.client.get('/+login')
self.assertTemplateUsed(response, 'registration/login.html')
self.assertNotContains(response, 'id="rpconfig_logo"')
def render_u1_login_with_rpconfig(self, rpconfig):
with switches(BRAND_UBUNTUONE=True):
return render_to_string(
'registration/login.html',
dict(rpconfig=rpconfig, brand_description="Ubuntu One"))
def get_title_style_and_text(self, dom):
titles = dom.find('p[class=title]')
self.assertEqual(1, titles.length)
text = " ".join(titles[0].text_content().split())
style = dom.find('style[data-qa-id="test_login_rp"]')
if len(style) == 1:
style = " ".join(style.text().split())
else:
style = None
return style, text
def test_u1_login_rp_details(self):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
displayname='Landscape',
logo='http://localhost/img.png')
html = self.render_u1_login_with_rpconfig(rpconfig)
style, text = self.get_title_style_and_text(PyQuery(html))
self.assertIn("url('http://localhost/img.png')", style)
self.assertIn(u"Landscape log in with Ubuntu One", text)
def test_u1_login_rp_no_logo(self):
"""The rp displayname is still included."""
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
displayname='Landscape')
html = self.render_u1_login_with_rpconfig(rpconfig)
style, text = self.get_title_style_and_text(PyQuery(html))
self.assertIsNone(style)
self.assertIn(u"Landscape log in with Ubuntu One", text)
def test_u1_login_rp_no_displayname(self):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
displayname='Landscape',
logo='http://localhost/img.png')
html = self.render_u1_login_with_rpconfig(rpconfig)
style, text = self.get_title_style_and_text(PyQuery(html))
self.assertIn("url('http://localhost/img.png')", style)
self.assertIn(u"log in with Ubuntu One", text)
@skipUnless(settings.BRAND == 'ubuntuone',
"New account form only applies to u1 brand.""")
def test_u1_branded_login_has_create_account_form(self):
response = self.client.get('/+login')
self.assertContains(response, "data-qa-id=\"create_account_form\"")
@skipUnless(settings.BRAND == 'ubuntuone',
"New account form only applies to u1 brand.""")
def test_u1_branded_login_without_create_account_form(self):
rm = ReadOnlyManager()
rm.set_readonly()
response = self.client.get('/+login')
self.assertNotContains(response, "data-qa-id=\"create_account_form\"")
rm.clear_readonly()
class NewAccountTemplateTestCase(SSOBaseUnittestTestCase):
def test_with_logo_url(self):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
logo='http://localhost/img.png'
)
html = render_to_string(
'registration/new_account.html',
{'rpconfig': rpconfig}
)
self.assertIn('id="rpconfig_logo"', html)
self.assertIn('src="http://localhost/img.png"', html)
def test_without_logo_url(self):
rpconfig = OpenIDRPConfig(
trust_root='http://localhost/',
logo=''
)
html = render_to_string(
'registration/new_account.html',
{'rpconfig': rpconfig}
)
self.assertNotIn('id="rpconfig_logo"', html)
def test_action_without_token(self):
html = render_to_string('registration/new_account.html', {})
dom = PyQuery(html)
form = dom.find('form[name=newaccountform]')
self.assertEqual(form.attr['action'], reverse('new_account'))
def test_action_with_token(self):
ctx = {'token': 'a' * 16}
html = render_to_string('registration/new_account.html', ctx)
dom = PyQuery(html)
form = dom.find('form[name=newaccountform]')
self.assertEqual(
form.attr['action'],
reverse('new_account', kwargs=ctx)
)
@skipUnless(settings.BRAND == 'ubuntu',
"Text does not exist in other brands.""")
@switches(ALLOW_UNVERIFIED=False)
def test_allow_invalidated_switch_off(self):
html = render_to_string('registration/new_account.html', {})
self.assertIn(
'and we will send you instructions on how to confirm',
html
)
@skipUnless(settings.BRAND == 'ubuntu',
"Text does not exist in other brands.""")
@switches(ALLOW_UNVERIFIED=True)
def test_allow_invalidated_switch_on(self):
html = render_to_string('registration/new_account.html', {})
self.assertNotIn(
'and we will send you instructions on how to confirm',
html
)
class UbuntuBaseTemplateTestCase(TestCase):
def test_base_template_includes_analytics(self):
# Analytics code is included if the analytics id is set.
with patch_settings(GOOGLE_ANALYTICS_ID='foobar'):
response = self.client.get('/')
self.assertTemplateUsed(response, 'base.html')
self.assertContains(
response, "_gaq.push(['_setAccount', 'foobar']);")
def test_base_template_not_includes_analytics(self):
# If the analytics id is not set the analitycs code is not
# included.
with patch_settings(GOOGLE_ANALYTICS_ID=None):
response = self.client.get('/')
self.assertTemplateUsed(response, 'base.html')
self.assertNotContains(response, "_gaq.push")
| agpl-3.0 | -3,805,882,609,750,976,000 | 35.69697 | 78 | 0.627581 | false |
3dfxsoftware/cbss-addons | hr_payroll_pay_generator/wizard/generator_wizard.py | 1 | 1675 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Addons modules by CLEARCORP S.A.
# Copyright (C) 2009-TODAY CLEARCORP S.A. (<http://clearcorp.co.cr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv, fields
class GeneratorWizard(osv.TransientModel):
_name = 'hr.payroll.pay.generator.generator.wizard'
def generator_exectute(self, cr, uid, ids, context=None):
return True
_columns = {
'pay_type_id': fields.many2one('hr.payroll.pay.generator.pay.type', string='Pay Type', required=True),
'payslip_run_id': fields.many2one('hr.payslip.run', string='Payslip Batch', required=True),
'salary_rule_id': fields.many2one('hr.salary.rule', string='Salary Rule', required=True),
'employee_ids': fields.many2many('hr.employee', string='Employees', required=True),
} | gpl-2.0 | 7,269,671,319,546,851,000 | 44.297297 | 110 | 0.633433 | false |
ccressent/acnav | DAT/Block.py | 1 | 3076 | """
Parse DAT files blocks and traverse block chains.
"""
from binascii import hexlify
from struct import unpack_from
from DAT.Header import Header
class Block:
"""
A block making up a chunk of a Directory in a DAT file.
"""
def __init__(self, filename=None, offset=None, size=None, next_block_offset=None, data=None):
self.filename = filename
self.offset = offset
self.size = size
self.next_block_offset = next_block_offset
self.data = data
def parse(self, blob):
"""
Try to parse a block structure out of the given binary blob.
"""
self.data = unpack_from(str(len(blob[4:])) + "s", blob[4:])[0]
self.next_block_offset = unpack_from("I", blob)[0]
@classmethod
def from_blob(cls, blob):
"""
Return a new Block instance initialized with the result of parsing the
given binary blob.
"""
b = cls()
b.parse(blob)
b.size = len(blob)
return b
@classmethod
def from_file(cls, filename, offset):
"""
Return a new Block instance initialized with the result of parsing the
given file at the given offset.
"""
with open(filename, "rb") as fp:
h = Header.from_file(filename)
fp.seek(offset)
blob = fp.read(h.block_size)
b = cls.from_blob(blob)
b.filename = filename
b.offset = offset
return b
def __iter__(self):
return BlockIterator(self)
def __str__(self):
s = "{filename: " + str(self.filename)
s += ", offset: " + str(hex(self.offset))
s += ", size: " + str(hex(self.size))
s += ", next: " + str(hex(self.next_block_offset))
s += ", data: " + hexlify(self.data)
s += "}"
return s
class BlockIterator:
def __init__(self, first_block):
self.current_block = first_block
self.no_more_blocks = False
def __iter__(self):
return self
def next(self):
if self.no_more_blocks:
raise StopIteration()
else:
if self.current_block.next_block_offset == 0x0:
self.no_more_blocks = True
b = self.current_block
filename = self.current_block.filename
next_block_offset = self.current_block.next_block_offset
self.current_block = Block.from_file(filename, next_block_offset)
return b
class BlockChain:
"""
The result of traversing a series of Block starting at the given Block.
The data held by a BlockChain can be parsed into a Directory.
"""
def __init__(self, start_block):
self.size = 0
self.data = ""
for block in iter(start_block):
self.size += block.size
self.data += block.data
def __str__(self):
s = "{size: " + str(self.size)
s += ", data: " + hexlify(self.data)
s += "}"
return s
| mit | 8,332,130,015,601,070,000 | 26.221239 | 97 | 0.540637 | false |
rjschwei/azure-sdk-for-python | azure-mgmt-authorization/azure/mgmt/authorization/models/classic_administrator.py | 1 | 1424 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ClassicAdministrator(Model):
"""Classic Administrators.
:param id: The ID of the administrator.
:type id: str
:param name: The name of the administrator.
:type name: str
:param type: The type of the administrator.
:type type: str
:param properties: Properties for the classic administrator.
:type properties: :class:`ClassicAdministratorProperties
<azure.mgmt.authorization.models.ClassicAdministratorProperties>`
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'ClassicAdministratorProperties'},
}
def __init__(self, id=None, name=None, type=None, properties=None):
self.id = id
self.name = name
self.type = type
self.properties = properties
| mit | -2,572,937,800,584,470,000 | 34.6 | 86 | 0.589888 | false |
akretion/l10n-brazil | l10n_br_base/tests/test_valid_createid.py | 1 | 6009 | # -*- coding: utf-8 -*-
# @ 2017 Akretion - www.akretion.com.br -
# Clément Mombereau <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.exceptions import ValidationError
from odoo.tests.common import TransactionCase
class ValidCreateIdTest(TransactionCase):
"""Test if ValidationError is raised well during create({})"""
def setUp(self):
super(ValidCreateIdTest, self).setUp()
self.company_valid = {
'name': 'Company Test 1',
'legal_name': 'Company Testc 1 Ltda',
'cnpj_cpf': '02.960.895/0001-31',
'inscr_est': '081.981.37-6',
'street': 'Rod BR-101 Norte Contorno',
'number': '955',
'street2': 'Portão 1',
'district': 'Jardim da Saudade',
'state_id': self.env.ref('base.state_br_es').id,
'l10n_br_city_id': self.env.ref('l10n_br_base.city_3205002').id,
'country_id': self.env.ref('base.br').id,
'city': 'Serra',
'zip': '29161-695',
'phone': '+55 27 2916-1695',
'email': '[email protected]',
'website': 'www.companytest.com.br'
}
self.company_invalid_cnpj = {
'name': 'Company Test 2',
'legal_name': 'Company Testc 2 Ltda',
'cnpj_cpf': '14.018.406/0001-93',
'inscr_est': '385.611.86-2',
'street': 'Rod BR-101 Norte Contorno',
'number': '955',
'street2': 'Portão 1',
'district': 'Jardim da Saudade',
'state_id': self.env.ref('base.state_br_es').id,
'l10n_br_city_id': self.env.ref('l10n_br_base.city_3205002').id,
'country_id': self.env.ref('base.br').id,
'city': 'Serra',
'zip': '29161-695',
'phone': '+55 27 2916-1695',
'email': '[email protected]',
'website': 'www.companytest.com.br'
}
self.company_invalid_inscr_est = {
'name': 'Company Test 3',
'legal_name': 'Company Testc 3 Ltda',
'cnpj_cpf': '31.295.101/0001-60',
'inscr_est': '924.511.27-0',
'street': 'Rod BR-101 Norte Contorno',
'number': '955',
'street2': 'Portão 1',
'district': 'Jardim da Saudade',
'state_id': self.env.ref('base.state_br_es').id,
'l10n_br_city_id': self.env.ref('l10n_br_base.city_3205002').id,
'country_id': self.env.ref('base.br').id,
'city': 'Serra',
'zip': '29161-695',
'phone': '+55 27 2916-1695',
'email': '[email protected]',
'website': 'www.companytest.com.br'
}
self.partner_valid = {
'name': 'Partner Test 1',
'legal_name': 'Partner Testc 1 Ltda',
'cnpj_cpf': '734.419.622-06',
'inscr_est': '176.754.07-5',
'street': 'Rod BR-101 Norte Contorno',
'number': '955',
'street2': 'Portão 1',
'district': 'Jardim da Saudade',
'state_id': self.env.ref('base.state_br_es').id,
'l10n_br_city_id': self.env.ref('l10n_br_base.city_3205002').id,
'country_id': self.env.ref('base.br').id,
'city': 'Serra',
'zip': '29161-695',
'phone': '+55 27 2916-1695',
'email': '[email protected]',
'website': 'www.partnertest.com.br'
}
self.partner_invalid_cpf = {
'name': 'Partner Test 2',
'legal_name': 'Partner Testc 2 Ltda',
'cnpj_cpf': '734.419.622-07',
'inscr_est': '538.759.92-5',
'street': 'Rod BR-101 Norte Contorno',
'number': '955',
'street2': 'Portão 1',
'district': 'Jardim da Saudade',
'state_id': self.env.ref('base.state_br_es').id,
'l10n_br_city_id': self.env.ref('l10n_br_base.city_3205002').id,
'country_id': self.env.ref('base.br').id,
'city': 'Serra',
'zip': '29161-695',
'phone': '+55 27 2916-1695',
'email': '[email protected]',
'website': 'www.partnertest.com.br'
}
# Tests on companies
def test_comp_valid(self):
"""Try do create id with correct CNPJ and correct Inscricao Estadual"""
try:
company = self.env['res.company'].create(self.company_valid)
except:
assert company, u"Error when using .create() even with valid \
and Inscricao Estadual"
def test_comp_invalid_cnpj(self):
"""Test if ValidationError raised during .create() with invalid CNPJ
and correct Inscricao Estadual"""
with self.assertRaises(ValidationError):
self.env['res.company'].create(self.company_invalid_cnpj)
def test_comp_invalid_inscr_est(self):
"""Test if ValidationError raised with correct CNPJ
and invalid Inscricao Estadual"""
with self.assertRaises(ValidationError):
self.env['res.company'].create(self.company_invalid_inscr_est)
# Tests on partners
def test_part_valid(self):
"""Try do create id with correct CPF and correct Inscricao Estadual"""
try:
partner = self.env['res.partner'].create(self.partner_valid)
except:
assert partner, u"Error when using .create() even with valid CPF \
and Inscricao Estadual"
def test_part_invalid_cpf(self):
"""Test if ValidationError raised during .create() with invalid CPF
and correct Inscricao Estadual"""
with self.assertRaises(ValidationError):
self.env['res.partner'].create(self.partner_invalid_cpf)
# No test on Inscricao Estadual for partners with CPF
# because they haven't Inscricao Estadual
| agpl-3.0 | 8,697,426,201,187,918,000 | 39.02 | 79 | 0.537898 | false |
nttks/jenkins-test | lms/djangoapps/ga_survey/migrations/0002_auto__chg_field_surveysubmission_course_id.py | 1 | 4589 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'SurveySubmission.course_id'
db.alter_column('ga_survey_surveysubmission', 'course_id', self.gf('xmodule_django.models.CourseKeyField')(max_length=255))
def backwards(self, orm):
# Changing field 'SurveySubmission.course_id'
db.alter_column('ga_survey_surveysubmission', 'course_id', self.gf('django.db.models.fields.CharField')(max_length=128))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'ga_survey.surveysubmission': {
'Meta': {'object_name': 'SurveySubmission'},
'course_id': ('xmodule_django.models.CourseKeyField', [], {'max_length': '255', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'survey_answer': ('django.db.models.fields.TextField', [], {}),
'survey_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'unit_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'survey'", 'to': "orm['auth.User']"})
}
}
complete_apps = ['ga_survey'] | agpl-3.0 | -8,267,494,084,640,521,000 | 65.521739 | 182 | 0.565919 | false |
bth/stools | stools/Machine.py | 1 | 4555 | # -*- coding: utf-8 -*-
"""
Module for machine
"""
import paramiko
import re, string
class Machine(object):
"""
Representation of a machine
"""
def __init__(self, name, ip, username, password, gateway="", prompt=None):
"""
Create a new Machine object
:param name: machine name
:param ip: ip address (or hostname)
:param username: username (login) for ssh connection
:param password: password for ssh connection
:param gateway_machine_name: machine name of gateway
:param prompt: prompt to wait
:return: Machine instance
:rtype: Machine
"""
self.name = name
self.ip = ip
self.username = username
self.password = password
self.gateway_machine_name = gateway
if prompt == None:
prompt = "[$#]+"
self.prompt = prompt
def set_gateway(self, gateway_machine):
"""
Set gateway to access to this machine
:param gateway_machine: instance of gateway machine
"""
self.gateway = gateway_machine
def create_connection(self):
"""
Create SSH connection with this machine
"""
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(self.ip, username=self.username, password=self.password)
return client
def write_on_terminal(self, terminal, string_to_write, string_of_end):
"""
Write string_to_write on terminal and wait for string_of_end
:param terminal: terminal instance
:param string_to_write: string to write in terminal
:param string_of_end: string of waiting
"""
terminal.send(string_to_write + "\n")
ret = ''
while re.search(string_of_end, ret) == None:
if re.search("Are you sure you want to continue connecting", ret):
terminal.send("yes" + "\n")
fragment = terminal.recv(9999)
ret += fragment
return ret
def create_connection_by_terminal(self):
"""
Create SSH connection with this machine with terminal
"""
client = self.gateway.create_connection()
terminal = client.invoke_shell()
self.write_on_terminal(terminal, "ssh " + self.username + "@" + self.ip, "password: ")
self.write_on_terminal(terminal, self.password, self.prompt)
return client, terminal
def execute_command(self, command, timeout):
"""
Execute command on this machine
:param command: command to execute
:param timeout: timeout (in seconds) for command execution
:return: return of the command
:rtype: String
"""
if self.gateway == None:
client = self.create_connection()
stdin, stdout, stderr = client.exec_command(command, timeout=timeout)
ret = stdout.readlines()
ret = ''.join(ret)
ret = ret[:string.rfind(ret, '\n')]
else:
client, terminal = self.create_connection_by_terminal()
ret = self.write_on_terminal(terminal, command, self.prompt)
ret = self.clean_output(ret)
return ret
def execute_copy(self, command, machine_target):
"""
Execute copy command on this machine
:param command: command copy to execute
:param machine_target: machine instance of target machine
:return: return of the command
:rtype: String
"""
if self.gateway == None:
client = self.create_connection()
terminal = client.invoke_shell()
else:
client, terminal = self.create_connection_by_terminal()
self.write_on_terminal(terminal, command, "password: ")
ret = self.write_on_terminal(terminal, machine_target.password, self.prompt)
return self.clean_output(ret)
def clean_output(self, output):
"""
Delete useless space of output
:param output: string to clean
:return: cleaned string
:rtype: String
"""
cut_start = 0
last_return_position = string.rfind(output, "\r\n")
first_return_position = string.find(output, "\r\n")
cut_start = first_return_position + 2
output = output[cut_start:last_return_position]
return output
| gpl-2.0 | 7,681,146,573,439,596,000 | 32.992537 | 94 | 0.574973 | false |
svn2github/pylucene | test3/test_PyLuceneThread.py | 1 | 3930 | # ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
import sys, lucene, unittest
import time, threading
from lucene import getVMEnv
from PyLuceneTestCase import PyLuceneTestCase
from java.lang import Thread
from org.apache.lucene.analysis.standard import StandardAnalyzer
from org.apache.lucene.document import Document, Field, TextField
from org.apache.lucene.index import Term
from org.apache.lucene.search import TermQuery
class PyLuceneThreadTestCase(PyLuceneTestCase):
"""
Test using threads in PyLucene with python threads
"""
def setUp(self):
super(PyLuceneThreadTestCase, self).setUp()
self.classLoader = Thread.currentThread().getContextClassLoader()
writer = self.getWriter(analyzer=StandardAnalyzer())
doc1 = Document()
doc2 = Document()
doc3 = Document()
doc4 = Document()
doc1.add(Field("field", "one", TextField.TYPE_STORED))
doc2.add(Field("field", "two", TextField.TYPE_STORED))
doc3.add(Field("field", "three", TextField.TYPE_STORED))
doc4.add(Field("field", "one", TextField.TYPE_STORED))
writer.addDocument(doc1)
writer.addDocument(doc2)
writer.addDocument(doc3)
writer.addDocument(doc4)
writer.commit()
writer.close()
self.testData = [('one',2), ('two',1), ('three', 1), ('five', 0)] * 500
self.lock = threading.Lock()
self.totalQueries = 0
def testWithMainThread(self):
""" warm up test for runSearch in main thread """
self.runSearch(2000, True)
def testWithPyLuceneThread(self):
""" Run 5 threads with 2000 queries each """
threads = []
for i in range(5):
threads.append(threading.Thread(target=self.runSearch,
args=(2000,)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# we survived!
# and all queries have ran successfully
self.assertEqual(10000, self.totalQueries)
def runSearch(self, runCount, mainThread=False):
""" search for runCount number of times """
# problem: if there are any assertion errors in the child
# thread, the calling thread is not notified and may still
# consider the test case pass. We are using self.totalQueries
# to double check that work has actually been done.
if not mainThread:
getVMEnv().attachCurrentThread()
time.sleep(0.5)
searcher = self.getSearcher()
try:
for word, count in self.testData[0:runCount]:
query = TermQuery(Term("field", word))
topDocs = searcher.search(query, 50)
self.assertEqual(topDocs.totalHits, count)
self.lock.acquire()
self.totalQueries += 1
self.lock.release()
finally:
del searcher
if __name__ == "__main__":
lucene.initVM(vmargs=['-Djava.awt.headless=true'])
if '-loop' in sys.argv:
sys.argv.remove('-loop')
while True:
try:
unittest.main()
except:
pass
else:
unittest.main()
| apache-2.0 | -3,566,141,051,861,162,500 | 32.02521 | 79 | 0.597455 | false |
NuGrid/NuGridPy | nugridpy/astronomy.py | 1 | 11926 | """
============
astronomy.py
============
Useful functions for astronomy & astrophysics
"""
from functools import update_wrapper
import numpy as np
from scipy import integrate
from . import constants as cs
class ReadOnlyConstants:
"""Callable class for attaching constants as read-only property to a function."""
def __init__(self, constants, func):
"""Constructor that defines function and constants in class instance."""
self._constants = constants
self.func = func
def __call__(self, *args, **kwargs):
"""Defines the class as a callable and executes the decorated function."""
return self.func(*args, **kwargs)
@property
def constants(self):
"""Returns constants as private attribute."""
return self._constants
def attach_constants(*args):
"""Decorator receives function constants first, then attaches them through a callable class."""
def attach(func):
function_with_constants = ReadOnlyConstants(args, func)
# inherit docstring and other magic info from original function
return update_wrapper(function_with_constants, func)
return attach
@attach_constants(cs.visc_mol_const)
def visc_mol_sol(T, rho, X):
"""
Molecular plasma viscosity (Spitzer 1962)
Parameters
----------
T : float
temperature in K
rho : float
density in cgs
X : float
H mass fraction
Returns
-------
nu : float
molecular diffusivity in [cm**2/s]
Notes
-----
According to Eq 22 in Schatzman (1977). Assume log Lambda = 15.
(see Table 5.1), a H/He mix (for different mix use Eq. 5.54 in
Spitzer textbook)
"""
visc_mol = cs.visc_mol_const * (1. + (7.*X)) * (T**2.5 / rho)
return visc_mol
@attach_constants(cs.nu_rad_const)
def visc_rad_kap_sc(T, rho, X):
"""
Radiative viscosity (Thomas, 1930) for e- scattering opacity
Parameters
----------
T : float
temperature in K
rho : float
density in cgs
X : float
H mass fraction
Returns
-------
nu : float
radiative diffusivity in [cm**2/s]
Notes
-----
Eqn. 14 in Schatzman, 1977, assume electron scattering opacity
kappa_sc = 0.2*(1+X), Kippenhahn (2nd edn, Eqn 17.2)
"""
kappa = 0.2 * (1.+X)
nu_rad = cs.nu_rad_const * (T**4 / (kappa * rho**2))
return nu_rad
@attach_constants()
def Gamma1_gasrad(beta):
"""
Gamma1 for a mix of ideal gas and radiation
Hansen & Kawaler, page 177, Eqn. 3.110
Parameters
----------
beta : float
Gas pressure fraction Pgas/(Pgas+Prad)
"""
Gamma3minus1 = (2./3.) * (4. - (3.*beta)) / (8. - (7.*beta))
Gamma1 = beta + (4. - (3.*beta)) * Gamma3minus1
return Gamma1
@attach_constants(cs.boltzmann_const, cs.atomic_mass_unit)
def Pgas(rho, T, mmu):
"""
P = R/mu * rho * T
Parameters
----------
rho : float
Density [cgs]
T : float
Temperature [K]
mmu : float
Mean molecular weight
Returns
--------
Gas pressure
"""
R = cs.boltzmann_const / cs.atomic_mass_unit
return (R/mmu) * rho * T
@attach_constants(cs.rad_const)
def Prad(T):
"""
P = cs.rad_const/3 * T**4
Parameters
----------
T : float
Temperature [K]
Returns
--------
Radiation pressure
"""
return (cs.rad_const / 3.) * T**4
@attach_constants(cs.mimf_coeff_6, cs.mimf_coeff_5, cs.mimf_coeff_4,
cs.mimf_coeff_3, cs.mimf_coeff_2, cs.mimf_coeff_1, cs.mimf_coeff_0)
def mimf_ferrario(mi):
""" Curvature MiMf from Ferrario et al. 2005MNRAS.361.1131."""
mf = ((cs.mimf_coeff_6 * (mi**6)) + (cs.mimf_coeff_5 * (mi**5))
- (cs.mimf_coeff_4 * (mi**4)) + (cs.mimf_coeff_3 * (mi**3))
- (cs.mimf_coeff_2 * (mi**2)) + (cs.mimf_coeff_1 * mi) + cs.mimf_coeff_0)
return mf
@attach_constants(cs.core_mass_coeff, cs.core_mass_offset)
def core_mass_L(MH):
"""
Core-mass luminosity relationship from Bloecker (1993)
Parameters
----------
MH : float
Core mass in Msun
Returns
-------
L
Luminosity in Lsun
"""
return cs.core_mass_coeff*(MH - cs.core_mass_offset)
@attach_constants(cs.imf_m1, cs.imf_m2, cs.imf_a1, cs.imf_a2, cs.imf_a3)
def imf(m):
"""
Initial mass function from Kroupa
Parameters
-------
m : float
mass (g)
Returns
-------
N(M)dM
for given mass according to Kroupa IMF
"""
const2 = cs.imf_m1**(-cs.imf_a1) - cs.imf_m1**(-cs.imf_a2)
const3 = cs.imf_m2**(-cs.imf_a2) - cs.imf_m2**(-cs.imf_a3)
if m < cs.imf_m1:
alpha = cs.imf_a1
const = -const2 - const3
elif m < cs.imf_m2:
alpha = cs.imf_a2
const = -const3
else:
alpha = cs.imf_a3
const = 0.
return m**(-alpha) + const
@attach_constants()
def int_imf_dm(m1, m2, m, imf_ar, bywhat='bymass', integral='normal'):
"""
Integrate IMF between m1 and m2
Parameters
----------
m1 : float
Lower mass integration bound
m2 : float
Upper mass integration bound
m : array
Mass array
imf_ar : array
Array of IMF values corresponding to mass array
bywhat : string, optional
'bymass' integrates the mass that goes into stars of
that mass interval; or 'bynumber' which integrates the number
of stars in that mass interval. The default is 'bymass'.
integrate : string, optional
'normal' uses scipy.integrate.trapz; 'cum' returns cumulative
trapezoidal integral. The default is 'normal'.
Returns
---------
Integrated initial mass function for given bounds
"""
ind_m = (m >= min(m1, m2)) & (m <= max(m1, m2))
if integral == 'normal':
int_func = integrate.trapz
elif integral == 'cum':
int_func = integrate.cumtrapz
else:
raise ValueError(
"Error in int_imf_dm: don't know how to integrate (normal or cum)")
if bywhat == 'bymass':
return int_func(m[ind_m] * imf_ar[ind_m], m[ind_m])
elif bywhat == 'bynumber':
return int_func(imf_ar[ind_m], m[ind_m])
raise ValueError(
"Error in int_imf_dm: Need integration type (bymass or bynumber)")
@attach_constants(cs.r_sun, cs.m_sun, cs.grav_const)
def am_orb(m1, m2, a, e):
"""
Orbital angular momentum equation
e.g. Ge et al 2010
Parameters
----------
m1, m2 : float
Masses of both stars in Msun
A : float
Separation in Rsun
e : float
Eccentricity
Returns
--------
Orbital angular momentum
"""
a_cm = a * cs.r_sun
m1_g = m1 * cs.m_sun
m2_g = m2 * cs.m_sun
J_orb = np.sqrt(cs.grav_const * a_cm * ((m1_g**2 * m2_g**2) / (m1_g + m2_g))) * (1 - e**2)
return J_orb
@attach_constants(cs.van_loon_1, cs.van_loon_2, cs.van_loon_3)
def mass_loss_loon05(L, Teff):
"""
Mass loss rate from van Loon et al (2005)
Parameters
----------
L : float
L in L_sun
Teff : float
Teff in K
Returns
-------
Mdot
Mdot in Msun/yr
Notes
-----
ref: van Loon etal 2005, A&A 438, 273
"""
Mdot = (cs.van_loon_1 + np.log10(L / 10.**4) -
cs.van_loon_2 * np.log10(Teff / cs.van_loon_3))
return Mdot
@attach_constants(cs.grav_const, cs.m_sun, cs.r_sun)
def energ_orb(m1, m2, r):
"""
Orbital potential energy equation
Parameters
----------
m1, m2 : float
M in Msun
r : float
Distance in Rsun
Returns
-------
Epot
Epot in erg
"""
epo = -cs.grav_const * m1 * m2 * cs.m_sun**2 / (r * cs.r_sun)
return epo
@attach_constants(cs.r_sun, cs.grav_const, cs.m_sun, cs.day_secs)
def period(A, M1, M2):
"""
Calculate binary period from separation.
Parameters
----------
A : float
separation A Rsun
M1, M2 : float
M in Msun
Returns
-------
p
period in days
"""
A *= cs.r_sun
velocity = np.sqrt(cs.grav_const * cs.m_sun * (M1+M2) / A)
p = ((2. * np.pi * A) / velocity) / cs.day_secs
return p
@attach_constants(cs.grav_const, cs.m_sun, cs.r_sun)
def escape_velocity(M, R):
"""
Escape velocity
Parameters
----------
M : float
Mass in solar masses
R : float
Radius in solar radii
Returns
-------
v_escape
in km/s
"""
ve = np.sqrt(2. * cs.grav_const * M * cs.m_sun / (R * cs.r_sun))
ve = ve * 1.e-5
return ve
@attach_constants(cs.avogadro_const, cs.boltzmann_const, cs.mass_H_atom)
def Nasv(macs_val, T):
"""
Parameters
----------
macs_val : float
MACS [mb] at T [K]
T : float
Temperature [K}
Returns
-------
Na*<sigma v>
for MACS [mb] at T [K]
"""
Na = cs.avogadro_const
k = cs.boltzmann_const
vtherm = (2. * k * T / cs.mass_H_atom)**0.5
s = macs_val * 1.e-27
Nasv_val = s * vtherm * Na
return Nasv_val
@attach_constants(cs.avogadro_const, cs.boltzmann_const, cs.mass_H_atom)
def macs(nasv, T):
"""
Parameters
----------
nasv : float
nasv value
T : float
Temperature [K]
Returns
-------
MACS
[mb] at T [K] from Na*<sigma v>
"""
Na = cs.avogadro_const
k = cs.boltzmann_const
vtherm = (2. * k * T / cs.mass_H_atom)**0.5
s = nasv / (vtherm * Na)
macs_val = s * 1.e27
return macs_val
@attach_constants()
def mu_e(X):
"""
Mean molecular weight per free electron, assuming full ionisation, and
approximating mu_i/Z_i ~ 2 for all elements heavier then Helium.
(Kippenhahn & Weigert, Ch 13.1, Eq. 13.8)
Parameters
----------
X : float
Mass fraction of H
Returns
-------
mu_el : float
Free electron mean molecular weight
"""
try:
mu_el = 2. / (1.+X)
except TypeError:
X = np.array([X])
mu_el = 2. / (1.+X)
return mu_el
@attach_constants()
def mu(X, Z, A):
"""
Mean molecular weight assuming full ionisation.
(Kippenhahn & Weigert, Ch 13.1, Eq. 13.6)
Parameters
----------
X : float or array
Mass fraction vector
Z : float or array
Charge number vector
A : float or array
Mass number vector
Returns
-------
mmu : float
Mean molecular weight at full ionization
"""
if not isinstance(Z, np.ndarray):
Z = np.array(Z)
if not isinstance(A, np.ndarray):
A = np.array(A)
if not isinstance(X, np.ndarray):
X = np.array(X)
try:
mmu = 1. / sum(X * (1.+Z) / A)
except TypeError:
X = np.array([X])
A = np.array([A])
Z = np.array([Z])
mmu = 1. / sum(X * (1.+Z) / A)
return mmu
@attach_constants(cs.idrad_const)
def Trho_idrad(rho, mmu):
"""
T(rho) that separates P_rad from P_gas dominated regions.
Kippenhahn & Weigert, Eq. 16.10
Parameters
----------
rho : float
Density array [cgs]
mu : float
Mean molecular weight
Returns
-------
T : float
Temperature at boundary
"""
T = cs.idrad_const * (rho/mmu)**(1./3.)
return T
@attach_constants(cs.iddeg_const)
def Trho_iddeg(rho, mmu, mu_el):
"""
T(rho) that separates ideal gas and degenerate pressure dominated regions.
Kippenhahn & Weigert, Eq. 16.6
Parameters
----------
rho : float
Density array [cgs]
mmu : float
Mean molecular weight
mu_el : float
Mean molecular weight per free electron
Returns
-------
T : float
Temperature at boundary
"""
T = cs.iddeg_const * rho**(2./3.) * mmu / (mu_el**(5./3.))
return T
| bsd-3-clause | -490,455,445,624,667,840 | 19.84965 | 99 | 0.551149 | false |
thimslugga/apistar | apistar/commands/new.py | 1 | 1588 | import os
import shutil
import sys
import click
import apistar
from apistar import schema
APISTAR_PACKAGE_DIR = os.path.dirname(apistar.__file__)
LAYOUTS_DIR = os.path.join(APISTAR_PACKAGE_DIR, 'layouts')
LAYOUT_CHOICES = os.listdir(LAYOUTS_DIR)
IGNORED_DIRECTORIES = ['__pycache__']
class TargetDir(schema.String):
pass
class Layout(schema.String):
description = 'Select the project layout to use.'
default = 'standard'
choices = LAYOUT_CHOICES
class Force(schema.Boolean):
description = 'Overwrite any existing project files.'
default = False
def new(target_dir: TargetDir, layout: Layout, force: Force) -> None:
"""
Create a new project in TARGET_DIR.
"""
source_dir = os.path.join(LAYOUTS_DIR, layout)
copy_paths = []
for dir_path, dirs, filenames in os.walk(source_dir):
dirs[:] = [d for d in dirs if d not in IGNORED_DIRECTORIES]
for filename in filenames:
source_path = os.path.join(dir_path, filename)
rel_path = os.path.relpath(source_path, source_dir)
target_path = os.path.join(target_dir, rel_path)
if os.path.exists(target_path) and not force:
click.echo('Project files already exist. Use `-f` to overwrite.')
sys.exit(1)
copy_paths.append((source_path, target_path))
for source_path, target_path in copy_paths:
click.echo(target_path)
parent = os.path.dirname(target_path)
if parent:
os.makedirs(parent, exist_ok=True)
shutil.copy(source_path, target_path)
| bsd-3-clause | 8,955,669,424,910,434,000 | 28.407407 | 81 | 0.648615 | false |
BEugen/AI | KTF/evaluation_ktf_test.py | 1 | 4967 | # from pip import models
import numpy as np
import sys
import os
import argparse
###################################################################
# Variables #
# When launching project or scripts from Visual Studio, #
# input_dir and output_dir are passed as arguments. #
# Users could set them from the project setting page. #
###################################################################
input_dir = None
output_dir = None
log_dir = None
#################################################################################
# Keras configs. #
# Please refer to https://keras.io/backend . #
#################################################################################
import keras
from keras import backend as K
# K.set_floatx('float32')
# String: 'float16', 'float32', or 'float64'.
# K.set_epsilon(1e-05)
# float. Sets the value of the fuzz factor used in numeric expressions.
# K.set_image_data_format('channels_first')
# data_format: string. 'channels_first' or 'channels_last'.
#################################################################################
# Keras imports. #
#################################################################################
from keras.models import Model
from keras.models import Sequential
from keras.layers import Input
from keras.layers import Lambda
from keras.layers import Layer
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Activation
from keras.layers import Flatten
from keras.layers import Conv2D
from keras.layers import MaxPooling2D
from keras.optimizers import SGD
from keras.optimizers import RMSprop
from keras.callbacks import TensorBoard
from keras.utils import np_utils
import pandas as pd
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_auc_score
# import matplotlib.pyplot as plt
from time import time
from keras.models import model_from_json
def classification(x):
if x < 0.3:
return 0
if 0.3 <= x < 0.5:
return 1
if x >= 0.5:
return 2
def model_nn(name):
json_file = open(name + '.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
loaded_model = model_from_json(loaded_model_json)
# load weights into new model
loaded_model.load_weights(name + '.h5')
sgd = SGD(lr=0.001, momentum=0.8, decay=0.0, nesterov=False)
loaded_model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
return loaded_model
def main():
so = pd.read_csv('data_so.csv', delimiter=';')
so['c8_q'] = so.iloc[:, [1, 2, 3, 4, 5]].sum(axis=1)
so['label_n'] = so.apply(lambda x: classification(x.iloc[15]), axis=1)
so['label_m'] = so.apply(lambda x: classification(x.iloc[16]), axis=1)
so['label_ug'] = so.apply(lambda x: classification(x.iloc[17]), axis=1)
so.drop(so.columns[[0, 1, 2, 3, 4, 9, 11, 12, 15, 16, 17]], inplace=True, axis=1)
so.iloc[:, 0:7] = \
StandardScaler().fit_transform(so.iloc[:, 0:7].as_matrix())
so.to_csv('data_so_all-pr.csv', sep=';')
data = np.random.permutation(so.values)
X = data[:, 0:7].astype(float)
Y_n = data[:, 7]
Y_m = data[:, 8]
Y_u = data[:, 9]
enc = LabelEncoder()
enc_Y = enc.fit_transform(Y_n)
Y_n_f = np_utils.to_categorical(enc_Y)
enc_Y = enc.fit_transform(Y_m)
Y_m_f = np_utils.to_categorical(enc_Y)
enc_Y = enc.fit_transform(Y_u)
Y_u_f = np_utils.to_categorical(enc_Y)
model = model_nn('model_n')
score = model.evaluate(X, Y_n_f, verbose=1)
print("%s: %.2f%%" % (model.metrics_names[1], score[1] * 100))
model = model_nn('model_m')
score = model.evaluate(X, Y_m_f, verbose=1)
print("%s: %.2f%%" % (model.metrics_names[1], score[1] * 100))
model = model_nn('model_ug')
score = model.evaluate(X, Y_u_f, verbose=1)
print("%s: %.2f%%" % (model.metrics_names[1], score[1] * 100))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input_dir", type=str,
default=None,
help="Input directory where where training dataset and meta data are saved",
required=False
)
parser.add_argument("--output_dir", type=str,
default=None,
help="Input directory where where logs and models are saved",
required=False
)
args, unknown = parser.parse_known_args()
input_dir = args.input_dir
output_dir = args.output_dir
log_dir = output_dir
main()
| gpl-3.0 | 7,968,917,967,813,314,000 | 35.240876 | 100 | 0.550856 | false |
tensorflow/probability | tensorflow_probability/python/internal/docstring_util_test.py | 1 | 2144 | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for docstring utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.internal import docstring_util
from tensorflow_probability.python.internal import test_util as tfp_test_util
class DocstringUtil(tfp_test_util.TestCase):
def _testFunction(self):
doc_args = """x: Input to return as output.
y: Baz."""
@docstring_util.expand_docstring(args=doc_args)
def foo(x):
# pylint: disable=g-doc-args
"""Hello world.
Args:
${args}
Returns:
x.
"""
# pylint: enable=g-doc-args
return x
true_docstring = """Hello world.
Args:
x: Input to return as output.
y: Baz.
Returns:
x.
"""
self.assertEqual(foo.__doc__, true_docstring)
def _testClassInit(self):
doc_args = """x: Input to return as output.
y: Baz."""
class Foo(object):
@docstring_util.expand_docstring(args=doc_args)
def __init__(self, x, y):
# pylint: disable=g-doc-args
"""Hello world.
Args:
${args}
Bar.
"""
# pylint: enable=g-doc-args
pass
true_docstring = """Hello world.
Args:
x: Input to return as output.
y: Baz.
Bar.
"""
self.assertEqual(Foo.__doc__, true_docstring)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 3,060,544,077,028,014,000 | 23.363636 | 78 | 0.615672 | false |
binho/myservertalks | MySTInstruct.py | 1 | 1607 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
This file is part of MyServerTalks.
MyServerTalks is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
MyServerTalks is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with MyServerTalks; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
class MySTInstruct:
def __init__(self, command, parameters=[], contact='', escope=''):
self.command = command
self.parameters = parameters
self.contact = contact
self.escope = escope
'''
result format send to script
'''
def __str__(self):
#return (" '" + self.contact + "' '" + self.escope + "' " + self.command + " '" + ";".join(self.parameters) + "'").strip()
return ( self.contact + ' ' + self.escope + ' ' + self.command + ' ' + ';'.join(self.parameters) )
def setContact(self, contact):
self.contact = contact
def setEscope(self, escope):
self.escope = escope
def getCommand(self):
return self.command
def getParameters(self):
return self.parameters
def getContact(self):
return self.contact
def getEscope(self):
return self.escope
| gpl-2.0 | -1,327,910,839,982,942,700 | 29.320755 | 124 | 0.685128 | false |
xdutaotao/ntlmaps | lib/www_client.py | 1 | 4899 | #--coding:utf-8--
# This file is part of 'NTLM Authorization Proxy Server'
# Copyright 2001 Dmitry A. Rozmanov <[email protected]>
#
# NTLM APS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# NTLM APS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the sofware; see the file COPYING. If not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
import string, socket, thread, select, time
import logger, http_header, utils
import ntlm_auth, basic_auth
import proxy_client
class www_HTTP_Client(proxy_client.proxy_HTTP_Client):
#-------------------------------------------------
def connect_rserver(self):
""
self.logger.log('*** Connecting to remote server...')
self.first_run = 0
# we don't have proxy then we have to connect server by ourselves
rs, rsp = self.client_head_obj.get_http_server()
self.logger.log('(%s:%d)...' % (rs, rsp))
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((rs, rsp))
self.rserver_socket = s
self.rserver_socket_closed = 0
self.current_rserver_net_location = '%s:%d' % (rs, rsp)
self.logger.log('Done.\n')
except:
self.rserver_socket_closed = 1
self.logger.log('Failed.\n')
self.exit()
thread.exit()
#-----------------------------------------------------------------------
def fix_client_header(self):
""
self.logger.log('*** Replacing values in client header...')
if self.config.has_key('CLIENT_HEADER'):
for i in self.config['CLIENT_HEADER'].keys():
self.client_head_obj.del_param(i)
self.client_head_obj.add_param_value(i, self.config['CLIENT_HEADER'][i])
self.logger.log('Done.\n')
# self.logger.log('*** New client header:\n=====\n' + self.client_head_obj.__repr__())
else:
self.logger.log('No need.\n*** There is no "CLIENT_HEADER" section in server.cfg.\n')
self.logger.log("*** Working as selfcontained proxy, then have to change client header.\n")
self.logger.log("*** Remake url format in client header...")
self.client_head_obj.make_right_header()
self.logger.log('Done.\n')
self.client_head_obj.del_param('Keep-Alive')
self.logger.log("*** Just killed 'Keep-Alive' value in the header.\n")
# Code which converts 'Proxy-Connection' value to 'Connection'
# I am not sure that it is needed at all
# May be it is just useless activity
self.logger.log("*** Looking for 'Proxy-Connection' in client header...")
pconnection = self.client_head_obj.get_param_values('Proxy-Connection')
if pconnection:
# if we have 'Proxy-Connection'
self.logger.log("there are some.\n")
wconnection = self.client_head_obj.get_param_values('Connection')
if wconnection:
# if we have 'Connection' as well
self.logger.log("*** There is a 'Connection' value in the header.\n")
self.client_head_obj.del_param('Proxy-Connection')
self.logger.log("*** Just killed 'Proxy-Connection' value in the header.\n")
else:
self.logger.log("*** There is no 'Connection' value in the header.\n")
self.client_head_obj.del_param('Proxy-Connection')
for i in pconnection:
self.client_head_obj.add_param_value('Connection', i)
self.logger.log("*** Changed 'Proxy-Connection' to 'Connection' header value.\n")
else:
self.logger.log("there aren't any.\n")
# End of doubtable code.
# Show reworked header.
self.logger.log('*** New client header:\n=====\n' + self.client_head_obj.__repr__())
#-----------------------------------------------------------------------
def check_connected_remote_server(self):
""
# if we are working as a standalone proxy server
rs, rsp = self.client_head_obj.get_http_server()
if self.current_rserver_net_location != '%s:%d' % (rs, rsp):
# if current connection is not we need then close it.
self.logger.log('*** We had wrong connection for new request so we have to close it.\n')
self.close_rserver()
| gpl-2.0 | -9,079,188,218,544,254,000 | 43.135135 | 100 | 0.585834 | false |
Hanaasagi/sorator | tests/orm/test_model_global_scopes.py | 1 | 4366 | # -*- coding: utf-8 -*-
from . import DatabaseConnectionResolver
from .. import OratorTestCase
from orator.orm.scopes import Scope
from orator import Model
class ModelGlobalScopesTestCase(OratorTestCase):
@classmethod
def setUpClass(cls):
Model.set_connection_resolver(DatabaseConnectionResolver())
@classmethod
def tearDownClass(cls):
Model.unset_connection_resolver()
def test_global_scope_is_applied(self):
model = GlobalScopesModel()
query = model.new_query()
self.assertEqual(
'SELECT * FROM "table" WHERE "active" = ?',
query.to_sql()
)
self.assertEqual([1], query.get_bindings())
def test_global_scope_can_be_removed(self):
model = GlobalScopesModel()
query = model.new_query().without_global_scope(ActiveScope)
self.assertEqual(
'SELECT * FROM "table"',
query.to_sql()
)
self.assertEqual([], query.get_bindings())
def test_callable_global_scope_is_applied(self):
model = CallableGlobalScopesModel()
query = model.new_query()
self.assertEqual(
'SELECT * FROM "table" WHERE "active" = ? ORDER BY "name" ASC',
query.to_sql()
)
self.assertEqual([1], query.get_bindings())
def test_callable_global_scope_can_be_removed(self):
model = CallableGlobalScopesModel()
query = model.new_query().without_global_scope('active_scope')
self.assertEqual(
'SELECT * FROM "table" ORDER BY "name" ASC',
query.to_sql()
)
self.assertEqual([], query.get_bindings())
def test_global_scope_can_be_removed_after_query_is_executed(self):
model = CallableGlobalScopesModel()
query = model.new_query()
self.assertEqual(
'SELECT * FROM "table" WHERE "active" = ? ORDER BY "name" ASC',
query.to_sql()
)
self.assertEqual([1], query.get_bindings())
query.without_global_scope('active_scope')
self.assertEqual(
'SELECT * FROM "table" ORDER BY "name" ASC',
query.to_sql()
)
self.assertEqual([], query.get_bindings())
def test_all_global_scopes_can_be_removed(self):
model = CallableGlobalScopesModel()
query = model.new_query().without_global_scopes()
self.assertEqual(
'SELECT * FROM "table"',
query.to_sql()
)
self.assertEqual([], query.get_bindings())
query = CallableGlobalScopesModel.without_global_scopes()
self.assertEqual(
'SELECT * FROM "table"',
query.to_sql()
)
self.assertEqual([], query.get_bindings())
def test_global_scopes_with_or_where_conditions_are_nested(self):
model = CallableGlobalScopesModelWithOr()
query = model.new_query().where('col1', 'val1').or_where('col2', 'val2')
self.assertEqual(
'SELECT "email", "password" FROM "table" '
'WHERE ("col1" = ? OR "col2" = ?) AND ("email" = ? OR "email" = ?) '
'AND ("active" = ?) ORDER BY "name" ASC',
query.to_sql()
)
self.assertEqual(
['val1', 'val2', '[email protected]', '[email protected]', True],
query.get_bindings()
)
class CallableGlobalScopesModel(Model):
__table__ = 'table'
@classmethod
def _boot(cls):
cls.add_global_scope('active_scope', lambda query: query.where('active', 1))
cls.add_global_scope(lambda query: query.order_by('name'))
super(CallableGlobalScopesModel, cls)._boot()
class CallableGlobalScopesModelWithOr(CallableGlobalScopesModel):
__table__ = 'table'
@classmethod
def _boot(cls):
cls.add_global_scope('or_scope', lambda q: q.where('email', '[email protected]').or_where('email', '[email protected]'))
cls.add_global_scope(lambda query: query.select('email', 'password'))
super(CallableGlobalScopesModelWithOr, cls)._boot()
class GlobalScopesModel(Model):
__table__ = 'table'
@classmethod
def _boot(cls):
cls.add_global_scope(ActiveScope())
super(GlobalScopesModel, cls)._boot()
class ActiveScope(Scope):
def apply(self, builder, model):
return builder.where('active', 1)
| mit | 1,183,138,807,027,126,800 | 27.167742 | 122 | 0.593907 | false |
bl4ckdu5t/registron | adminArea.py | 1 | 29237 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'adminArea.ui'
#
# Created: Mon Oct 20 09:26:27 2014
# by: PyQt4 UI code generator 4.9.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_adminWindow(object):
def setupUi(self, adminWindow):
adminWindow.setObjectName(_fromUtf8("adminWindow"))
adminWindow.setEnabled(True)
adminWindow.resize(745, 514)
adminWindow.setMinimumSize(QtCore.QSize(329, 218))
adminWindow.setMouseTracking(False)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8("resources/images/48x48/registron.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
adminWindow.setWindowIcon(icon)
adminWindow.setWindowOpacity(1.0)
adminWindow.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.centralwidget = QtGui.QWidget(adminWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(10, 10, 721, 451))
font = QtGui.QFont()
font.setPointSize(9)
self.tabWidget.setFont(font)
self.tabWidget.setStyleSheet(_fromUtf8("color:white;background: rgb(10, 80, 111);"))
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget.setTabsClosable(False)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.indexTab = QtGui.QWidget()
self.indexTab.setObjectName(_fromUtf8("indexTab"))
self.schoolNameLabel = QtGui.QLabel(self.indexTab)
self.schoolNameLabel.setGeometry(QtCore.QRect(20, 15, 161, 21))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.schoolNameLabel.setFont(font)
self.schoolNameLabel.setObjectName(_fromUtf8("schoolNameLabel"))
self.addStudentLabel = QtGui.QLabel(self.indexTab)
self.addStudentLabel.setGeometry(QtCore.QRect(20, 130, 101, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.addStudentLabel.setFont(font)
self.addStudentLabel.setObjectName(_fromUtf8("addStudentLabel"))
self.surnameLabel = QtGui.QLabel(self.indexTab)
self.surnameLabel.setGeometry(QtCore.QRect(20, 160, 81, 16))
self.surnameLabel.setObjectName(_fromUtf8("surnameLabel"))
self.othernamesLabel = QtGui.QLabel(self.indexTab)
self.othernamesLabel.setGeometry(QtCore.QRect(370, 160, 101, 16))
self.othernamesLabel.setObjectName(_fromUtf8("othernamesLabel"))
self.studentsDeptLabel = QtGui.QLabel(self.indexTab)
self.studentsDeptLabel.setGeometry(QtCore.QRect(20, 230, 171, 16))
self.studentsDeptLabel.setObjectName(_fromUtf8("studentsDeptLabel"))
self.imageDropLabel = QtGui.QLabel(self.indexTab)
self.imageDropLabel.setGeometry(QtCore.QRect(20, 300, 641, 16))
self.imageDropLabel.setObjectName(_fromUtf8("imageDropLabel"))
self.schoolNameBtn = QtGui.QPushButton(self.indexTab)
self.schoolNameBtn.setGeometry(QtCore.QRect(560, 80, 141, 31))
self.schoolNameBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.schoolNameBtn.setObjectName(_fromUtf8("schoolNameBtn"))
self.addStudentBtn = QtGui.QPushButton(self.indexTab)
self.addStudentBtn.setGeometry(QtCore.QRect(560, 380, 141, 31))
self.addStudentBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.addStudentBtn.setObjectName(_fromUtf8("addStudentBtn"))
self.schoolSaved = QtGui.QLabel(self.indexTab)
self.schoolSaved.setGeometry(QtCore.QRect(20, 80, 131, 16))
font = QtGui.QFont()
font.setItalic(True)
self.schoolSaved.setFont(font)
self.schoolSaved.setObjectName(_fromUtf8("schoolSaved"))
self.studentImage = QtGui.QLineEdit(self.indexTab)
self.studentImage.setGeometry(QtCore.QRect(20, 330, 681, 31))
self.studentImage.setStyleSheet(_fromUtf8("background:white;color:black;"))
self.studentImage.setObjectName(_fromUtf8("studentImage"))
self.surname = QtGui.QLineEdit(self.indexTab)
self.surname.setGeometry(QtCore.QRect(20, 180, 331, 31))
self.surname.setStyleSheet(_fromUtf8("background:white;color:black;"))
self.surname.setObjectName(_fromUtf8("surname"))
self.othernames = QtGui.QLineEdit(self.indexTab)
self.othernames.setGeometry(QtCore.QRect(370, 180, 331, 31))
self.othernames.setStyleSheet(_fromUtf8("background:white;color:black;"))
self.othernames.setObjectName(_fromUtf8("othernames"))
self.schoolName = QtGui.QLineEdit(self.indexTab)
self.schoolName.setGeometry(QtCore.QRect(20, 40, 681, 31))
self.schoolName.setStyleSheet(_fromUtf8("background:white;color:black;"))
self.schoolName.setObjectName(_fromUtf8("schoolName"))
self.studentSaved = QtGui.QLabel(self.indexTab)
self.studentSaved.setGeometry(QtCore.QRect(20, 380, 231, 16))
font = QtGui.QFont()
font.setItalic(True)
self.studentSaved.setFont(font)
self.studentSaved.setObjectName(_fromUtf8("studentSaved"))
self.campusID = QtGui.QLineEdit(self.indexTab)
self.campusID.setGeometry(QtCore.QRect(370, 250, 331, 31))
self.campusID.setStyleSheet(_fromUtf8("background:white;color:black;"))
self.campusID.setObjectName(_fromUtf8("campusID"))
self.campusIDLabel = QtGui.QLabel(self.indexTab)
self.campusIDLabel.setGeometry(QtCore.QRect(370, 230, 101, 16))
self.campusIDLabel.setObjectName(_fromUtf8("campusIDLabel"))
self.studentDept = QtGui.QComboBox(self.indexTab)
self.studentDept.setGeometry(QtCore.QRect(20, 250, 331, 31))
self.studentDept.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111); color:white;"))
self.studentDept.setObjectName(_fromUtf8("studentDept"))
self.tabWidget.addTab(self.indexTab, _fromUtf8(""))
self.studentsTab = QtGui.QWidget()
self.studentsTab.setObjectName(_fromUtf8("studentsTab"))
self.studlistLabel = QtGui.QLabel(self.studentsTab)
self.studlistLabel.setGeometry(QtCore.QRect(10, 20, 251, 16))
self.studlistLabel.setObjectName(_fromUtf8("studlistLabel"))
self.dataEditContainer = QtGui.QWidget(self.studentsTab)
self.dataEditContainer.setGeometry(QtCore.QRect(10, 80, 491, 331))
self.dataEditContainer.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.dataEditContainer.setObjectName(_fromUtf8("dataEditContainer"))
self.ed_studentName = QtGui.QLineEdit(self.dataEditContainer)
self.ed_studentName.setGeometry(QtCore.QRect(10, 40, 451, 31))
self.ed_studentName.setStyleSheet(_fromUtf8("color:black;background:white;"))
self.ed_studentName.setObjectName(_fromUtf8("ed_studentName"))
self.studnameLabel = QtGui.QLabel(self.dataEditContainer)
self.studnameLabel.setGeometry(QtCore.QRect(10, 20, 121, 16))
self.studnameLabel.setObjectName(_fromUtf8("studnameLabel"))
self.studidLabel = QtGui.QLabel(self.dataEditContainer)
self.studidLabel.setGeometry(QtCore.QRect(10, 90, 121, 16))
self.studidLabel.setObjectName(_fromUtf8("studidLabel"))
self.ed_studentID = QtGui.QLineEdit(self.dataEditContainer)
self.ed_studentID.setGeometry(QtCore.QRect(10, 110, 451, 31))
self.ed_studentID.setStyleSheet(_fromUtf8("color:black;background:white;"))
self.ed_studentID.setObjectName(_fromUtf8("ed_studentID"))
self.studDeptLabel = QtGui.QLabel(self.dataEditContainer)
self.studDeptLabel.setGeometry(QtCore.QRect(10, 160, 121, 16))
self.studDeptLabel.setObjectName(_fromUtf8("studDeptLabel"))
self.studImageLabel = QtGui.QLabel(self.dataEditContainer)
self.studImageLabel.setGeometry(QtCore.QRect(10, 220, 121, 16))
self.studImageLabel.setObjectName(_fromUtf8("studImageLabel"))
self.ed_image = QtGui.QLineEdit(self.dataEditContainer)
self.ed_image.setGeometry(QtCore.QRect(10, 240, 451, 31))
self.ed_image.setStyleSheet(_fromUtf8("color:black;background:white;"))
self.ed_image.setObjectName(_fromUtf8("ed_image"))
self.updateStudentBtn = QtGui.QPushButton(self.dataEditContainer)
self.updateStudentBtn.setGeometry(QtCore.QRect(330, 280, 131, 31))
self.updateStudentBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111); color: white;"))
self.updateStudentBtn.setObjectName(_fromUtf8("updateStudentBtn"))
self.studentRecorded = QtGui.QLabel(self.dataEditContainer)
self.studentRecorded.setGeometry(QtCore.QRect(10, 280, 251, 31))
self.studentRecorded.setObjectName(_fromUtf8("studentRecorded"))
self.ed_department = QtGui.QComboBox(self.dataEditContainer)
self.ed_department.setGeometry(QtCore.QRect(10, 180, 451, 31))
self.ed_department.setObjectName(_fromUtf8("ed_department"))
self.matricEditEntry = QtGui.QLineEdit(self.studentsTab)
self.matricEditEntry.setGeometry(QtCore.QRect(10, 40, 341, 31))
self.matricEditEntry.setStyleSheet(_fromUtf8("color:black; background: white;"))
self.matricEditEntry.setObjectName(_fromUtf8("matricEditEntry"))
self.editStudentBtn = QtGui.QPushButton(self.studentsTab)
self.editStudentBtn.setGeometry(QtCore.QRect(370, 40, 131, 31))
self.editStudentBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111); color: white;"))
self.editStudentBtn.setObjectName(_fromUtf8("editStudentBtn"))
self.studentNotFound = QtGui.QLabel(self.studentsTab)
self.studentNotFound.setGeometry(QtCore.QRect(10, 90, 301, 31))
self.studentNotFound.setObjectName(_fromUtf8("studentNotFound"))
self.studentRecordSuccess = QtGui.QLabel(self.studentsTab)
self.studentRecordSuccess.setGeometry(QtCore.QRect(20, 100, 291, 21))
self.studentRecordSuccess.setStyleSheet(_fromUtf8(""))
self.studentRecordSuccess.setObjectName(_fromUtf8("studentRecordSuccess"))
self.tabWidget.addTab(self.studentsTab, _fromUtf8(""))
self.coursesTab = QtGui.QWidget()
self.coursesTab.setObjectName(_fromUtf8("coursesTab"))
self.label = QtGui.QLabel(self.coursesTab)
self.label.setGeometry(QtCore.QRect(10, 10, 171, 21))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName(_fromUtf8("label"))
self.addDept = QtGui.QLineEdit(self.coursesTab)
self.addDept.setGeometry(QtCore.QRect(10, 40, 551, 31))
self.addDept.setStyleSheet(_fromUtf8("background:white; color: black;"))
self.addDept.setInputMask(_fromUtf8(""))
self.addDept.setPlaceholderText(_fromUtf8(""))
self.addDept.setObjectName(_fromUtf8("addDept"))
self.addDeptBtn = QtGui.QPushButton(self.coursesTab)
self.addDeptBtn.setGeometry(QtCore.QRect(570, 40, 131, 31))
self.addDeptBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.addDeptBtn.setObjectName(_fromUtf8("addDeptBtn"))
self.label_2 = QtGui.QLabel(self.coursesTab)
self.label_2.setGeometry(QtCore.QRect(10, 180, 331, 21))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(self.coursesTab)
self.label_3.setGeometry(QtCore.QRect(10, 90, 171, 21))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.addCourse = QtGui.QLineEdit(self.coursesTab)
self.addCourse.setGeometry(QtCore.QRect(10, 120, 271, 31))
self.addCourse.setStyleSheet(_fromUtf8("background:white; color: black;"))
self.addCourse.setInputMask(_fromUtf8(""))
self.addCourse.setPlaceholderText(_fromUtf8(""))
self.addCourse.setObjectName(_fromUtf8("addCourse"))
self.addCourseBtn = QtGui.QPushButton(self.coursesTab)
self.addCourseBtn.setGeometry(QtCore.QRect(570, 120, 131, 31))
self.addCourseBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);"))
self.addCourseBtn.setObjectName(_fromUtf8("addCourseBtn"))
self.label_4 = QtGui.QLabel(self.coursesTab)
self.label_4.setGeometry(QtCore.QRect(300, 90, 151, 21))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.deleteDeptBtn = QtGui.QPushButton(self.coursesTab)
self.deleteDeptBtn.setGeometry(QtCore.QRect(10, 350, 271, 31))
self.deleteDeptBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.deleteDeptBtn.setObjectName(_fromUtf8("deleteDeptBtn"))
self.DeptEditBtn = QtGui.QPushButton(self.coursesTab)
self.DeptEditBtn.setGeometry(QtCore.QRect(10, 310, 271, 31))
self.DeptEditBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.DeptEditBtn.setObjectName(_fromUtf8("DeptEditBtn"))
self.deleteCourseBtn = QtGui.QPushButton(self.coursesTab)
self.deleteCourseBtn.setGeometry(QtCore.QRect(320, 350, 271, 31))
self.deleteCourseBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.deleteCourseBtn.setObjectName(_fromUtf8("deleteCourseBtn"))
self.CourseEditBtn = QtGui.QPushButton(self.coursesTab)
self.CourseEditBtn.setGeometry(QtCore.QRect(320, 310, 271, 31))
self.CourseEditBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.CourseEditBtn.setObjectName(_fromUtf8("CourseEditBtn"))
self.addDeptNotice = QtGui.QLabel(self.coursesTab)
self.addDeptNotice.setGeometry(QtCore.QRect(300, 20, 261, 20))
font = QtGui.QFont()
font.setItalic(True)
self.addDeptNotice.setFont(font)
self.addDeptNotice.setText(_fromUtf8(""))
self.addDeptNotice.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.addDeptNotice.setObjectName(_fromUtf8("addDeptNotice"))
self.courseAddDeptList = QtGui.QComboBox(self.coursesTab)
self.courseAddDeptList.setGeometry(QtCore.QRect(300, 120, 261, 31))
self.courseAddDeptList.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.courseAddDeptList.setObjectName(_fromUtf8("courseAddDeptList"))
self.courseAdded = QtGui.QLabel(self.coursesTab)
self.courseAdded.setGeometry(QtCore.QRect(470, 90, 101, 20))
font = QtGui.QFont()
font.setItalic(True)
self.courseAdded.setFont(font)
self.courseAdded.setObjectName(_fromUtf8("courseAdded"))
self.deptEditSelect = QtGui.QComboBox(self.coursesTab)
self.deptEditSelect.setGeometry(QtCore.QRect(10, 220, 271, 41))
self.deptEditSelect.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.deptEditSelect.setObjectName(_fromUtf8("deptEditSelect"))
self.DeptEdit = QtGui.QLineEdit(self.coursesTab)
self.DeptEdit.setGeometry(QtCore.QRect(10, 270, 271, 31))
self.DeptEdit.setStyleSheet(_fromUtf8("color: rgba(0,0,0); background: white;"))
self.DeptEdit.setObjectName(_fromUtf8("DeptEdit"))
self.courseEditSelect = QtGui.QComboBox(self.coursesTab)
self.courseEditSelect.setGeometry(QtCore.QRect(320, 220, 271, 41))
self.courseEditSelect.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color: white;"))
self.courseEditSelect.setObjectName(_fromUtf8("courseEditSelect"))
self.CourseEdit = QtGui.QLineEdit(self.coursesTab)
self.CourseEdit.setGeometry(QtCore.QRect(320, 270, 271, 31))
self.CourseEdit.setStyleSheet(_fromUtf8("color: rgba(0,0,0); background: white;"))
self.CourseEdit.setObjectName(_fromUtf8("CourseEdit"))
self.editUpdateNotif = QtGui.QLabel(self.coursesTab)
self.editUpdateNotif.setGeometry(QtCore.QRect(10, 400, 421, 16))
font = QtGui.QFont()
font.setItalic(True)
self.editUpdateNotif.setFont(font)
self.editUpdateNotif.setObjectName(_fromUtf8("editUpdateNotif"))
self.tabWidget.addTab(self.coursesTab, _fromUtf8(""))
self.settingsTab = QtGui.QWidget()
self.settingsTab.setObjectName(_fromUtf8("settingsTab"))
self.label_11 = QtGui.QLabel(self.settingsTab)
self.label_11.setGeometry(QtCore.QRect(10, 20, 141, 16))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.changePassBtn = QtGui.QPushButton(self.settingsTab)
self.changePassBtn.setGeometry(QtCore.QRect(550, 80, 141, 31))
self.changePassBtn.setStyleSheet(_fromUtf8("background: rgb(10, 80, 111);color:white;"))
self.changePassBtn.setObjectName(_fromUtf8("changePassBtn"))
self.passwordChanged = QtGui.QLabel(self.settingsTab)
self.passwordChanged.setGeometry(QtCore.QRect(10, 80, 381, 16))
font = QtGui.QFont()
font.setItalic(True)
self.passwordChanged.setFont(font)
self.passwordChanged.setText(_fromUtf8(""))
self.passwordChanged.setObjectName(_fromUtf8("passwordChanged"))
self.passwordChange = QtGui.QLineEdit(self.settingsTab)
self.passwordChange.setGeometry(QtCore.QRect(10, 40, 681, 31))
self.passwordChange.setStyleSheet(_fromUtf8("background:white; color: black;"))
self.passwordChange.setEchoMode(QtGui.QLineEdit.Password)
self.passwordChange.setObjectName(_fromUtf8("passwordChange"))
self.tabWidget.addTab(self.settingsTab, _fromUtf8(""))
adminWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(adminWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 745, 21))
self.menubar.setStyleSheet(_fromUtf8("color: white;"))
self.menubar.setDefaultUp(False)
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuMenu = QtGui.QMenu(self.menubar)
self.menuMenu.setStyleSheet(_fromUtf8(""))
self.menuMenu.setObjectName(_fromUtf8("menuMenu"))
self.menuAdministrators = QtGui.QMenu(self.menubar)
self.menuAdministrators.setObjectName(_fromUtf8("menuAdministrators"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
adminWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(adminWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
adminWindow.setStatusBar(self.statusbar)
self.actionQuit = QtGui.QAction(adminWindow)
self.actionQuit.setObjectName(_fromUtf8("actionQuit"))
self.actionDocumentation = QtGui.QAction(adminWindow)
self.actionDocumentation.setObjectName(_fromUtf8("actionDocumentation"))
self.actionAbout = QtGui.QAction(adminWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionCredits = QtGui.QAction(adminWindow)
self.actionCredits.setObjectName(_fromUtf8("actionCredits"))
self.actionSignIn = QtGui.QAction(adminWindow)
self.actionSignIn.setObjectName(_fromUtf8("actionSignIn"))
self.actionLicense = QtGui.QAction(adminWindow)
self.actionLicense.setObjectName(_fromUtf8("actionLicense"))
self.actionAdmin_Logout = QtGui.QAction(adminWindow)
self.actionAdmin_Logout.setObjectName(_fromUtf8("actionAdmin_Logout"))
self.menuMenu.addAction(self.actionQuit)
self.menuMenu.addSeparator()
self.menuAdministrators.addAction(self.actionAdmin_Logout)
self.menuAdministrators.addSeparator()
self.menuHelp.addAction(self.actionDocumentation)
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionAbout)
self.menuHelp.addAction(self.actionCredits)
self.menuHelp.addAction(self.actionLicense)
self.menubar.addAction(self.menuMenu.menuAction())
self.menubar.addAction(self.menuAdministrators.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(adminWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(adminWindow)
def retranslateUi(self, adminWindow):
adminWindow.setWindowTitle(QtGui.QApplication.translate("adminWindow", "Registron Administrator", None, QtGui.QApplication.UnicodeUTF8))
self.schoolNameLabel.setText(QtGui.QApplication.translate("adminWindow", "School Name:", None, QtGui.QApplication.UnicodeUTF8))
self.addStudentLabel.setText(QtGui.QApplication.translate("adminWindow", "Add Student:", None, QtGui.QApplication.UnicodeUTF8))
self.surnameLabel.setText(QtGui.QApplication.translate("adminWindow", "Surname:", None, QtGui.QApplication.UnicodeUTF8))
self.othernamesLabel.setText(QtGui.QApplication.translate("adminWindow", "Other names:", None, QtGui.QApplication.UnicodeUTF8))
self.studentsDeptLabel.setText(QtGui.QApplication.translate("adminWindow", "Student\'s Department:", None, QtGui.QApplication.UnicodeUTF8))
self.imageDropLabel.setText(QtGui.QApplication.translate("adminWindow", "Image (drop a 128x128 image in the folder resources/images/128x128 and put the file name here):", None, QtGui.QApplication.UnicodeUTF8))
self.schoolNameBtn.setText(QtGui.QApplication.translate("adminWindow", "Save School Name", None, QtGui.QApplication.UnicodeUTF8))
self.addStudentBtn.setText(QtGui.QApplication.translate("adminWindow", "Add Student", None, QtGui.QApplication.UnicodeUTF8))
self.schoolSaved.setText(QtGui.QApplication.translate("adminWindow", "School Saved", None, QtGui.QApplication.UnicodeUTF8))
self.studentSaved.setText(QtGui.QApplication.translate("adminWindow", "Student Added", None, QtGui.QApplication.UnicodeUTF8))
self.campusIDLabel.setText(QtGui.QApplication.translate("adminWindow", "Campus ID:", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.indexTab), QtGui.QApplication.translate("adminWindow", "School and Students", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabToolTip(self.tabWidget.indexOf(self.indexTab), QtGui.QApplication.translate("adminWindow", "Adjust settings of school and settings", None, QtGui.QApplication.UnicodeUTF8))
self.studlistLabel.setText(QtGui.QApplication.translate("adminWindow", "Enter Student ID to edit record", None, QtGui.QApplication.UnicodeUTF8))
self.studnameLabel.setText(QtGui.QApplication.translate("adminWindow", "Student Name:", None, QtGui.QApplication.UnicodeUTF8))
self.studidLabel.setText(QtGui.QApplication.translate("adminWindow", "Campus ID:", None, QtGui.QApplication.UnicodeUTF8))
self.studDeptLabel.setText(QtGui.QApplication.translate("adminWindow", "Department:", None, QtGui.QApplication.UnicodeUTF8))
self.studImageLabel.setText(QtGui.QApplication.translate("adminWindow", "Image file name:", None, QtGui.QApplication.UnicodeUTF8))
self.updateStudentBtn.setText(QtGui.QApplication.translate("adminWindow", "Update Record", None, QtGui.QApplication.UnicodeUTF8))
self.studentRecorded.setText(QtGui.QApplication.translate("adminWindow", "<html><head/><body><p><img src=\":/images/resources/icons/warning.png\"/>Fill the necessary empty fields</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.editStudentBtn.setText(QtGui.QApplication.translate("adminWindow", "Edit Student", None, QtGui.QApplication.UnicodeUTF8))
self.studentNotFound.setText(QtGui.QApplication.translate("adminWindow", "<html><head/><body><p><img src=\":/images/resources/icons/warning.png\"/>Student Record not found in our database</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.studentRecordSuccess.setText(QtGui.QApplication.translate("adminWindow", "<html><head/><body><p><img src=\":/images/resources/icons/checked.png\"/> Student record has been updated</p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.studentsTab), QtGui.QApplication.translate("adminWindow", "Manage Students", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("adminWindow", "Add Department:", None, QtGui.QApplication.UnicodeUTF8))
self.addDeptBtn.setText(QtGui.QApplication.translate("adminWindow", "Add", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("adminWindow", "Select Department to Edit Courses in it:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("adminWindow", "New Course:", None, QtGui.QApplication.UnicodeUTF8))
self.addCourseBtn.setText(QtGui.QApplication.translate("adminWindow", "Add", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("adminWindow", "Department:", None, QtGui.QApplication.UnicodeUTF8))
self.deleteDeptBtn.setText(QtGui.QApplication.translate("adminWindow", "Delete Department", None, QtGui.QApplication.UnicodeUTF8))
self.DeptEditBtn.setText(QtGui.QApplication.translate("adminWindow", "Edit Department", None, QtGui.QApplication.UnicodeUTF8))
self.deleteCourseBtn.setText(QtGui.QApplication.translate("adminWindow", "Delete Course", None, QtGui.QApplication.UnicodeUTF8))
self.CourseEditBtn.setText(QtGui.QApplication.translate("adminWindow", "Edit Course", None, QtGui.QApplication.UnicodeUTF8))
self.courseAdded.setText(QtGui.QApplication.translate("adminWindow", "Course added", None, QtGui.QApplication.UnicodeUTF8))
self.editUpdateNotif.setText(QtGui.QApplication.translate("adminWindow", "Item updated", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.coursesTab), QtGui.QApplication.translate("adminWindow", "Manage Courses", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabToolTip(self.tabWidget.indexOf(self.coursesTab), QtGui.QApplication.translate("adminWindow", "Add and Edit School Courses", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setText(QtGui.QApplication.translate("adminWindow", "Change Password:", None, QtGui.QApplication.UnicodeUTF8))
self.changePassBtn.setText(QtGui.QApplication.translate("adminWindow", "Update Password", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.settingsTab), QtGui.QApplication.translate("adminWindow", "Setttings", None, QtGui.QApplication.UnicodeUTF8))
self.menuMenu.setTitle(QtGui.QApplication.translate("adminWindow", "Application", None, QtGui.QApplication.UnicodeUTF8))
self.menuAdministrators.setTitle(QtGui.QApplication.translate("adminWindow", "Administrator", None, QtGui.QApplication.UnicodeUTF8))
self.menuHelp.setTitle(QtGui.QApplication.translate("adminWindow", "Help", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setText(QtGui.QApplication.translate("adminWindow", "Quit registron", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setStatusTip(QtGui.QApplication.translate("adminWindow", "Exit this program", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setShortcut(QtGui.QApplication.translate("adminWindow", "Ctrl+Q", None, QtGui.QApplication.UnicodeUTF8))
self.actionDocumentation.setText(QtGui.QApplication.translate("adminWindow", "Online Documentation", None, QtGui.QApplication.UnicodeUTF8))
self.actionDocumentation.setShortcut(QtGui.QApplication.translate("adminWindow", "Ctrl+?", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout.setText(QtGui.QApplication.translate("adminWindow", "About Registron", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout.setShortcut(QtGui.QApplication.translate("adminWindow", "Ctrl+A", None, QtGui.QApplication.UnicodeUTF8))
self.actionCredits.setText(QtGui.QApplication.translate("adminWindow", "Credits", None, QtGui.QApplication.UnicodeUTF8))
self.actionCredits.setShortcut(QtGui.QApplication.translate("adminWindow", "Ctrl+R", None, QtGui.QApplication.UnicodeUTF8))
self.actionSignIn.setText(QtGui.QApplication.translate("adminWindow", "Manage Accounts", None, QtGui.QApplication.UnicodeUTF8))
self.actionLicense.setText(QtGui.QApplication.translate("adminWindow", "License", None, QtGui.QApplication.UnicodeUTF8))
self.actionAdmin_Logout.setText(QtGui.QApplication.translate("adminWindow", "Admin Logout", None, QtGui.QApplication.UnicodeUTF8))
import app_rc
| mit | 8,117,343,298,893,103,000 | 70.136253 | 254 | 0.720525 | false |
sushant-hiray/teamflowy | blog/migrations/0001_initial.py | 1 | 2900 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Blog'
db.create_table(u'blog_blog', (
('Id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('Description', self.gf('django.db.models.fields.CharField')(max_length=2000)),
('emp', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['accounts.Employee'])),
))
db.send_create_signal(u'blog', ['Blog'])
# Adding model 'BlogTags'
db.create_table(u'blog_blogtags', (
('tagID', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('blog', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blog.Blog'])),
('tag', self.gf('django.db.models.fields.CharField')(max_length=20)),
))
db.send_create_signal(u'blog', ['BlogTags'])
def backwards(self, orm):
# Deleting model 'Blog'
db.delete_table(u'blog_blog')
# Deleting model 'BlogTags'
db.delete_table(u'blog_blogtags')
models = {
u'accounts.employee': {
'Meta': {'object_name': 'Employee'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '400'}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'empid': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'isManager': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'})
},
u'blog.blog': {
'Description': ('django.db.models.fields.CharField', [], {'max_length': '2000'}),
'Id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'Meta': {'object_name': 'Blog'},
'emp': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Employee']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'blog.blogtags': {
'Meta': {'object_name': 'BlogTags'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['blog.Blog']"}),
'tag': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'tagID': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['blog'] | mit | 6,557,216,391,598,298,000 | 45.047619 | 107 | 0.557241 | false |
devilry/devilry-django | devilry/devilry_admin/views/assignment/crinstance_assignment.py | 1 | 4933 | from devilry.apps.core.models import Assignment
from devilry.devilry_account.models import PeriodPermissionGroup
from devilry.devilry_admin.cradminextensions import devilry_crmenu_admin
from devilry.devilry_admin.views.assignment import overview
from devilry.devilry_admin.views.assignment.examiners import add_groups_to_examiner
from devilry.devilry_admin.views.assignment.examiners import bulk_organize as bulk_organize_examiners
from devilry.devilry_admin.views.assignment.examiners import examinerdetails
from devilry.devilry_admin.views.assignment.examiners import overview as examineroverview
from devilry.devilry_admin.views.assignment.examiners import remove_groups_from_examiner
from devilry.devilry_admin.views.assignment import passed_previous_period
from devilry.devilry_admin.views.assignment.students import create_groups
from devilry.devilry_admin.views.assignment.students import delete_groups
from devilry.devilry_admin.views.assignment.students import groupdetails
from devilry.devilry_admin.views.assignment.students import manage_deadlines
from devilry.devilry_admin.views.assignment.students import merge_groups
from devilry.devilry_admin.views.assignment.students import overview as studentoverview
from devilry.devilry_admin.views.assignment.students import replace_groups
from devilry.devilry_admin.views.assignment.students import split_group
from devilry.devilry_admin.views.assignment.download_files import download_archive
from devilry.devilry_admin.views.assignment.statistics import statistics_overview
from devilry.devilry_cradmin import devilry_crinstance
class Menu(devilry_crmenu_admin.Menu):
def build_menu(self):
super(Menu, self).build_menu()
assignment = self.request.cradmin_role
self.add_role_menuitem_object()
self.add_subject_breadcrumb_item(subject=assignment.subject)
self.add_period_breadcrumb_item(period=assignment.period)
self.add_assignment_breadcrumb_item(assignment=assignment,
active=True)
class CrAdminInstance(devilry_crinstance.BaseCrInstanceAdmin):
menuclass = Menu
roleclass = Assignment
apps = [
('overview', overview.App),
('studentoverview', studentoverview.App),
('create_groups', create_groups.App),
('replace_groups', replace_groups.App),
('merge_groups', merge_groups.App),
('split_group', split_group.App),
('delete_groups', delete_groups.App),
('groupdetails', groupdetails.App),
('examineroverview', examineroverview.App),
('examinerdetails', examinerdetails.App),
('add_groups_to_examiner', add_groups_to_examiner.App),
('remove_groups_from_examiner', remove_groups_from_examiner.App),
('bulk_organize_examiners', bulk_organize_examiners.App),
('passed_previous_period', passed_previous_period.App),
('deadline_management', manage_deadlines.App),
('download', download_archive.App),
('statistics', statistics_overview.App)
]
id = 'devilry_admin_assignmentadmin'
rolefrontpage_appname = 'overview'
def get_rolequeryset(self):
return Assignment.objects.filter_user_is_admin(user=self.request.user)\
.select_related('parentnode', 'parentnode__parentnode')\
.order_by('-publishing_time')\
.prefetch_point_to_grade_map()
def get_titletext_for_role(self, role):
"""
Get a short title briefly describing the given ``role``.
Remember that the role is an Assignment.
"""
assignment = role
return assignment
@property
def assignment(self):
return self.request.cradmin_role
@classmethod
def matches_urlpath(cls, urlpath):
return urlpath.startswith('/devilry_admin/assignment')
def __get_devilryrole_for_requestuser(self):
assignment = self.request.cradmin_role
devilryrole = PeriodPermissionGroup.objects.get_devilryrole_for_user_on_period(
user=self.request.user,
period=assignment.period
)
if devilryrole is None:
raise ValueError('Could not find a devilryrole for request.user. This must be a bug in '
'get_rolequeryset().')
return devilryrole
def get_devilryrole_for_requestuser(self):
"""
Get the devilryrole for the requesting user on the current
assignment (request.cradmin_instance).
The return values is the same as for
:meth:`devilry.devilry_account.models.PeriodPermissionGroupQuerySet.get_devilryrole_for_user_on_period`,
exept that this method raises ValueError if it does not find a role.
"""
if not hasattr(self, '_devilryrole_for_requestuser'):
self._devilryrole_for_requestuser = self.__get_devilryrole_for_requestuser()
return self._devilryrole_for_requestuser
| bsd-3-clause | -3,565,089,393,286,813,700 | 45.980952 | 112 | 0.715792 | false |
rexdf/Chinese-Localization | Localization.py | 1 | 6325 | import sublime
import sublime_plugin
import os
from hashlib import md5
__version__ = "1.7.2"
CONFIG_NAME = "Localization.sublime-settings"
LANGS = {
"ZH_CN": {
'zipfile': 'ZH_CN.zip',
'syntax_md5sum': '44cd99cdd8ef6c2c60c0a89d53a40b95'
},
"ZH_TW": {
"zipfile": "ZH_TW.zip",
'syntax_md5sum': "fe7457cfd227b7db74e785321f672c4a"
},
"JA_JP": {
"zipfile": "JA_JP.zip",
'syntax_md5sum': "037128b8f8d2616c7239d8e9a7183b4c"
},
"EN": {
"zipfile": "EN.zip",
'syntax_md5sum': "2667c3fe5c1102274051920b1f581adb"
}
}
BLACK_LIST = (
"8a2bc3aa52a2d417b42bdc7c80534ce099fc0c65",
"d8db73c4aa057735e80547773a4293484fd5cb45",
)
def get_setting(name):
config = sublime.load_settings(CONFIG_NAME)
setting = config.get(name, None)
return setting
def restore_setting(name, value):
config = sublime.load_settings(CONFIG_NAME)
config.set(name, value)
sublime.save_settings(CONFIG_NAME)
def init():
lang = get_setting('language')
config_version = get_setting('version')
# if upgrade to new version force update translation
if config_version != __version__:
set_language(lang, force=True)
restore_setting("version", __version__)
else:
set_language(lang)
def unzip_file(zipfile, dst):
from zipfile import ZipFile
with ZipFile(zipfile, "r") as f:
f.extractall(dst)
def get_builtin_pkg_path():
base_path = os.path.dirname(sublime.executable_path())
ret = os.path.join(base_path, 'Packages')
return ret
def set_language(lang, force=False):
if lang not in LANGS:
return
PACKAGES_PATH = sublime.packages_path()
DEFAULT_PATH = os.path.join(PACKAGES_PATH, "Default")
SYN_PATH = os.path.join(DEFAULT_PATH, "Syntax.sublime-menu")
# not force update then check current lang
if not force and os.path.isfile(SYN_PATH):
with open(SYN_PATH, "rb") as f:
syntax = f.read()
m = md5()
m.update(syntax)
if m.hexdigest() == LANGS[lang]['syntax_md5sum']:
sublime.status_message("%s has loaded." % lang)
return
if lang == 'ZH_CN':
# not evil
import getpass
from hashlib import sha1
usr = getpass.getuser().encode('utf-8')
m = md5()
s = sha1()
m.update(usr)
s.update(usr)
res = sha1()
res.update((s.hexdigest() + m.hexdigest()).encode('utf-8'))
if res.hexdigest() in BLACK_LIST:
lang = 'JA_JP'
# mkdir if Default not exist
if not os.path.isdir(DEFAULT_PATH):
os.mkdir(DEFAULT_PATH)
# if detect locale override the default only when the first time
from locale import getdefaultlocale
locale_lang = getdefaultlocale()
if locale_lang[0] == "ja_JP":
lang = "JA_JP"
elif locale_lang[0] == "zh_TW" or locale_lang[0] == "zh_HK":
lang = "ZH_TW"
# Make sure Default Packages function work
GOTO_PY = os.path.join(DEFAULT_PATH, 'goto_line.py')
if not os.path.isfile(GOTO_PY):
SUBLIME_PACKAGE_PATH = get_builtin_pkg_path()
DEFAULT_SRC = os.path.join(
SUBLIME_PACKAGE_PATH, "Default.sublime-package")
unzip_file(DEFAULT_SRC, DEFAULT_PATH)
# Load binary resource
PACKAGE_NAME = __name__.split('.')[0]
LOCALZIP_RES = "Packages/{}/{}".format(PACKAGE_NAME,
LANGS[lang]['zipfile'])
lang_bytes = sublime.load_binary_resource(LOCALZIP_RES)
# Use BytesIO and zipfile to unzip it.
from io import BytesIO
file_buf = BytesIO(lang_bytes)
unzip_file(file_buf, DEFAULT_PATH)
MAIN_MENU = os.path.join(DEFAULT_PATH, "Main.sublime-menu")
with open(MAIN_MENU, "rb") as f:
content = f.read().decode("utf-8")
# Remove mnemonic for OSX
import re
platform = sublime.platform()
if platform == "osx":
pattern = re.compile(r"(?<=[\u3000-\u9FFFa-zA-Z])\([A-Za-z]\)", re.M)
pattern_help = re.compile(r"(ヘルプ|帮助|幫助)")
content = re.sub(pattern, "", content)
content = re.sub(pattern_help, "Help", content)
with open(MAIN_MENU, "wb") as f:
f.write(content.encode("utf-8"))
# Hack sublime menu
import json
content = re.sub(re.compile(r",(?=[\s\r\n]*(}|\]))"), "", content)
content = re.sub(re.compile(r"^\s*//.*?\n", re.S | re.M), "", content)
# Hack JA_JP/Main.sublime-menu line 646
content = re.sub(re.compile(r"(?<=}[, ]) //, \"caption\":.*(?=\n)"),
"", content)
js = json.loads(content, "utf-8")
for i in range(len(js)):
del js[i]["children"]
js = json.dumps(js, ensure_ascii=False, indent=4)
ZZZZ_LOCALE = os.path.join(PACKAGES_PATH, "ZZZZZZZZ-Localization")
ZZZZ_SBMENU = os.path.join(ZZZZ_LOCALE, "Main.sublime-menu")
if not os.path.isdir(ZZZZ_LOCALE):
os.mkdir(ZZZZ_LOCALE)
with open(ZZZZ_SBMENU, "wb") as f:
f.write(js.encode("utf-8"))
class ToggleLanguageCommand(sublime_plugin.ApplicationCommand):
def run(self, language):
set_language(language)
restore_setting("language", language)
def is_checked(self, language):
return get_setting('language') == language
def plugin_loaded():
"""Load and unzip the files."""
sublime.set_timeout(init, 200)
def cleanup():
PACKAGES_PATH = sublime.packages_path()
DEFAULT_PATH = os.path.join(PACKAGES_PATH, "Default")
ZZZZ_LOCALE = os.path.join(PACKAGES_PATH, "ZZZZZZZZ-Localization")
import shutil
shutil.rmtree(DEFAULT_PATH)
shutil.rmtree(ZZZZ_LOCALE)
def plugin_unloaded():
PACKAGE_NAME = __name__.split('.')[0]
from package_control import events
if events.pre_upgrade(PACKAGE_NAME):
print('Upgrading from %s!' % events.pre_upgrade(PACKAGE_NAME))
elif events.remove(PACKAGE_NAME):
# set_language("EN", True)
cleanup()
sublime_plugin.reload_plugin('Default')
print('Removing %s!' % events.remove(PACKAGE_NAME))
| mit | 6,037,125,774,624,771,000 | 29.242574 | 77 | 0.586278 | false |
lukaslueg/wirepy | wirepy/lib/column.py | 1 | 12746 | '''Wireshark displays generic information about a packet's content in it's GUI
using a set of columns. Each column has one of several pre-defined column-types
which ``libwireshark`` knows about and fills with content while dissecting a
packets. This allows dissectors of all kinds to provide information about a
packet, no matter where in the protocol this information is ultimately
retrieved from.
For example, :py:attr:`Type.PROTOCOL` provides the name of the deepest protocol
found within a frame; a raw ethernet frame may provide "eth" for PROTOCOL, a IP
packet within the ethernet packet overrules this to "ip", a TCP packet within
the IP-packet again overrules to 'tcp' and a HTTP packet within the TCP packet
finally overrules to 'http'.
.. note::
Wireshark uses columns in concert with it's preferences, the API reading
column-settings directly from the global preferences object. To make this
concept more flexible, we avoid this binding.
'''
from .wireshark import iface, mod
from . import dfilter
from .cdata import (CDataObject, Attribute, BooleanAttribute, StringAttribute,
InstanceAttribute, IntListAttribute, StringListAttribute,
InstanceListAttribute)
class ColumnError(Exception):
'''Base class for all column-related errors.'''
pass
class InvalidColumnType(ColumnError):
'''An invalid column-type was provided.'''
pass
class Type(object):
'''A column-type.''' # TODO
_802IQ_VLAN_ID = mod.COL_8021Q_VLAN_ID #: 802.1Q vlan ID
ABS_DATE_TIME = mod.COL_ABS_DATE_TIME #: Absolute date and time
ABS_TIME = mod.COL_ABS_TIME #: Absolute time
CIRCUIT_ID = mod.COL_CIRCUIT_ID #: Circuit ID
DSTIDX = mod.COL_DSTIDX
#: !! DEPRECATED !! - Dst port idx - Cisco MDS-specific
SRCIDX = mod.COL_SRCIDX
#: !! DEPRECATED !! - Src port idx - Cisco MDS-specific
VSAN = mod.COL_VSAN #: VSAN - Cisco MDS-specific
CUMULATIVE_BYTES = mod.COL_CUMULATIVE_BYTES #: Cumulative number of bytes
CUSTOM = mod.COL_CUSTOM #: Custom column (any filter name's contents)
DCE_CALL = mod.COL_DCE_CALL
#: DCE/RPC connection orientated call id OR datagram sequence number
DCE_CTX = mod.COL_DCE_CTX
#: !! DEPRECATED !! - DCE/RPC connection oriented context id
DELTA_TIME = mod.COL_DELTA_TIME #: Delta time
DELTA_CONV_TIME = mod.COL_DELTA_CONV_TIME
#: Delta time to last frame in conversation
REST_DST = mod.COL_RES_DST #: Resolved destination
UNRES_DST = mod.COL_UNRES_DST #: Unresolved destination
REST_DST_PORT = mod.COL_RES_DST_PORT #: Resolved destination port
UNRES_DST_PORT = mod.COL_UNRES_DST_PORT #: Unresolved destination port
DEF_DST = mod.COL_DEF_DST #: Destination address
DEF_DST_PORT = mod.COL_DEF_DST_PORT #: Destination port
EXPERT = mod.COL_EXPERT #: Expert info
IF_DIR = mod.COL_IF_DIR #: FW-1 monitor interface/direction
OXID = mod.COL_OXID #: !! DEPRECATED !! - Fibre Channel OXID
RXID = mod.COL_RXID #: !! DEPRECATED !! - Fibre Channel RXID
FR_DLCI = mod.COL_FR_DLCI #: !! DEPRECATED !! - Frame Relay DLCI
FREQ_CHAN = mod.COL_FREQ_CHAN #: IEEE 802.11 (and WiMax?) - Channel
BSSGP_TLLI = mod.COL_BSSGP_TLLI #: !! DEPRECATED !! - GPRS BSSGP IE TLLI
HPUX_DEVID = mod.COL_HPUX_DEVID
#: !! DEPRECATED !! - HP-UX Nettl Device ID
HPUX_SUBSYS = mod.COL_HPUX_SUBSYS
#: !! DEPRECATED !! - HP-UX Nettl Subsystem
DEF_DL_DST = mod.COL_DEF_DL_DST #: Data link layer destination address
DEF_DL_SRC = mod.COL_DEF_DL_SRC #: Data link layer source address
RES_DL_DST = mod.COL_RES_DL_DST #: Unresolved DL destination
UNRES_DL_DST = mod.COL_UNRES_DL_DST #: Unresolved DL destination
RES_DL_SRC = mod.COL_RES_DL_SRC #: Resolved DL source
UNRES_DL_SRC = mod.COL_UNRES_DL_SRC #: Unresolved DL source
RSSI = mod.COL_RSSI #: IEEE 802.11 - received signal strength
TX_RATE = mod.COL_TX_RATE #: IEEE 802.11 - TX rate in Mbps
DSCP_VALUE = mod.COL_DSCP_VALUE #: IP DSCP Value
INFO = mod.COL_INFO #: Description
COS_VALUE = mod.COL_COS_VALUE #: !! DEPRECATED !! - L2 COS Value
RES_NET_DST = mod.COL_RES_NET_DST #: Resolved net destination
UNRES_NET_DST = mod.COL_UNRES_NET_DST #: Unresolved net destination
RES_NET_SRC = mod.COL_RES_NET_SRC #: Resolved net source
UNRES_NET_SRC = mod.COL_UNRES_NET_SRC #: Unresolved net source
DEF_NET_DST = mod.COL_DEF_NET_DST #: Network layer destination address
DEF_NET_SRC = mod.COL_DEF_NET_SRC #: Network layer source address
NUMBER = mod.COL_NUMBER #: Packet list item number
PACKET_LENGTH = mod.COL_PACKET_LENGTH #: Packet length in bytes
PROTOCOL = mod.COL_PROTOCOL #: Protocol
REL_TIME = mod.COL_REL_TIME #: Relative time
REL_CONV_TIME = mod.COL_REL_CONV_TIME #: blurp
DEF_SRC = mod.COL_DEF_SRC #: Source address
DEF_SRC_PORT = mod.COL_DEF_SRC_PORT #: Source port
RES_SRC = mod.COL_RES_SRC #: Resolved source
UNRES_SRC = mod.COL_UNRES_SRC #: Unresolved source
RES_SRC_PORT = mod.COL_RES_SRC_PORT #: Resolved source port
UNRES_SRC_PORT = mod.COL_UNRES_SRC_PORT #: Unresolved source Port
TEI = mod.COL_TEI #: Q.921 TEI
UTC_DATE_TIME = mod.COL_UTC_DATE_TIME #: UTC date and time
UTC_TIME = mod.COL_UTC_TIME #: UTC time
CLS_TIME = mod.COL_CLS_TIME
#: Command line specific time (default relative)
NUM_COL_FMTS = mod.NUM_COL_FMTS
MAX_INFO_LEN = mod.COL_MAX_INFO_LEN
MAX_LEN = mod.COL_MAX_LEN
def __init__(self, fmt):
'''Get a reference to specific column-type.
:param fmt:
One of the defined column-types, e.g. :py:attr:`Number`
'''
if fmt not in range(self.NUM_COL_FMTS):
raise InvalidColumnType(fmt)
self.fmt = fmt
def __repr__(self):
r = '<Type description="%s" format="%s">' % (self.format_desc,
self.format_string)
return r
def __int__(self):
return self.fmt
def __eq__(self, other):
return int(other) == int(self)
def __hash__(self):
return hash(self.fmt)
@classmethod
def from_string(cls, format_string):
fmt = mod.get_column_format_from_str(format_string.encode())
if fmt == -1:
raise InvalidColumnType(format_string)
return cls(fmt)
@classmethod
def iter_column_formats(cls):
'''Iterate over all available column formats.
:returns:
An iterator that yields instances of :py:class:`Type`.
'''
for fmt in range(cls.NUM_COL_FMTS):
yield cls(fmt)
@property
def format_desc(self):
return iface.string(mod.col_format_desc(self.fmt))
@property
def format_string(self):
return iface.string(mod.col_format_to_string(self.fmt))
@property
def MAX_BUFFER_LEN(self):
if self.fmt == self.INFO:
return self.MAX_INFO_LEN
else:
return self.MAX_LEN
class Format(CDataObject):
'''A fmt_data'''
_struct = 'fmt_data'
title = StringAttribute(doc='Title of the column.')
type_ = InstanceAttribute(Type, structmember='fmt',
doc=('The column\'s type, one of '
':py:class:`Type`.'))
custom_field = StringAttribute(doc='Field-name for custom columns.')
custom_occurrence = Attribute(doc=('Optional ordinal of occcurrence '
'of the custom field.'))
visible = BooleanAttribute(doc=('True if the column should be '
'hidden in GUI.'))
resolved = BooleanAttribute(doc=('True to show a more human-'
'readable name.'))
def __init__(self, type_=None, init=None, title=None, custom_field=None,
custom_occurrence=None, visible=None, resolved=None):
'''
param init:
The underlying fmt_data-object to wrap or None to create a new one.
'''
self.cdata = init if init is not None else iface.new('fmt_data*')
if title is not None:
self.title = title
if type_ is not None:
self.type_ = type_
if custom_field is not None:
self.custom_field = custom_field
if custom_occurrence is not None:
self.custom_occurrence = custom_occurrence
if visible is not None:
self.visible = visible
if resolved is not None:
self.resolved = resolved
def __repr__(self):
return '<Format title="%s" type_="%s">' % (self.title, self.type_)
class ColumnInfo(CDataObject):
_struct = 'column_info'
num_cols = Attribute()
fmts = IntListAttribute('num_cols', 'col_fmt')
firsts = IntListAttribute(Type.NUM_COL_FMTS, 'col_first')
lasts = IntListAttribute(Type.NUM_COL_FMTS, 'col_last')
titles = StringListAttribute('num_cols', 'col_title')
custom_fields = StringListAttribute('num_cols', 'col_custom_field')
custom_occurrences = IntListAttribute('num_cols', 'col_custom_occurrence')
custom_field_ids = IntListAttribute('num_cols', 'col_custom_field_id')
custom_dfilters = InstanceListAttribute(dfilter.DisplayFilter,
sizeattr='num_cols',
structmember='col_custom_dfilter')
fences = IntListAttribute('num_cols', 'col_fence')
writeable = BooleanAttribute()
def __init__(self, init):
'''Create a new ColumnInfo-descriptor.
:param init:
Either a cdata-object to be wrapped or an iterable of
:py:class:`Format` instances.
'''
if isinstance(init, iface.CData):
self.cdata = init
else:
self.cdata = iface.new('column_info*')
self.num_cols = len(init)
self.firsts = [-1 for i in range(Type.NUM_COL_FMTS)]
self.lasts = [-1 for i in range(Type.NUM_COL_FMTS)]
self.fmts = [fmt.type_ for fmt in init]
self.titles = [fmt.title for fmt in init]
self.custom_fields = [fmt.custom_field if fmt.type_ == Type.CUSTOM
else None for fmt in init]
self.custom_occurrences = [fmt.custom_occurrence
if fmt.type_ == Type.CUSTOM else 0
for fmt in init]
self.custom_field_ids = [-1 for fmt in init]
self.custom_dfilters = [dfilter.DisplayFilter(fmt.custom_field)
if fmt.type_ == Type.CUSTOM else None
for fmt in init]
self.fences = [0 for fmt in init]
self._matx = []
for i in range(self.num_cols):
self._matx.append(iface.new('gboolean[]', Type.NUM_COL_FMTS))
self._matxp = iface.new('gboolean*[]', self._matx)
self.cdata.fmt_matx = self._matxp
for i in range(self.num_cols):
mod.get_column_format_matches(self.cdata.fmt_matx[i],
self.fmts[i])
self._col_data = [iface.NULL for fmt in init]
self._col_datap = iface.new('gchar*[]', self._col_data)
self.cdata.col_data = self._col_datap
self._col_buf = [iface.new('gchar[]', fmt.type_.MAX_BUFFER_LEN)
for fmt in init]
self._col_bufp = iface.new('gchar*[]', self._col_buf)
self.cdata.col_buf = self._col_bufp
self._col_expr = [iface.new('gchar[]', Type.MAX_LEN)
for fmt in init] + [iface.NULL]
self._col_exprp = iface.new('gchar*[]', self._col_expr)
self.cdata.col_expr.col_expr = self._col_exprp
self._col_expr_val = [iface.new('gchar[]', Type.MAX_LEN)
for fmt in init] + [iface.NULL]
self._col_expr_valp = iface.new('gchar*[]', self._col_expr_val)
self.cdata.col_expr.col_expr_val = self._col_expr_valp
for i in range(self.num_cols):
for j in range(Type.NUM_COL_FMTS):
if self._matxp[i][j]:
if self.firsts[j] == -1:
self.firsts[j] = i
self.lasts[j] = i
def __len__(self):
'''Equal to the number of columns in this descriptor'''
return self.num_cols
@property
def have_custom_cols(self):
''''''
# TODO do we really need this through the API ?
return bool(mod.have_custom_cols(self.cdata))
| gpl-3.0 | -2,718,258,804,250,527,000 | 42.20678 | 79 | 0.597442 | false |
sukuba/js-py-ngram-full-text-search | test2_jsngram.py | 1 | 1764 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# written for python 3 but also run on python 2
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
import jsngram.jsngram
import jsngram.dir2
import jsngram.text2
def test():
base_dir = os.path.realpath('/scratch') # may be './scratch', or others.
ngram_size = 2
ngram_shorter = True
org_dir = os.path.join(base_dir, 'org')
in_dir = os.path.join(base_dir, 'txt')
out_dir = os.path.join(base_dir, 'idx')
ch_ignore = r'[\s,.,.、。]+'
flat_dir = False
verbose_print = False
def make_index_by_files_inc(n=ngram_size, shorter=ngram_shorter,
src=in_dir, dest=out_dir, flat=flat_dir, ignore=ch_ignore):
"""
text files in src directory will be indexed.
"""
ix = jsngram.jsngram.JsNgram(n, shorter, src, dest, flat, ignore)
entries = jsngram.dir2.list_files(src)
ix.add_files_to_json(entries, verbose_print)
return ix
def remove_entries(dest):
"""
remove files and subdirectories at dest
"""
for entry in os.listdir(dest):
fullpath = os.path.join(dest, entry)
if os.path.isfile(fullpath):
os.remove(fullpath)
else:
shutil.rmtree(fullpath)
def test_suite1():
remove_entries(out_dir)
jsngram.text2.normalize_texts(org_dir, in_dir)
ix = make_index_by_files_inc()
for entry in jsngram.dir2.list_files(out_dir):
fullpath = os.path.join(out_dir, entry)
jsngram.json2.json_end(fullpath)
print('Done.')
test_suite1()
if __name__ == '__main__':
test()
| mit | 6,650,857,769,097,815,000 | 29.807018 | 82 | 0.58713 | false |
sukeesh/Jarvis | jarviscli/plugins/stock.py | 1 | 7341 | import requests
from plugin import plugin
from colorama import Fore
from inspect import cleandoc
import re
@plugin('stock')
class Stock:
"""
stock <stock_id> : Get details of stock identified by <stock_id>(one id at a time)
stock getid : Search stock id
stock profile <stock_id> : Get company profile
stock fstatement <stock_id> : Get latest ANNUAL finincial statement of the company
stock gainers : Most gainers in NYSE
stock losers : Most losers in NYSE
stock help : Prints help
*** AVAILABLE ONLY FOR US EQUITIES ***
Data provided for free by IEX (https://iextrading.com/developer). View IEX’s Terms of Use (https://iextrading.com/api-exhibit-a/).
"""
def __call__(self, jarvis, s):
if not s or 'help' in s:
jarvis.say(cleandoc(self.__doc__), Fore.GREEN)
else:
ps = s.split()
if ps[0] == 'getid':
ps.pop(0)
if ps:
name = ' '.join(ps)
else:
name = jarvis.input("Enter the name of the stock: ")
self.get_stock_id(jarvis, name)
elif ps[0] == 'profile':
if(len(ps) != 2):
jarvis.say("You forgot to mention the symbol", Fore.RED)
else:
symbol = ps[1]
self.get_profile(jarvis, symbol)
elif ps[0] == 'fstatement':
if(len(ps) != 2):
jarvis.say("You forgot to mention the symbol", Fore.RED)
else:
symbol = ps[1]
self.get_financial_stmt(jarvis, symbol)
elif ps[0] == 'gainers':
self.get_gainers(jarvis)
elif ps[0] == 'losers':
self.get_losers(jarvis)
# anything else is treated as a stock symbol
else:
self.get_stock_data(jarvis, s)
def get_stock_data(self, jarvis, quote):
''' Given a stock symbol, get the real time price of the stock '''
url = 'https://financialmodelingprep.com/api/v3/stock/real-time-price/' + quote
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
if('symbol' in data.keys()):
jarvis.say("Symbol: " + str(data['symbol']), Fore.GREEN)
jarvis.say("Price: " + str(data['price']), Fore.GREEN)
jarvis.say("IEX Real-Time Price (https://iextrading.com/developer)")
elif('Error' in data.keys()):
jarvis.say("Invalid stock symbol name", Fore.RED)
else:
jarvis.say("Error. Please retry")
else:
jarvis.say("Cannot find the name. Try again later\n", Fore.RED)
def get_stock_id(self, jarvis, name):
''' Get the list of stock IDs given a company name or part of the company name '''
url = 'https://financialmodelingprep.com/api/v3/company/stock/list'
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
found = False
# Add try block. Somtimes the endpoint does not work or has unexcepted behaviour
try:
for stock in data['symbolsList']:
if(re.match(name.lower(), stock['name'].lower())):
found = True
jarvis.say(stock['symbol'] + "\t\t" + stock['name'], Fore.GREEN)
if not found:
jarvis.say("The given name could not be found\n", Fore.RED)
except KeyError:
jarvis.say("The endpoint is not working at the moment. Try again later", Fore.RED)
else:
jarvis.say("Cannot find the name at this time. Try again later\n", Fore.RED)
def get_profile(self, jarvis, symbol):
''' Given a stock symbol get the company profile '''
url = 'https://financialmodelingprep.com/api/v3/company/profile/' + symbol
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
if(not data):
jarvis.say("Cannot find details for " + symbol, Fore.RED)
else:
jarvis.say(" Symbol : " + data['symbol'], Fore.GREEN)
jarvis.say(" Company : " + data['profile']['companyName'], Fore.GREEN)
jarvis.say(" Industry : " + data['profile']['industry'], Fore.GREEN)
jarvis.say(" Sector : " + data['profile']['sector'], Fore.GREEN)
jarvis.say(" Website : " + data['profile']['website'], Fore.GREEN)
jarvis.say(" Exchange : " + data['profile']['exchange'], Fore.GREEN)
jarvis.say(" Description : " + data['profile']['description'], Fore.GREEN)
else:
jarvis.say("Cannot find details for " + symbol, Fore.RED)
def get_financial_stmt(self, jarvis, symbol):
''' Get the last annual financial statement of a company given it's stock symbol '''
url = 'https://financialmodelingprep.com/api/v3/financials/income-statement/' + symbol
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
if(not data):
jarvis.say("Cannot find details for: " + symbol, Fore.RED)
else:
for key in data['financials'][0].keys():
jarvis.say(key + " => " + data['financials'][0][key], Fore.GREEN)
else:
jarvis.say("Cannot find details for " + symbol, Fore.RED)
def get_gainers(self, jarvis):
''' Get the most gainers of the day '''
url = 'https://financialmodelingprep.com/api/v3/stock/gainers'
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
if(not data):
jarvis.say("Cannot find details at this moment.", Fore.RED)
else:
for gainer in data['mostGainerStock']:
jarvis.say(gainer['ticker'] + " | " + gainer['companyName'], Fore.GREEN)
jarvis.say("Price: " + str(gainer['price']) + " | Change: " + str(gainer['changes']), Fore.GREEN)
jarvis.say("Percent gained: " + str(gainer['changesPercentage'])[1:-1] + "\n\n", Fore.GREEN)
else:
jarvis.say("Cannot get gainers list at the moment")
def get_losers(self, jarvis):
''' Get the most losers of the day '''
url = 'https://financialmodelingprep.com/api/v3/stock/losers'
resp = requests.get(url)
if(resp.status_code == 200):
data = resp.json()
if(not data):
jarvis.say("Cannot find details at the moment.", Fore.RED)
else:
for loser in data['mostLoserStock']:
jarvis.say(loser['ticker'] + " | " + loser['companyName'], Fore.GREEN)
jarvis.say("Price: " + str(loser['price']) + " | Change: " + str(loser['changes']), Fore.GREEN)
jarvis.say("Percent lost: " + str(loser['changesPercentage'])[1:-1] + "\n\n", Fore.GREEN)
else:
jarvis.say("Cannot get losers list at the moment")
| mit | 7,597,438,638,204,117,000 | 43.75 | 134 | 0.528682 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.