repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
woglinde/osmo-smsc | integration-tests/osmo-smsc-integrationtests.py | 1 | 2884 | # runs integration tests against the osmo-smsc restapi
# Copyright (C) 2016 Henning Heinold <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import tortilla
import argparse
import sys
def test_inserter(host, port):
inserter_collection_api = tortilla.wrap('http://{}:{}/v1/inserterSMPPLinks'.format(host, port), format='json')
inserter_api = tortilla.wrap('http://{}:{}/v1/inserterSMPPLink'.format(host, port), format='json')
inserter_api.config.headers = {'Content-Type': 'application/json'}
client={"connectionType": "client",
"hostname": "127.0.0.1",
"port": 88,
"systemId": "systemId",
"systemType": "systemType",
"password": "password"}
server={"connectionType": "server",
"port": 99,
"systemId": "systemId",
"systemType": "systemType",
"password": "password",
"allowedRemoteAddress": "127.0.0.1",
"allowedRemotePort": 99}
inserter_api("client").put(data=client)
inserter_api("client").get()
inserter_api("server").put(data=server)
inserter_api("server").get()
inserter_collection_api.get()
# test update
client['port'] = 99
inserter_api("client").put(data=client)
client_answer = inserter_api("client").get()
if not client_answer['port'] == 99:
sys.exit(1)
server['allowedRemotePort'] = 101
inserter_api("server").put(data=server)
server_answer = inserter_api("server").get()
if not server_answer['allowedRemotePort'] == 101:
sys.exit(1)
inserter_api("client").delete()
inserter_api("server").delete()
def check_arg(args=None):
parser = argparse.ArgumentParser(description='runs integration tests against the osmo-smsc restapi')
parser.add_argument('-H', '--host',
help='host ip',
default='localhost')
parser.add_argument('-p', '--port',
help='port of the rest api server',
default='1700')
results = parser.parse_args(args)
return (results.host,
results.port)
def main():
host, port = check_arg(sys.argv[1:])
test_inserter(host, port)
main()
| agpl-3.0 | -3,718,793,537,376,352,000 | 35.05 | 114 | 0.633842 | false |
ftp2010/svm-de-CAPTCHA | svm.py | 1 | 9550 | #!/usr/bin/env python
from ctypes import *
from ctypes.util import find_library
from os import path
import sys
try:
dirname = path.dirname(path.abspath(__file__))
print dirname
print sys.platform
if sys.platform == 'win32':
libsvm = CDLL(path.join(dirname, r'..\windows\libsvm.dll'))
else:
print path.join(dirname, '../libsvm.so.2')
libsvm = CDLL(path.join(dirname, 'libsvm.so.2'))
except:
# For unix the prefix 'lib' is not considered.
if find_library('svm'):
libsvm = CDLL(find_library('svm'))
elif find_library('libsvm'):
libsvm = CDLL(find_library('libsvm'))
else:
raise Exception('LIBSVM library not found.')
# Construct constants
SVM_TYPE = ['C_SVC', 'NU_SVC', 'ONE_CLASS', 'EPSILON_SVR', 'NU_SVR' ]
KERNEL_TYPE = ['LINEAR', 'POLY', 'RBF', 'SIGMOID', 'PRECOMPUTED']
for i, s in enumerate(SVM_TYPE): exec("%s = %d" % (s , i))
for i, s in enumerate(KERNEL_TYPE): exec("%s = %d" % (s , i))
PRINT_STRING_FUN = CFUNCTYPE(None, c_char_p)
def print_null(s):
return
def genFields(names, types):
return list(zip(names, types))
def fillprototype(f, restype, argtypes):
f.restype = restype
f.argtypes = argtypes
class svm_node(Structure):
_names = ["index", "value"]
_types = [c_int, c_double]
_fields_ = genFields(_names, _types)
def __str__(self):
return '%d:%g' % (self.index, self.value)
def gen_svm_nodearray(xi, feature_max=None, isKernel=None):
if isinstance(xi, dict):
index_range = xi.keys()
elif isinstance(xi, (list, tuple)):
if not isKernel:
xi = [0] + xi # idx should start from 1
index_range = range(len(xi))
else:
raise TypeError('xi should be a dictionary, list or tuple')
if feature_max:
assert(isinstance(feature_max, int))
index_range = filter(lambda j: j <= feature_max, index_range)
if not isKernel:
index_range = filter(lambda j:xi[j] != 0, index_range)
index_range = sorted(index_range)
ret = (svm_node * (len(index_range)+1))()
ret[-1].index = -1
for idx, j in enumerate(index_range):
ret[idx].index = j
ret[idx].value = xi[j]
max_idx = 0
if index_range:
max_idx = index_range[-1]
return ret, max_idx
class svm_problem(Structure):
_names = ["l", "y", "x"]
_types = [c_int, POINTER(c_double), POINTER(POINTER(svm_node))]
_fields_ = genFields(_names, _types)
def __init__(self, y, x, isKernel=None):
if len(y) != len(x):
raise ValueError("len(y) != len(x)")
self.l = l = len(y)
max_idx = 0
x_space = self.x_space = []
for i, xi in enumerate(x):
tmp_xi, tmp_idx = gen_svm_nodearray(xi,isKernel=isKernel)
x_space += [tmp_xi]
max_idx = max(max_idx, tmp_idx)
self.n = max_idx
self.y = (c_double * l)()
for i, yi in enumerate(y): self.y[i] = yi
self.x = (POINTER(svm_node) * l)()
for i, xi in enumerate(self.x_space): self.x[i] = xi
class svm_parameter(Structure):
_names = ["svm_type", "kernel_type", "degree", "gamma", "coef0",
"cache_size", "eps", "C", "nr_weight", "weight_label", "weight",
"nu", "p", "shrinking", "probability"]
_types = [c_int, c_int, c_int, c_double, c_double,
c_double, c_double, c_double, c_int, POINTER(c_int), POINTER(c_double),
c_double, c_double, c_int, c_int]
_fields_ = genFields(_names, _types)
def __init__(self, options = None):
if options == None:
options = ''
self.parse_options(options)
def __str__(self):
s = ''
attrs = svm_parameter._names + list(self.__dict__.keys())
values = map(lambda attr: getattr(self, attr), attrs)
for attr, val in zip(attrs, values):
s += (' %s: %s\n' % (attr, val))
s = s.strip()
return s
def set_to_default_values(self):
self.svm_type = C_SVC;
self.kernel_type = RBF
self.degree = 3
self.gamma = 0
self.coef0 = 0
self.nu = 0.5
self.cache_size = 100
self.C = 1
self.eps = 0.001
self.p = 0.1
self.shrinking = 1
self.probability = 0
self.nr_weight = 0
self.weight_label = (c_int*0)()
self.weight = (c_double*0)()
self.cross_validation = False
self.nr_fold = 0
self.print_func = cast(None, PRINT_STRING_FUN)
def parse_options(self, options):
if isinstance(options, list):
argv = options
elif isinstance(options, str):
argv = options.split()
else:
raise TypeError("arg 1 should be a list or a str.")
self.set_to_default_values()
self.print_func = cast(None, PRINT_STRING_FUN)
weight_label = []
weight = []
i = 0
while i < len(argv):
if argv[i] == "-s":
i = i + 1
self.svm_type = int(argv[i])
elif argv[i] == "-t":
i = i + 1
self.kernel_type = int(argv[i])
elif argv[i] == "-d":
i = i + 1
self.degree = int(argv[i])
elif argv[i] == "-g":
i = i + 1
self.gamma = float(argv[i])
elif argv[i] == "-r":
i = i + 1
self.coef0 = float(argv[i])
elif argv[i] == "-n":
i = i + 1
self.nu = float(argv[i])
elif argv[i] == "-m":
i = i + 1
self.cache_size = float(argv[i])
elif argv[i] == "-c":
i = i + 1
self.C = float(argv[i])
elif argv[i] == "-e":
i = i + 1
self.eps = float(argv[i])
elif argv[i] == "-p":
i = i + 1
self.p = float(argv[i])
elif argv[i] == "-h":
i = i + 1
self.shrinking = int(argv[i])
elif argv[i] == "-b":
i = i + 1
self.probability = int(argv[i])
elif argv[i] == "-q":
self.print_func = PRINT_STRING_FUN(print_null)
elif argv[i] == "-v":
i = i + 1
self.cross_validation = 1
self.nr_fold = int(argv[i])
if self.nr_fold < 2:
raise ValueError("n-fold cross validation: n must >= 2")
elif argv[i].startswith("-w"):
i = i + 1
self.nr_weight += 1
nr_weight = self.nr_weight
weight_label += [int(argv[i-1][2:])]
weight += [float(argv[i])]
else:
raise ValueError("Wrong options")
i += 1
libsvm.svm_set_print_string_function(self.print_func)
self.weight_label = (c_int*self.nr_weight)()
self.weight = (c_double*self.nr_weight)()
for i in range(self.nr_weight):
self.weight[i] = weight[i]
self.weight_label[i] = weight_label[i]
class svm_model(Structure):
_names = ['param', 'nr_class', 'l', 'SV', 'sv_coef', 'rho',
'probA', 'probB', 'sv_indices', 'label', 'nSV', 'free_sv']
_types = [svm_parameter, c_int, c_int, POINTER(POINTER(svm_node)),
POINTER(POINTER(c_double)), POINTER(c_double),
POINTER(c_double), POINTER(c_double), POINTER(c_int),
POINTER(c_int), POINTER(c_int), c_int]
_fields_ = genFields(_names, _types)
def __init__(self):
self.__createfrom__ = 'python'
def __del__(self):
# free memory created by C to avoid memory leak
if hasattr(self, '__createfrom__') and self.__createfrom__ == 'C':
libsvm.svm_free_and_destroy_model(pointer(self))
def get_svm_type(self):
return libsvm.svm_get_svm_type(self)
def get_nr_class(self):
return libsvm.svm_get_nr_class(self)
def get_svr_probability(self):
return libsvm.svm_get_svr_probability(self)
def get_labels(self):
nr_class = self.get_nr_class()
labels = (c_int * nr_class)()
libsvm.svm_get_labels(self, labels)
return labels[:nr_class]
def get_sv_indices(self):
total_sv = self.get_nr_sv()
sv_indices = (c_int * total_sv)()
libsvm.svm_get_sv_indices(self, sv_indices)
return sv_indices[:total_sv]
def get_nr_sv(self):
return libsvm.svm_get_nr_sv(self)
def is_probability_model(self):
return (libsvm.svm_check_probability_model(self) == 1)
def get_sv_coef(self):
return [tuple(self.sv_coef[j][i] for j in xrange(self.nr_class - 1))
for i in xrange(self.l)]
def get_SV(self):
result = []
for sparse_sv in self.SV[:self.l]:
row = dict()
i = 0
while True:
row[sparse_sv[i].index] = sparse_sv[i].value
if sparse_sv[i].index == -1:
break
i += 1
result.append(row)
return result
def toPyModel(model_ptr):
"""
toPyModel(model_ptr) -> svm_model
Convert a ctypes POINTER(svm_model) to a Python svm_model
"""
if bool(model_ptr) == False:
raise ValueError("Null pointer")
m = model_ptr.contents
m.__createfrom__ = 'C'
return m
fillprototype(libsvm.svm_train, POINTER(svm_model), [POINTER(svm_problem), POINTER(svm_parameter)])
fillprototype(libsvm.svm_cross_validation, None, [POINTER(svm_problem), POINTER(svm_parameter), c_int, POINTER(c_double)])
fillprototype(libsvm.svm_save_model, c_int, [c_char_p, POINTER(svm_model)])
fillprototype(libsvm.svm_load_model, POINTER(svm_model), [c_char_p])
fillprototype(libsvm.svm_get_svm_type, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_get_nr_class, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_get_labels, None, [POINTER(svm_model), POINTER(c_int)])
fillprototype(libsvm.svm_get_sv_indices, None, [POINTER(svm_model), POINTER(c_int)])
fillprototype(libsvm.svm_get_nr_sv, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_get_svr_probability, c_double, [POINTER(svm_model)])
fillprototype(libsvm.svm_predict_values, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
fillprototype(libsvm.svm_predict, c_double, [POINTER(svm_model), POINTER(svm_node)])
fillprototype(libsvm.svm_predict_probability, c_double, [POINTER(svm_model), POINTER(svm_node), POINTER(c_double)])
fillprototype(libsvm.svm_free_model_content, None, [POINTER(svm_model)])
fillprototype(libsvm.svm_free_and_destroy_model, None, [POINTER(POINTER(svm_model))])
fillprototype(libsvm.svm_destroy_param, None, [POINTER(svm_parameter)])
fillprototype(libsvm.svm_check_parameter, c_char_p, [POINTER(svm_problem), POINTER(svm_parameter)])
fillprototype(libsvm.svm_check_probability_model, c_int, [POINTER(svm_model)])
fillprototype(libsvm.svm_set_print_string_function, None, [PRINT_STRING_FUN])
| gpl-2.0 | 2,175,319,489,963,224,300 | 28.84375 | 122 | 0.639686 | false |
victor-torres/python-redis-lock | tests/test_redis_lock.py | 1 | 6855 | from __future__ import print_function
import os
import sys
import time
from collections import defaultdict
import pytest
from process_tests import dump_on_error
from process_tests import TestProcess
from process_tests import wait_for_strings
from redis import StrictRedis
from redis_lock import Lock, AlreadyAcquired, NotAcquired, InterruptableThread
from redis_lock import reset_all
from conf import TIMEOUT
from conf import UDS_PATH
from conf import HELPER
@pytest.yield_fixture
def redis_server(scope='module'):
try:
os.unlink(UDS_PATH)
except OSError:
pass
with TestProcess('redis-server', '--port', '0', '--unixsocket', UDS_PATH) as process:
wait_for_strings(process.read, TIMEOUT, "Running")
yield process
@pytest.fixture(scope='function')
def conn(redis_server):
return StrictRedis(unix_socket_path=UDS_PATH)
def test_simple(redis_server):
with TestProcess(sys.executable, HELPER, 'test_simple') as proc:
with dump_on_error(proc.read):
name = 'lock:foobar'
wait_for_strings(
proc.read, TIMEOUT,
'Getting %r ...' % name,
'Got lock for %r.' % name,
'Releasing %r.' % name,
'UNLOCK_SCRIPT not cached.',
'DIED.',
)
def test_no_block(conn):
with Lock(conn, "foobar"):
with TestProcess(sys.executable, HELPER, 'test_no_block') as proc:
with dump_on_error(proc.read):
name = 'lock:foobar'
wait_for_strings(
proc.read, TIMEOUT,
'Getting %r ...' % name,
'Failed to get %r.' % name,
'acquire=>False',
'DIED.',
)
def test_expire(conn):
with Lock(conn, "foobar", expire=TIMEOUT/4):
with TestProcess(sys.executable, HELPER, 'test_expire') as proc:
with dump_on_error(proc.read):
name = 'lock:foobar'
wait_for_strings(
proc.read, TIMEOUT,
'Getting %r ...' % name,
'Got lock for %r.' % name,
'Releasing %r.' % name,
'UNLOCK_SCRIPT not cached.',
'DIED.',
)
lock = Lock(conn, "foobar")
try:
assert lock.acquire(blocking=False) == True
finally:
lock.release()
def test_double_acquire(conn):
lock = Lock(conn, "foobar")
with lock:
pytest.raises(RuntimeError, lock.acquire)
pytest.raises(AlreadyAcquired, lock.acquire)
def test_plain(conn):
with Lock(conn, "foobar"):
time.sleep(0.01)
def test_no_overlap(redis_server):
with TestProcess(sys.executable, HELPER, 'test_no_overlap') as proc:
with dump_on_error(proc.read):
name = 'lock:foobar'
wait_for_strings(proc.read, TIMEOUT, 'Getting %r ...' % name)
wait_for_strings(proc.read, TIMEOUT, 'Got lock for %r.' % name)
wait_for_strings(proc.read, TIMEOUT, 'Releasing %r.' % name)
wait_for_strings(proc.read, TIMEOUT, 'UNLOCK_SCRIPT not cached.')
wait_for_strings(proc.read, 10*TIMEOUT, 'DIED.')
class Event(object):
pid = start = end = '?'
def __str__(self):
return "Event(%s; %r => %r)" % (self.pid, self.start, self.end)
events = defaultdict(Event)
for line in proc.read().splitlines():
try:
pid, time, junk = line.split(' ', 2)
pid = int(pid)
except ValueError:
continue
if 'Got lock for' in junk:
events[pid].pid = pid
events[pid].start = time
if 'Releasing' in junk:
events[pid].pid = pid
events[pid].end = time
assert len(events) == 125
for event in events.values():
for other in events.values():
if other is not event:
try:
if other.start < event.start < other.end or \
other.start < event.end < other.end:
pytest.fail('%s overlaps %s' % (event, other))
except:
print("[%s/%s]" % (event, other))
raise
def test_reset(conn):
with Lock(conn, "foobar") as lock:
lock.reset()
new_lock = Lock(conn, "foobar")
new_lock.acquire(blocking=False)
new_lock.release()
def test_reset_all(conn):
lock1 = Lock(conn, "foobar1")
lock2 = Lock(conn, "foobar2")
lock1.acquire(blocking=False)
lock2.acquire(blocking=False)
reset_all(conn)
lock1 = Lock(conn, "foobar1")
lock2 = Lock(conn, "foobar2")
lock1.acquire(blocking=False)
lock2.acquire(blocking=False)
lock1.release()
lock2.release()
def test_owner_id(conn):
unique_identifier = b"foobar-identifier"
lock = Lock(conn, "foobar-tok", expire=TIMEOUT/4, id=unique_identifier)
lock_id = lock.id
assert lock_id == unique_identifier
lock.acquire(blocking=False)
assert lock.get_owner_id() == unique_identifier
lock.release()
def test_token(conn):
lock = Lock(conn, "foobar-tok")
tok = lock.id
assert conn.get(lock._name) is None
lock.acquire(blocking=False)
assert conn.get(lock._name) == tok
def test_bogus_release(conn):
lock = Lock(conn, "foobar-tok")
pytest.raises(NotAcquired, lock.release)
lock.release(force=True)
def test_release_from_nonblocking_leaving_garbage(conn):
for _ in range(10):
lock = Lock(conn, 'release_from_nonblocking')
lock.acquire(blocking=False)
lock.release()
assert conn.llen('lock-signal:release_from_nonblocking') == 1
def test_no_auto_renewal(conn):
lock = Lock(conn, 'lock_renewal', expire=3, auto_renewal=False)
assert lock._lock_renewal_interval is None
lock.acquire()
assert lock._lock_renewal_thread is None, "No lock refresh thread should have been spawned"
def test_auto_renewal_bad_values(conn):
with pytest.raises(ValueError):
Lock(conn, 'lock_renewal', expire=None, auto_renewal=True)
def test_auto_renewal(conn):
lock = Lock(conn, 'lock_renewal', expire=3, auto_renewal=True)
lock.acquire()
assert isinstance(lock._lock_renewal_thread, InterruptableThread)
assert not lock._lock_renewal_thread.should_exit
assert lock._lock_renewal_interval == 2
time.sleep(3)
assert conn.get(lock._name) == lock.id, "Key expired but it should have been getting renewed"
lock.release()
assert lock._lock_renewal_thread is None
| bsd-2-clause | -7,740,257,477,774,175,000 | 31.03271 | 97 | 0.570241 | false |
spudmind/parlparse | pyscraper/patchtool.py | 1 | 4596 | #!/usr/bin/env python
# vim:sw=8:ts=8:et:nowrap
import sys
import os
import shutil
import string
import miscfuncs
import re
import tempfile
import optparse
# change current directory to pyscraper folder script is in
os.chdir(os.path.dirname(sys.argv[0]) or '.')
from resolvemembernames import memberList
toppath = miscfuncs.toppath
# File names of patch files
# this is horid since it shadows stuff that's done distributively in the scrapers
def GenPatchFileNames(typ, sdate):
qfolder = toppath
qfolder = os.path.join(qfolder, "cmpages")
# transform the typ into the file stub
if typ == "wrans":
stub = "answers"
elif typ == "lords" or typ == 'lordspages':
typ = "lordspages"
stub = "daylord"
elif typ == "westminhall":
stub = "westminster"
elif typ == "wms":
stub = "ministerial"
elif typ == "standing":
stub = "standing"
elif typ[0:9] == 'chgpages/':
stub = re.sub('chgpages/', '', typ)
else:
stub = typ
folder = os.path.join(qfolder, typ)
# lords case where we use the new top level patch directory
pdire = os.path.join(toppath, "patches")
# all patches will be moved to where they belong
# if typ != "lordspages":
# pdire = "patches" # as local directory
pdire = os.path.join(pdire, typ)
if not os.path.isdir(pdire):
os.mkdir(pdire)
patchfile = os.path.join(pdire, "%s%s.html.patch" % (stub, sdate))
orgfile = os.path.join(folder, "%s%s.html" % (stub, sdate))
tmpfile = tempfile.mktemp(".html", "patchtmp-%s%s-" % (stub, sdate), miscfuncs.tmppath)
tmppatchfile = os.path.join(pdire, "%s%s.html.patch.new" % (stub, sdate))
return patchfile, orgfile, tmpfile, tmppatchfile
# Launches editor on copy of file, and makes patch file of changes the user
# makes interactively
def RunPatchToolW(typ, sdate, stamp, frag):
(patchfile, orgfile, tmpfile, tmppatchfile) = GenPatchFileNames(typ, sdate)
shutil.copyfile(orgfile, tmpfile)
if os.path.isfile(patchfile):
print "Patching ", patchfile
status = os.system('patch --quiet "%s" < "%s"' % (tmpfile, patchfile))
# run the editor (first finding the line number to be edited)
gp = 0
finforlines = open(tmpfile, "r")
rforlines = finforlines.read();
finforlines.close()
if stamp:
aname = stamp.GetAName()
ganamef = re.search(('<a name\s*=\s*"%s">([\s\S]*?)<a name(?i)' % aname), rforlines)
if ganamef:
gp = ganamef.start(1)
else:
ganamef = None
if not frag:
fragl = -1
elif ganamef:
fragl = string.find(ganamef.group(1), str(frag))
else:
fragl = string.find(rforlines, str(frag))
if fragl != -1:
gp += fragl
gl = string.count(rforlines, '\n', 0, gp)
gc = 0
if gl:
gc = gp - string.rfind(rforlines, '\n', 0, gp)
#print "find loc codes ", gp, gl, gc
if 1==0 and sys.platform == "win32":
os.system('"C:\Program Files\ConTEXT\ConTEXT" %s /g%d:%d' % (tmpfile, gc + 1, gl + 1))
else:
# TODO add column support using gc + 1, if you can work out vim's syntax
editor = os.getenv('EDITOR')
if not editor:
editor = 'vim'
os.system('%s "%s" +%d' % (editor, tmpfile, gl + 1))
# now create the diff file
if os.path.isfile(tmppatchfile):
os.remove(tmppatchfile)
ern = os.system('diff -u "%s" "%s" > "%s"' % (orgfile, tmpfile, tmppatchfile))
if ern == 2:
print "Error running diff"
sys.exit(1)
os.remove(tmpfile)
if os.path.isfile(patchfile):
os.remove(patchfile)
if os.path.getsize(tmppatchfile):
os.rename(tmppatchfile, patchfile)
print "Making patchfile ", patchfile
def RunPatchTool(typ, sdatext, ce):
if not ce.stamp:
print "No stamp available, so won't move your cursor to right place"
else:
assert ce.stamp.sdate[:10] == sdatext[:10] # omitting the letter extension
print "\nHit RETURN to launch your editor to make patches "
sys.stdin.readline()
RunPatchToolW(typ, sdatext, ce.stamp, ce.fragment)
memberList.reloadXML()
# So it works from the command line
if __name__ == '__main__':
parser=optparse.OptionParser()
(options,args)=parser.parse_args()
args=[sys.argv[0]]+args
#print args
if len(args) != 3:
print """
This generates files for the patchfilter.py filter.
They are standard patch files which apply to the glued HTML files which we
download from Hansard. Any special errors in Hansard are fixed by
these patches.
Run this tool like this:
./patchtool.py wrans 2004-03-25
This will launch your editor, and upon exit write out a patch of your changes
in the patches folder underneath this folder. The original file is
untouched. We consider the patches permanent data, so add them to CVS.
"""
sys.exit(1)
RunPatchToolW(args[1], args[2], None, "")
| agpl-3.0 | -4,639,347,042,262,529,000 | 27.37037 | 88 | 0.683203 | false |
mackst/vulkan-tutorial | 09_shader_modules.py | 1 | 17105 | # -*- coding: UTF-8 -*-
import sys
from vulkan import *
from PySide2 import (QtGui, QtCore)
validationLayers = [
'VK_LAYER_LUNARG_standard_validation'
]
deviceExtensions = [
VK_KHR_SWAPCHAIN_EXTENSION_NAME
]
enableValidationLayers = True
class InstanceProcAddr(object):
T = None
def __init__(self, func):
self.__func = func
def __call__(self, *args, **kwargs):
funcName = self.__func.__name__
func = InstanceProcAddr.procfunc(funcName)
if func:
return func(*args, **kwargs)
else:
return VK_ERROR_EXTENSION_NOT_PRESENT
@staticmethod
def procfunc(funcName):
return vkGetInstanceProcAddr(InstanceProcAddr.T, funcName)
class DeviceProcAddr(InstanceProcAddr):
@staticmethod
def procfunc(funcName):
return vkGetDeviceProcAddr(InstanceProcAddr.T, funcName)
# instance ext functions
@InstanceProcAddr
def vkCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator):
pass
@InstanceProcAddr
def vkDestroyDebugReportCallbackEXT(instance, pCreateInfo, pAllocator):
pass
@InstanceProcAddr
def vkCreateWin32SurfaceKHR(instance, pCreateInfo, pAllocator):
pass
@InstanceProcAddr
def vkDestroySurfaceKHR(instance, surface, pAllocator):
pass
@InstanceProcAddr
def vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface):
pass
@InstanceProcAddr
def vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface):
pass
@InstanceProcAddr
def vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface):
pass
@InstanceProcAddr
def vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface):
pass
# device ext functions
@DeviceProcAddr
def vkCreateSwapchainKHR(device, pCreateInfo, pAllocator):
pass
@DeviceProcAddr
def vkDestroySwapchainKHR(device, swapchain, pAllocator):
pass
@DeviceProcAddr
def vkGetSwapchainImagesKHR(device, swapchain):
pass
def debugCallback(*args):
print('DEBUG: {} {}'.format(args[5], args[6]))
return 0
class Win32misc(object):
@staticmethod
def getInstance(hWnd):
from cffi import FFI as _FFI
_ffi = _FFI()
_ffi.cdef('long __stdcall GetWindowLongA(void* hWnd, int nIndex);')
_lib = _ffi.dlopen('User32.dll')
return _lib.GetWindowLongA(_ffi.cast('void*', hWnd), -6) # GWL_HINSTANCE
class QueueFamilyIndices(object):
def __init__(self):
self.graphicsFamily = -1
self.presentFamily = -1
@property
def isComplete(self):
return self.graphicsFamily >= 0 and self.presentFamily >= 0
class SwapChainSupportDetails(object):
def __init__(self):
self.capabilities = None
self.formats = None
self.presentModes = None
class HelloTriangleApplication(QtGui.QWindow):
def __init__(self):
super(HelloTriangleApplication, self).__init__()
self.setWidth(1280)
self.setHeight(720)
self.setTitle("Vulkan Python - PySide2")
# self.setSurfaceType(self.OpenGLSurface)
self.__instance = None
self.__callbcak = None
self.__surface = None
self.__physicalDevice = None
self.__device = None
self.__graphicQueue = None
self.__presentQueue = None
self.__swapChain = None
self.__swapChainImages = []
self.__swapChainImageFormat = None
self.__swapChainExtent = None
self.__swapChainImageViews = []
self.__indices = QueueFamilyIndices()
self.initVulkan()
def __del__(self):
if self.__swapChainImageViews:
[vkDestroyImageView(self.__device, imv, None) for imv in self.__swapChainImageViews]
if self.__swapChain:
vkDestroySwapchainKHR(self.__device, self.__swapChain, None)
if self.__device:
vkDestroyDevice(self.__device, None)
if self.__callbcak:
vkDestroyDebugReportCallbackEXT(self.__instance, self.__callbcak, None)
if self.__surface:
vkDestroySurfaceKHR(self.__instance, self.__surface, None)
if self.__instance:
vkDestroyInstance(self.__instance, None)
print('instance destroyed')
def initVulkan(self):
self.__cretaeInstance()
self.__setupDebugCallback()
self.__createSurface()
self.__pickPhysicalDevice()
self.__createLogicalDevice()
self.__createSwapChain()
self.__createImageViews()
self.__createGraphicsPipeline()
def __cretaeInstance(self):
if enableValidationLayers and not self.__checkValidationLayerSupport():
raise Exception("validation layers requested, but not available!")
appInfo = VkApplicationInfo(
# sType=VK_STRUCTURE_TYPE_APPLICATION_INFO,
pApplicationName='Python VK',
applicationVersion=VK_MAKE_VERSION(1, 0, 0),
pEngineName='pyvulkan',
engineVersion=VK_MAKE_VERSION(1, 0, 0),
apiVersion=VK_API_VERSION
)
extenstions = self.__getRequiredExtensions()
if enableValidationLayers:
instanceInfo = VkInstanceCreateInfo(
pApplicationInfo=appInfo,
enabledLayerCount=len(validationLayers),
ppEnabledLayerNames=validationLayers,
enabledExtensionCount=len(extenstions),
ppEnabledExtensionNames=extenstions
)
else:
instanceInfo = VkInstanceCreateInfo(
pApplicationInfo=appInfo,
enabledLayerCount=0,
enabledExtensionCount=len(extenstions),
ppEnabledExtensionNames=extenstions
)
self.__instance = vkCreateInstance(instanceInfo, None)
InstanceProcAddr.T = self.__instance
def __setupDebugCallback(self):
if not enableValidationLayers:
return
createInfo = VkDebugReportCallbackCreateInfoEXT(
flags=VK_DEBUG_REPORT_WARNING_BIT_EXT | VK_DEBUG_REPORT_ERROR_BIT_EXT,
pfnCallback=debugCallback
)
self.__callbcak = vkCreateDebugReportCallbackEXT(self.__instance, createInfo, None)
def __createSurface(self):
if sys.platform == 'win32':
hwnd = self.winId()
hinstance = Win32misc.getInstance(hwnd)
createInfo = VkWin32SurfaceCreateInfoKHR(
hinstance=hinstance,
hwnd=hwnd
)
self.__surface = vkCreateWin32SurfaceKHR(self.__instance, createInfo, None)
# elif sys.platform == 'linux':
# pass
def __pickPhysicalDevice(self):
physicalDevices = vkEnumeratePhysicalDevices(self.__instance)
for device in physicalDevices:
if self.__isDeviceSuitable(device):
self.__physicalDevice = device
break
assert self.__physicalDevice != None
def __createLogicalDevice(self):
self.__indices = self.__findQueueFamilies(self.__physicalDevice)
uniqueQueueFamilies = {}.fromkeys([self.__indices.graphicsFamily, self.__indices.presentFamily])
queueCreateInfos = []
for i in uniqueQueueFamilies:
queueCreateInfo = VkDeviceQueueCreateInfo(
queueFamilyIndex=i,
queueCount=1,
pQueuePriorities=[1.0]
)
queueCreateInfos.append(queueCreateInfo)
deviceFeatures = VkPhysicalDeviceFeatures()
if enableValidationLayers:
createInfo = VkDeviceCreateInfo(
queueCreateInfoCount=len(queueCreateInfos),
pQueueCreateInfos=queueCreateInfos,
enabledExtensionCount=len(deviceExtensions),
ppEnabledExtensionNames=deviceExtensions,
enabledLayerCount=len(validationLayers),
ppEnabledLayerNames=validationLayers,
pEnabledFeatures=deviceFeatures
)
else:
createInfo = VkDeviceCreateInfo(
queueCreateInfoCount=1,
pQueueCreateInfos=queueCreateInfo,
enabledExtensionCount=len(deviceExtensions),
ppEnabledExtensionNames=deviceExtensions,
enabledLayerCount=0,
pEnabledFeatures=deviceFeatures
)
self.__device = vkCreateDevice(self.__physicalDevice, createInfo, None)
DeviceProcAddr.T = self.__device
self.__graphicQueue = vkGetDeviceQueue(self.__device, self.__indices.graphicsFamily, 0)
self.__presentQueue = vkGetDeviceQueue(self.__device, self.__indices.presentFamily, 0)
def __createSwapChain(self):
swapChainSupport = self.__querySwapChainSupport(self.__physicalDevice)
surfaceFormat = self.__chooseSwapSurfaceFormat(swapChainSupport.formats)
presentMode = self.__chooseSwapPresentMode(swapChainSupport.presentModes)
extent = self.__chooseSwapExtent(swapChainSupport.capabilities)
imageCount = swapChainSupport.capabilities.minImageCount + 1
if swapChainSupport.capabilities.maxImageCount > 0 and imageCount > swapChainSupport.capabilities.maxImageCount:
imageCount = swapChainSupport.capabilities.maxImageCount
indices = self.__findQueueFamilies(self.__physicalDevice)
queueFamily = {}.fromkeys([indices.graphicsFamily, indices.presentFamily])
queueFamilies = list(queueFamily.keys())
if len(queueFamilies) > 1:
createInfo = VkSwapchainCreateInfoKHR(
surface=self.__surface,
minImageCount=imageCount,
imageFormat=surfaceFormat.format,
imageColorSpace=surfaceFormat.colorSpace,
imageExtent=extent,
imageArrayLayers=1,
imageUsage=VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
queueFamilyIndexCount=len(queueFamilies),
pQueueFamilyIndices=queueFamilies,
imageSharingMode=VK_SHARING_MODE_CONCURRENT,
preTransform=swapChainSupport.capabilities.currentTransform,
compositeAlpha=VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
presentMode=presentMode,
clipped=True
)
else:
createInfo = VkSwapchainCreateInfoKHR(
surface=self.__surface,
minImageCount=imageCount,
imageFormat=surfaceFormat.format,
imageColorSpace=surfaceFormat.colorSpace,
imageExtent=extent,
imageArrayLayers=1,
imageUsage=VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
queueFamilyIndexCount=len(queueFamilies),
pQueueFamilyIndices=queueFamilies,
imageSharingMode=VK_SHARING_MODE_EXCLUSIVE,
preTransform=swapChainSupport.capabilities.currentTransform,
compositeAlpha=VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
presentMode=presentMode,
clipped=True
)
self.__swapChain = vkCreateSwapchainKHR(self.__device, createInfo, None)
assert self.__swapChain != None
self.__swapChainImages = vkGetSwapchainImagesKHR(self.__device, self.__swapChain)
self.__swapChainImageFormat = surfaceFormat.format
self.__swapChainExtent = extent
def __createImageViews(self):
self.__swapChainImageViews = []
for i, image in enumerate(self.__swapChainImages):
ssr = VkImageSubresourceRange(
VK_IMAGE_ASPECT_COLOR_BIT,
0, 1, 0, 1
)
createInfo = VkImageViewCreateInfo(
image=image,
viewType=VK_IMAGE_VIEW_TYPE_2D,
format=self.__swapChainImageFormat,
components=[VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY],
subresourceRange=ssr
)
self.__swapChainImageViews.append(vkCreateImageView(self.__device, createInfo, None))
def __createGraphicsPipeline(self):
vertexShaderMode = self.__createShaderModule('shader/vert.spv')
fragmentShaderMode = self.__createShaderModule('shader/frag.spv')
vertexShaderStageInfo = VkPipelineShaderStageCreateInfo(
stage=VK_SHADER_STAGE_VERTEX_BIT,
module=vertexShaderMode,
pName='main'
)
fragmentShaderStageInfo = VkPipelineShaderStageCreateInfo(
stage=VK_SHADER_STAGE_FRAGMENT_BIT,
module=fragmentShaderMode,
pName='main'
)
shaderStageInfos = [vertexShaderStageInfo, fragmentShaderStageInfo]
vkDestroyShaderModule(self.__device, vertexShaderMode, None)
vkDestroyShaderModule(self.__device, fragmentShaderMode, None)
def __createShaderModule(self, shaderFile):
with open(shaderFile, 'rb') as sf:
code = sf.read()
createInfo = VkShaderModuleCreateInfo(
codeSize=len(code),
pCode=code
)
return vkCreateShaderModule(self.__device, createInfo, None)
def __chooseSwapSurfaceFormat(self, formats):
if len(formats) == 1 and formats[0].format == VK_FORMAT_UNDEFINED:
return [VK_FORMAT_B8G8R8_UNORM, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR]
for i in formats:
if i.format == VK_FORMAT_B8G8R8_UNORM and i.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
return i
return formats[0]
def __chooseSwapPresentMode(self, presentModes):
bestMode = VK_PRESENT_MODE_FIFO_KHR
for i in presentModes:
if i == VK_PRESENT_MODE_FIFO_KHR:
return i
elif i == VK_PRESENT_MODE_MAILBOX_KHR:
return i
elif i == VK_PRESENT_MODE_IMMEDIATE_KHR:
return i
return bestMode
def __chooseSwapExtent(self, capabilities):
width = max(capabilities.minImageExtent.width, min(capabilities.maxImageExtent.width, self.width()))
height = max(capabilities.minImageExtent.height, min(capabilities.maxImageExtent.height, self.height()))
return VkExtent2D(width, height)
def __querySwapChainSupport(self, device):
detail = SwapChainSupportDetails()
detail.capabilities = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device, self.__surface)
detail.formats = vkGetPhysicalDeviceSurfaceFormatsKHR(device, self.__surface)
detail.presentModes = vkGetPhysicalDeviceSurfacePresentModesKHR(device, self.__surface)
return detail
def __isDeviceSuitable(self, device):
indices = self.__findQueueFamilies(device)
extensionsSupported = self.__checkDeviceExtensionSupport(device)
swapChainAdequate = False
if extensionsSupported:
swapChainSupport = self.__querySwapChainSupport(device)
swapChainAdequate = (swapChainSupport.formats is not None) and (swapChainSupport.presentModes is not None)
return indices.isComplete and extensionsSupported and swapChainAdequate
def __checkDeviceExtensionSupport(self, device):
availableExtensions = vkEnumerateDeviceExtensionProperties(device, None)
aen = [i.extensionName for i in availableExtensions]
for i in deviceExtensions:
if i not in aen:
return False
return True
def __findQueueFamilies(self, device):
indices = QueueFamilyIndices()
familyProperties = vkGetPhysicalDeviceQueueFamilyProperties(device)
for i, prop in enumerate(familyProperties):
if prop.queueCount > 0 and prop.queueFlags & VK_QUEUE_GRAPHICS_BIT:
indices.graphicsFamily = i
presentSupport = vkGetPhysicalDeviceSurfaceSupportKHR(device, i, self.__surface)
if prop.queueCount > 0 and presentSupport:
indices.presentFamily = i
if indices.isComplete:
break
return indices
def __getRequiredExtensions(self):
extenstions = [e.extensionName for e in vkEnumerateInstanceExtensionProperties(None)]
if enableValidationLayers:
extenstions.append(VK_EXT_DEBUG_REPORT_EXTENSION_NAME)
return extenstions
def __checkValidationLayerSupport(self):
availableLayers = vkEnumerateInstanceLayerProperties()
for layer in validationLayers:
layerfound = False
for layerProp in availableLayers:
if layer == layerProp.layerName:
layerfound = True
break
return layerfound
return False
if __name__ == '__main__':
import sys
app = QtGui.QGuiApplication(sys.argv)
win = HelloTriangleApplication()
win.show()
def clenaup():
global win
del win
app.aboutToQuit.connect(clenaup)
sys.exit(app.exec_())
| mit | -1,177,517,058,833,136,400 | 31.395833 | 120 | 0.638702 | false |
openNSS/enigma2 | lib/python/Screens/About.py | 1 | 16426 | from Screen import Screen
from Screens.MessageBox import MessageBox
from Components.config import config
from Components.ActionMap import ActionMap
from Components.Sources.StaticText import StaticText
from Components.Harddisk import harddiskmanager
from Components.NimManager import nimmanager
from Components.About import about
from Components.ScrollLabel import ScrollLabel
from Components.Button import Button
from Components.Label import Label
from Components.ProgressBar import ProgressBar
from Tools.StbHardware import getFPVersion
from enigma import eTimer, eLabel, eConsoleAppContainer, getDesktop, eGetEnigmaDebugLvl
from Components.GUIComponent import GUIComponent
import skin, os
class About(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("About"))
hddsplit = skin.parameters.get("AboutHddSplit", 0)
AboutText = _("Hardware: ") + about.getHardwareTypeString() + "\n"
cpu = about.getCPUInfoString()
AboutText += _("CPU: ") + cpu + "\n"
AboutText += _("Image: ") + about.getImageTypeString() + "\n"
AboutText += _("Info: www.nonsolosat.net") + "\n"
# [WanWizard] Removed until we find a reliable way to determine the installation date
# AboutText += _("Installed: ") + about.getFlashDateString() + "\n"
EnigmaVersion = about.getEnigmaVersionString()
EnigmaVersion = EnigmaVersion.rsplit("-", EnigmaVersion.count("-") - 2)
if len(EnigmaVersion) == 3:
EnigmaVersion = EnigmaVersion[0] + " (" + EnigmaVersion[2] + "-" + EnigmaVersion[1] + ")"
else:
EnigmaVersion = EnigmaVersion[0] + " (" + EnigmaVersion[1] + ")"
EnigmaVersion = _("Enigma version: ") + EnigmaVersion
self["EnigmaVersion"] = StaticText(EnigmaVersion)
AboutText += "\n" + EnigmaVersion + "\n"
AboutText += _("Kernel version: ") + about.getKernelVersionString() + "\n"
AboutText += _("DVB driver version: ") + about.getDriverInstalledDate() + "\n"
GStreamerVersion = _("GStreamer version: ") + about.getGStreamerVersionString(cpu).replace("GStreamer","")
self["GStreamerVersion"] = StaticText(GStreamerVersion)
AboutText += GStreamerVersion + "\n"
AboutText += _("Python version: ") + about.getPythonVersionString() + "\n"
AboutText += _("Enigma (re)starts: %d\n") % config.misc.startCounter.value
AboutText += _("Enigma debug level: %d\n") % eGetEnigmaDebugLvl()
fp_version = getFPVersion()
if fp_version is None:
fp_version = ""
else:
fp_version = _("Frontprocessor version: %s") % fp_version
AboutText += fp_version + "\n"
self["FPVersion"] = StaticText(fp_version)
AboutText += _('Skin & Resolution: %s (%sx%s)\n') % (config.skin.primary_skin.value.split('/')[0], getDesktop(0).size().width(), getDesktop(0).size().height())
self["TunerHeader"] = StaticText(_("Detected NIMs:"))
AboutText += "\n" + _("Detected NIMs:") + "\n"
nims = nimmanager.nimListCompressed()
for count in range(len(nims)):
if count < 4:
self["Tuner" + str(count)] = StaticText(nims[count])
else:
self["Tuner" + str(count)] = StaticText("")
AboutText += nims[count] + "\n"
self["HDDHeader"] = StaticText(_("Detected HDD:"))
AboutText += "\n" + _("Detected HDD:") + "\n"
hddlist = harddiskmanager.HDDList()
hddinfo = ""
if hddlist:
formatstring = hddsplit and "%s:%s, %.1f %sB %s" or "%s\n(%s, %.1f %sB %s)"
for count in range(len(hddlist)):
if hddinfo:
hddinfo += "\n"
hdd = hddlist[count][1]
if int(hdd.free()) > 1024:
hddinfo += formatstring % (hdd.model(), hdd.capacity(), hdd.free()/1024.0, "G", _("free"))
else:
hddinfo += formatstring % (hdd.model(), hdd.capacity(), hdd.free(), "M", _("free"))
else:
hddinfo = _("none")
self["hddA"] = StaticText(hddinfo)
AboutText += hddinfo + "\n\n" + _("Network Info:")
for x in about.GetIPsFromNetworkInterfaces():
AboutText += "\n" + x[0] + ": " + x[1]
AboutText += '\n\n' + _("Uptime") + ": " + about.getBoxUptime()
self["AboutScrollLabel"] = ScrollLabel(AboutText)
self["key_green"] = Button(_("Translations"))
self["key_red"] = Button(_("Latest Commits"))
self["key_yellow"] = Button(_("Troubleshoot"))
self["key_blue"] = Button(_("Memory Info"))
self["actions"] = ActionMap(["ColorActions", "SetupActions", "DirectionActions"],
{
"cancel": self.close,
"ok": self.close,
"red": self.showCommits,
"green": self.showTranslationInfo,
"blue": self.showMemoryInfo,
"yellow": self.showTroubleshoot,
"up": self["AboutScrollLabel"].pageUp,
"down": self["AboutScrollLabel"].pageDown
})
def showTranslationInfo(self):
self.session.open(TranslationInfo)
def showCommits(self):
self.session.open(CommitInfo)
def showMemoryInfo(self):
self.session.open(MemoryInfo)
def showTroubleshoot(self):
self.session.open(Troubleshoot)
class TranslationInfo(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("Translation"))
# don't remove the string out of the _(), or it can't be "translated" anymore.
# TRANSLATORS: Add here whatever should be shown in the "translator" about screen, up to 6 lines (use \n for newline)
info = _("TRANSLATOR_INFO")
if info == "TRANSLATOR_INFO":
info = "(N/A)"
infolines = _("").split("\n")
infomap = {}
for x in infolines:
l = x.split(': ')
if len(l) != 2:
continue
(type, value) = l
infomap[type] = value
print infomap
self["key_red"] = Button(_("Cancel"))
self["TranslationInfo"] = StaticText(info)
translator_name = infomap.get("Language-Team", "none")
if translator_name == "none":
translator_name = infomap.get("Last-Translator", "")
self["TranslatorName"] = StaticText(translator_name)
self["actions"] = ActionMap(["SetupActions"],
{
"cancel": self.close,
"ok": self.close,
})
class CommitInfo(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("Latest Commits"))
self.skinName = ["CommitInfo", "About"]
self["AboutScrollLabel"] = ScrollLabel(_("Please wait"))
self["actions"] = ActionMap(["SetupActions", "DirectionActions"],
{
"cancel": self.close,
"ok": self.close,
"up": self["AboutScrollLabel"].pageUp,
"down": self["AboutScrollLabel"].pageDown,
"left": self.left,
"right": self.right
})
self["key_red"] = Button(_("Cancel"))
# get the branch to display from the Enigma version
try:
branch = "?sha=" + "-".join(about.getEnigmaVersionString().split("-")[3:])
except:
branch = ""
self.project = 0
self.projects = [
("https://api.github.com/repos/openpli/enigma2/commits" + branch, "Enigma2"),
("https://api.github.com/repos/openpli/openpli-oe-core/commits" + branch, "Openpli Oe Core"),
("https://api.github.com/repos/openpli/enigma2-plugins/commits", "Enigma2 Plugins"),
("https://api.github.com/repos/openpli/aio-grab/commits", "Aio Grab"),
("https://api.github.com/repos/openpli/enigma2-plugin-extensions-epgimport/commits", "Plugin EPGImport"),
("https://api.github.com/repos/openpli/enigma2-plugin-skins-magic/commits", "Skin Magic SD"),
("https://api.github.com/repos/littlesat/skin-PLiHD/commits", "Skin PLi HD"),
("https://api.github.com/repos/E2OpenPlugins/e2openplugin-OpenWebif/commits", "OpenWebif"),
("https://api.github.com/repos/haroo/HansSettings/commits", "Hans settings")
]
self.cachedProjects = {}
self.Timer = eTimer()
self.Timer.callback.append(self.readGithubCommitLogs)
self.Timer.start(50, True)
def readGithubCommitLogs(self):
url = self.projects[self.project][0]
commitlog = ""
from datetime import datetime
from json import loads
from urllib2 import urlopen
try:
commitlog += 80 * '-' + '\n'
commitlog += url.split('/')[-2] + '\n'
commitlog += 80 * '-' + '\n'
try:
# OpenPli 5.0 uses python 2.7.11 and here we need to bypass the certificate check
from ssl import _create_unverified_context
log = loads(urlopen(url, timeout=5, context=_create_unverified_context()).read())
except:
log = loads(urlopen(url, timeout=5).read())
for c in log:
creator = c['commit']['author']['name']
title = c['commit']['message']
date = datetime.strptime(c['commit']['committer']['date'], '%Y-%m-%dT%H:%M:%SZ').strftime('%x %X')
commitlog += date + ' ' + creator + '\n' + title + 2 * '\n'
commitlog = commitlog.encode('utf-8')
self.cachedProjects[self.projects[self.project][1]] = commitlog
except:
commitlog += _("Currently the commit log cannot be retrieved - please try later again")
self["AboutScrollLabel"].setText(commitlog)
def updateCommitLogs(self):
if self.projects[self.project][1] in self.cachedProjects:
self["AboutScrollLabel"].setText(self.cachedProjects[self.projects[self.project][1]])
else:
self["AboutScrollLabel"].setText(_("Please wait"))
self.Timer.start(50, True)
def left(self):
self.project = self.project == 0 and len(self.projects) - 1 or self.project - 1
self.updateCommitLogs()
def right(self):
self.project = self.project != len(self.projects) - 1 and self.project + 1 or 0
self.updateCommitLogs()
class MemoryInfo(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.close,
"ok": self.getMemoryInfo,
"green": self.getMemoryInfo,
"blue": self.clearMemory,
})
self["key_red"] = Label(_("Cancel"))
self["key_green"] = Label(_("Refresh"))
self["key_blue"] = Label(_("Clear"))
self['lmemtext'] = Label()
self['lmemvalue'] = Label()
self['rmemtext'] = Label()
self['rmemvalue'] = Label()
self['pfree'] = Label()
self['pused'] = Label()
self["slide"] = ProgressBar()
self["slide"].setValue(100)
self["params"] = MemoryInfoSkinParams()
self['info'] = Label(_("This info is for developers only.\nFor normal users it is not relevant.\nPlease don't panic if you see values displayed looking suspicious!"))
self.setTitle(_("Memory Info"))
self.onLayoutFinish.append(self.getMemoryInfo)
def getMemoryInfo(self):
try:
ltext = rtext = ""
lvalue = rvalue = ""
mem = 1
free = 0
rows_in_column = self["params"].rows_in_column
for i, line in enumerate(open('/proc/meminfo','r')):
s = line.strip().split(None, 2)
if len(s) == 3:
name, size, units = s
elif len(s) == 2:
name, size = s
units = ""
else:
continue
if name.startswith("MemTotal"):
mem = int(size)
if name.startswith("MemFree") or name.startswith("Buffers") or name.startswith("Cached"):
free += int(size)
if i < rows_in_column:
ltext += "".join((name,"\n"))
lvalue += "".join((size," ",units,"\n"))
else:
rtext += "".join((name,"\n"))
rvalue += "".join((size," ",units,"\n"))
self['lmemtext'].setText(ltext)
self['lmemvalue'].setText(lvalue)
self['rmemtext'].setText(rtext)
self['rmemvalue'].setText(rvalue)
self["slide"].setValue(int(100.0*(mem-free)/mem+0.25))
self['pfree'].setText("%.1f %s" % (100.*free/mem,'%'))
self['pused'].setText("%.1f %s" % (100.*(mem-free)/mem,'%'))
except Exception, e:
print "[About] getMemoryInfo FAIL:", e
def clearMemory(self):
eConsoleAppContainer().execute("sync")
open("/proc/sys/vm/drop_caches", "w").write("3")
self.getMemoryInfo()
class MemoryInfoSkinParams(GUIComponent):
def __init__(self):
GUIComponent.__init__(self)
self.rows_in_column = 25
def applySkin(self, desktop, screen):
if self.skinAttributes is not None:
attribs = [ ]
for (attrib, value) in self.skinAttributes:
if attrib == "rowsincolumn":
self.rows_in_column = int(value)
self.skinAttributes = attribs
return GUIComponent.applySkin(self, desktop, screen)
GUI_WIDGET = eLabel
class Troubleshoot(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.setTitle(_("Troubleshoot"))
self["AboutScrollLabel"] = ScrollLabel(_("Please wait"))
self["key_red"] = Button()
self["key_green"] = Button()
self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ColorActions"],
{
"cancel": self.close,
"up": self["AboutScrollLabel"].pageUp,
"down": self["AboutScrollLabel"].pageDown,
"moveUp": self["AboutScrollLabel"].homePage,
"moveDown": self["AboutScrollLabel"].endPage,
"left": self.left,
"right": self.right,
"red": self.red,
"green": self.green,
})
self.container = eConsoleAppContainer()
self.container.appClosed.append(self.appClosed)
self.container.dataAvail.append(self.dataAvail)
self.commandIndex = 0
self.updateOptions()
self.onLayoutFinish.append(self.run_console)
def left(self):
self.commandIndex = (self.commandIndex - 1) % len(self.commands)
self.updateKeys()
self.run_console()
def right(self):
self.commandIndex = (self.commandIndex + 1) % len(self.commands)
self.updateKeys()
self.run_console()
def red(self):
if self.commandIndex >= self.numberOfCommands:
self.session.openWithCallback(self.removeAllLogfiles, MessageBox, _("Do you want to remove all the crash logfiles"), default=False)
else:
self.close()
def green(self):
if self.commandIndex >= self.numberOfCommands:
try:
os.remove(self.commands[self.commandIndex][4:])
except:
pass
self.updateOptions()
self.run_console()
def removeAllLogfiles(self, answer):
if answer:
for fileName in self.getLogFilesList():
try:
os.remove(fileName)
except:
pass
self.updateOptions()
self.run_console()
def appClosed(self, retval):
if retval:
self["AboutScrollLabel"].setText(_("An error occurred - Please try again later"))
def dataAvail(self, data):
self["AboutScrollLabel"].appendText(data)
def run_console(self):
self["AboutScrollLabel"].setText("")
self.setTitle("%s - %s" % (_("Troubleshoot"), self.titles[self.commandIndex]))
command = self.commands[self.commandIndex]
if command.startswith("cat "):
try:
self["AboutScrollLabel"].setText(open(command[4:], "r").read())
except:
self["AboutScrollLabel"].setText(_("Logfile does not exist anymore"))
else:
try:
if self.container.execute(command):
raise Exception, "failed to execute: ", command
except Exception, e:
self["AboutScrollLabel"].setText("%s\n%s" % (_("An error occurred - Please try again later"), e))
def cancel(self):
self.container.appClosed.remove(self.appClosed)
self.container.dataAvail.remove(self.dataAvail)
self.container = None
self.close()
def getDebugFilesList(self):
import glob
return [x for x in sorted(glob.glob("/home/root/enigma.*.debuglog"), key=lambda x: os.path.isfile(x) and os.path.getmtime(x))]
def getLogFilesList(self):
import glob
home_root = "/home/root/enigma2_crash.log"
tmp = "/tmp/enigma2_crash.log"
return [x for x in sorted(glob.glob("/mnt/hdd/*.log"), key=lambda x: os.path.isfile(x) and os.path.getmtime(x))] + (os.path.isfile(home_root) and [home_root] or []) + (os.path.isfile(tmp) and [tmp] or [])
def updateOptions(self):
self.titles = ["dmesg", "ifconfig", "df", "top", "ps", "messages"]
self.commands = ["dmesg", "ifconfig", "df -h", "top -n 1", "ps -l", "cat /var/volatile/log/messages"]
install_log = "/home/root/autoinstall.log"
if os.path.isfile(install_log):
self.titles.append("%s" % install_log)
self.commands.append("cat %s" % install_log)
self.numberOfCommands = len(self.commands)
fileNames = self.getLogFilesList()
if fileNames:
totalNumberOfLogfiles = len(fileNames)
logfileCounter = 1
for fileName in reversed(fileNames):
self.titles.append("logfile %s (%s/%s)" % (fileName, logfileCounter, totalNumberOfLogfiles))
self.commands.append("cat %s" % (fileName))
logfileCounter += 1
fileNames = self.getDebugFilesList()
if fileNames:
totalNumberOfLogfiles = len(fileNames)
logfileCounter = 1
for fileName in reversed(fileNames):
self.titles.append("debug log %s (%s/%s)" % (fileName, logfileCounter, totalNumberOfLogfiles))
self.commands.append("tail -n 2500 %s" % (fileName))
logfileCounter += 1
self.commandIndex = min(len(self.commands) - 1, self.commandIndex)
self.updateKeys()
def updateKeys(self):
self["key_red"].setText(_("Cancel") if self.commandIndex < self.numberOfCommands else _("Remove all logfiles"))
self["key_green"].setText(_("Refresh") if self.commandIndex < self.numberOfCommands else _("Remove this logfile"))
| gpl-2.0 | 5,027,147,157,042,226,000 | 33.508403 | 206 | 0.664921 | false |
GaneshPandey/alex-scraper | alexscrapper/spiders/yazing_spider.py | 1 | 2751 | # -*- coding: utf-8 -*-
import scrapy
from scrapy.http import Request, FormRequest
from scrapy.spiders import CrawlSpider
from alexscrapper.items import *
from datetime import datetime
from scrapy.conf import settings
import urllib
import csv
import json
import re
from datetime import datetime, timedelta
from dateutil import parser
from urllib import urlencode
from HTMLParser import HTMLParser
import requests
class YangingSpider(CrawlSpider):
store_name = "Yazing"
name = "yazing"
i = 0
allowed_domains = ["yazing.com"]
start_urls = ['http://yazing.com/brands']
base_url = 'http://yazing.com/deals'
headers = {
'User-Agent': 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.10) Firefox/3.6.10 GTB7.1',
'Accept-Language': 'en-us,en;q=0.5'
}
def __init__(self, *args, **kwargs):
super(YangingSpider, self).__init__(*args, **kwargs)
settings.set('RETRY_HTTP_CODES', [500, 503, 504, 400, 408, 404] )
settings.set('RETRY_TIMES', 5 )
settings.set('REDIRECT_ENABLED', True)
settings.set('METAREFRESH_ENABLED', True)
settings.set('USER_AGENT', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36')
def start_requests(self):
for url in self.start_urls:
yield Request(url=url, callback=self.parse_product, headers=self.headers)
def parse_product(self, response):
item = Yaging()
pattern = ur'([\d.]+)'
div = response.xpath('//div[@class="row thumbnail valign brand appear"]')
for data in div:
self.i = self.i + 1
cashback = data.xpath('div[2]/h5/text()').extract()[:][0]
link = str(data.xpath('div[1]/h4/a/@href').extract()[:][0])
name = str([data.xpath('div[1]/h4/a/text()').extract()][0][0])
item['name'] = name.replace("'", "''")
item['link'] = link
if "$" in cashback:
cashback = "$"+ str(self.getNumbers(cashback))
elif "%" in cashback:
cashback = str(self.getNumbers(cashback)) + "%"
else:
cashback = ""
item['cashback'] = cashback.replace("'", "''")
item['sid'] = self.store_name
item['ctype'] = 1
item['numbers'] = self.getNumbers(cashback).replace('$', '').replace('%', '')
item['domainurl'] = self.base_url
yield item
def getNumbers(self, cashback):
cash = cashback
pattern = r'\d+(?:\.\d+)?'
ret = re.findall(pattern, cash)
if len(ret):
return ret[0]
else:
return "100" | gpl-3.0 | -3,813,770,262,426,610,000 | 34.282051 | 146 | 0.563795 | false |
vsemionov/wordbase | src/wordbase/master.py | 1 | 3079 | # Copyright (C) 2011 Victor Semionov
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of the contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import os
import socket
import signal
import errno
import logging
import core
_sock = None
logger = None
def _sigterm_handler(signum, frame):
logger.info("caught SIGTERM; terminating")
sys.exit()
def _accept_connections(sock, timeout, mp):
suppress_eintr = mp.is_subproc and hasattr(signal, "siginterrupt") and hasattr(signal, "SIGCHLD")
logger.info("waiting for connections")
while True:
try:
if suppress_eintr: signal.siginterrupt(signal.SIGCHLD, True)
conn, addr = sock.accept()
if suppress_eintr: signal.siginterrupt(signal.SIGCHLD, False)
except IOError as ioe:
if ioe.errno == errno.EINTR: continue
else: raise
host, port = addr
logger.debug("accepted connection from address %s:%d", host, port)
conn.settimeout(timeout)
mp.process(core.process_session, conn, addr)
def init(address, backlog):
global logger
logger = logging.getLogger(__name__)
logger.info("server starting")
signal.signal(signal.SIGTERM, _sigterm_handler)
global _sock
_sock = socket.socket()
_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
_sock.bind(address)
_sock.listen(backlog)
host, port = address
logger.info("listening at address %s:%d", host, port)
def run(timeout, mp):
pid = os.getpid()
try:
_accept_connections(_sock, timeout, mp)
finally:
if os.getpid() == pid:
logger.info("server stopped")
| bsd-3-clause | -2,246,946,171,496,893,200 | 33.211111 | 101 | 0.714193 | false |
robertz23/code-samples | python scripts and tools/get_photos.py | 1 | 1103 | import urllib2
"""
This script was only created to extract a several
photos, in jpg format, from an internal website.
It was a very quick script I made only for the
purpose decribe before.
"""
web_not_found = 0
web_found = 0
for num_photo in range(7277, 7630):
web_page = r'http://ntpnoapps0301.corp.codetel.com.do/intranet/Fotos_premio_al_logro_2010/content/images/large/IMG_' + str(num_photo) + '.jpg'
print 'Opening ' + web_page
archive_name = 'IMG_' + str(num_photo) + '.jpg'
try:
url = urllib2.urlopen(r'http://ntpnoapps0301.corp.codetel.com.do/intranet/Fotos_premio_al_logro_2010/content/images/large/IMG_' + str(num_photo) + '.jpg')
web_found += 1
except Exception, e:
web_not_found += 1
continue
fp = open(r'C:\\Documents and Settings\\Roberto Zapata\\My Documents\\ISC\\Premios al logro 2010\\' + archive_name, 'wb')
fp.write(url.read())
fp.close()
url.close()
print "Stats"
print "Web pages found ", str(web_found)
print "Web pages not found ", str(web_not_found)
| mit | -6,296,794,329,739,016,000 | 29.638889 | 162 | 0.635539 | false |
hugs/django | django/db/models/fields/related.py | 1 | 42717 | from django.db import connection, transaction
from django.db.models import signals, get_model
from django.db.models.fields import AutoField, Field, IntegerField, PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.utils.translation import ugettext_lazy, string_concat, ungettext, ugettext as _
from django.utils.functional import curry
from django.core import validators
from django import oldforms
from django import forms
try:
set
except NameError:
from sets import Set as set # Python 2.3 fallback
# Values for Relation.edit_inline.
TABULAR, STACKED = 1, 2
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name, False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
def manipulator_valid_rel_key(f, self, field_data, all_data):
"Validates that the value is a valid foreign key"
klass = f.rel.to
try:
klass._default_manager.get(**{f.rel.field_name: field_data})
except klass.DoesNotExist:
raise validators.ValidationError, _("Please enter a valid %s.") % f.verbose_name
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Add an accessor to allow easy determination of the related query path for this field
self.related_query_name = curry(self._get_related_query_name, cls._meta)
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {'class': cls.__name__.lower()}
other = self.rel.to
if isinstance(other, basestring):
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, related)
def get_db_prep_lookup(self, lookup_type, value):
# If we are doing a lookup on a Related Field, we must be
# comparing object instances. The value should be the PK of value,
# not value itself.
def pk_trace(value):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
try:
while True:
v = getattr(v, v._meta.pk.name)
except AttributeError:
pass
return v
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
return QueryWrapper(('(%s)' % sql), params)
if lookup_type == 'exact':
return [pk_trace(value)]
if lookup_type == 'in':
return [pk_trace(v) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError, "Related Field has invalid lookup: %s" % lookup_type
def _get_related_query_name(self, opts):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = '_%s_cache' % related.get_accessor_name()
def __get__(self, instance, instance_type=None):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self.related.opts.object_name
try:
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
rel_obj = self.related.model._default_manager.get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self.related.opts.object_name
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
# Set the value of the related field
setattr(value, self.related.field.rel.get_related_field().attname, instance)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
def __get__(self, instance, instance_type=None):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self.field.name
cache_name = self.field.get_cache_name()
try:
return getattr(instance, cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
# If NULL is an allowed value, return it.
if self.field.null:
return None
raise self.field.rel.to.DoesNotExist
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
# If the related manager indicates that it should be used for
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "%s must be accessed via instance" % self._field.name
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object cache now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.field.get_cache_name(), value)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
rel_field = self.related.field
rel_model = self.related.model
# Dynamically create a class that subclasses the related
# model's default manager.
superclass = self.related.model._default_manager.__class__
class RelatedManager(superclass):
def get_query_set(self):
return superclass.get_query_set(self).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
setattr(obj, rel_field.name, instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
new_obj = self.model(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
return super(RelatedManager, self).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(instance, rel_field.rel.get_related_field().attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist, "%r is not related to %r." % (obj, instance)
remove.alters_data = True
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
clear.alters_data = True
manager = RelatedManager()
attname = rel_field.rel.get_related_field().name
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
getattr(instance, attname)}
manager.model = self.related.model
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
def create_many_related_manager(superclass, through=False):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_col_name=None, target_col_name=None):
super(ManyRelatedManager, self).__init__()
self.core_filters = core_filters
self.model = model
self.symmetrical = symmetrical
self.instance = instance
self.join_table = join_table
self.source_col_name = source_col_name
self.target_col_name = target_col_name
self.through = through
self._pk_val = self.instance._get_pk_val()
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
return superclass.get_query_set(self).filter(**(self.core_filters))
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if through is None:
def add(self, *objs):
self._add_items(self.source_col_name, self.target_col_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_col_name, self.source_col_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_col_name, self.target_col_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_col_name, self.source_col_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_col_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_col_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if through is not None:
raise AttributeError, "Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
new_obj = self.model(**kwargs)
new_obj.save()
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
obj, created = \
super(ManyRelatedManager, self).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_col_name, target_col_name, *objs):
# join_table: name of the m2m link table
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
new_ids.add(obj._get_pk_val())
else:
new_ids.add(obj)
# Add the newly created or already existing objects to the join table.
# First find out which items are already added, to avoid adding them twice
cursor = connection.cursor()
cursor.execute("SELECT %s FROM %s WHERE %s = %%s AND %s IN (%s)" % \
(target_col_name, self.join_table, source_col_name,
target_col_name, ",".join(['%s'] * len(new_ids))),
[self._pk_val] + list(new_ids))
existing_ids = set([row[0] for row in cursor.fetchall()])
# Add the ones that aren't there already
for obj_id in (new_ids - existing_ids):
cursor.execute("INSERT INTO %s (%s, %s) VALUES (%%s, %%s)" % \
(self.join_table, source_col_name, target_col_name),
[self._pk_val, obj_id])
transaction.commit_unless_managed()
def _remove_items(self, source_col_name, target_col_name, *objs):
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj._get_pk_val())
else:
old_ids.add(obj)
# Remove the specified objects from the join table
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE %s = %%s AND %s IN (%s)" % \
(self.join_table, source_col_name,
target_col_name, ",".join(['%s'] * len(old_ids))),
[self._pk_val] + list(old_ids))
transaction.commit_unless_managed()
def _clear_items(self, source_col_name):
# source_col_name: the PK colname in join_table for the source object
cursor = connection.cursor()
cursor.execute("DELETE FROM %s WHERE %s = %%s" % \
(self.join_table, source_col_name),
[self._pk_val])
transaction.commit_unless_managed()
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model = self.related.model
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.related.field.rel.through)
qn = connection.ops.quote_name
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.related.field.name: instance._get_pk_val()},
instance=instance,
symmetrical=False,
join_table=qn(self.related.field.m2m_db_table()),
source_col_name=qn(self.related.field.m2m_reverse_name()),
target_col_name=qn(self.related.field.m2m_column_name())
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
through = getattr(self.related.field.rel, 'through', None)
if through is not None:
raise AttributeError, "Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
def __get__(self, instance, instance_type=None):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model=self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.field.rel.through)
qn = connection.ops.quote_name
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.field.related_query_name(): instance._get_pk_val()},
instance=instance,
symmetrical=(self.field.rel.symmetrical and instance.__class__ == rel_model),
join_table=qn(self.field.m2m_db_table()),
source_col_name=qn(self.field.m2m_column_name()),
target_col_name=qn(self.field.m2m_reverse_name())
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError, "Manager must be accessed via instance"
through = getattr(self.field.rel, 'through', None)
if through is not None:
raise AttributeError, "Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s's Manager instead." % through
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, num_in_admin=3, min_num_in_admin=None,
max_num_in_admin=None, num_extra_on_change=1, edit_inline=False,
related_name=None, limit_choices_to=None, lookup_overrides=None,
parent_link=False):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.num_in_admin, self.edit_inline = num_in_admin, edit_inline
self.min_num_in_admin, self.max_num_in_admin = min_num_in_admin, max_num_in_admin
self.num_extra_on_change, self.related_name = num_extra_on_change, related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.lookup_overrides = lookup_overrides or {}
self.multiple = True
self.parent_link = parent_link
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, num_in_admin=0, min_num_in_admin=None,
max_num_in_admin=None, num_extra_on_change=None, edit_inline=False,
related_name=None, limit_choices_to=None, lookup_overrides=None,
parent_link=False):
# NOTE: *_num_in_admin and num_extra_on_change are intentionally
# ignored here. We accept them as parameters only to match the calling
# signature of ManyToOneRel.__init__().
super(OneToOneRel, self).__init__(to, field_name, num_in_admin,
edit_inline=edit_inline, related_name=related_name,
limit_choices_to=limit_choices_to,
lookup_overrides=lookup_overrides, parent_link=parent_link)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, num_in_admin=0, related_name=None,
limit_choices_to=None, symmetrical=True, through=None):
self.to = to
self.num_in_admin = num_in_admin
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.edit_inline = False
self.symmetrical = symmetrical
self.multiple = True
self.through = through
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
to_field = to_field or to._meta.pk.name
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = rel_class(to, to_field,
num_in_admin=kwargs.pop('num_in_admin', 3),
min_num_in_admin=kwargs.pop('min_num_in_admin', None),
max_num_in_admin=kwargs.pop('max_num_in_admin', None),
num_extra_on_change=kwargs.pop('num_extra_on_change', 1),
edit_inline=kwargs.pop('edit_inline', False),
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
lookup_overrides=kwargs.pop('lookup_overrides', None),
parent_link=kwargs.pop('parent_link', False))
Field.__init__(self, **kwargs)
self.db_index = True
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def prepare_field_objs_and_params(self, manipulator, name_prefix):
params = {'validator_list': self.validator_list[:], 'member_name': name_prefix + self.attname}
if self.null:
field_objs = [oldforms.NullSelectField]
else:
field_objs = [oldforms.SelectField]
params['choices'] = self.get_choices_default()
return field_objs, params
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_manipulator_field_objs(self):
rel_field = self.rel.get_related_field()
return [oldforms.IntegerField]
def get_db_prep_save(self, value):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value)
def flatten_data(self, follow, obj=None):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return {self.attname: choice_list[1][0]}
return Field.flatten_data(self, follow, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
def formfield(self, **kwargs):
defaults = {'form_class': forms.ModelChoiceField, 'queryset': self.rel.to._default_manager.complex_filter(self.rel.limit_choices_to)}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
rel_field = self.rel.get_related_field()
if isinstance(rel_field, (AutoField, PositiveIntegerField, PositiveSmallIntegerField)):
return IntegerField().db_type()
return rel_field.db_type()
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
kwargs['editable'] = False
if 'num_in_admin' not in kwargs:
kwargs['num_in_admin'] = 0
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
if not cls._meta.one_to_one_field:
cls._meta.one_to_one_field = self
class ManyToManyField(RelatedField, Field):
def __init__(self, to, **kwargs):
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
num_in_admin=kwargs.pop('num_in_admin', 0),
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', True),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
self.creates_table = False
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
else:
self.creates_table = True
Field.__init__(self, **kwargs)
msg = ugettext_lazy('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_manipulator_field_objs(self):
choices = self.get_choices_default()
return [curry(oldforms.SelectMultipleField, size=min(max(len(choices), 5), 15), choices=choices)]
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through_model._meta.db_table
elif self.db_table:
return self.db_table
else:
return '%s_%s' % (opts.db_table, self.name)
def _get_m2m_column_name(self, related):
"Function that can be curried to provide the source column name for the m2m table"
try:
return self._m2m_column_name_cache
except:
if self.rel.through is not None:
for f in self.rel.through_model._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
self._m2m_column_name_cache = f.column
break
# If this is an m2m relation to self, avoid the inevitable name clash
elif related.model == related.parent_model:
self._m2m_column_name_cache = 'from_' + related.model._meta.object_name.lower() + '_id'
else:
self._m2m_column_name_cache = related.model._meta.object_name.lower() + '_id'
# Return the newly cached value
return self._m2m_column_name_cache
def _get_m2m_reverse_name(self, related):
"Function that can be curried to provide the related column name for the m2m table"
try:
return self._m2m_reverse_name_cache
except:
if self.rel.through is not None:
found = False
for f in self.rel.through_model._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
self._m2m_reverse_name_cache = f.column
break
else:
found = True
else:
self._m2m_reverse_name_cache = f.column
break
# If this is an m2m relation to self, avoid the inevitable name clash
elif related.model == related.parent_model:
self._m2m_reverse_name_cache = 'to_' + related.parent_model._meta.object_name.lower() + '_id'
else:
self._m2m_reverse_name_cache = related.parent_model._meta.object_name.lower() + '_id'
# Return the newly cached value
return self._m2m_reverse_name_cache
def isValidIDList(self, field_data, all_data):
"Validates that the value is a valid list of foreign keys"
mod = self.rel.to
try:
pks = map(int, field_data.split(','))
except ValueError:
# the CommaSeparatedIntegerField validator will catch this error
return
objects = mod._default_manager.in_bulk(pks)
if len(objects) != len(pks):
badkeys = [k for k in pks if k not in objects]
raise validators.ValidationError, ungettext("Please enter valid %(self)s IDs. The value %(value)r is invalid.",
"Please enter valid %(self)s IDs. The values %(value)r are invalid.", len(badkeys)) % {
'self': self.verbose_name,
'value': len(badkeys) == 1 and badkeys[0] or tuple(badkeys),
}
def flatten_data(self, follow, obj = None):
new_data = {}
if obj:
instance_ids = [instance._get_pk_val() for instance in getattr(obj, self.name).all()]
new_data[self.name] = instance_ids
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank and not self.rel.edit_inline:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
new_data[self.name] = [choices_list[0][0]]
return new_data
def contribute_to_class(self, cls, name):
super(ManyToManyField, self).contribute_to_class(cls, name)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through_model = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
elif self.rel.through:
self.rel.through_model = self.rel.through
self.rel.through = self.rel.through._meta.object_name
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# m2m relations to self do not have a ManyRelatedObjectsDescriptor,
# as it would be redundant - unless the field is non-symmetrical.
if related.model != related.parent_model or not self.rel.symmetrical:
# Add the descriptor for the m2m relation
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_column_name, related)
self.m2m_reverse_name = curry(self._get_m2m_reverse_name, related)
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
defaults = {'form_class': forms.ModelMultipleChoiceField, 'queryset': self.rel.to._default_manager.complex_filter(self.rel.limit_choices_to)}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
defaults['initial'] = [i._get_pk_val() for i in defaults['initial']]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| bsd-3-clause | -879,966,623,721,259,000 | 44.735546 | 217 | 0.60641 | false |
conductorproject/conductor | tests/testcollections.py | 1 | 1421 | """
Unit tests for conductor's collections module
"""
import logging
import mock
from nose import tools
from conductor import collections
from conductor.settings import settings
from conductor import errors
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__file__)
class TestCollectionFactory(object):
@mock.patch("conductor.collections.Collection", autospec=True)
def test_get_collection(self, mock_collection):
"""The Collection factory is able to create new Collection instances"""
valid_name = "fake_valid_name"
settings.collections = [{"short_name": valid_name}]
factory = collections.collection_factory
factory.get_collection(valid_name)
tools.assert_true(mock_collection.called)
invalid_name = "fake_invalid_name"
tools.assert_raises(errors.CollectionNotDefinedError,
factory.get_collection, invalid_name)
class TestCollection(object):
def test_collection_creation(self):
"""Collections are created with or without a long name."""
short_name = "fake"
c1 = collections.Collection(short_name)
tools.eq_(c1.short_name, short_name)
tools.eq_(c1.name, short_name)
long_name = "a fake name"
c2 = collections.Collection(short_name, name=long_name)
tools.eq_(c2.short_name, short_name)
tools.eq_(c2.name, long_name)
| agpl-3.0 | 7,126,767,272,529,742,000 | 31.295455 | 79 | 0.684025 | false |
kirberich/gerber_to_scad | vector.py | 1 | 3314 | # Basic vector maths class
import math
class V(object):
def __init__(self, x=0, y=0):
self.x = float(x)
self.y = float(y)
def __unicode__(self):
return "(%s, %s)" % (self.x, self.y)
__repr__ = __unicode__
@classmethod
def from_tuple(cls, coordinates):
x, y = coordinates
return V(x, y)
def as_tuple(self):
return (self.x, self.y)
@classmethod
def intersection(cls, o1, d1, o2, d2):
""" Find intersection of two vectors, if any """
try:
l2 = ((o2.x - o1.x) * d1.y / d1.x - o2.y + o1.y) / (d2.y - d2.x * d1.y / d1.x)
return o2 + d2 * l2
except ZeroDivisionError:
return None
@classmethod
def point_line_projection(cls, v1, v2, p, limit_to_segment=False):
""" Returns the projection of the point p on the line defined
by the two endpoints v1 and v2
"""
d = v2 - v1
l2 = d.abs_sq()
# If v1 and v2 are equal, simply return v1 (the line direction is undefined)
if l2 == 0:
return v1
# Get the projection factor
a = ((p - v1) * d) / l2
# Limit the projection to be limited to stay between v1 and v2, if requested
if limit_to_segment:
if a < 0:
return v1
if a > 1:
return v2
return v1 + d * a
def abs_sq(self):
""" Square of absolute value of vector self """
return abs(self.x * self.x + self.y * self.y)
def consume_tuple(self, other):
if isinstance(other, tuple) or isinstance(other, list):
return V(other[0], other[1])
return other
def cross(self, other):
""" cross product """
return V(self.x * other.y - other.x * self.y)
def rotate(self, theta, as_degrees=False):
""" Adapted from https://gist.github.com/mcleonard/5351452.
Rotate this vector by theta in degrees.
"""
if as_degrees:
theta = math.radians(theta)
dc, ds = math.cos(theta), math.sin(theta)
x, y = dc*self.x - ds*self.y, ds*self.x + dc*self.y
return V(x, y)
def __abs__(self):
return math.sqrt(self.abs_sq())
def __cmp__(self, other):
other = self.consume_tuple(other)
if self.x == other.x and self.y == other.y:
return 0
if self.abs() < other.abs():
return -1
return 1
def __nonzero__(self):
if self.x or self.y:
return True
return False
def __neg__(self):
return V(-self.x, -self.y)
def __add__(self, other):
other = self.consume_tuple(other)
return V(self.x + other.x, self.y + other.y)
def __sub__(self, other):
other = self.consume_tuple(other)
return V(self.x - other.x, self.y - other.y)
def __mul__(self, other):
other = self.consume_tuple(other)
if isinstance(other, V):
return (self.x * other.x + self.y * other.y)
return V(other * self.x, other * self.y)
def __div__(self, other):
if not other:
raise Exception("Division by zero")
other = float(other)
return V(self.x / other, self.y / other)
__truediv__ = __div__
| mit | -2,852,812,981,836,606,500 | 27.324786 | 90 | 0.521123 | false |
popazerty/beyonwiz-4.1 | lib/python/Components/Converter/EventName.py | 1 | 4665 | from enigma import eEPGCache
from Components.Converter.Converter import Converter
from Components.Element import cached
from Components.Converter.genre import getGenreStringSub
class EventName(Converter, object):
NAME = 0
SHORT_DESCRIPTION = 1
EXTENDED_DESCRIPTION = 2
FULL_DESCRIPTION = 3
ID = 4
NAME_NOW = 5
NAME_NEXT = 6
GENRE = 7
RATING = 8
SRATING = 9
NEXT_DESCRIPTION = 21
THIRD_NAME = 22
THIRD_DESCRIPTION = 23
def __init__(self, type):
Converter.__init__(self, type)
self.epgcache = eEPGCache.getInstance()
if type == "Description":
self.type = self.SHORT_DESCRIPTION
elif type == "ExtendedDescription":
self.type = self.EXTENDED_DESCRIPTION
elif type == "FullDescription":
self.type = self.FULL_DESCRIPTION
elif type == "ID":
self.type = self.ID
elif type == "NameNow" or type == "NowName":
self.type = self.NAME_NOW
elif type == "NameNext" or type == "NextName":
self.type = self.NAME_NEXT
elif type == "Genre":
self.type = self.GENRE
elif type == "Rating":
self.type = self.RATING
elif type == "SmallRating":
self.type = self.SRATING
elif type == "NextDescription":
self.type = self.NEXT_DESCRIPTION
elif type == "ThirdName":
self.type = self.THIRD_NAME
elif type == "ThirdDescription":
self.type = self.THIRD_DESCRIPTION
else:
self.type = self.NAME
@cached
def getText(self):
event = self.source.event
if event is None:
return ""
if self.type == self.NAME:
if event.getEventName() == "Visibile gratis su tv terrestre e TivuSat":
return event.getShortDescription().title()
else:
return event.getEventName()
elif self.type == self.SRATING:
rating = event.getParentalData()
if rating is None:
return ""
else:
country = rating.getCountryCode()
age = rating.getRating()
if age == 0:
return _("All ages")
elif age > 15:
return _("bc%s") % age
else:
age += 3
return " %d+" % age
elif self.type == self.RATING:
rating = event.getParentalData()
if rating is None:
return ""
else:
country = rating.getCountryCode()
age = rating.getRating()
if age == 0:
return _("Rating undefined")
elif age > 15:
return _("Rating defined by broadcaster - %d") % age
else:
age += 3
return _("Minimum age %d years") % age
elif self.type == self.GENRE:
genre = event.getGenreData()
if genre is None:
return ""
else:
return getGenreStringSub(genre.getLevel1(), genre.getLevel2())
elif self.type == self.NAME_NOW:
return pgettext("now/next: 'now' event label", "Now") + ": " + event.getEventName()
elif self.type == self.SHORT_DESCRIPTION:
return event.getShortDescription()
elif self.type == self.EXTENDED_DESCRIPTION:
return event.getExtendedDescription() or event.getShortDescription()
elif self.type == self.FULL_DESCRIPTION:
description = event.getShortDescription()
extended = event.getExtendedDescription()
if description and extended:
description += '\n\n'
return description + extended
elif self.type == self.ID:
return str(event.getEventId())
elif int(self.type) == 6 or int(self.type) >= 21:
try:
reference = self.source.service
info = reference and self.source.info
if info is None:
return
test = [ 'ITSECX', (reference.toString(), 1, -1, 1440) ] # search next 24 hours
self.list = [] if self.epgcache is None else self.epgcache.lookupEvent(test)
if self.list:
if self.type == self.NAME_NEXT and self.list[1][1]:
return pgettext("now/next: 'next' event label", "Next") + ": " + self.list[1][1]
elif self.type == self.NEXT_DESCRIPTION and (self.list[1][2] or self.list[1][3]):
description = self.list[1][2]
extended = self.list[1][3]
if (description and extended) and (description[0:20] != extended[0:20]):
description += '\n'
return description + extended
elif self.type == self.THIRD_NAME and self.list[2][1]:
return pgettext("third event: 'third' event label", "Later") + ": " + self.list[2][1]
elif self.type == self.THIRD_DESCRIPTION and (self.list[2][2] or self.list[2][3]):
description = self.list[2][2]
extended = self.list[2][3]
if (description and extended) and (description[0:20] != extended[0:20]):
description += '\n'
return description + extended
else:
# failed to return any epg data.
return ""
except:
# failed to return any epg data.
if self.type == self.NAME_NEXT:
return pgettext("now/next: 'next' event label", "Next") + ": " + event.getEventName()
return ""
text = property(getText)
| gpl-2.0 | -8,038,430,678,504,437,000 | 30.52027 | 92 | 0.646731 | false |
noplay/gns3-gui | gns3/ui/node_configurator_dialog_ui.py | 1 | 5531 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/grossmj/PycharmProjects/gns3-gui/gns3/ui/node_configurator_dialog.ui'
#
# Created: Sun Aug 17 18:05:14 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_NodeConfiguratorDialog(object):
def setupUi(self, NodeConfiguratorDialog):
NodeConfiguratorDialog.setObjectName(_fromUtf8("NodeConfiguratorDialog"))
NodeConfiguratorDialog.resize(689, 454)
self.gridlayout = QtGui.QGridLayout(NodeConfiguratorDialog)
self.gridlayout.setObjectName(_fromUtf8("gridlayout"))
self.splitter = QtGui.QSplitter(NodeConfiguratorDialog)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.uiNodesTreeWidget = QtGui.QTreeWidget(self.splitter)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uiNodesTreeWidget.sizePolicy().hasHeightForWidth())
self.uiNodesTreeWidget.setSizePolicy(sizePolicy)
self.uiNodesTreeWidget.setObjectName(_fromUtf8("uiNodesTreeWidget"))
self.uiNodesTreeWidget.header().setVisible(False)
self.verticalLayout = QtGui.QWidget(self.splitter)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.vboxlayout = QtGui.QVBoxLayout(self.verticalLayout)
self.vboxlayout.setSpacing(4)
self.vboxlayout.setMargin(0)
self.vboxlayout.setObjectName(_fromUtf8("vboxlayout"))
self.uiTitleLabel = QtGui.QLabel(self.verticalLayout)
font = QtGui.QFont()
font.setPointSize(16)
self.uiTitleLabel.setFont(font)
self.uiTitleLabel.setFrameShape(QtGui.QFrame.Box)
self.uiTitleLabel.setFrameShadow(QtGui.QFrame.Sunken)
self.uiTitleLabel.setTextFormat(QtCore.Qt.PlainText)
self.uiTitleLabel.setObjectName(_fromUtf8("uiTitleLabel"))
self.vboxlayout.addWidget(self.uiTitleLabel)
self.uiConfigStackedWidget = QtGui.QStackedWidget(self.verticalLayout)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.uiConfigStackedWidget.sizePolicy().hasHeightForWidth())
self.uiConfigStackedWidget.setSizePolicy(sizePolicy)
self.uiConfigStackedWidget.setFrameShape(QtGui.QFrame.Box)
self.uiConfigStackedWidget.setFrameShadow(QtGui.QFrame.Sunken)
self.uiConfigStackedWidget.setObjectName(_fromUtf8("uiConfigStackedWidget"))
self.uiEmptyPageWidget = QtGui.QWidget()
self.uiEmptyPageWidget.setObjectName(_fromUtf8("uiEmptyPageWidget"))
self.vboxlayout1 = QtGui.QVBoxLayout(self.uiEmptyPageWidget)
self.vboxlayout1.setSpacing(0)
self.vboxlayout1.setContentsMargins(0, 4, 0, 0)
self.vboxlayout1.setObjectName(_fromUtf8("vboxlayout1"))
spacerItem = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.vboxlayout1.addItem(spacerItem)
self.textLabel = QtGui.QLabel(self.uiEmptyPageWidget)
self.textLabel.setAlignment(QtCore.Qt.AlignCenter)
self.textLabel.setObjectName(_fromUtf8("textLabel"))
self.vboxlayout1.addWidget(self.textLabel)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.vboxlayout1.addItem(spacerItem1)
self.uiConfigStackedWidget.addWidget(self.uiEmptyPageWidget)
self.vboxlayout.addWidget(self.uiConfigStackedWidget)
self.gridlayout.addWidget(self.splitter, 0, 0, 1, 1)
self.uiButtonBox = QtGui.QDialogButtonBox(NodeConfiguratorDialog)
self.uiButtonBox.setOrientation(QtCore.Qt.Horizontal)
self.uiButtonBox.setStandardButtons(QtGui.QDialogButtonBox.Apply | QtGui.QDialogButtonBox.Cancel | QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Reset)
self.uiButtonBox.setObjectName(_fromUtf8("uiButtonBox"))
self.gridlayout.addWidget(self.uiButtonBox, 1, 0, 1, 1)
self.retranslateUi(NodeConfiguratorDialog)
self.uiConfigStackedWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(NodeConfiguratorDialog)
def retranslateUi(self, NodeConfiguratorDialog):
NodeConfiguratorDialog.setWindowTitle(_translate("NodeConfiguratorDialog", "Node configurator", None))
self.uiNodesTreeWidget.headerItem().setText(0, _translate("NodeConfiguratorDialog", "Nodes", None))
self.uiTitleLabel.setText(_translate("NodeConfiguratorDialog", "Node Configuration", None))
self.textLabel.setText(_translate("NodeConfiguratorDialog", "Please select a node in the list \n"
"to display the configuration page.", None))
from . import resources_rc
| gpl-3.0 | 2,178,063,253,634,749,200 | 52.182692 | 164 | 0.736756 | false |
qqfly/arc | hks_gripper/src/hksGripperSrv2.py | 1 | 1469 | #!/usr/bin/env python
#from beginner_tutorials.srv import *
from aimm_srv.srv import hksGripper
import rospy
import socket
import string
from PIL import Image,ImageFont,ImageDraw
HOST = '192.168.1.101' # The remote host
PORT = 13000 # The same port as used by the server
s = None
state = 0;
def handle_gripper_srv2(req):
print "Received the para: %d"%(req.position)
if req.position < 60 or req.position > 115:
print "Received para error!"
return -1
position = req.position
#print "Begin to TCP!"
ADDR=(HOST, PORT)
try:
hksSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except:
print "TCP creat fail!"
return -1
hksSocket.settimeout(1)
try:
hksSocket.connect(ADDR)
except:
print "TCP connect fail!"
return -1
hksSocket.settimeout(1)
try:
hksSocket.send("ID: XDJ IDEND CMD: MOVE CMDEND MVPARA: " + str(position) + " MVEND" + "\r\n")
except:
print "TCP send fail!"
hksSocket.close()
return -1
hksSocket.settimeout(2)
try:
data=hksSocket.recv(40)
#print "Received string: %s"%(data)
if data.index("ANS: Start") > 10:
return 1
hksSocket.close()
except:
print "TCP receive fail!"
hksSocket.close()
return -1
#not success
#return -1
def hks_gripper_srv_main():
rospy.init_node('hks_gripper_srv2')
s = rospy.Service('hksGpripperSrv2', hksGripper, handle_gripper_srv2)
print "HKSGRIPPER Ready to receive!"
rospy.spin()
if __name__ == "__main__":
hks_gripper_srv_main()
| bsd-3-clause | -5,293,182,609,316,089,000 | 22.31746 | 95 | 0.682777 | false |
cp16net/trove | trove/tests/unittests/guestagent/test_vertica_manager.py | 1 | 15814 | # Copyright [2015] Hewlett-Packard Development Company, L.P.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mock import MagicMock
from mock import patch
import testtools
from trove.common.context import TroveContext
from trove.common.exception import DatastoreOperationNotSupported
from trove.common import instance as rd_instance
from trove.guestagent.datastore.experimental.vertica.manager import Manager
from trove.guestagent.datastore.experimental.vertica.service import (
VerticaAppStatus)
from trove.guestagent.datastore.experimental.vertica.service import VerticaApp
from trove.guestagent import dbaas
from trove.guestagent import volume
from trove.guestagent.volume import VolumeDevice
class GuestAgentManagerTest(testtools.TestCase):
def setUp(self):
super(GuestAgentManagerTest, self).setUp()
self.context = TroveContext()
self.manager = Manager()
self.origin_format = volume.VolumeDevice.format
self.origin_migrate_data = volume.VolumeDevice.migrate_data
self.origin_mount = volume.VolumeDevice.mount
self.origin_unmount = volume.VolumeDevice.unmount
self.origin_mount_points = volume.VolumeDevice.mount_points
self.origin_set_read = volume.VolumeDevice.set_readahead_size
self.origin_install_vertica = VerticaApp.install_vertica
self.origin_create_db = VerticaApp.create_db
self.origin_stop_db = VerticaApp.stop_db
self.origin_start_db = VerticaApp.start_db
self.origin_restart = VerticaApp.restart
self.origin_install_if = VerticaApp.install_if_needed
self.origin_complete_install = VerticaApp.complete_install_or_restart
def tearDown(self):
super(GuestAgentManagerTest, self).tearDown()
volume.VolumeDevice.format = self.origin_format
volume.VolumeDevice.migrate_data = self.origin_migrate_data
volume.VolumeDevice.mount = self.origin_mount
volume.VolumeDevice.unmount = self.origin_unmount
volume.VolumeDevice.mount_points = self.origin_mount_points
volume.VolumeDevice.set_readahead_size = self.origin_set_read
VerticaApp.create_db = self.origin_create_db
VerticaApp.install_vertica = self.origin_install_vertica
VerticaApp.stop_db = self.origin_stop_db
VerticaApp.start_db = self.origin_start_db
VerticaApp.restart = self.origin_restart
VerticaApp.install_if_needed = self.origin_install_if
VerticaApp.complete_install_or_restart = self.origin_complete_install
def test_update_status(self):
mock_status = MagicMock()
self.manager.appStatus = mock_status
self.manager.update_status(self.context)
mock_status.update.assert_any_call()
def _prepare_dynamic(self, packages,
config_content='MockContent', device_path='/dev/vdb',
backup_id=None,
overrides=None, is_mounted=False):
# covering all outcomes is starting to cause trouble here
expected_vol_count = 1 if device_path else 0
if not backup_id:
backup_info = {'id': backup_id,
'location': 'fake-location',
'type': 'InnoBackupEx',
'checksum': 'fake-checksum',
}
mock_status = MagicMock()
self.manager.appStatus = mock_status
mock_status.begin_install = MagicMock(return_value=None)
path_exists_function = MagicMock(return_value=True)
volume.VolumeDevice.format = MagicMock(return_value=None)
volume.VolumeDevice.migrate_data = MagicMock(return_value=None)
volume.VolumeDevice.mount = MagicMock(return_value=None)
mount_points = []
if is_mounted:
mount_points = ['/mnt']
VolumeDevice.mount_points = MagicMock(return_value=mount_points)
VolumeDevice.unmount = MagicMock(return_value=None)
VerticaApp.install_if_needed = MagicMock(return_value=None)
VerticaApp.install_vertica = MagicMock(return_value=None)
VerticaApp.create_db = MagicMock(return_value=None)
VerticaApp.prepare_for_install_vertica = MagicMock(return_value=None)
VerticaApp.complete_install_or_restart = MagicMock(return_value=None)
# invocation
self.manager.prepare(context=self.context, packages=packages,
config_contents=config_content,
databases=None,
memory_mb='2048', users=None,
device_path=device_path,
mount_point="/var/lib/vertica",
backup_info=backup_info,
overrides=None,
cluster_config=None,
path_exists_function=path_exists_function)
self.assertEqual(expected_vol_count, VolumeDevice.format.call_count)
self.assertEqual(expected_vol_count,
VolumeDevice.migrate_data.call_count)
self.assertEqual(expected_vol_count,
VolumeDevice.mount_points.call_count)
if is_mounted:
self.assertEqual(1, VolumeDevice.unmount.call_count)
else:
self.assertEqual(0, VolumeDevice.unmount.call_count)
VerticaApp.install_if_needed.assert_any_call(packages)
VerticaApp.prepare_for_install_vertica.assert_any_call()
VerticaApp.install_vertica.assert_any_call()
VerticaApp.create_db.assert_any_call()
VerticaApp.complete_install_or_restart.assert_any_call()
def test_prepare_pkg(self):
self._prepare_dynamic(['vertica'])
def test_prepare_no_pkg(self):
self._prepare_dynamic([])
def test_restart(self):
mock_status = MagicMock()
self.manager.appStatus = mock_status
VerticaApp.restart = MagicMock(return_value=None)
# invocation
self.manager.restart(self.context)
# verification/assertion
VerticaApp.restart.assert_any_call()
def test_stop_db(self):
mock_status = MagicMock()
self.manager.appStatus = mock_status
VerticaApp.stop_db = MagicMock(return_value=None)
# invocation
self.manager.stop_db(self.context)
# verification/assertion
VerticaApp.stop_db.assert_any_call(do_not_start_on_reboot=False)
@patch.object(VerticaApp, 'install_vertica')
@patch.object(VerticaApp, '_export_conf_to_members')
@patch.object(VerticaApp, 'create_db')
def test_install_cluster(self, mock_install, mock_export, mock_create_db):
members = ['test1', 'test2']
self.manager.install_cluster(self.context, members)
mock_install.assert_called_with('test1,test2')
mock_export.assert_called_with(members)
mock_create_db.assert_called_with('test1,test2')
@patch.object(VerticaAppStatus, 'set_status')
@patch.object(VerticaApp, 'install_cluster',
side_effect=RuntimeError("Boom!"))
def test_install_cluster_failure(self, mock_install, mock_set_status):
members = ["test1", "test2"]
self.assertRaises(RuntimeError, self.manager.install_cluster,
self.context, members)
mock_set_status.assert_called_with(rd_instance.ServiceStatuses.FAILED)
@patch.object(volume.VolumeDevice, 'mount_points', return_value=[])
@patch.object(volume.VolumeDevice, 'unmount_device', return_value=None)
@patch.object(volume.VolumeDevice, 'mount', return_value=None)
@patch.object(volume.VolumeDevice, 'migrate_data', return_value=None)
@patch.object(volume.VolumeDevice, 'format', return_value=None)
@patch.object(VerticaApp, 'prepare_for_install_vertica')
@patch.object(VerticaApp, 'install_if_needed')
@patch.object(VerticaAppStatus, 'begin_install')
def _prepare_method(self, instance_id, instance_type, *args):
cluster_config = {"id": instance_id,
"instance_type": instance_type}
# invocation
self.manager.prepare(context=self.context, databases=None,
packages=['vertica'],
memory_mb='2048', users=None,
mount_point='/var/lib/vertica',
overrides=None,
cluster_config=cluster_config)
@patch.object(VerticaAppStatus, 'set_status')
def test_prepare_member(self, mock_set_status):
self._prepare_method("test-instance-3", "member")
mock_set_status.assert_called_with(
rd_instance.ServiceStatuses.BUILD_PENDING)
def test_reset_configuration(self):
try:
configuration = {'config_contents': 'some junk'}
self.manager.reset_configuration(self.context, configuration)
except Exception:
self.fail("reset_configuration raised exception unexpectedly.")
def test_rpc_ping(self):
output = self.manager.rpc_ping(self.context)
self.assertTrue(output)
@patch.object(VerticaAppStatus, 'set_status')
def test_prepare_invalid_cluster_config(self, mock_set_status):
self._prepare_method("test-instance-3", "query_router")
mock_set_status.assert_called_with(
rd_instance.ServiceStatuses.FAILED)
def test_get_filesystem_stats(self):
with patch.object(dbaas, 'get_filesystem_volume_stats'):
self.manager.get_filesystem_stats(self.context, '/var/lib/vertica')
dbaas.get_filesystem_volume_stats.assert_any_call(
'/var/lib/vertica')
def test_mount_volume(self):
with patch.object(volume.VolumeDevice, 'mount', return_value=None):
self.manager.mount_volume(self.context,
device_path='/dev/vdb',
mount_point='/var/lib/vertica')
test_mount = volume.VolumeDevice.mount.call_args_list[0]
test_mount.assert_called_with('/var/lib/vertica', False)
def test_unmount_volume(self):
with patch.object(volume.VolumeDevice, 'unmount', return_value=None):
self.manager.unmount_volume(self.context, device_path='/dev/vdb')
test_unmount = volume.VolumeDevice.unmount.call_args_list[0]
test_unmount.assert_called_with('/var/lib/vertica')
def test_resize_fs(self):
with patch.object(volume.VolumeDevice, 'resize_fs', return_value=None):
self.manager.resize_fs(self.context, device_path='/dev/vdb')
test_resize_fs = volume.VolumeDevice.resize_fs.call_args_list[0]
test_resize_fs.assert_called_with('/var/lib/vertica')
def test_cluster_complete(self):
mock_status = MagicMock()
mock_status.set_status = MagicMock()
self.manager.appStatus = mock_status
mock_status._get_actual_db_status = MagicMock(
return_value=rd_instance.ServiceStatuses.RUNNING)
self.manager.cluster_complete(self.context)
mock_status.set_status.assert_called_with(
rd_instance.ServiceStatuses.RUNNING)
def test_get_public_keys(self):
with patch.object(VerticaApp, 'get_public_keys',
return_value='some_key'):
test_key = self.manager.get_public_keys(self.context, 'test_user')
self.assertEqual('some_key', test_key)
def test_authorize_public_keys(self):
with patch.object(VerticaApp, 'authorize_public_keys',
return_value=None):
self.manager.authorize_public_keys(self.context,
'test_user',
'some_key')
VerticaApp.authorize_public_keys.assert_any_call(
'test_user', 'some_key')
def test_start_db_with_conf_changes(self):
with patch.object(VerticaApp, 'start_db_with_conf_changes'):
self.manager.start_db_with_conf_changes(self.context, 'something')
VerticaApp.start_db_with_conf_changes.assert_any_call('something')
def test_change_passwords(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.change_passwords,
self.context, None)
def test_update_attributes(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.update_attributes,
self.context, 'test_user', '%', {'name': 'new_user'})
def test_create_database(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.create_database,
self.context, [{'name': 'test_db'}])
def test_create_user(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.create_user,
self.context, [{'name': 'test_user'}])
def test_delete_database(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.delete_database,
self.context, [{'name': 'test_db'}])
def test_delete_user(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.delete_user,
self.context, [{'name': 'test_user'}])
def test_get_user(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.get_user,
self.context, 'test_user', '%')
def test_grant_access(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.grant_access,
self.context, 'test_user', '%', [{'name': 'test_db'}]
)
def test_revoke_access(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.revoke_access,
self.context, 'test_user', '%', [{'name': 'test_db'}]
)
def test_list_access(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.list_access,
self.context, 'test_user', '%')
def test_list_databases(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.list_databases,
self.context)
def test_list_users(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.list_users,
self.context)
def test_enable_root(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.enable_root,
self.context)
def test_is_root_enabled(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.is_root_enabled,
self.context)
def test_create_backup(self):
self.assertRaises(DatastoreOperationNotSupported,
self.manager.create_backup,
self.context, {})
| apache-2.0 | 8,298,361,802,309,006,000 | 44.442529 | 79 | 0.622233 | false |
projectatomic/osbs-client | tests/throughput-test-harness.py | 1 | 8369 | #!/usr/bin/python
from __future__ import absolute_import
import re
import sys
import time
import logging
import argparse
import subprocess
from dockerfile_parse import DockerfileParser
DEFAULT_BRANCH_PREFIX = "branch"
DEFAULT_BRANCH_COUNT = 100
DEFAULT_STAGGER_NUMBER = 10
DEFAULT_STAGGER_WAIT = 60
DEFAULT_KOJI_BIN = "koji"
DEFAULT_KOJI_TARGET = "extras-rhel-7.2-candidate"
DEFAULT_GIT_REMOTE = "origin"
class SubprocessError(Exception):
pass
def run(*args):
logging.info("running: %s", " ".join(args))
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
if out:
logging.info("stdout:\n%s", out.rstrip())
if err:
logging.info("stderr:\n%s", err.rstrip())
if p.returncode != 0:
raise SubprocessError("Subprocess failed w/ return code {}".format(p.returncode))
return out
def bump_release(df_path, branch):
parser = DockerfileParser(df_path)
oldrelease = parser.labels["Release"]
if not oldrelease:
raise RuntimeError("Dockerfile has no Release label")
m = re.match(r"(.*\D)?(\d+)", oldrelease)
if not m:
raise RuntimeError("Release does not end with number")
num = int(m.group(2))
newrelease = "{}{:03d}".format(m.group(1), num+1)
parser.labels["Release"] = newrelease
return newrelease
def set_initial_release(df_path, branch):
parser = DockerfileParser(df_path)
oldrelease = parser.labels.get("Release", "1")
newrelease = "{}.{}.iteration001".format(oldrelease, branch)
parser.labels["Release"] = newrelease
return newrelease
def get_branches(branch_prefix):
branches = run("git", "branch", "--list")
branches = [b[2:] for b in branches.splitlines()]
branches = [b for b in branches if b.startswith(branch_prefix)]
if not branches:
raise RuntimeError("No branches starting with %s found" % branch_prefix)
return branches
def cmd_create_branches(args):
branches = ["{}{:03d}".format(args.branch_prefix, n+1) for n in range(args.number)]
logging.info("Creating branches from current branch")
for b in branches:
run("git", "branch", b)
logging.info("Setting initial Release")
for b in branches:
run("git", "checkout", b)
release = set_initial_release("Dockerfile", b)
run("git", "add", "Dockerfile")
run("git", "commit", "--message", release)
logging.info("Pusing ALL branches to %s", args.git_remote)
run("git", "push", "--force", args.git_remote, *branches)
def cmd_delete_branches(args):
branches = get_branches(args.branch_prefix)
# otherwise we get Cannot delete the branch 'branch005' which you are currently on.
run("git", "checkout", "master")
logging.info("Deleting %d branches", len(branches))
run("git", "branch", "--delete", "--force", *branches)
logging.info("Deleting remote branches in %s", args.git_remote)
run("git", "push", "--force", args.git_remote, *[":"+b for b in branches])
def cmd_bump_release(args):
branches = get_branches(args.branch_prefix)
for b in branches:
run("git", "checkout", b)
release = bump_release("Dockerfile", b)
run("git", "add", "Dockerfile")
run("git", "commit", "--message", release)
logging.info("Pusing ALL branches to %s", args.git_remote)
run("git", "push", "--force", args.git_remote, *branches)
def cmd_start_builds(args):
branches = get_branches(args.branch_prefix)
if args.git_url:
remote_url = args.git_url
else:
for line in run("git", "remote", "-v").splitlines():
parts = line.split()
if parts[0] == args.git_remote and parts[2] == "(fetch)":
remote_url = parts[1]
break
else:
raise RuntimeError("Remote URL for repository %s not found" % args.git_remote)
stagger_remaining = args.stagger_number
failed_builds = {}
repo_url = []
if args.repo_url:
if args.use_koji:
repo_url = ['--repo-url', args.repo_url]
else:
repo_url = ['--add-yum-repo', args.repo_url]
for (i, b) in enumerate(branches):
if i >= DEFAULT_BRANCH_COUNT:
break
commit = run("git", "rev-parse", b).strip()
branch_url = "{}#{}".format(remote_url, commit)
try:
if args.use_koji:
run(args.koji_bin,
"container-build",
args.koji_target,
"--nowait",
"--git-branch", b,
branch_url,
*repo_url)
else:
run("osbs",
"build",
"-g", remote_url,
"-b", b,
"--target", args.koji_target,
"-c", "fake-component",
"-u", "vrutkovs",
"--no-logs",
*repo_url)
except SubprocessError as ex:
logging.exception("Failed to start build for branch %s", b)
failed_builds[b] = ex
if stagger_remaining > 0:
logging.info("Waiting %d seconds before starting another build", args.stagger_wait)
time.sleep(args.stagger_wait)
stagger_remaining -= 1
if failed_builds:
logging.error("Failed to start builds: %d", len(failed_builds))
for b, ex in failed_builds.items():
logging.error("Branch %s:", b, exc_info=ex)
def main():
parser = argparse.ArgumentParser(description="OSBS throughput test harness",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--branch-prefix", default=DEFAULT_BRANCH_PREFIX,
help="work on branches with this prefix")
parser.add_argument("--git-remote", default=DEFAULT_GIT_REMOTE,
help="git remote to use")
subparsers = parser.add_subparsers(help="subcommand")
create_branches = subparsers.add_parser("create-branches",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
create_branches.add_argument("--number", metavar="N", type=int, default=DEFAULT_BRANCH_COUNT,
help="number of branches to create")
create_branches.set_defaults(func=cmd_create_branches)
delete_branches = subparsers.add_parser("delete-branches",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
delete_branches.set_defaults(func=cmd_delete_branches)
bump_release = subparsers.add_parser("bump-release",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
bump_release.set_defaults(func=cmd_bump_release)
start_builds = subparsers.add_parser("start-builds",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
start_builds.add_argument("--stagger-number", metavar="N", type=int,
default=DEFAULT_STAGGER_NUMBER,
help="wait between starting N first builds")
start_builds.add_argument("--stagger-wait", metavar="SECONDS", type=int,
default=DEFAULT_STAGGER_WAIT,
help="amount of time to wait between initial builds")
start_builds.add_argument("--use-koji", default=False, action="store_true",
help="use koji to submit builds (default: use osbs")
start_builds.add_argument("--koji-bin", default=DEFAULT_KOJI_BIN, help="koji executable")
start_builds.add_argument("--koji-target", default=DEFAULT_KOJI_TARGET,
help="koji target to build in")
start_builds.add_argument("--git-url",
help="url of git repo to pass to koji "
"(autodetected if not specified)")
start_builds.add_argument("--repo-url", help="url of rpm repo to install for builds")
start_builds.set_defaults(func=cmd_start_builds)
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
args = parser.parse_args()
args.func(args)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 6,371,666,067,306,513,000 | 34.918455 | 99 | 0.590752 | false |
Torkvamedo/smx | tasks/part_2_Se7en.py | 1 | 5139 | #--------Gof (порождающие паттерны) Builder-----------------------------------------------------------------------------
#
# class Building:
# def make_basement(self,basement):
# pass
# def make_walls(self,walls):
# pass
# def make_roof(self,roof):
# pass
#
# class Sky_scriber(Building):
# def __init__(self):
# self.basement = None
# self.walls = None
# self.roof = None
#
# def make_basement(self, basement):
# self.basement = basement
#
# def make_walls(self, walls):
# self.walls = walls
#
# def make_roof(self,roof):
# self.roof = roof
#
# class Cottage(Building):
# def __init__(self):
# self.basement = None
# self.walls = None
# self.roof = None
#
# def make_basement(self, basement):
# self.basement = basement
#
# def make_walls(self, walls):
# self.walls = walls
#
# def make_roof(self, roof):
# self.roof = roof
#
# class Foreman:
# def __init__(self,builder):
# self.builder = builder
#
# def build(self):
# self.builder.build_basement()
# self.builder.build_walls()
# self.builder.build_roof()
#
#
# class Builder:
# def __init__(self):
# self.building = None
# def get_building(self):
# return self.building
#
# def build_basement(self):
# pass
#
# def build_walls(self):
# pass
#
# def build_roof(self):
# pass
#
#
# class Sky_scriber_builder(Builder):
# def __init__(self):
# Builder.__init__(self)
# self.building = Sky_scriber()
#
# def build_basement(self):
# self.building.make_basement("basement")
#
# def build_walls(self):
# self.building.make_walls("walls")
#
# def build_roof(self):
# self.building.make_roof("roof")
#
#
# class Cottage_builder(Builder):
# def __init__(self):
# Builder.__init__(self)
# self.building = Sky_scriber()
#
# def build_basement(self):
# self.building.make_basement("basement")
#
# def build_walls(self):
# self.building.make_walls("walls")
#
# def build_roof(self):
# self.building.make_roof("roof")
#
# def main():
# cottage_builder = Cottage_builder()
# foreman = Foreman(cottage_builder)
# foreman.build()
# cottage = cottage_builder.get_building()
#
# main()
# ----------------------------------------------------------------------------------------------------------------------
#
# ----------(Анти) Паттерн Singleton------------------------------------------------------------------------------------
# class Singleton():
# instance = None
# def __new__(cls, *args, **kwargs):
# if Singleton.instance == None:
# Singleton.instance = object.__new__(cls)
# return Singleton.instance
# def __init__(self,a):
# self.a = a
#
# a1 = Singleton(10)
# a2 = Singleton(39)
# print(a1.a)
# print(a2.a)
# print(a1==a2)
# ----------------------------------------------------------------------------------------------------------------------
#
# -------Flyweight приспособленец---------------------------------------------------------------------------------------
#
# class Character_flyweight():
# def __init__(self,character):
# self.character = character
#
# class Factory():
# def __init__(self):
# self.map = {}
# def instance_character(self,char):
# if self.map.get(char)!= None:
# return self.map.get(char)
# else:
# c = Character_flyweight(char)
# self.map[char] = c
# return c
# factory = Factory()
#
# def convert_to_list(word):
# lis = []
# for char in word:
# lis.append(factory.instance_character(char))
# return lis
#
# lis_word = convert_to_list("abbaaa").
# print(lis_word)
# ----------------------------------------------------------------------------------------------------------------------
#----------------Proxy pattern-------реализация кеша--------------------------------------------------------------------
# class Operation:
# def operation(self,a,b):
# return a + b
#
# class Proxy_operation():
# def __init__(self):
# self.operation = Operation
# self.cache = []
#
# def operation(self,a,b):
# for tup in self.cache:
# if tup[0] == a and tup[1] == b:
# return tup[2]
#
#
# res = self.operation.operation(a,b)
# self.cache.append((a,b,res))
# return res
# ----------------------------------------------------------------------------------------------------------------------
# =========работа с Super()===================================
# class A:
# def __init__(self,a):
# self.a = a
#
# class B:
# def __init__(self,b):
# A.__init__(self,a)
# self.b = b
# ------------------------
#
# class B(A):
# def __init__(self,a,b):
# super(B,self).__init__(a)
# self.b = b
# pb = B(10,20)
# print(pb)
| unlicense | -9,211,868,288,961,263,000 | 25.989362 | 120 | 0.440284 | false |
Empire-of-Code-Puzzles/checkio-empire-square-spiral | verification/src/tests.py | 1 | 1833 | """
TESTS is a dict with all you tests.
Keys for this will be categories' names.
Each test is dict with
"input" -- input data for user function
"answer" -- your right answer
"explanation" -- not necessary key, it's using for additional info in animation.
"""
TESTS = {
"Rank_1": [
{
"input": [1, 9],
"answer": 2
},
{
"input": [9, 1],
"answer": 2
},
{
"input": [10, 25],
"answer": 1
},
{
"input": [5, 9],
"answer": 4
},
{
"input": [26, 31],
"answer": 5
},
{
"input": [50, 16],
"answer": 10
},
{
"input": [1, 2],
"answer": 1
},
{
"input": [99, 1],
"answer": 8
},
{
"input": [999, 1],
"answer": 26
},
{
"input": [998, 999],
"answer": 1
},
{
"input": [73, 91],
"answer": 18
},
{
"input": [100, 82],
"answer": 18
},
{
"input": [900, 961],
"answer": 59
},
{
"input": [86, 69],
"answer": 9
},
{
"input": [2, 18],
"answer": 4
},
{
"input": [777, 555],
"answer": 18
},
{
"input": [100, 10],
"answer": 12
},
{
"input": [69, 96],
"answer": 9
},
{
"input": [521, 2],
"answer": 13
},
{
"input": [81, 65],
"answer": 16
},
]
}
| gpl-2.0 | -472,377,947,165,673,100 | 18.709677 | 84 | 0.286961 | false |
SuLab/biothings.api | biothings/databuild/mapper.py | 1 | 2870 | from biothings.utils.dataload import alwayslist
class BaseMapper(object):
"""
Basic mapper used to convert documents.
if mapper's name matches source's metadata's mapper,
mapper.convert(docs) call will be used to
process/convert/whatever passed documents
"""
def __init__(self, name=None, *args, **kwargs):
self.name = name
def load(self):
"""
Do whatever is required to fill mapper with mapping data
Can be called multiple time, the first time only will load data
"""
raise NotImplementedError("sub-class and implement me")
def process(self,docs):
"""
Convert given docs into other docs.
"""
raise NotImplementedError("sub-class and implement me")
class IDBaseMapper(BaseMapper):
"""
Provide mapping between different sources
"""
def __init__(self, name=None, convert_func=None, *args, **kwargs):
"""
'name' may match a "mapper" metatdata field (see uploaders). If None, mapper
will be applied to any document from a resource without "mapper" argument
"""
super(IDBaseMapper,self).__init__(self,name=name)
self.map = None
self.convert_func = convert_func
def translate(self,_id,transparent=False):
"""
Return _id translated through mapper, or _id itself if not part of mapper
If 'transparent' and no match, original _id will be returned
"""
if self.need_load():
self.load()
default = transparent and _id or None
conv = self.convert_func or (lambda x: x)
return self.map.get(self.conv(_id),default)
def __contains__(self,_id):
if self.need_load():
self.load()
return _id in self.map
def __len__(self):
if self.need_load():
self.load()
return len(self.map)
def process(self,docs,key_to_convert="_id",transparent=True):
"""
Process 'key_to_convert' document key using mapping.
If transparent and no match, original key will be used
(so there's no change). Else, if no match, document will
be discarded (default).
Warning: key to be translated must not be None (it's considered
a non-match)
"""
for doc in docs:
_id = doc.get(key_to_convert)
_newid = self.translate(_id,transparent)
if _newid is None and not transparent:
continue
for _oneid in alwayslist(_newid):
_oneid = str(_oneid)
doc[key_to_convert] = _oneid
yield doc
def need_load(self):
return self.map is None
class TransparentMapper(BaseMapper):
def load(self, *args, **kwargs):
pass
def process(self, docs, *args, **kwargs):
return docs
| apache-2.0 | 3,674,017,107,009,157,000 | 29.860215 | 84 | 0.591986 | false |
rafaelferrero/sigcaw | grados/migrations/0001_initial.py | 1 | 2325 | # Generated by Django 2.1.2 on 2018-12-01 20:06
import colorfield.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Escalafon',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=255, verbose_name='Nombre')),
],
options={
'verbose_name': 'Escalafón',
'verbose_name_plural': 'Escalafones',
},
),
migrations.CreateModel(
name='Grado',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=255, verbose_name='Nombre')),
('excepcion', models.BooleanField(default=False, verbose_name='Posee más de un grado superior')),
('escalafon', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='grados.Escalafon', verbose_name='Escalafón')),
('grado_superior', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='grados.Grado', verbose_name='Grado Superior')),
],
options={
'verbose_name': 'Grado',
'verbose_name_plural': 'Grados',
},
),
migrations.CreateModel(
name='Rango',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=255, verbose_name='Nombre')),
('color', colorfield.fields.ColorField(default='#FF0000', max_length=18)),
],
options={
'verbose_name': 'Rango',
'verbose_name_plural': 'Rangos',
},
),
migrations.AddField(
model_name='escalafon',
name='rango',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='grados.Rango', verbose_name='Rango'),
),
]
| gpl-3.0 | -1,061,136,195,948,800,800 | 39.034483 | 175 | 0.55814 | false |
tony/django-docutils | django_docutils/lib/directives/__init__.py | 1 | 1172 | from django.utils.module_loading import import_string
from docutils.parsers.rst import directives
from ..settings import BASED_LIB_RST
def register_based_directives():
"""Register all directives, exists to avoid race conditions.
Sometimes stuff like publish_parts can be ran from command line functions
tests. There's also ways we could avoid this by placing it in __init__
of django_docutils.lib, but that's a bit implicit. Investigate that later.
In order to make this work across django projects, let's use django
settings to register to them.
Why? Not all django projects want code highlighting (which requires
pygments). Let's use a TEMPLATES-style django config::
BASED_LIB_RST = {
'directives': { #: directive-name: Directive class (import string)
'code-block': 'django_docutils.lib.directives.pygments.CodeBlock'
}
}
"""
if not BASED_LIB_RST:
return
if 'directives' in BASED_LIB_RST:
for dir_name, dir_cls_str in BASED_LIB_RST['directives'].items():
class_ = import_string(dir_cls_str)
directives.register_directive(dir_name, class_)
| mit | 1,619,159,871,801,524,000 | 34.515152 | 78 | 0.68942 | false |
Abogical/borg-gtk | src/filetree.py | 1 | 11077 | import common, enums
import borg, importlib
importlib.import_module('.helpers', 'borg')
importlib.import_module('.fuse', 'borg')
import os, stat, pwd, grp, gi, collections
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, Gio, GObject
def toggle_hide_column(checkbox, column):
column.set_visible(checkbox.get_active())
class FileModel(Gtk.TreeStore):
metadata_cache_left = 128
def unref_node(self, iterator):
if self[iterator][0] != None:
self.metadata_cache_left += 1
self[iterator][0] = None
return super().unref_node(iterator)
def name_and_id(name, name_id):
if name != None:
return '%s (%s)' % (name, name_id)
else:
return str(name_id)
def size_data_func(col, cell, model, iterator, index):
cell.set_property('text', borg.helpers.format_file_size(model[iterator][index]))
class BaseFileTree:
cache = borg.fuse.ItemCache()
def icon_data_func(self, col, cell, model, iterator, data):
if model.iter_has_child(iterator):
return cell.set_property('icon-name', 'folder')
metadata = self._load_metadata(iterator)
if stat.S_ISDIR(metadata[b'mode']):
return cell.set_property('icon-name', 'folder')
cell.set_property('gicon',
Gio.content_type_get_icon(Gio.content_type_guess(metadata[b'name'])[0]))
def name_data_func(self, col, cell, model, iterator, data):
cell.set_property('text', self._load_metadata(iterator)[b'name'])
def mode_data_func(self, col, cell, model, iterator, data):
cell.set_property('text', stat.filemode(self._load_metadata(iterator)[b'mode']))
def time_data_func(self, col, cell, model, iterator, key):
metadata = self._load_metadata(iterator)
cell.set_property('text',
borg.helpers.format_time(borg.helpers.safe_timestamp(metadata[key])))
def status_data_func(self, col, cell, model, iterator, data):
cell.set_property('icon-name',
[None, 'emblem-synchronizing', 'process-completed-symbolic',
'process-error-symbolic'][model[iterator][self.status_column_index]])
def _set_data_func(self, col, func, data=None):
getattr(self, col + '_column').set_cell_data_func(
self.builder.get_object(col + '_cellrenderer'), func, data)
def __init__(self, status):
self.status = status
self.builder = Gtk.Builder.new_from_file('../data/ui/filetree.ui')
def _set_from_builder(attr):
setattr(self, attr, self.builder.get_object(attr))
_set_from_builder('grid')
_set_from_builder('treeview')
_set_from_builder('checkbutton_grid')
_set_from_builder('name_column')
signal_set = {}
def _set_column(col):
col = col + '_column'
obj = self.builder.get_object(col)
setattr(self, col, obj)
signal_set['toggle_'+col] = (toggle_hide_column, obj)
self.name_column.set_cell_data_func(self.builder.get_object('name_cellrenderer'),
self.name_data_func)
self.name_column.set_cell_data_func(self.builder.get_object('icon_cellrenderer'),
self.icon_data_func)
_set_column('size')
self._set_data_func('size', size_data_func, 2)
_set_column('mode')
self._set_data_func('mode', self.mode_data_func)
_set_column('user')
_set_column('group')
_set_column('atime')
self._set_data_func('atime', self.time_data_func, b'atime')
_set_column('mtime')
self._set_data_func('mtime', self.time_data_func, b'mtime')
_set_column('ctime')
self._set_data_func('ctime', self.time_data_func, b'ctime')
self.builder.connect_signals(signal_set)
store_cols = [GObject.TYPE_PYOBJECT, GObject.TYPE_INT64, GObject.TYPE_INT64]
store_cols += self._setup_cols()
del self.builder
if status:
self.status_column_index = len(store_cols)
store_cols.append(int)
self.status_cellrenderer = Gtk.CellRendererPixbuf()
self.status_column = Gtk.TreeViewColumn('Status', self.status_cellrenderer)
self.status_column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.treeview.insert_column(self.status_column, -1)
self.status_column.set_cell_data_func(self.status_cellrenderer, self.status_data_func)
self.treeview.insert_column(self.status_column, -1)
self.status_checkbutton = Gtk.CheckButton('Status')
self.status_checkbutton.connect('toggled', toggle_hide_column, self.status_column)
self.status_checkbutton.set_active(True)
self.status_checkbutton.show()
self.checkbutton_grid.insert_row(0)
self.checkbutton_grid.attach(self.status_checkbutton, 0, 0, 2, 1)
self.model = FileModel(*store_cols)
self.treeview.set_model(self.model)
def append(self, parent, arr, metadata, size):
prepend_arr = [None, self.cache.add(metadata), size]
if self.model.metadata_cache_left > 0:
prepend_arr[0] = metadata
self.model.metadata_cache_left -= 1
if self.status:
arr.append(enums.ADDED)
return self.model.append(parent, prepend_arr + arr)
def set_error_status(self, iterator):
self.model[iterator][self.status_column_index] = enums.ERROR
parent_iter = self.model.iter_parent(iterator)
if parent_iter != None:
self.set_error_status(parent_iter)
def _load_metadata(self, iterator):
if self.model[iterator][0] == None:
metadata = self.cache.get(self.model[iterator][1])
for key, value in metadata.items():
if isinstance(value, bytes):
value = value.decode()
metadata[key] = value
self.model[iterator][0] = metadata
self.model.metadata_cache_left -= 1
return self.model[iterator][0]
def get_iter_from_string(self, treepath):
if treepath == None:
return None
return self.model.get_iter_from_string(treepath)
def set_processing_status_treepath(self, treepath):
self.model[self.get_iter_from_string(treepath)][self.status_column_index] = enums.PROCESSING
def set_done_status_treepath(self, treepath):
self.model[self.get_iter_from_string(treepath)][self.status_column_index] = enums.DONE
def set_error_status_treepath(self, treepath):
self.set_error_status(self.get_iter_from_string(treepath))
class FileTree(BaseFileTree):
def file_user_data_func(self, col, cell, model, iterator, data):
uid = self._load_metadata(iterator)[b'uid']
cell.set_property('text', name_and_id(pwd.getpwuid(uid)[0], uid))
def file_group_data_func(self, col, cell, model, iterator, data):
gid = self._load_metadata(iterator)[b'gid']
cell.set_property('text', name_and_id(grp.getgrgid(gid)[0], gid))
def _setup_cols(self):
self._set_data_func('user', self.file_user_data_func)
self._set_data_func('group', self.file_group_data_func)
return []
def _update_parent_size(self, iterator, size):
self.model[iterator][2] += size
parent_iter = self.model.iter_parent(iterator)
if parent_iter != None:
self._update_parent_size(parent_iter, size)
def append(self, parent_treepath, metadata, size):
iterator = super().append(self.get_iter_from_string(parent_treepath), [], metadata, 0)
self._update_parent_size(iterator, size)
return iterator
class ArchiveTree(BaseFileTree):
def archive_user_data_func(self, col, cell, model, iterator, data):
metadata = self._load_metadata(iterator)
cell.set_property('text', name_and_id(metadata[b'user'], metadata[b'uid']))
def archive_group_data_func(self, col, cell, model, iterator, data):
metadata = self._load_metadata(iterator)
cell.set_property('text', name_and_id(metadata[b'group'], metadata[b'gid']))
def _setup_cols(self):
self._set_data_func('user', self.archive_user_data_func)
self._set_data_func('group', self.archive_group_data_func)
self.csize_cellrenderer = Gtk.CellRendererText()
self.csize_column = Gtk.TreeViewColumn('Compressed size', self.csize_cellrenderer)
self.csize_column.set_cell_data_func(self.csize_cellrenderer, size_data_func, 3)
self.csize_column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.treeview.insert_column(self.csize_column, 3)
self.csize_checkbutton = Gtk.CheckButton('Compressed size')
self.csize_checkbutton.connect('toggled', toggle_hide_column, self.csize_column)
self.csize_checkbutton.set_active(True)
self.csize_checkbutton.show()
self.checkbutton_grid.attach(self.csize_checkbutton, 1, 0, 1, 1)
self.chunks_cellrenderer = Gtk.CellRendererText()
self.chunks_column = Gtk.TreeViewColumn('Chunks', self.chunks_cellrenderer, text=5)
self.chunks_column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.chunks_column.set_visible(False)
self.treeview.insert_column(self.chunks_column, -1)
self.chunks_checkbox = Gtk.CheckButton('Chunks')
self.chunks_checkbox.connect('toggled', toggle_hide_column, self.chunks_column)
self.chunks_checkbox.show()
self.checkbutton_grid.attach(self.chunks_checkbox, 0, 4, 1, 1)
self.uchunks_cellrenderer = Gtk.CellRendererText()
self.uchunks_column = Gtk.TreeViewColumn('Unique chunks', self.uchunks_cellrenderer,
text=4)
self.uchunks_column.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.uchunks_column.set_visible(False)
self.treeview.insert_column(self.uchunks_column, -1)
self.uchunks_checkbox = Gtk.CheckButton('Unique chunks')
self.uchunks_checkbox.connect('toggled', toggle_hide_column, self.uchunks_column)
self.uchunks_checkbox.show()
self.checkbutton_grid.attach(self.uchunks_checkbox, 1, 4, 1, 1)
return [GObject.TYPE_INT64]*3 + [GObject.TYPE_PYOBJECT]
def _update_parent_totals(self, iterator, size, csize):
self.model[iterator][2] += size
self.model[iterator][3] += csize
''' This doesn't work
iterator_common_chunks = self.model[iterator][6]
for common_key in iterator_common_chunks.keys() & common_chunks.keys():
iterator_common_chunks[common_key] -= 1
common_chunks[common_key] = iterator_common_chunks[common_key]
if iterator_common_chunks[common_key] == 1:
del iterator_common_chunks[common_key]
del common_chunks[common_key]
uchunks += 1
self.model[iterator][4] += uchunks
iterator_common_chunks.update(common_chunks)
self.model[iterator][6] = iterator_common_chunks
self.model[iterator][5] = self.model[iterator][4] + len(iterator_common_chunks)
'''
parent_iter = self.model.iter_parent(iterator)
if parent_iter != None:
self._update_parent_totals(parent_iter, size, csize)
def append(self, parent_treepath, path, chunks, metadata):
total_size, total_csize, total_uchunks, total_chunks = 0,0,0,0
parent_iter = self.get_iter_from_string(parent_treepath)
if chunks != None:
# Stores a list of non-unique chunks, to calculate parent chunks and uchunks
# common_chunks = {}
for chunk_id, size, csize in chunks:
total_chunks += 1
total_size += size
total_csize += csize
'''
chunk_refcount = common.cur_rep_cache.chunks[chunk_id][0]
if chunk_refcount == 1:
total_uchunks += 1
else:
common_chunks[chunk_id] = chunk_refcount
'''
if parent_iter != None:
self._update_parent_totals(parent_iter, total_size, total_csize)
iterator = super().append(parent_iter, [total_csize, total_uchunks, total_chunks, None], metadata, total_size)
'''
if stat.S_ISDIR(metadata[b'mode']):
self.model[iterator][6] = {}
'''
| bsd-2-clause | -472,546,436,021,843,840 | 36.296296 | 112 | 0.701363 | false |
twilio/twilio-python | tests/integration/chat/v1/test_service.py | 1 | 10285 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class ServiceTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://chat.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
'''
))
actual = self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://chat.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").delete()
self.assertTrue(actual)
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.chat.v1.services.create(friendly_name="friendly_name")
values = {'FriendlyName': "friendly_name", }
self.holodeck.assert_has_request(Request(
'post',
'https://chat.twilio.com/v1/Services',
data=values,
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
'''
))
actual = self.client.chat.v1.services.create(friendly_name="friendly_name")
self.assertIsNotNone(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.chat.v1.services.list()
self.holodeck.assert_has_request(Request(
'get',
'https://chat.twilio.com/v1/Services',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"first_page_url": "https://chat.twilio.com/v1/Services?Page=0&PageSize=50",
"key": "services",
"next_page_url": null,
"page": 0,
"page_size": 0,
"previous_page_url": null,
"url": "https://chat.twilio.com/v1/Services"
},
"services": []
}
'''
))
actual = self.client.chat.v1.services.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"meta": {
"first_page_url": "https://chat.twilio.com/v1/Services?Page=0&PageSize=50",
"key": "services",
"next_page_url": null,
"page": 0,
"page_size": 1,
"previous_page_url": null,
"url": "https://chat.twilio.com/v1/Services"
},
"services": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 100,
"user_channels": 250
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
]
}
'''
))
actual = self.client.chat.v1.services.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.holodeck.assert_has_request(Request(
'post',
'https://chat.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"consumption_report_interval": 100,
"date_created": "2015-07-30T20:00:00Z",
"date_updated": "2015-07-30T20:00:00Z",
"default_channel_creator_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_channel_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"default_service_role_sid": "RLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"limits": {
"actions_per_second": 20,
"channel_members": 500,
"user_channels": 600
},
"links": {},
"notifications": {},
"post_webhook_url": "post_webhook_url",
"pre_webhook_url": "pre_webhook_url",
"reachability_enabled": false,
"read_status_enabled": false,
"sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"typing_indicator_timeout": 100,
"url": "http://www.example.com",
"webhook_filters": [
"webhook_filters"
],
"webhook_method": "webhook_method",
"webhooks": {}
}
'''
))
actual = self.client.chat.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update()
self.assertIsNotNone(actual)
| mit | 3,698,696,757,865,667,600 | 35.863799 | 97 | 0.49334 | false |
mikrosimage/rez | src/rezplugins/build_system/cmake.py | 1 | 11253 | """
CMake-based build system
"""
from rez.build_system import BuildSystem
from rez.build_process_ import BuildType
from rez.resolved_context import ResolvedContext
from rez.exceptions import BuildSystemError
from rez.util import create_forwarding_script
from rez.packages_ import get_developer_package
from rez.utils.platform_ import platform_
from rez.config import config
from rez.backport.shutilwhich import which
from rez.vendor.schema.schema import Or
from rez.shells import create_shell
import functools
import os.path
import sys
import os
class RezCMakeError(BuildSystemError):
pass
class CMakeBuildSystem(BuildSystem):
"""The CMake build system.
The 'cmake' executable is run within the build environment. Rez supplies a
library of cmake macros in the 'cmake_files' directory; these are added to
cmake's searchpath and are available to use in your own CMakeLists.txt
file.
The following CMake variables are available:
- REZ_BUILD_TYPE: One of 'local', 'central'. Describes whether an install
is going to the local packages path, or the release packages path.
- REZ_BUILD_INSTALL: One of 0 or 1. If 1, an installation is taking place;
if 0, just a build is occurring.
"""
build_systems = {'eclipse': "Eclipse CDT4 - Unix Makefiles",
'codeblocks': "CodeBlocks - Unix Makefiles",
'make': "Unix Makefiles",
'nmake': "NMake Makefiles",
'xcode': "Xcode"}
build_targets = ["Debug", "Release", "RelWithDebInfo"]
schema_dict = {
"build_target": Or(*build_targets),
"build_system": Or(*build_systems.keys()),
"cmake_args": [basestring],
"cmake_binary": Or(None, basestring),
"make_binary": Or(None, basestring)}
@classmethod
def name(cls):
return "cmake"
@classmethod
def child_build_system(cls):
return "make"
@classmethod
def is_valid_root(cls, path):
return os.path.isfile(os.path.join(path, "CMakeLists.txt"))
@classmethod
def bind_cli(cls, parser):
settings = config.plugins.build_system.cmake
parser.add_argument("--bt", "--build-target", dest="build_target",
type=str, choices=cls.build_targets,
default=settings.build_target,
help="set the build target (default: %(default)s).")
parser.add_argument("--bs", "--build-system", dest="build_system",
type=str, choices=cls.build_systems.keys(),
default=settings.build_system,
help="set the cmake build system (default: %(default)s).")
def __init__(self, working_dir, opts=None, write_build_scripts=False,
verbose=False, build_args=[], child_build_args=[]):
super(CMakeBuildSystem, self).__init__(
working_dir,
opts=opts,
write_build_scripts=write_build_scripts,
verbose=verbose,
build_args=build_args,
child_build_args=child_build_args)
self.settings = self.package.config.plugins.build_system.cmake
self.build_target = (opts and opts.build_target) or \
self.settings.build_target
self.cmake_build_system = (opts and opts.build_system) or \
self.settings.build_system
if self.cmake_build_system == 'xcode' and platform_.name != 'osx':
raise RezCMakeError("Generation of Xcode project only available "
"on the OSX platform")
def build(self, context, variant, build_path, install_path, install=False,
build_type=BuildType.local):
def _pr(s):
if self.verbose:
print s
# find cmake binary
if self.settings.cmake_binary:
exe = self.settings.cmake_binary
else:
exe = context.which("cmake", fallback=True)
if not exe:
raise RezCMakeError("could not find cmake binary")
found_exe = which(exe)
if not found_exe:
raise RezCMakeError("cmake binary does not exist: %s" % exe)
sh = create_shell()
# assemble cmake command
cmd = [found_exe, "-d", self.working_dir]
cmd += (self.settings.cmake_args or [])
cmd += (self.build_args or [])
cmd.append("-DCMAKE_INSTALL_PREFIX=%s" % install_path)
cmd.append("-DCMAKE_MODULE_PATH=%s" %
sh.get_key_token("CMAKE_MODULE_PATH").replace('\\', '/'))
cmd.append("-DCMAKE_BUILD_TYPE=%s" % self.build_target)
cmd.append("-DREZ_BUILD_TYPE=%s" % build_type.name)
cmd.append("-DREZ_BUILD_INSTALL=%d" % (1 if install else 0))
cmd.extend(["-G", self.build_systems[self.cmake_build_system]])
if config.rez_1_cmake_variables and \
not config.disable_rez_1_compatibility and \
build_type == BuildType.central:
cmd.append("-DCENTRAL=1")
# execute cmake within the build env
_pr("Executing: %s" % ' '.join(cmd))
if not os.path.abspath(build_path):
build_path = os.path.join(self.working_dir, build_path)
build_path = os.path.realpath(build_path)
callback = functools.partial(self._add_build_actions,
context=context,
package=self.package,
variant=variant,
build_type=build_type)
# run the build command and capture/print stderr at the same time
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
ret = {}
if retcode:
ret["success"] = False
return ret
if self.write_build_scripts:
# write out the script that places the user in a build env, where
# they can run make directly themselves.
build_env_script = os.path.join(build_path, "build-env")
create_forwarding_script(build_env_script,
module=("build_system", "cmake"),
func_name="_FWD__spawn_build_shell",
working_dir=self.working_dir,
build_dir=build_path,
variant_index=variant.index)
ret["success"] = True
ret["build_env_script"] = build_env_script
return ret
# assemble make command
if self.settings.make_binary:
cmd = [self.settings.make_binary]
else:
cmd = ["make"]
cmd += (self.child_build_args or [])
# nmake has no -j
if self.settings.make_binary != 'nmake':
if not any(x.startswith("-j") for x in (self.child_build_args or [])):
n = variant.config.build_thread_count
cmd.append("-j%d" % n)
# execute make within the build env
_pr("\nExecuting: %s" % ' '.join(cmd))
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
if not retcode and install and "install" not in cmd:
cmd.append("install")
# execute make install within the build env
_pr("\nExecuting: %s" % ' '.join(cmd))
retcode, _, _ = context.execute_shell(command=cmd,
block=True,
cwd=build_path,
actions_callback=callback)
ret["success"] = (not retcode)
return ret
@staticmethod
def _add_build_actions(executor, context, package, variant, build_type):
settings = package.config.plugins.build_system.cmake
cmake_path = os.path.join(os.path.dirname(__file__), "cmake_files")
template_path = os.path.join(os.path.dirname(__file__), "template_files")
executor.env.CMAKE_MODULE_PATH.append(cmake_path.replace('\\', '/'))
executor.env.REZ_BUILD_DOXYFILE = os.path.join(template_path, 'Doxyfile')
executor.env.REZ_BUILD_VARIANT_INDEX = variant.index or 0
executor.env.REZ_BUILD_THREAD_COUNT = package.config.build_thread_count
# build always occurs on a filesystem package, thus 'filepath' attribute
# exists. This is not the case for packages in general.
executor.env.REZ_BUILD_PROJECT_FILE = package.filepath
executor.env.REZ_BUILD_PROJECT_VERSION = str(package.version)
executor.env.REZ_BUILD_PROJECT_NAME = package.name
executor.env.REZ_BUILD_PROJECT_DESCRIPTION = \
(package.description or '').strip()
executor.env.REZ_BUILD_REQUIRES_UNVERSIONED = \
' '.join(x.name for x in context.requested_packages(True))
executor.env.REZ_BUILD_INSTALL_PYC = '1' if settings.install_pyc else '0'
if config.rez_1_environment_variables and \
not config.disable_rez_1_compatibility and \
build_type == BuildType.central:
executor.env.REZ_IN_REZ_RELEASE = 1
def _FWD__spawn_build_shell(working_dir, build_dir, variant_index):
# This spawns a shell that the user can run 'make' in directly
context = ResolvedContext.load(os.path.join(build_dir, "build.rxt"))
package = get_developer_package(working_dir)
variant = package.get_variant(variant_index)
config.override("prompt", "BUILD>")
callback = functools.partial(CMakeBuildSystem._add_build_actions,
context=context,
package=package,
variant=variant,
build_type=BuildType.local)
retcode, _, _ = context.execute_shell(block=True,
cwd=build_dir,
actions_callback=callback)
sys.exit(retcode)
def register_plugin():
return CMakeBuildSystem
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
| lgpl-3.0 | 6,769,576,808,462,202,000 | 41.146067 | 86 | 0.570603 | false |
20tab/twentytab-gmapsmarkers | gmapsmarkers/fields.py | 1 | 2307 | from . import conf
from django.conf import settings
from django.db import models
from django import forms
from gmapsmarkers.widgets import GmapsSelectAutocomplete, GeotypeSelect
class GmapsField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.pop("max_length", 250)
self.plugin_options = kwargs.pop("plugin_options", {})
self.select2_options = kwargs.pop("select2_options", {})
self.language_code = kwargs.pop(
"language_code", settings.GMAPS_LANGUAGE_CODE)
super(GmapsField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
self.plugin_options['gmaps_field_name'] = self.name
defaults = {
'form_class': GmapsFormField,
'plugin_options': self.plugin_options,
'select2_options': self.select2_options,
'language_code': self.language_code
}
defaults.update(kwargs)
return super(GmapsField, self).formfield(**defaults)
class GmapsFormField(forms.CharField):
def __init__(self, plugin_options={}, select2_options={},
language_code=settings.GMAPS_LANGUAGE_CODE, *args, **kwargs):
kwargs.update({'widget': GmapsSelectAutocomplete(
plugin_options=plugin_options, select2_options=select2_options,
language_code=language_code
)})
super(GmapsFormField, self).__init__(*args, **kwargs)
class GeotypeField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = kwargs.pop("max_length", 250)
super(GeotypeField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': GeotypeFormField
}
defaults.update(kwargs)
return super(GeotypeField, self).formfield(**defaults)
class GeotypeFormField(forms.CharField):
def __init__(self, *args, **kwargs):
kwargs.update({'widget': GeotypeSelect})
super(GeotypeFormField, self).__init__(*args, **kwargs)
# Fix field for South
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^gmapsmarkers\.fields\.GmapsField"])
add_introspection_rules([], ["^gmapsmarkers\.fields\.GeotypeField"])
except:
pass
| mit | 1,255,904,685,461,706,200 | 33.432836 | 78 | 0.641959 | false |
dreucifer/chargenstart | vehicle_lookup/models.py | 1 | 2049 | import uuid
from sqlalchemy import Column, Integer, String, ForeignKey, Table, Text
from sqlalchemy.orm import relationship, backref
import database as db
from core.types import GUID
class Level():
id_ = Column(Integer, primary_key=True)
name = Column(String)
def __unicode__(self):
return "%s" % self.name
@property
def serialize(self):
return {'Name': self.name, 'ID': self.id_}
make_type = Table('make_type', db.Base.metadata,
Column('make_id', Integer, ForeignKey('makes.id_'), primary_key=True),
Column('type_id', Integer, ForeignKey('types.id_'), primary_key=True))
class Make(Level, db.Base):
__tablename__ = 'makes'
types = relationship('Type', secondary=make_type, lazy='dynamic')
class Type(Level, db.Base):
__tablename__ = 'types'
modelyear = Table('modelyear', db.Base.metadata,
Column('model_id', Integer, ForeignKey('models.id_'), primary_key=True),
Column('year_id', Integer, ForeignKey('years.id_'), primary_key=True))
class ModelYearEngine(Level, db.Base):
__tablename__ = 'modelyearengine'
model_id = Column(Integer, ForeignKey('models.id_'))
year_id = Column(Integer, ForeignKey('years.id_'))
engine_id = Column(Integer, ForeignKey('engines.id_'))
engine = relationship('Engine')
model = relationship('Model', uselist=False)
year = relationship('Year', uselist=False)
class Model(Level, db.Base):
__tablename__ = 'models'
type_id = Column(Integer, ForeignKey('types.id_'), nullable=False)
make_id = Column(Integer, ForeignKey('makes.id_'), nullable=False)
make = relationship('Make', uselist=False)
type_ = relationship('Type', uselist=False)
years = relationship('Year', secondary=modelyear, lazy='dynamic')
engines = relationship('ModelYearEngine')
class Year(Level, db.Base):
__tablename__ = 'years'
class Engine(Level, db.Base):
__tablename__ = 'engines'
class Vehicle(Level, db.Base):
__tablename__ = 'vehicles'
guid = Column(GUID(), primary_key=True)
| mit | 7,943,113,298,090,685,000 | 29.132353 | 80 | 0.662274 | false |
twitter/heron | heron/shell/src/python/handlers/filestatshandler.py | 2 | 2386 | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
''' filestatshandler.py '''
import json
import os
import stat
import tornado.web
from heron.shell.src.python import utils
class FileStatsHandler(tornado.web.RequestHandler):
"""
Get the file stats in JSON format given the path.
"""
@tornado.web.asynchronous
def get(self, path):
''' get method '''
path = tornado.escape.url_unescape(path)
if not path:
path = "."
# User should not be able to access anything outside
# of the dir that heron-shell is running in. This ensures
# sandboxing. So we don't allow absolute paths and parent
# accessing.
if not utils.check_path(path):
self.write("Only relative paths are allowed")
self.set_status(403)
self.finish()
return
listing = utils.get_listing(path)
file_stats = {}
for fn in listing:
try:
is_dir = False
formatted_stat = utils.format_prefix(fn, utils.get_stat(path, fn))
if stat.S_ISDIR(utils.get_stat(path, fn).st_mode):
is_dir = True
file_stats[fn] = {
"formatted_stat": formatted_stat,
"is_dir": is_dir,
"path": tornado.escape.url_escape(os.path.join(path, fn)),
}
if fn == "..":
path_fragments = path.split("/")
if not path_fragments:
file_stats[fn]["path"] = "."
else:
file_stats[fn]["path"] = tornado.escape.url_escape("/".join(path_fragments[:-1]))
except:
continue
self.write(json.dumps(file_stats))
self.finish()
| apache-2.0 | 1,211,499,568,843,792,600 | 31.684932 | 93 | 0.650042 | false |
c-oreills/before_after | before_after/__init__.py | 1 | 1517 | """Package for before_after."""
__project__ = 'before_after'
__version__ = '1.0.1'
VERSION = __project__ + '-' + __version__
PYTHON_VERSION = 2, 7
import sys
if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test)
exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
from contextlib import contextmanager
from functools import wraps
def before(target, fn, **kwargs):
return before_after(target, before_fn=fn, **kwargs)
def after(target, fn, **kwargs):
return before_after(target, after_fn=fn, **kwargs)
@contextmanager
def before_after(
target, before_fn=None, after_fn=None, once=True, **kwargs):
def before_after_wrap(fn):
called = []
@wraps(fn)
def inner(*a, **k):
# If once is True, then don't call if this function has already
# been called
if once:
if called:
return fn(*a, **k)
else:
# Hack for lack of nonlocal keyword in Python 2: append to
# list to maked called truthy
called.append(True)
if before_fn:
before_fn(*a, **k)
ret = fn(*a, **k)
if after_fn:
after_fn(*a, **k)
return ret
return inner
from mock import patch
patcher = patch(target, **kwargs)
original, _ = patcher.get_original()
patcher.new = before_after_wrap(original)
with patcher:
yield
| gpl-2.0 | 7,272,649,427,314,903,000 | 24.711864 | 78 | 0.550428 | false |
allthroughthenight/aces | python/functions/WADJ.py | 1 | 2758 | import math
import numpy as np
from WAGEOS import WAGEOS
from WAPBL import WAPBL
from WASBL import WASBL
# Perform wind adjustments
# INPUT
# uobs: observed windspeed
# zobs: elevation of wind observation
# delt: air-sea temperature difference
# F: fetch length
# tobs: duration of wind observation
# tfin: duration of final desired windspeed
# latt: Latitude of wind observation
# obstyp: Type of wind observation
# 1 = overwater (shipboard)
# 2 = overwater (not shipboard)
# 3 = at shore (off to onshore)
# 4 = at shore (on to offshore)
# 5 = over land
# 6 = geostrophic wind
# OUTPUT
# ue: equivalent neutral windspeed at 10 m elevation and at desired final
# duration
# error: message indication non-convergence
def WADJ(uobs, zobs, delt, F, tobs, tfin, latt, obstyp):
m2cm = 100.0
if obstyp == 1:
#Ship-based wind observations over water
u = 1.864*uobs**(7.0/9.0)
u10m = WASBL(u*m2cm,delt, zobs*m2cm)
u10m = u10m / m2cm
elif obstyp == 2 or obstyp == 3:
#Wind observation over water (not ship-based) or at the shoreline
#(wind direction from offshore to onshore)
u10m = WASBL(uobs*m2cm, delt, zobs*m2cm)
u10m = u10m / m2cm
elif obstyp == 4 or obstyp == 5:
#Winds over land or at the shoreline (wind direction from onshore
#to offshore)
u = WAGEOS(uobs*m2cm, zobs*m2cm, 30)
omega = 7.2921150*10**-5 #Earth's angular velocity (2pi/86164.09)
f = 2*omega*math.sin(latt) #Coriolis force
u10m = WAPBL(u, delt, f, 0, 0)
u10m = u10m / m2cm
elif obstyp == 6:
#Geostrophic winds
omega = 2*math.pi / (24*3600) #Earth's angular velocity
f = 2*omega*math.sin(latt) #Coriolis force
u10m = WAPBL(uobs*m2cm, delt, f, 0, 0)
u10m = u10m / m2cm
ue = u10m
if F < 16000.0:
ue *= 0.9
if tobs <= 1.0:
if not (tobs > 1.0):
print("Error: Observed windspeed duration must be > 1 s.")
return
elif np.isclose(tobs, 3600.0):
u3600 = ue
elif tobs < 3600.0:
eqshrt = 1.277 + 0.296*math.tanh(0.9*math.log10(45.0/tobs))
u3600 = ue / eqshrt
elif tobs > 3600.0:
eqlong = -0.15*math.log10(tobs) + 1.5334
u3600 = ue/eqlong
if tfin <= 1.0:
print("Error: Final windspeed duration must be > 1 s.")
return
elif np.isclose(tfin, 3600.0):
ue = u3600
elif tfin < 3600.0:
eqshrt = 1.277 + 0.296*math.tanh(0.9*math.log10(45.0/tfin))
ue = u3600*eqshrt
elif tfin > 3600.0:
eqlong = -0.15*math.log10(tfin) + 1.5334
ue = u3600*eqlong
return ue | gpl-3.0 | -1,940,398,384,674,519,300 | 30 | 75 | 0.58847 | false |
baalansellers/OrbNotifier | colorificworker.py | 1 | 4923 | import pexpect, numpy
import threading, Queue, time
SLEEP_SEC = 0.001
def grange(start, stop, step):
r = start
while r < stop:
yield r
r += step
def npindex(alist, target):
i = 0
for x in alist:
if numpy.array_equal(x, target): return i
i += 1
return None
class ColorificWorker(threading.Thread):
RED = numpy.array([255,0,0])
ORANGE = numpy.array([255,102,0])
YELLOW = numpy.array([255,255,0])
GREEN = numpy.array([0,255,0])
AQUA = numpy.array([0,255,255])
SKYBLUE = numpy.array([0,102,255])
BLUE = numpy.array([0,0,255])
PURPLE = numpy.array([102,0,255])
FUCHSIA = numpy.array([255,0,255])
PINK = numpy.array([255,0,102])
WHITE = numpy.array([0,0,0])
def __init__(self, notification_q, mac):
super(ColorificWorker, self).__init__()
self.notification_q = notification_q
self.stoprequest = threading.Event()
self.mac = mac
self.currentcolor = ColorificWorker.WHITE
self.faderorder = [ColorificWorker.RED, ColorificWorker.ORANGE, ColorificWorker.YELLOW, ColorificWorker.GREEN, ColorificWorker.AQUA, ColorificWorker.SKYBLUE, ColorificWorker.BLUE, ColorificWorker.PURPLE, ColorificWorker.FUCHSIA, ColorificWorker.PINK]
self.currentfadecolor = ColorificWorker.RED
self.fadertrigger = 0
self.alerton = 0
self.reconnecttrigger = 0
self.bulb_connect()
self.color_shift(ColorificWorker.BLUE)
def run(self):
while not self.stoprequest.isSet():
try:
cmd = self.notification_q.get(True, 0.05)
if cmd == 'alert_mention':
self.alert_mention()
if cmd == 'alert_im':
self.alert_im()
if cmd == 'alert_channel':
self.alert_channel()
except Queue.Empty:
curtime = int(time.time())
if curtime % 60 == 0:
self.fadertrigger = 1
self.reconnecttrigger = 1
else:
if self.reconnecttrigger == 1:
self.gatt.sendline('disconnect')
self.bulb_connect()
if self.fadertrigger == 1 and self.alerton == 0:
self.fadertrigger = 0
self.fader_next()
continue
def close(self, timeout=None):
self.gatt.sendline('disconnect')
self.gatt.sendline('exit')
self.stoprequest.set()
super(ColorificWorker, self).join(timeout)
def bulb_connect(self):
self.gatt = pexpect.spawn('gatttool -I')
self.gatt.sendline('connect {0}'.format(self.mac))
try:
self.gatt.expect(['successful','[CON]'])
except EOF:
print('Received EOF')
self.close()
except TIMEOUT:
print('Timeout Elapsed')
self.close()
def nrgbstr(self, narray):
r, g, b = map(lambda a: int(a), narray)
return self.rgbstr(r, g, b)
def rgbstr(self, r, g, b):
return '{0:02X}{1:02X}{2:02X}'.format(r, g, b)
def color_shift(self, newcolor):
if self.currentcolor == None:
self.color_change(newcolor)
return
lnvector = newcolor - self.currentcolor
for x in grange(0.05, 1.00, 0.05):
tempcolor = self.currentcolor + lnvector * x
self.gatt.sendline('char-write-cmd 0x0028 580103014000'+self.nrgbstr(tempcolor))
time.sleep(SLEEP_SEC)
self.currentcolor = newcolor
def color_change(self, newcolor):
self.gatt.sendline('char-write-cmd 0x0028 580103014000'+self.nrgbstr(newcolor))
self.currentcolor = newcolor
def fader_next(self):
fi = npindex(self.faderorder, self.currentfadecolor) + 1
if fi == len(self.faderorder): fi = 0
self.color_shift(self.faderorder[fi])
self.currentfadecolor = self.faderorder[fi]
def alert_mention(self):
self.alerton = 1
for x in range(20):
self.color_change(ColorificWorker.RED)
time.sleep(SLEEP_SEC)
self.color_change(ColorificWorker.YELLOW)
self.color_shift(self.currentfadecolor)
self.alerton = 0
def alert_im(self):
self.alerton = 1
for x in range(20):
self.color_change(ColorificWorker.BLUE)
time.sleep(SLEEP_SEC)
self.color_change(ColorificWorker.GREEN)
self.color_shift(self.currentfadecolor)
self.alerton = 0
def alert_channel(self):
self.alerton = 1
for x in range(20):
self.color_change(ColorificWorker.PINK)
time.sleep(SLEEP_SEC)
self.color_change(ColorificWorker.AQUA)
self.color_shift(self.currentfadecolor)
self.alerton = 0
| mit | -7,959,778,125,772,076,000 | 33.426573 | 258 | 0.572415 | false |
eagleamon/home-assistant | homeassistant/components/binary_sensor/zwave.py | 1 | 4553 | """
Interfaces with Z-Wave sensors.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/binary_sensor.zwave/
"""
import logging
import datetime
import homeassistant.util.dt as dt_util
from homeassistant.helpers.event import track_point_in_time
from homeassistant.components import zwave
from homeassistant.components.binary_sensor import (
DOMAIN,
BinarySensorDevice)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = []
PHILIO = 0x013c
PHILIO_SLIM_SENSOR = 0x0002
PHILIO_SLIM_SENSOR_MOTION = (PHILIO, PHILIO_SLIM_SENSOR, 0)
PHILIO_3_IN_1_SENSOR_GEN_4 = 0x000d
PHILIO_3_IN_1_SENSOR_GEN_4_MOTION = (PHILIO, PHILIO_3_IN_1_SENSOR_GEN_4, 0)
WENZHOU = 0x0118
WENZHOU_SLIM_SENSOR_MOTION = (WENZHOU, PHILIO_SLIM_SENSOR, 0)
WORKAROUND_NO_OFF_EVENT = 'trigger_no_off_event'
DEVICE_MAPPINGS = {
PHILIO_SLIM_SENSOR_MOTION: WORKAROUND_NO_OFF_EVENT,
PHILIO_3_IN_1_SENSOR_GEN_4_MOTION: WORKAROUND_NO_OFF_EVENT,
WENZHOU_SLIM_SENSOR_MOTION: WORKAROUND_NO_OFF_EVENT,
}
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Z-Wave platform for binary sensors."""
if discovery_info is None or zwave.NETWORK is None:
return
node = zwave.NETWORK.nodes[discovery_info[zwave.const.ATTR_NODE_ID]]
value = node.values[discovery_info[zwave.const.ATTR_VALUE_ID]]
value.set_change_verified(False)
# Make sure that we have values for the key before converting to int
if (value.node.manufacturer_id.strip() and
value.node.product_id.strip()):
specific_sensor_key = (int(value.node.manufacturer_id, 16),
int(value.node.product_id, 16),
value.index)
if specific_sensor_key in DEVICE_MAPPINGS:
if DEVICE_MAPPINGS[specific_sensor_key] == WORKAROUND_NO_OFF_EVENT:
# Default the multiplier to 4
re_arm_multiplier = (zwave.get_config_value(value.node,
9) or 4)
add_devices([
ZWaveTriggerSensor(value, "motion",
hass, re_arm_multiplier * 8)
])
return
if value.command_class == zwave.const.COMMAND_CLASS_SENSOR_BINARY:
add_devices([ZWaveBinarySensor(value, None)])
class ZWaveBinarySensor(BinarySensorDevice, zwave.ZWaveDeviceEntity):
"""Representation of a binary sensor within Z-Wave."""
def __init__(self, value, sensor_class):
"""Initialize the sensor."""
self._sensor_type = sensor_class
zwave.ZWaveDeviceEntity.__init__(self, value, DOMAIN)
@property
def is_on(self):
"""Return True if the binary sensor is on."""
return self._value.data
@property
def sensor_class(self):
"""Return the class of this sensor, from SENSOR_CLASSES."""
return self._sensor_type
@property
def should_poll(self):
"""No polling needed."""
return False
class ZWaveTriggerSensor(ZWaveBinarySensor):
"""Representation of a stateless sensor within Z-Wave."""
def __init__(self, value, sensor_class, hass, re_arm_sec=60):
"""Initialize the sensor."""
super(ZWaveTriggerSensor, self).__init__(value, sensor_class)
self._hass = hass
self.re_arm_sec = re_arm_sec
self.invalidate_after = dt_util.utcnow() + datetime.timedelta(
seconds=self.re_arm_sec)
# If it's active make sure that we set the timeout tracker
if value.data:
track_point_in_time(
self._hass, self.async_update_ha_state,
self.invalidate_after)
def value_changed(self, value):
"""Called when a value for this entity's node has changed."""
if self._value.value_id == value.value_id:
self.schedule_update_ha_state()
if value.data:
# only allow this value to be true for re_arm secs
self.invalidate_after = dt_util.utcnow() + datetime.timedelta(
seconds=self.re_arm_sec)
track_point_in_time(
self._hass, self.async_update_ha_state,
self.invalidate_after)
@property
def is_on(self):
"""Return True if movement has happened within the rearm time."""
return self._value.data and \
(self.invalidate_after is None or
self.invalidate_after > dt_util.utcnow())
| apache-2.0 | -4,397,521,756,478,201,300 | 35.717742 | 79 | 0.623984 | false |
jarshwah/flake8_formatter_abspath | travis_pypi_setup.py | 1 | 3771 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Update encrypted deploy password in Travis config file
"""
from __future__ import print_function
import base64
import json
import os
from getpass import getpass
import yaml
from cryptography.hazmat.primitives.serialization import load_pem_public_key
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
try:
from urllib import urlopen
except:
from urllib.request import urlopen
GITHUB_REPO = 'jarshwah/flake8_formatter_abspath'
TRAVIS_CONFIG_FILE = os.path.join(
os.path.dirname(os.path.abspath(__file__)), '.travis.yml')
def load_key(pubkey):
"""Load public RSA key, with work-around for keys using
incorrect header/footer format.
Read more about RSA encryption with cryptography:
https://cryptography.io/latest/hazmat/primitives/asymmetric/rsa/
"""
try:
return load_pem_public_key(pubkey.encode(), default_backend())
except ValueError:
# workaround for https://github.com/travis-ci/travis-api/issues/196
pubkey = pubkey.replace('BEGIN RSA', 'BEGIN').replace('END RSA', 'END')
return load_pem_public_key(pubkey.encode(), default_backend())
def encrypt(pubkey, password):
"""Encrypt password using given RSA public key and encode it with base64.
The encrypted password can only be decrypted by someone with the
private key (in this case, only Travis).
"""
key = load_key(pubkey)
encrypted_password = key.encrypt(password, PKCS1v15())
return base64.b64encode(encrypted_password)
def fetch_public_key(repo):
"""Download RSA public key Travis will use for this repo.
Travis API docs: http://docs.travis-ci.com/api/#repository-keys
"""
keyurl = 'https://api.travis-ci.org/repos/{0}/key'.format(repo)
data = json.loads(urlopen(keyurl).read().decode())
if 'key' not in data:
errmsg = "Could not find public key for repo: {}.\n".format(repo)
errmsg += "Have you already added your GitHub repo to Travis?"
raise ValueError(errmsg)
return data['key']
def prepend_line(filepath, line):
"""Rewrite a file adding a line to its beginning.
"""
with open(filepath) as f:
lines = f.readlines()
lines.insert(0, line)
with open(filepath, 'w') as f:
f.writelines(lines)
def load_yaml_config(filepath):
with open(filepath) as f:
return yaml.load(f)
def save_yaml_config(filepath, config):
with open(filepath, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
def update_travis_deploy_password(encrypted_password):
"""Update the deploy section of the .travis.yml file
to use the given encrypted password.
"""
config = load_yaml_config(TRAVIS_CONFIG_FILE)
config['deploy']['password'] = dict(secure=encrypted_password)
save_yaml_config(TRAVIS_CONFIG_FILE, config)
line = ('# This file was autogenerated and will overwrite'
' each time you run travis_pypi_setup.py\n')
prepend_line(TRAVIS_CONFIG_FILE, line)
def main(args):
public_key = fetch_public_key(args.repo)
password = args.password or getpass('PyPI password: ')
update_travis_deploy_password(encrypt(public_key, password.encode()))
print("Wrote encrypted password to .travis.yml -- you're ready to deploy")
if '__main__' == __name__:
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--repo', default=GITHUB_REPO,
help='GitHub repo (default: %s)' % GITHUB_REPO)
parser.add_argument('--password',
help='PyPI password (will prompt if not provided)')
args = parser.parse_args()
main(args)
| mit | 4,381,893,640,900,162,000 | 29.909836 | 79 | 0.680191 | false |
openstack/tempest | releasenotes/source/conf.py | 1 | 8723 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Tempest Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'openstackdocstheme',
'reno.sphinxext',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/tempest'
openstackdocs_bug_project = 'tempest'
openstackdocs_bug_tag = ''
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
copyright = '2016, tempest Developers'
# Release do not need a version number in the title, they
# cover multiple versions.
# The full version, including alpha/beta/rc tags.
release = ''
# The short X.Y version.
version = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'tempestReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'olso.configReleaseNotes.tex',
'olso.config Release Notes Documentation',
'tempest Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'olso.configreleasenotes',
'tempest Release Notes Documentation',
['tempest Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'tempestReleaseNotes',
'tempest Release Notes Documentation',
'tempest Developers', 'olso.configReleaseNotes',
'An OpenStack library for parsing configuration options from the command'
' line and configuration files.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| apache-2.0 | 5,820,930,783,431,054,000 | 32.1673 | 79 | 0.70893 | false |
miettal/pypdp11simulator | test_vm.py | 1 | 2330 | #!/usr/bin/env python
# coding:utf-8
#
# test_vm.py
#
# Author: Hiromasa Ihara (miettal)
# URL: http://miettal.com
# License: MIT License
# Created: 2014-09-13
#
import sys
import os
import glob
import traceback
import re
import six
if six.PY3 :
from io import StringIO
else :
from StringIO import StringIO
import pdp11_vm
from subprocess import *
def itersplit(s, sep=None):
exp = re.compile(r'\s+' if sep is None else re.escape(sep))
pos = 0
while True:
m = exp.search(s, pos)
if not m:
if pos < len(s) or sep is not None:
yield s[pos:]
break
if pos < m.start() or sep is not None:
yield s[pos:m.start()]
pos = m.end()
def test(args) :
vm = pdp11_vm.VM()
vm.debug = StringIO()
vm.load(args)
flag = False
try :
#Popen(["rm", "-f", "a.out"]).wait()
#Popen(["rm", "-f"] + glob.glob("/tmp/atm*")).wait()
vm.run()
except :
pass
finally:
myvm_output = vm.debug.getvalue()
myvm_output = vm.debug.getvalue()
#Popen(["rm", "-f", "a.out"]).wait()
#Popen(["rm", "-f"] + glob.glob("/tmp/atm*")).wait()
p = Popen(["7run", "-m", "-r", "/usr/local/v6root"] + args, stdout=PIPE, stderr=PIPE)
nanarun_output = p.communicate()[1].decode('ascii')
if myvm_output != nanarun_output :
output = ""
for (myvm_line, nanarun_line) in zip(itersplit(myvm_output, '\n'), itersplit(nanarun_output, '\n')) :
output += " "+nanarun_line+"\n"
if myvm_line != nanarun_line:
output += "-"+myvm_line+"\n"
flag = True
print((' '.join(args)+"...")+' ')
if flag :
print("failed")
print(output)
else :
print("success")
if __name__ == '__main__':
test_cfiles = [
{'src':'test1.c', 'args':[],},
{'src':'test2.c', 'args':[],},
{'src':'test3.c', 'args':[],},
{'src':'test4.c', 'args':[],},
{'src':'test4.c', 'args':['arg1', ],},
{'src':'test4.c', 'args':['arg1', 'arg2', ],},
{'src':'test4.c', 'args':['arg1', 'arg2', 'arg3'],},
{'src':'test6.c', 'args':[],},
]
for x in test_cfiles :
bin_filename = x['src'].split('.')[0]
Popen(['v6cc', x['src']]).wait()
Popen(['mv', 'a.out', bin_filename]).wait()
test([bin_filename]+x['args'])
test(['/usr/local/v6root/bin/as', 'write-1.s'])
#test(['/usr/local/v6root/bin/nm', 'a.out'])
sys.exit(0)
| mit | -5,143,977,169,533,136,000 | 22.3 | 105 | 0.545064 | false |
RuiNascimento/krepo | script.module.lambdascrapers/lib/lambdascrapers/sources_ lambdascrapers/en_DebridOnly/2ddl.py | 1 | 6029 | # -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# -Cleaned and Checked on 10-10-2018 by JewBMX in Yoda.
import re,traceback,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import source_utils
from resources.lib.modules import log_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['2ddl.ws']
self.base_link = 'http://2ddl.ws'
self.search_link = '/search/%s/feed/rss2/'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('2DDL - Exception: \n' + str(failure))
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('2DDL - Exception: \n' + str(failure))
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('2DDL - Exception: \n' + str(failure))
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (
data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (
data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
html = client.request(url)
posts = client.parseDOM(html, 'item')
hostDict = hostprDict + hostDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
u = client.parseDOM(post, 'a', ret='href')
s = re.search('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', post)
s = s.groups()[0] if s else '0'
items += [(t, i, s) for i in u]
except:
pass
for item in items:
try:
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
valid, host = source_utils.is_host_valid(url, hostDict)
if not valid: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name, flags=re.I)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
quality, info = source_utils.get_release_quality(name, url)
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+)\s*(?:GB|GiB|MB|MiB))', item[2])[-1]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size)) / div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info,
'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
failure = traceback.format_exc()
log_utils.log('2DDL - Exception: \n' + str(failure))
return sources
def resolve(self, url):
return url | gpl-2.0 | 1,075,875,589,911,131,800 | 37.903226 | 120 | 0.462763 | false |
nityas/6869-finalproject | hog/cpreprocess.py | 1 | 2059 | # based on http://cs229.stanford.edu/proj2013/JimenezNguyen_MathFormulas_final_paper.pdf
import sys
sys.path.append('/usr/local/lib/python2.7/site-packages/') # not necessary for all computers, depending on how OpenCV was installed
import cv2
import numpy as np
from skimage import morphology
from PIL import Image
def segment(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
blurred = cv2.GaussianBlur(gray, (5, 5), 0)
thresh = cv2.adaptiveThreshold(blurred, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, 15, 2) # maybe increase the last argument
kernel = np.ones((2, 2), np.uint8)
dilation = cv2.erode(thresh, kernel, iterations=1)
contours, hierarchy = cv2.findContours(dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
sizes = []
for cnt in contours:
x, y, w, h, = cv2.boundingRect(cnt)
sizes.append(w * h)
sizes.sort()
res = []
for cnt in contours:
x, y, w, h, = cv2.boundingRect(cnt)
if w * h > sizes[len(sizes) * 3 / 4] / 2:
print cnt
temp = np.copy(img[y:y+h, x:x+w, :])
for i in range(h):
for j in range(w):
if cv2.pointPolygonTest(cnt, (x + j, y + i), False) < 0:
temp[i, j, 0] = 0
temp[i, j, 1] = 0
temp[i, j, 2] = 0
else:
temp[i, j, 0] = 255
temp[i, j, 1] = 255
temp[i, j, 2] = 255
res.append(temp)
for cnt in contours:
x, y, w, h, = cv2.boundingRect(cnt)
if w * h > sizes[len(sizes) * 3 / 4] / 2:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2)
#cv2.imshow("aaaa", img)
#cv2.waitKey(0)
return res
fname = 'captcha_1/0.1145622891645.jpg'
img = cv2.imread(fname)
res = segment(img)
for i in range(len(res)):
print 'data_2/' + fname[10:].replace('.', '_%d.' % i)
cv2.imwrite('data_2/' + fname[10:].replace('.', '_%d.' % i), res[i])
| mit | 5,092,538,566,527,311,000 | 33.898305 | 145 | 0.542496 | false |
shamshad-npti/kafka-spark-streaming | producer/sales_receipt.py | 1 | 2781 | """
SalesReceipt and ItemOrder model classes
"""
import json
def _assign_with_type_check(obj, attr, value, type_info):
if isinstance(value, type_info):
setattr(obj, attr, value)
else:
raise TypeError("Expected type: %s but found: %s" % (type_info, type(value)))
class SalesReceipt(object):
def __init__(self, store_id, customer_id, receipt_id, items):
self._store_id = int(store_id)
self._customer_id = int(customer_id)
self._receipt_id = int(receipt_id)
if not isinstance(items, (list, tuple)):
raise TypeError("items should be instance of list or tuple")
if not all([isinstance(item, ItemOrder) for item in items]):
raise TypeError("All item order should be instance of `ItemOrder`")
self._items = tuple(items)
@property
def store_id(self):
return self._store_id
@store_id.setter
def store_id(self, value):
_assign_with_type_check(self, '_store_id', value, int)
@property
def customer_id(self):
return self._customer_id
@customer_id.setter
def customer_id(self, value):
_assign_with_type_check(self, '_customer_id', value, int)
@property
def receipt_id(self):
return self._receipt_id
@receipt_id.setter
def receipt_id(self, value):
_assign_with_type_check(self, '_receipt_id', value, int)
@property
def items(self):
return self._items
def as_dict(self):
return {
"store_id": self.store_id,
"customer_id": self.customer_id,
"receipt_id": self.receipt_id,
"items": [item.as_dict() for item in self.items]
}
def __str__(self):
return json.dumps(self.as_dict())
class ItemOrder(object):
def __init__(self, item_id, quantity, total_price):
self._item_id = int(item_id)
self._quantity = int(quantity)
self._total_price = float(total_price)
@property
def item_id(self):
return self._item_id
@item_id.setter
def item_id(self, value):
_assign_with_type_check(self, '_item_id', value, int)
@property
def quantity(self):
return self._quantity
@quantity.setter
def quantity(self, value):
_assign_with_type_check(self, '_quantity', value, int)
@property
def total_price(self):
return self._total_price
@total_price.setter
def total_price(self, value):
_assign_with_type_check(self, '_total_price', value, float)
def as_dict(self):
return {
"item_id": self.item_id,
"quantity": self.quantity,
"total_price_paid": self.total_price
}
def __str__(self):
return json.dumps(self.as_dict())
| gpl-3.0 | -6,758,974,663,050,205,000 | 25.740385 | 85 | 0.592233 | false |
sohonetlabs/qbe | django_qbe/operators.py | 1 | 3919 | from builtins import object
from django.conf import settings
from django.db import connections
from django.db.models.fields import Field
from importlib import import_module
from future.utils import with_metaclass
DATABASES = settings.DATABASES
BACKEND_TO_OPERATIONS = {
'mysql': 'MySQLOperations',
'oracle': 'OracleOperations',
'postgis': 'PostGISOperations',
'spatialite': 'SpatiaLiteOperations',
}
"""
Plugin infrastructure based on
http://martyalchin.com/2008/jan/10/simple-plugin-framework/
"""
class OperatorMount(type):
def __init__(cls, *args, **kwargs):
if not hasattr(cls, 'operators'):
# This branch only executes when processing the mount point itself.
# So, since this is a new operator type, not an implementation,
# this class shouldn't be registered as a operator. Instead, it
# sets up a list where operators can be registered later.
cls.operators = {}
else:
# This must be a operator implementation, which should be
# registered.
# Simply appending it to the list is all that's needed to keep
# track of it later.
if hasattr(cls, 'slug') and hasattr(cls, 'label'):
cls.operators[cls.slug] = cls
def get_operators(self):
return self.operators
class CustomOperator(with_metaclass(OperatorMount, object)):
"""
Mount point for operators which refer to actions that can be performed.
Operators implementing this reference should provide the following
attributes:
======== ========================================================
slug A unique slug that must identify this operator
label The label that will be displayed in the criteria dropdown
======== ========================================================
"""
def __init__(self, db_field, operator, value, db_alias="default"):
self.params = []
self.wheres = []
self.db_field = db_field
self.operator = operator
self.value = value
self._db_alias = db_alias
self._db_connection = connections["default"]
database_properties = DATABASES.get(self._db_alias, "default")
module = database_properties['ENGINE']
try:
base_mod = import_module("%s.base" % module)
intros_mod = import_module("%s.introspection" % module)
except ImportError:
pass
if base_mod and intros_mod:
self._db_operators = base_mod.DatabaseWrapper.operators
if module.startswith('django.contrib.gis'):
operations_name = BACKEND_TO_OPERATIONS[module.split('.')[-1]]
DatabaseOperations = getattr(base_mod, operations_name)
else:
DatabaseOperations = base_mod.DatabaseOperations
try:
self._db_operations = DatabaseOperations(self._db_connection)
except TypeError:
# Some engines have no params to instance DatabaseOperations
self._db_operations = DatabaseOperations()
def _get_lookup(self, operator, over):
lookup = Field().get_db_prep_lookup(operator, over,
connection=self._db_connection,
prepared=True)
if isinstance(lookup, (tuple, list)):
return lookup[0]
return lookup
def get_params(self):
"""
returns a list
"""
value = self._get_lookup(self.operator, self.value)
self.params.append(self.value)
return self.params
def get_wheres(self):
"""
returns a list
"""
self.wheres.append(u"%s %s"
% (lookup_cast(operator) % self.db_field,
self.operator))
return self.wheres
| mit | -3,223,715,422,294,490,000 | 33.991071 | 79 | 0.577954 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20161115A.py | 1 | 1601 | """
[2016-11-15] Challenge #292 [Easy] Increasing range parsing
https://www.reddit.com/r/dailyprogrammer/comments/5d1l7v/20161115_challenge_292_easy_increasing_range/
# **Description:**
We are given a list of numbers in a "short-hand" range notation where only the significant part of the next number is
written because we know the numbers are always increasing (ex. "1,3,7,2,4,1" represents [1, 3, 7, 12, 14, 21]). Some
people use different separators for their ranges (ex. "1-3,1-2", "1:3,1:2", "1..3,1..2" represent the same numbers [1,
2, 3, 11, 12]) and they sometimes specify a third digit for the range step (ex. "1:5:2" represents [1, 3, 5]).
**NOTE:** For this challenge range limits are always inclusive.
Our job is to return a list of the complete numbers.
The possible separators are: ["-", ":", ".."]
# **Input:**
You'll be given strings in the "short-hand" range notation
"1,3,7,2,4,1"
"1-3,1-2"
"1:5:2"
"104-2"
"104..02"
"545,64:11"
# **Output:**
You should output a string of all the numbers separated by a space
"1 3 7 12 14 21"
"1 2 3 11 12"
"1 3 5"
"104 105 106 107 108 109 110 111 112"
"104 105 106...200 201 202" # truncated for simplicity
"545 564 565 566...609 610 611" # truncated for simplicity
#Finally
Have a good challenge idea, like /u/izxle did?
Consider submitting it to /r/dailyprogrammer_ideas
#Update
As /u/SeverianLies pointed out, it is unclear if the `-` is a seperator or a sign.
For this challenge we work with only positive natural numbers.
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | 3,391,676,638,498,215,000 | 35.386364 | 118 | 0.674578 | false |
OSSOS/MOP | src/ossos/core/ossos/downloads/core.py | 1 | 3503 | import os
__author__ = "David Rusk <[email protected]>"
from astropy.io import fits
import io
from ossos.gui import logger
from .. import storage
import sys
class Downloader(object):
"""
Downloads data from VOSpace.
"""
def download_hdulist(self, uri, **kwargs):
"""
Downloads a FITS image as a HDUList.
Args:
uri: The URI of the FITS image to download.
kwargs: optional arguments to pass to the vos client.
For example, passing view="cutout" and cutout=[1] will result
in a cutout of extension 1 from the FITS image specified by the
URI.
Returns:
hdulist: astropy.io.fits.hdu.hdulist.HDUList
The requests FITS image as an Astropy HDUList object
(http://docs.astropy.org/en/latest/io/fits/api/hdulists.html).
"""
logger.debug(str(kwargs))
hdulist = None
try:
vobj = storage.vofile(uri, **kwargs)
try:
fobj = io.StringIO(vobj.read())
fobj.seek(0)
hdulist = fits.open(fobj)
except Exception as e:
sys.stderr.write("ERROR: {}\n".format(str(e)))
sys.stderr.write("While loading {} {}\n".format(uri, kwargs))
pass
finally:
vobj.close()
except Exception as e:
sys.stderr.write(str(e)+"\n")
sys.stderr.write("While opening connection to {}.\n".format(uri))
sys.stderr.write("Sending back FLAT instead, too keep display happy.")
hdulist = self.download_hdulist('vos:OSSOS/dbimages/calibrators/13AQ05_r_flat.fits', **kwargs)
return hdulist
def download_apcor(self, uri):
"""
Downloads apcor data.
Args:
uri: The URI of the apcor data file.
Returns:
apcor: ossos.downloads.core.ApcorData
"""
local_file = os.path.basename(uri)
if os.access(local_file, os.F_OK):
fobj = open(local_file)
else:
fobj = storage.vofile(uri, view='data')
fobj.seek(0)
str = fobj.read()
fobj.close()
apcor_str = str
return ApcorData.from_string(apcor_str)
def download_zmag(self, uri):
local_file = os.path.basename(uri)
if os.access(local_file, os.F_OK):
return float(open(local_file).read())
fobj = storage.vofile(uri, view="data")
fobj.seek(0)
str = fobj.read()
return float(str)
class ApcorData(object):
def __init__(self, ap_in, ap_out, apcor, apcor_err):
self.ap_in = ap_in
self.ap_out = ap_out
self.apcor = apcor
self.apcor_err = apcor_err
@classmethod
def from_string(cls, rawstr):
"""
Creates an ApcorData record from the raw string format.
Expected string format:
ap_in ap_out ap_cor apcor_err
"""
try:
args = list(map(float, rawstr.split()))
except Exception as ex:
import sys
logger.error("Failed to convert aperture correction: {}".format(ex))
raise ex
return cls(*args)
@property
def valid(self):
return self.apcor_err < 1.0
@property
def aperture(self):
return self.ap_in
@property
def sky(self):
return self.ap_out + 1
@property
def swidth(self):
return 2*self.ap_in
| gpl-3.0 | -1,030,849,070,742,915,700 | 27.25 | 106 | 0.554382 | false |
havaeimo/smartlearner | smartlearner/interfaces/loss.py | 1 | 1599 | from collections import OrderedDict
from theano import tensor as T
from abc import ABCMeta, abstractmethod
class Loss(object):
__metaclass__ = ABCMeta
def __init__(self, model, dataset):
self.model = model
self.dataset = dataset
self.consider_constant = [] # Part of the computational graph to be considered as a constant.
self._tasks = []
self._gradients = None
# Build the graph for the loss.
model_output = self.model.get_output(self.dataset.symb_inputs)
self._loss = self._compute_loss(model_output)
@abstractmethod
def _get_updates(self):
raise NotImplementedError("Subclass of 'Loss' must implement '_get_updates()'.")
@abstractmethod
def _compute_loss(self, model_output):
raise NotImplementedError("Subclass of 'Loss' must implement '_compute_loss(model_output)'.")
@property
def gradients(self):
if self._gradients is None:
self._gradients = self._get_gradients()
return self._gradients
@property
def tasks(self):
return self.model.tasks + self._tasks
@property
def updates(self):
updates = OrderedDict()
updates.update(self.model.updates)
updates.update(self._get_updates())
return updates
def _get_gradients(self):
gparams = T.grad(cost=self._loss,
wrt=self.model.parameters,
consider_constant=self.consider_constant)
self._gradients = dict(zip(self.model.parameters, gparams))
return self.gradients
| bsd-3-clause | -3,206,657,585,251,132,400 | 29.169811 | 102 | 0.629769 | false |
jcurbo/astronomy-stuff | moon-rise-set/gen-dates.py | 1 | 2418 | import ephem
from astropy.coordinates import EarthLocation
from astropy.time import Time
from astroplan import moon_illumination
from icalendar import Calendar, Event
from datetime import date, time, timedelta, datetime
my_lat = '39.0209321'
my_lon = '-77.01722708'
my_elev = 122
date_start = '2017/01/01 00:00'
obs = ephem.Observer()
moon = ephem.Moon()
obs.lat = my_lat
obs.lon = my_lon
obs.elevation = my_elev
obs.date = date_start
loc = EarthLocation(lat=my_lat, lon=my_lon)
moonrise_all = []
moonset_all = []
illum_all = []
for x in range(0,365):
# for x in range(0,1):
print("Calculation for {0}:".format(obs.date))
moon.compute(obs)
if (moon.alt > 0):
print(" Moon is currently up")
moon_up = True
moonrise = obs.previous_rising(moon)
moonset = obs.next_setting(moon)
else:
print(" Moon is currently down")
moon_up = False
moonrise = obs.next_rising(moon)
moonset = obs.next_setting(moon)
illum = moon_illumination(Time(moonrise.datetime()))*100
moonrise_all.append(ephem.localtime(moonrise))
moonset_all.append(ephem.localtime(moonset))
illum_all.append(illum)
print(" Moonrise: {0}".format(ephem.localtime(moonrise)))
print(" Moonset: {0}".format(ephem.localtime(moonset)))
print(" Illum: {0:.0f}%".format(illum))
obs.date = obs.date + 1
# ical stuff starts here
cal = Calendar()
cal.add('prodid', '-//python icalendar//python.org//')
cal.add('version', '2.0')
for r, s, i in zip(moonrise_all, moonset_all, illum_all):
# moonrise event
e1 = Event()
moonrise_simpletime = time.strftime(r.time(), "%H:%M")
e1.add('uid', "{0}@curbo.org".format(r.isoformat()))
e1.add('summary', "Moonrise at {0}, illum {1:.0f}%".format(moonrise_simpletime, i))
e1.add('dtstart', r)
e1.add('dtend', r + timedelta(minutes=15))
e1.add('dtstamp', datetime.now())
cal.add_component(e1)
# moonset event
e2 = Event()
moonset_simpletime = time.strftime(s.time(), "%H:%M")
e2.add('uid', "{0}@curbo.org".format(s.isoformat()))
e2.add('summary', "Moonset at {0}, illum {1:.0f}%".format(moonset_simpletime, i))
e2.add('dtstart', s)
e2.add('dtend', s + timedelta(minutes=15))
e2.add('dtstamp', datetime.now())
cal.add_component(e2)
# write out the ics file
f = open('moon.ics', 'wb')
f.write(cal.to_ical())
f.close()
| mit | 1,291,823,558,686,291,500 | 26.793103 | 87 | 0.636063 | false |
brigittebigi/proceed | proceed/src/DataIO/Read/readgeneric.py | 1 | 2212 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ ___ ___ ____ ____ __
# | \ | \ | | / | | | \ Automatic
# |__/ |__/ | | | |__ |__ | | Conference
# | |\_ | | | | | | | Proceedings
# | | \ |___| \___ |___ |___ |__/ Generator
# ==========================================================
#
# http://www.lpl-aix.fr/~bigi/
#
# ---------------------------------------------------------------------------
# developed at:
#
# Laboratoire Parole et Langage
#
# Copyright (C) 2013-2014 Brigitte Bigi
#
# Use of this software is governed by the GPL, v3
# This banner notice must not be removed
# ---------------------------------------------------------------------------
#
# SPPAS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SPPAS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
#
"""
Defines the interface for a generic document reader that provides common
utilities required for generating formatted abstract submissions.
"""
#-----------------------------------------------------------------------------
class readGeneric(object):
"""
Base class for an "abstract reader".
This means that all sub-reader classes must implement these methods.
"""
def GetDocs(self, filename, authorsfilename=None):
"""
Return a list of document instances.
"""
pass
#-----------------------------------------------------------------------------
| gpl-3.0 | -2,115,020,616,265,497,600 | 37.137931 | 78 | 0.44575 | false |
Kim-Seonghyeon/youtube_8m | imagenet.py | 1 | 1225 | import inspect
import os
from tensorflow import gfile
import numpy as np
import tensorflow as tf
import time
from tensorflow import logging
out_file = gfile.Open("gs://ksh_imagenet/ILSVRC/feature.csv", "w+")
out_file.write("filename," + ",".join(["feature" + str(i) for i in range(1, 2049)]) + "\n")
files = tf.gfile.Glob("gs://ksh_imagenet/ILSVRC/Data/DET/test/*.JPEG")
logging.info(files)
f = tf.gfile.FastGFile("gs://ksh_imagenet/vgg16/classify_image_graph_def.pb", 'rb')
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
config = tf.ConfigProto(allow_soft_placement=True)
sess = tf.Session(config=config)
pool_3_tensor = sess.graph.get_tensor_by_name('pool_3:0')
idx = 0
for i in range(int(len(files)/8)):
for k in range(8):
with tf.device("/gpu:%d" % k):
image_data = tf.gfile.FastGFile(files[idx + k], 'rb').read()
logging.info(idx + k)
feature = sess.run(pool_3_tensor,
{'DecodeJpeg/contents:0': image_data})
feature = np.squeeze(feature)
out_file.write(files[idx + k] + "," + ",".join(["%f" % y for y in feature]) + "\n")
idx += 8
out_file.close()
| apache-2.0 | -6,206,226,487,434,061,000 | 33.027778 | 95 | 0.626122 | false |
iEngage/python-sdk | iengage_client/models/idea.py | 1 | 20100 | # coding: utf-8
"""
Stakeholder engagement API
This API enables Intelligent Engagement for your Business. iEngage is a platform that combines process, augmented intelligence and rewards to help you intelligently engage customers.
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class Idea(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, idea_id=None, idea_title=None, group=None, idea_description=None, ideator=None, idea_creation_date=None, last_modified_date=None, idea_stage=None, domain=None, technology=None, access_type=None, video_id=None, active_status=None, team_status=None, project_status=None, total_followers=None, total_comments=None, total_blogs=None, average_rating_score=None, number_of_ratings=None, current_user_following=False, current_user_rating=None, idea_file_url=None, attachment_list=None, sentiment=None, sentiment_details=None, sentiment_weightage=None, entity=None):
"""
Idea - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'idea_id': 'int',
'idea_title': 'str',
'group': 'Group',
'idea_description': 'str',
'ideator': 'User',
'idea_creation_date': 'datetime',
'last_modified_date': 'datetime',
'idea_stage': 'str',
'domain': 'str',
'technology': 'str',
'access_type': 'str',
'video_id': 'str',
'active_status': 'str',
'team_status': 'str',
'project_status': 'str',
'total_followers': 'int',
'total_comments': 'int',
'total_blogs': 'int',
'average_rating_score': 'float',
'number_of_ratings': 'int',
'current_user_following': 'bool',
'current_user_rating': 'float',
'idea_file_url': 'str',
'attachment_list': 'list[Multimedia]',
'sentiment': 'str',
'sentiment_details': 'Sentiment',
'sentiment_weightage': 'float',
'entity': 'list[NER]'
}
self.attribute_map = {
'idea_id': 'ideaId',
'idea_title': 'ideaTitle',
'group': 'group',
'idea_description': 'ideaDescription',
'ideator': 'ideator',
'idea_creation_date': 'ideaCreationDate',
'last_modified_date': 'lastModifiedDate',
'idea_stage': 'ideaStage',
'domain': 'domain',
'technology': 'technology',
'access_type': 'accessType',
'video_id': 'videoId',
'active_status': 'activeStatus',
'team_status': 'teamStatus',
'project_status': 'projectStatus',
'total_followers': 'totalFollowers',
'total_comments': 'totalComments',
'total_blogs': 'totalBlogs',
'average_rating_score': 'averageRatingScore',
'number_of_ratings': 'numberOfRatings',
'current_user_following': 'currentUserFollowing',
'current_user_rating': 'currentUserRating',
'idea_file_url': 'ideaFileURL',
'attachment_list': 'attachmentList',
'sentiment': 'sentiment',
'sentiment_details': 'sentimentDetails',
'sentiment_weightage': 'sentimentWeightage',
'entity': 'entity'
}
self._idea_id = idea_id
self._idea_title = idea_title
self._group = group
self._idea_description = idea_description
self._ideator = ideator
self._idea_creation_date = idea_creation_date
self._last_modified_date = last_modified_date
self._idea_stage = idea_stage
self._domain = domain
self._technology = technology
self._access_type = access_type
self._video_id = video_id
self._active_status = active_status
self._team_status = team_status
self._project_status = project_status
self._total_followers = total_followers
self._total_comments = total_comments
self._total_blogs = total_blogs
self._average_rating_score = average_rating_score
self._number_of_ratings = number_of_ratings
self._current_user_following = current_user_following
self._current_user_rating = current_user_rating
self._idea_file_url = idea_file_url
self._attachment_list = attachment_list
self._sentiment = sentiment
self._sentiment_details = sentiment_details
self._sentiment_weightage = sentiment_weightage
self._entity = entity
@property
def idea_id(self):
"""
Gets the idea_id of this Idea.
:return: The idea_id of this Idea.
:rtype: int
"""
return self._idea_id
@idea_id.setter
def idea_id(self, idea_id):
"""
Sets the idea_id of this Idea.
:param idea_id: The idea_id of this Idea.
:type: int
"""
self._idea_id = idea_id
@property
def idea_title(self):
"""
Gets the idea_title of this Idea.
:return: The idea_title of this Idea.
:rtype: str
"""
return self._idea_title
@idea_title.setter
def idea_title(self, idea_title):
"""
Sets the idea_title of this Idea.
:param idea_title: The idea_title of this Idea.
:type: str
"""
self._idea_title = idea_title
@property
def group(self):
"""
Gets the group of this Idea.
:return: The group of this Idea.
:rtype: Group
"""
return self._group
@group.setter
def group(self, group):
"""
Sets the group of this Idea.
:param group: The group of this Idea.
:type: Group
"""
self._group = group
@property
def idea_description(self):
"""
Gets the idea_description of this Idea.
:return: The idea_description of this Idea.
:rtype: str
"""
return self._idea_description
@idea_description.setter
def idea_description(self, idea_description):
"""
Sets the idea_description of this Idea.
:param idea_description: The idea_description of this Idea.
:type: str
"""
self._idea_description = idea_description
@property
def ideator(self):
"""
Gets the ideator of this Idea.
:return: The ideator of this Idea.
:rtype: User
"""
return self._ideator
@ideator.setter
def ideator(self, ideator):
"""
Sets the ideator of this Idea.
:param ideator: The ideator of this Idea.
:type: User
"""
self._ideator = ideator
@property
def idea_creation_date(self):
"""
Gets the idea_creation_date of this Idea.
:return: The idea_creation_date of this Idea.
:rtype: datetime
"""
return self._idea_creation_date
@idea_creation_date.setter
def idea_creation_date(self, idea_creation_date):
"""
Sets the idea_creation_date of this Idea.
:param idea_creation_date: The idea_creation_date of this Idea.
:type: datetime
"""
self._idea_creation_date = idea_creation_date
@property
def last_modified_date(self):
"""
Gets the last_modified_date of this Idea.
:return: The last_modified_date of this Idea.
:rtype: datetime
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""
Sets the last_modified_date of this Idea.
:param last_modified_date: The last_modified_date of this Idea.
:type: datetime
"""
self._last_modified_date = last_modified_date
@property
def idea_stage(self):
"""
Gets the idea_stage of this Idea.
:return: The idea_stage of this Idea.
:rtype: str
"""
return self._idea_stage
@idea_stage.setter
def idea_stage(self, idea_stage):
"""
Sets the idea_stage of this Idea.
:param idea_stage: The idea_stage of this Idea.
:type: str
"""
self._idea_stage = idea_stage
@property
def domain(self):
"""
Gets the domain of this Idea.
:return: The domain of this Idea.
:rtype: str
"""
return self._domain
@domain.setter
def domain(self, domain):
"""
Sets the domain of this Idea.
:param domain: The domain of this Idea.
:type: str
"""
self._domain = domain
@property
def technology(self):
"""
Gets the technology of this Idea.
:return: The technology of this Idea.
:rtype: str
"""
return self._technology
@technology.setter
def technology(self, technology):
"""
Sets the technology of this Idea.
:param technology: The technology of this Idea.
:type: str
"""
self._technology = technology
@property
def access_type(self):
"""
Gets the access_type of this Idea.
:return: The access_type of this Idea.
:rtype: str
"""
return self._access_type
@access_type.setter
def access_type(self, access_type):
"""
Sets the access_type of this Idea.
:param access_type: The access_type of this Idea.
:type: str
"""
self._access_type = access_type
@property
def video_id(self):
"""
Gets the video_id of this Idea.
:return: The video_id of this Idea.
:rtype: str
"""
return self._video_id
@video_id.setter
def video_id(self, video_id):
"""
Sets the video_id of this Idea.
:param video_id: The video_id of this Idea.
:type: str
"""
self._video_id = video_id
@property
def active_status(self):
"""
Gets the active_status of this Idea.
:return: The active_status of this Idea.
:rtype: str
"""
return self._active_status
@active_status.setter
def active_status(self, active_status):
"""
Sets the active_status of this Idea.
:param active_status: The active_status of this Idea.
:type: str
"""
self._active_status = active_status
@property
def team_status(self):
"""
Gets the team_status of this Idea.
:return: The team_status of this Idea.
:rtype: str
"""
return self._team_status
@team_status.setter
def team_status(self, team_status):
"""
Sets the team_status of this Idea.
:param team_status: The team_status of this Idea.
:type: str
"""
self._team_status = team_status
@property
def project_status(self):
"""
Gets the project_status of this Idea.
:return: The project_status of this Idea.
:rtype: str
"""
return self._project_status
@project_status.setter
def project_status(self, project_status):
"""
Sets the project_status of this Idea.
:param project_status: The project_status of this Idea.
:type: str
"""
self._project_status = project_status
@property
def total_followers(self):
"""
Gets the total_followers of this Idea.
:return: The total_followers of this Idea.
:rtype: int
"""
return self._total_followers
@total_followers.setter
def total_followers(self, total_followers):
"""
Sets the total_followers of this Idea.
:param total_followers: The total_followers of this Idea.
:type: int
"""
self._total_followers = total_followers
@property
def total_comments(self):
"""
Gets the total_comments of this Idea.
:return: The total_comments of this Idea.
:rtype: int
"""
return self._total_comments
@total_comments.setter
def total_comments(self, total_comments):
"""
Sets the total_comments of this Idea.
:param total_comments: The total_comments of this Idea.
:type: int
"""
self._total_comments = total_comments
@property
def total_blogs(self):
"""
Gets the total_blogs of this Idea.
:return: The total_blogs of this Idea.
:rtype: int
"""
return self._total_blogs
@total_blogs.setter
def total_blogs(self, total_blogs):
"""
Sets the total_blogs of this Idea.
:param total_blogs: The total_blogs of this Idea.
:type: int
"""
self._total_blogs = total_blogs
@property
def average_rating_score(self):
"""
Gets the average_rating_score of this Idea.
:return: The average_rating_score of this Idea.
:rtype: float
"""
return self._average_rating_score
@average_rating_score.setter
def average_rating_score(self, average_rating_score):
"""
Sets the average_rating_score of this Idea.
:param average_rating_score: The average_rating_score of this Idea.
:type: float
"""
self._average_rating_score = average_rating_score
@property
def number_of_ratings(self):
"""
Gets the number_of_ratings of this Idea.
:return: The number_of_ratings of this Idea.
:rtype: int
"""
return self._number_of_ratings
@number_of_ratings.setter
def number_of_ratings(self, number_of_ratings):
"""
Sets the number_of_ratings of this Idea.
:param number_of_ratings: The number_of_ratings of this Idea.
:type: int
"""
self._number_of_ratings = number_of_ratings
@property
def current_user_following(self):
"""
Gets the current_user_following of this Idea.
:return: The current_user_following of this Idea.
:rtype: bool
"""
return self._current_user_following
@current_user_following.setter
def current_user_following(self, current_user_following):
"""
Sets the current_user_following of this Idea.
:param current_user_following: The current_user_following of this Idea.
:type: bool
"""
self._current_user_following = current_user_following
@property
def current_user_rating(self):
"""
Gets the current_user_rating of this Idea.
:return: The current_user_rating of this Idea.
:rtype: float
"""
return self._current_user_rating
@current_user_rating.setter
def current_user_rating(self, current_user_rating):
"""
Sets the current_user_rating of this Idea.
:param current_user_rating: The current_user_rating of this Idea.
:type: float
"""
self._current_user_rating = current_user_rating
@property
def idea_file_url(self):
"""
Gets the idea_file_url of this Idea.
:return: The idea_file_url of this Idea.
:rtype: str
"""
return self._idea_file_url
@idea_file_url.setter
def idea_file_url(self, idea_file_url):
"""
Sets the idea_file_url of this Idea.
:param idea_file_url: The idea_file_url of this Idea.
:type: str
"""
self._idea_file_url = idea_file_url
@property
def attachment_list(self):
"""
Gets the attachment_list of this Idea.
:return: The attachment_list of this Idea.
:rtype: list[Multimedia]
"""
return self._attachment_list
@attachment_list.setter
def attachment_list(self, attachment_list):
"""
Sets the attachment_list of this Idea.
:param attachment_list: The attachment_list of this Idea.
:type: list[Multimedia]
"""
self._attachment_list = attachment_list
@property
def sentiment(self):
"""
Gets the sentiment of this Idea.
:return: The sentiment of this Idea.
:rtype: str
"""
return self._sentiment
@sentiment.setter
def sentiment(self, sentiment):
"""
Sets the sentiment of this Idea.
:param sentiment: The sentiment of this Idea.
:type: str
"""
self._sentiment = sentiment
@property
def sentiment_details(self):
"""
Gets the sentiment_details of this Idea.
:return: The sentiment_details of this Idea.
:rtype: Sentiment
"""
return self._sentiment_details
@sentiment_details.setter
def sentiment_details(self, sentiment_details):
"""
Sets the sentiment_details of this Idea.
:param sentiment_details: The sentiment_details of this Idea.
:type: Sentiment
"""
self._sentiment_details = sentiment_details
@property
def sentiment_weightage(self):
"""
Gets the sentiment_weightage of this Idea.
:return: The sentiment_weightage of this Idea.
:rtype: float
"""
return self._sentiment_weightage
@sentiment_weightage.setter
def sentiment_weightage(self, sentiment_weightage):
"""
Sets the sentiment_weightage of this Idea.
:param sentiment_weightage: The sentiment_weightage of this Idea.
:type: float
"""
self._sentiment_weightage = sentiment_weightage
@property
def entity(self):
"""
Gets the entity of this Idea.
:return: The entity of this Idea.
:rtype: list[NER]
"""
return self._entity
@entity.setter
def entity(self, entity):
"""
Sets the entity of this Idea.
:param entity: The entity of this Idea.
:type: list[NER]
"""
self._entity = entity
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | 2,531,994,326,339,373,000 | 25.447368 | 581 | 0.558806 | false |
prim/ocempgui | ocempgui/widgets/ScrolledList.py | 1 | 22987 | # $Id: ScrolledList.py,v 1.61.2.6 2007/03/26 11:44:41 marcusva Exp $
#
# Copyright (c) 2004-2007, Marcus von Appen
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""A scrollable widget, which contains list elements."""
from pygame import KMOD_SHIFT, KMOD_CTRL, K_UP, K_DOWN, K_HOME, K_END, K_SPACE
from pygame import K_a, key
from ocempgui.widgets.components import ListItemCollection
from ScrolledWindow import ScrolledWindow
from ListViewPort import ListViewPort
from Constants import *
from StyleInformation import StyleInformation
import base
class ScrolledList (ScrolledWindow):
"""ScrolledList (width, height, collection=None) -> ScrolledList
A widget class, that populates elements in a list-style manner.
The ScrolledList displays data in a listed form and allows to browse
through it using horizontal and vertical scrolling. Single or
multiple items of the list can be selected or - dependant on the
ListItem object - be edited, etc.
The ScrolledList's child is a ListViewPort object, which takes care
of drawing the attached list items. You can supply your own
ListViewPort object through the 'child' attribute as described in
the Bin class documentation.
To set or reset an already created collection of items, the 'items'
attribute and set_items() method can be used. The collection to set
needs to be a ListItemCollection object.
myitems = ListItemCollection()
myitems.append (TextListItem ('First')
scrolledlist.items = myitems
scrolledlist.set_items (myitems)
The ScrolledList allows different types of selections by modifying
the 'selectionmode' attribute. Dependant on the set selection mode,
you can then select multiple items at once (SELECTION_MULTIPLE),
only one item at a time (SELECTION_SINGLE) or nothing at all
(SELECTION_NONE). Modifying the selection mode does not have any
effect on a currently made selection. See the selection constants
section in the Constants documentation for more details.
scrolledlist.selectionmode = SELECTION_NONE
scrolledlist.set_selectionmode (SELECTION_SINGLE)
To improve the appearance and selection behaviour of the single list
items, the ScrolledList support additional spacing to place between
them. It can be read and set through the 'spacing' attribute and
set_spacing() method.
scrolledlist.spacing = 4
scrolledlist.set_spacing (0)
Default action (invoked by activate()):
See the ScrolledWindow class.
Mnemonic action (invoked by activate_mnemonic()):
None
Signals:
SIG_SELECTCHANGED - Invoked, when the item selection changed.
SIG_LISTCHANGED - Invoked, when the underlying item list changed.
Attributes:
items - Item list of the ScrolledList.
selectionmode - The selection mode for the ScrolledList. Default is
SELECTION_MULTIPLE.
spacing - Spacing to place between the list items. Default is 0.
cursor - The currently focused item.
"""
def __init__ (self, width, height, collection=None):
# Temporary placeholder for kwargs of the update() method.
self.__lastargs = None
ScrolledWindow.__init__ (self, width, height)
self._spacing = 0
# The item cursor position within the list.
self._cursor = None
# Used for selections.
self._last_direction = None
self._signals[SIG_LISTCHANGED] = []
self._signals[SIG_SELECTCHANGED] = []
# Items and selection.
self._itemcollection = None
self._selectionmode = SELECTION_MULTIPLE
if collection:
self.set_items (collection)
else:
self._itemcollection = ListItemCollection ()
self._itemcollection.list_changed = self._list_has_changed
self._itemcollection.item_changed = self._item_has_changed
self.child = ListViewPort (self)
def _list_has_changed (self, collection):
"""S._list_has_changed (...) -> None
Update method for list_changed () notifications.
"""
if not self.locked:
self.child.update_items ()
self.dirty = True
self.run_signal_handlers (SIG_LISTCHANGED)
def _item_has_changed (self, item):
"""S._item_has_changed (...) -> None
Update method for item_changed() notifications.
"""
if not self.locked:
self.child.update_items ()
def set_items (self, items):
"""S.set_items (...) -> None
Sets a collection of items to display.
"""
old = self._itemcollection
self.vscrollbar.value = 0
self.hscrollbar.value = 0
if isinstance (items, ListItemCollection):
self._itemcollection = items
else:
self._itemcollection = ListItemCollection (items)
self._itemcollection.list_changed = self._list_has_changed
self._itemcollection.item_changed = self._item_has_changed
old.list_changed = None
old.item_changed = None
del old
if len (self._itemcollection) > 0:
self._cursor = self._itemcollection[0]
else:
self._cursor = None
self._list_has_changed (self._itemcollection)
def set_focus (self, focus=True):
"""S.set_focus (...) -> bool
Sets the input and action focus of the ScrolledList.
Sets the input and action focus of the ScrolledList and returns
True upon success or False, if the focus could not be set.
"""
if focus != self.focus:
self.lock ()
if focus and not self._cursor and (len (self.items) > 0):
self._cursor = self.items[0]
self._cursor.selected = True
ScrolledWindow.set_focus (self, focus)
self.unlock ()
return self.focus
def set_spacing (self, spacing):
"""S.set_spacing (...) -> None
Sets the spacing to place between the list items of the ScrolledList.
The spacing value is the amount of pixels to place between the
items of the ScrolledList.
Raises a TypeError, if the passed argument is not a positive
integer.
"""
if (type (spacing) != int) or (spacing < 0):
raise TypeError ("spacing must be a positive integer")
self._spacing = spacing
self.child.update_items ()
def set_selectionmode (self, mode):
"""S.set_selectionmode (...) -> None
Sets the selection mode for the ScrolledList.
The selection mode can be one of the SELECTION_TYPES list.
SELECTION_NONE disables selecting any list item,
SELECTION_SINGLE allows to select only one item from the list and
SELECTION_MULTIPLE allows to select multiple items from the list.
Raises a ValueError, if the passed argument is not a value of
the SELECTION_TYPES tuple.
"""
if mode not in SELECTION_TYPES:
raise ValueError ("mode must be a value from SELECTION_TYPES")
self._selectionmode = mode
def select (self, *items):
"""S.select (...) -> None
Selects one or more specific items of the ScrolledList.
Dependant on the set selection mode selecting an item has
specific side effects. If the selection mode is set to
SELECTION_SINGLE, selecting an item causes any other item to
become deselected. As a counterpart SELECTION_MULTIPLE causes
the items to get selected while leaving any other item untouched.
The method causes the SIG_SELECTCHANGED event to be emitted,
whenever the selection changes.
Raises a LookupError, if the passed argument could not be
found in the items attribute.
"""
self.__select (*items)
self.run_signal_handlers (SIG_SELECTCHANGED)
def select_all (self):
"""S.select_all () -> None
Selects all items of the ScrolledList.
Selects all items of the ScrolledList, if the selection mode is
set to SELECTION_MULTIPLE
"""
if self.selectionmode == SELECTION_MULTIPLE:
notselected = filter (lambda x: x.selected == False, self.items)
for i in notselected:
i.selected = True
if len (notselected) > 0:
self.run_signal_handlers (SIG_SELECTCHANGED)
def deselect (self, *items):
"""S.deselect (...) -> None
Deselects the specified items in the ScrolledList.
The method causes the SIG_SELECTCHANGED event to be emitted, when
the selection changes.
Raises a LookupError, if the passed argument could not be
found in the items attribute.
"""
self.__deselect (*items)
self.run_signal_handlers (SIG_SELECTCHANGED)
def __select (self, *items):
"""S.__select (...) -> None
Selects one or more specific items of the ScrolledList.
"""
if self.selectionmode == SELECTION_NONE:
# Do nothing here, maybe implement a specific event action
# or s.th. like that...
return
self.lock ()
if self.selectionmode == SELECTION_SINGLE:
# Only the last item.
if items[-1] not in self.items:
raise LookupError ("item could not be found in list")
items[-1].selected = True
elif self.selectionmode == SELECTION_MULTIPLE:
for item in items:
if item not in self.items:
raise LookupError ("item could not be found in list")
item.selected = True
self.unlock ()
def __deselect (self, *items):
"""S.__deselect (..:) -> None
Deselects the specified items in the ScrolledList
"""
self.lock ()
for item in items:
if item not in self.items:
raise LookupError ("item could not be found in list")
item.selected = False
self.child.update_items ()
self.unlock ()
def get_selected (self):
"""S.get_selected () -> list
Returns a list cotaining the selected items.
"""
if self.items.length != 0:
return [item for item in self.items if item.selected]
return []
def set_cursor (self, item, selected):
"""S.set_cursor (...) -> None
Sets the cursor index to the desired item.
"""
self._cursor = item
self._scroll_to_cursor ()
if item.selected == selected:
self.child.dirty = True # Render the cursor
else:
item.selected = selected
self.run_signal_handlers (SIG_SELECTCHANGED)
def _cursor_next (self, selected):
"""S._cursor_next () -> None
Advances the cursor to the next item in the list.
"""
if not self._cursor:
if self.items.length > 0:
self._cursor = self.items[0]
return
index = self.items.index (self._cursor)
if index < self.items.length - 1:
self.set_cursor (self.items[index + 1], selected)
def _cursor_prev (self, selected):
"""S._cursor_prev () -> None
Advances the cursor to the previous item in the list.
"""
if not self._cursor:
if self.items.length > 0:
self._cursor = self.items[0]
return
index = self.items.index (self._cursor)
if index > 0:
self.set_cursor (self.items[index - 1], selected)
def _scroll_to_cursor (self):
"""S._scroll_to_cursor () -> None
Scrolls the list to the cursor.
"""
if not self.cursor or not self.child.images.has_key (self.cursor):
return
border = base.GlobalStyle.get_border_size \
(self.child.__class__, self.child.style,
StyleInformation.get ("ACTIVE_BORDER")) * 2
x, y = self.child.get_item_position (self.cursor)
w, h = self.child.width, self.child.height
height = self.child.images[self.cursor][1].height + border + \
self.spacing
# Bottom edge of the item.
py = y + height
if (self.vscrollbar.value - y > 0): # Scrolling up.
self.vscrollbar.value = max (0, y - border)
elif (py > self.vscrollbar.value + h):
self.vscrollbar.value = min (abs (py - h + border),
self.vscrollbar.maximum)
def _navigate (self, key, mod):
"""S._navigate (key) -> None
Deals with keyboard navigation.
"""
multiselect = self.selectionmode == SELECTION_MULTIPLE
selected = not mod & KMOD_CTRL and self.selectionmode != SELECTION_NONE
if key == K_UP:
if (mod & KMOD_SHIFT) and multiselect:
if self._last_direction == K_DOWN:
if len (self.get_selected ()) > 1:
self._cursor.selected = False
self._cursor_prev (True)
return
self._last_direction = key
self._cursor_prev (True)
return
self.__deselect (*self.get_selected ())
self._cursor_prev (selected)
elif key == K_DOWN:
if (mod & KMOD_SHIFT) and multiselect:
if self._last_direction == K_UP:
if len (self.get_selected ()) > 1:
self._cursor.selected = False
self._cursor_next (True)
return
self._last_direction = key
self._cursor_next (True)
return
self.__deselect (*self.get_selected ())
self._cursor_next (selected)
elif key == K_END:
if (mod & KMOD_SHIFT) and multiselect:
start = self.items.index (self._cursor)
items = self.get_selected ()
self.__deselect (*items)
if self._last_direction == K_UP:
if len (items) > 0:
start = self.items.index (items[-1])
end = len (self.items) - 1
self.__select (*self.items[start:end])
else:
self.__deselect (*self.get_selected ())
self._last_direction = K_DOWN
self.set_cursor (self.items[-1], selected)
elif key == K_HOME:
if (mod & KMOD_SHIFT) and multiselect:
end = self.items.index (self._cursor)
items = self.get_selected ()
self.__deselect (*items)
if self._last_direction == K_DOWN:
if len (items) > 0:
end = self.items.index (items[0])
self.__select (*self.items[0:end])
else:
self.__deselect (*self.get_selected ())
self._last_direction = K_UP
self.set_cursor (self.items[0], selected)
elif key == K_SPACE:
if self._cursor.selected:
self.set_cursor (self._cursor, False)
else:
self.set_cursor (self._cursor, True)
elif (key == K_a) and mod & KMOD_CTRL and \
multiselect:
self._last_direction = K_DOWN
self.lock ()
notselected = filter (lambda x: x.selected == False, self.items)
for i in notselected:
i.selected = True
self.unlock ()
self.set_cursor (self.items[-1], True)
def _click (self, position):
"""S._click (...) -> None
Deals with mouse clicks.
"""
# Get the item and toggle the selection.
item = self.child.get_item_at_pos (position)
mods = key.get_mods ()
if not item:
return
if self.selectionmode != SELECTION_MULTIPLE:
self._last_direction = None
selection = self.get_selected ()
allowed = self.selectionmode != SELECTION_NONE
if mods & KMOD_CTRL:
if selection and item in selection:
selection.remove (item)
self.__deselect (*selection)
self.set_cursor (item, not item.selected and allowed)
else:
self.__deselect (*selection)
self.set_cursor (item, allowed)
return
if item.selected:
if mods & KMOD_CTRL:
self.set_cursor (item, False)
elif mods & KMOD_SHIFT:
# The item usually should be somewhere in that selection.
# Get its index and crop the selection according to that
# index.
selection = self.get_selected ()
if len (selection) > 1:
start = self.items.index (selection[0])
end = self.items.index (selection[-1])
index = selection.index (item)
if self._last_direction == K_UP:
if index > 0:
self.__deselect (*selection[0:index])
elif self._last_direction == K_DOWN:
if index > 0:
self.__deselect (*selection[index:end + 1])
self.set_cursor (item, True)
else:
self.set_cursor (selection[0], True)
else:
self._last_direction = None
self.__deselect (*self.get_selected ())
self.set_cursor (item, True)
else:
if mods & KMOD_CTRL:
self.set_cursor (item, True)
elif mods & KMOD_SHIFT:
# No click on an existing selection. Expand the current.
selection = self.get_selected ()
start = self.items.index (self.cursor)
index = self.items.index (item)
if len (selection) != 0:
if self._last_direction == K_DOWN:
start = self.items.index (selection[0])
end = self.items.index (selection[-1])
if index < start:
self._last_direction = K_UP
self.__deselect (*selection)
self.__select (*self.items[index:start])
elif index > end:
self.__select (*self.items[end:index])
elif self._last_direction == K_UP:
start = self.items.index (selection[0])
end = self.items.index (selection[-1])
if index > end:
self._last_direction = K_DOWN
self.__deselect (*selection)
self.__select (*self.items[end + 1:index])
elif index < start:
self.__select (*self.items[index:start])
else:
start = self.items.index (selection[0])
if index > start:
self.__select (*self.items[start:index])
self._last_direction = K_DOWN
else:
self.__select (*self.items[index:start])
self._last_direction = K_UP
self.set_cursor (item, True)
else:
self._last_direction = None
self.__deselect (*self.get_selected ())
self.set_cursor (item, True)
def notify (self, event):
"""S.notify (...) -> None
Notifies the ScrolledList about an event.
"""
if not self.sensitive:
return
if self.focus and (event.signal == SIG_KEYDOWN):
if len (self.items) > 0:
self._navigate (event.data.key, event.data.mod)
event.handled = True
elif event.signal == SIG_MOUSEDOWN:
eventarea = self.rect_to_client ()
if eventarea.collidepoint (event.data.pos):
for c in self.controls:
c.notify (event)
if not event.handled:
self.focus = True
self.run_signal_handlers (SIG_MOUSEDOWN, event.data)
if event.data.button == 1:
self._click (event.data.pos)
else:
ScrolledWindow.notify (self, event)
event.handled = True
else:
ScrolledWindow.notify (self, event)
selectionmode = property (lambda self: self._selectionmode,
lambda self, var: self.set_selectionmode (var),
doc = "The selection mode for the ScrolledList.")
spacing = property (lambda self: self._spacing,
lambda self, var: self.set_spacing (var),
doc = "Additional spacing to place between the items.")
items = property (lambda self: self._itemcollection,
lambda self, var: self.set_items (var),
doc = "The item collection of the ScrolledList.")
cursor = property (lambda self: self._cursor,
doc = "The item, which is currently focused.")
| bsd-2-clause | -6,080,596,617,264,534,000 | 38.160136 | 79 | 0.560447 | false |
elezar/fortran-beautifier | fparser/tests/test_api.py | 1 | 3401 | """
Test parsing of whole fortran files; 'blackbox' tests here.
"""
from fparser import api
import sys
from os.path import abspath, join, dirname
def test_use_module():
d = dirname(__file__)
sources = [join(d,'modfile.f95'), join(d,'funcfile.f95')]
file_to_parse = sources[1]
tree = api.parse(file_to_parse, isfree=True, isstrict=False, source_only = sources)
def test_dimension_attr():
source_str = '''
subroutine foo
integer, dimension( -10 : 10, - 2147483648 : 2147483648) :: a( -2 : 2, 1000000 : 1000001 )
real, dimension(-20:20, 100:113, - 512 : 713) :: b
end
'''
tree = api.parse(source_str, isfree=True, isstrict=False)
subr = tree.a.external_subprogram['foo']
avar = subr.a.variables['a']
assert avar.dimension == [('-10', '10'), ('- 2147483648', '2147483648')]
assert avar.bounds == [('-2', '2'), ('1000000', '1000001')]
assert avar.shape == ['4', '1']
bvar = subr.a.variables['b']
print(bvar.dimension)
print(bvar.shape)
print(bvar)
assert bvar.dimension == [('-20', '20'), ('100', '113'), ('- 512', '713')]
assert bvar.shape == ['40', '13', '1225']
def test_provides():
source_str = '''
module mod1
implicit none
integer, parameter :: GP = 6
integer :: a,b,c,d,e
! module_provides = {GP,a,b,c,d,e}
! use_provides = {}
end module mod1
module mod2
implicit none
integer, parameter :: SP = 5
real :: a,b,c
! module_provides = {SP,a,b,c}
! use_provides = {}
end module mod2
module mod3
use mod1
implicit none
integer, parameter :: DP = 0
! module_provides = {DP}
! use_provides = {GP,a,b,c,d,e}
end module mod3
module mod4
use mod2
implicit none
! module_provides = {}
! use_provides = {SP,a,b,c}
end module mod4
module mod5
use mod3, only: lGP => GP, a,b,e
use mod4, only: a2 => a, b2 => b
implicit none
integer, parameter :: FP = 1000
integer(kind=kind(0)) :: dummy
parameter (dummy = 20)
integer, private :: x,y,z
! module_provides = {FP, dummy}
! use_provides = {lGP, a, b, e, a2, b2}
end module mod5
module mod6
use mod5, qgp => lgp
implicit none
! module_provides = {}
! use_provides = {FP, dummy, a2, b2, qgp, a, b, e}
end module mod6
'''
# PY2to3: here keys from a dictionary is tested. These are not guaranteed to be in a consistent order
# Therefore these are now sorted before comparison
tree = api.parse(source_str, isfree=True, isstrict=False)
mod5 = tree.a.module['mod5']
mod6 = tree.a.module['mod6']
assert sorted(list(mod5.a.module_provides.keys())) == sorted(['fp', 'dummy'])
assert sorted(list(mod5.a.use_provides.keys())) == sorted(['a', 'b', 'e', 'a2', 'b2', 'lgp'])
assert sorted(list(mod6.a.module_provides.keys())) == sorted([])
assert sorted(list(mod6.a.use_provides.keys())) == sorted(['fp', 'dummy', 'b', 'e', 'qgp', 'a2', 'a', 'b2'])
assert mod6.a.use_provides['qgp'].name == 'gp'
def test_walk():
source_str = '''\
! before foo
subroutine foo
integer i, r
do i = 1,100
r = r + 1
end do
! after end do
end subroutine foo
'''
tree = api.parse(source_str, isfree=True, isstrict=False, ignore_comments=False)
for stmt, depth in api.walk(tree, 1):
print(depth, stmt.item)
| mit | 5,374,607,897,788,353,000 | 27.341667 | 112 | 0.586298 | false |
root-mirror/root | tutorials/roofit/rf513_wsfactory_tools.py | 11 | 3106 | ## \file
## \ingroup tutorial_roofit
## \notebook -nodraw
## Organization and simultaneous fits: illustration use of ROOT.RooCustomizer and
## ROOT.RooSimWSTool interface in factory workspace tool in a complex standalone B physics example
##
## \macro_code
##
## \date February 2018
## \authors Clemens Lange, Wouter Verkerke (C++ version)
import ROOT
w = ROOT.RooWorkspace("w")
# Build a complex example pdf
# -----------------------------------------------------------
# Make signal model for CPV: A bmixing decay function in t (convoluted with a triple Gaussian resolution model)
# times a Gaussian function the reconstructed mass
w.factory(
"PROD::sig( BMixDecay::sig_t( dt[-20,20], mixState[mixed=1,unmix=-1], tagFlav[B0=1,B0bar=-1], "
"tau[1.54], dm[0.472], w[0.05], dw[0], "
"AddModel::gm({GaussModel(dt,biasC[-10,10],sigmaC[0.1,3],dterr[0.01,0.2]), "
"GaussModel(dt,0,sigmaT[3,10]), "
"GaussModel(dt,0,20)},{fracC[0,1],fracT[0,1]}), "
"DoubleSided ), "
"Gaussian::sig_m( mes[5.20,5.30], mB0[5.20,5.30], sigmB0[0.01,0.05] ))")
# Make background component: A plain decay function in t times an Argus
# function in the reconstructed mass
w.factory("PROD::bkg( Decay::bkg_t( dt, tau, gm, DoubleSided), "
"ArgusBG::bkg_m( mes, 5.291, k[-100,-10]))")
# Make composite model from the signal and background component
w.factory("SUM::model( Nsig[5000,0,10000]*sig, NBkg[500,0,10000]*bkg )")
# Example of RooSimWSTool interface
# ------------------------------------------------------------------
# Introduce a flavour tagging category tagCat as observable with 4 states corresponding
# to 4 flavour tagging techniques with different performance that require different
# parameterizations of the fit model
#
# ROOT.RooSimWSTool operation:
# - Make 4 clones of model (for each tagCat) state, will gain an individual
# copy of parameters w, and biasC. The other parameters remain common
# - Make a simultaneous pdf of the 4 clones assigning each to the appropriate
# state of the tagCat index category
# ROOT.RooSimWSTool is interfaced as meta-type SIMCLONE in the factory. The $SplitParam()
# argument maps to the SplitParam() named argument in the
# ROOT.RooSimWSTool constructor
w.factory(
"SIMCLONE::model_sim( model, $SplitParam({w,dw,biasC},tagCat[Lep,Kao,NT1,NT2]))")
# Example of RooCustomizer interface
# -------------------------------------------------------------------
#
# Class ROOT.RooCustomizer makes clones of existing pdfs with certain prescribed
# modifications (branch of leaf node replacements)
#
# Here we take our model (the original before ROOT.RooSimWSTool modifications)
# and request that the parameter w (the mistag rate) is replaced with
# an expression-based function that calculates w in terms of the Dilution
# parameter D that is defined D = 1-2*w
# Make a clone model_D of original 'model' replacing 'w' with
# 'expr('0.5-D/2',D[0,1])'
w.factory("EDIT::model_D(model, w=expr('0.5-D/2',D[0,1]) )")
# Print workspace contents
w.Print()
# Make workspace visible on command line
ROOT.gDirectory.Add(w)
| lgpl-2.1 | -7,665,077,572,355,572,000 | 39.337662 | 111 | 0.672569 | false |
ray-project/ray | python/ray/tune/function_runner.py | 1 | 22498 | import logging
import os
import sys
import time
import inspect
import shutil
import threading
import traceback
import uuid
from functools import partial
from numbers import Number
from six.moves import queue
from ray.util.debug import log_once
from ray.tune import TuneError, session
from ray.tune.trainable import Trainable, TrainableUtil
from ray.tune.result import (DEFAULT_METRIC, TIME_THIS_ITER_S,
RESULT_DUPLICATE, SHOULD_CHECKPOINT)
from ray.tune.utils import (detect_checkpoint_function, detect_config_single,
detect_reporter)
from ray.tune.utils.trainable import with_parameters # noqa: F401
logger = logging.getLogger(__name__)
# Time between FunctionRunner checks when fetching
# new results after signaling the reporter to continue
RESULT_FETCH_TIMEOUT = 0.2
ERROR_REPORT_TIMEOUT = 10
ERROR_FETCH_TIMEOUT = 1
NULL_MARKER = ".null_marker"
TEMP_MARKER = ".temp_marker"
class FuncCheckpointUtil:
"""Utility class holding various function-checkpointing mechanisms.
The two special modes are "null" and "temporary" checkpoints.
*Null Checkpoints*
-------------------
Null checkpoints are generated when a trial is being saved
but a checkpoint has not been created. In this case,
a marker is set, indicating that the checkpoint is null.
When restoring from an null checkpoint, the FunctionRunner
will detect this and *not* restore from any checkpoint at all.
*Temporary Checkpoints*
-----------------------
Temporary checkpoints are generated when a trial is being
restored from a prior in-memory checkpoint. In this case, a marker
will be set indicating that a checkpoint is temporary.
Upon termination of the trial, temporary checkpoints
will be removed. We cannot remove them any earlier because
the loading of checkpoints is non-deterministic.
If "save" is called on a trial whose most recent checkpoint
is temporary, "create_perm_checkpoint" will be called. This
copies the temporary checkpoint to a permanent checkpoint directory.
"""
@staticmethod
def mk_null_checkpoint_dir(logdir):
"""Indicate that the given checkpoint doesn't have state."""
checkpoint_dir = TrainableUtil.make_checkpoint_dir(
logdir, index=-1, override=True)
open(os.path.join(checkpoint_dir, NULL_MARKER), "a").close()
return checkpoint_dir
@staticmethod
def mk_temp_checkpoint_dir(logdir):
"""Indicate that the checkpoint is only for restoration."""
temporary_checkpoint_dir = TrainableUtil.make_checkpoint_dir(
logdir, index="tmp" + uuid.uuid4().hex[:6], override=True)
open(os.path.join(temporary_checkpoint_dir, TEMP_MARKER), "a").close()
return temporary_checkpoint_dir
@staticmethod
def is_temp_checkpoint_dir(checkpoint_dir):
"""Checks for the temp checkpoint marker."""
return os.path.exists(os.path.join(checkpoint_dir, TEMP_MARKER))
@staticmethod
def is_null_checkpoint(checkpoint_dir):
"""Checks for the empty checkpoint marker."""
return os.path.exists(os.path.join(checkpoint_dir, NULL_MARKER))
@staticmethod
def create_perm_checkpoint(checkpoint_dir, logdir, step):
"""Copies temporary checkpoint to a permanent checkpoint directory."""
checkpoint_dir = os.path.abspath(checkpoint_dir)
temporary_marker = os.path.join(checkpoint_dir, TEMP_MARKER)
assert os.path.exists(temporary_marker), (
"Should not be calling this method on a permanent checkpoint.")
os.remove(temporary_marker)
perm_checkpoint_dir = TrainableUtil.make_checkpoint_dir(
logdir, index=step, override=True)
shutil.rmtree(perm_checkpoint_dir)
shutil.copytree(checkpoint_dir, perm_checkpoint_dir)
assert not os.path.exists(
os.path.join(perm_checkpoint_dir, TEMP_MARKER))
return perm_checkpoint_dir
class StatusReporter:
"""Object passed into your function that you can report status through.
Example:
>>> def trainable_function(config, reporter):
>>> assert isinstance(reporter, StatusReporter)
>>> reporter(timesteps_this_iter=1)
"""
def __init__(self,
result_queue,
continue_semaphore,
end_event,
trial_name=None,
trial_id=None,
logdir=None):
self._queue = result_queue
self._last_report_time = None
self._continue_semaphore = continue_semaphore
self._end_event = end_event
self._trial_name = trial_name
self._trial_id = trial_id
self._logdir = logdir
self._last_checkpoint = None
self._fresh_checkpoint = False
def reset(self, trial_name=None, trial_id=None, logdir=None):
self._trial_name = trial_name
self._trial_id = trial_id
self._logdir = logdir
self._last_checkpoint = None
self._fresh_checkpoint = False
def __call__(self, _metric=None, **kwargs):
"""Report updated training status.
Pass in `done=True` when the training job is completed.
Args:
kwargs: Latest training result status.
Example:
>>> reporter(mean_accuracy=1, training_iteration=4)
>>> reporter(mean_accuracy=1, training_iteration=4, done=True)
Raises:
StopIteration: A StopIteration exception is raised if the trial has
been signaled to stop.
"""
assert self._last_report_time is not None, (
"StatusReporter._start() must be called before the first "
"report __call__ is made to ensure correct runtime metrics.")
if _metric:
kwargs[DEFAULT_METRIC] = _metric
# time per iteration is recorded directly in the reporter to ensure
# any delays in logging results aren't counted
report_time = time.time()
if TIME_THIS_ITER_S not in kwargs:
kwargs[TIME_THIS_ITER_S] = report_time - self._last_report_time
self._last_report_time = report_time
# add results to a thread-safe queue
self._queue.put(kwargs.copy(), block=True)
# This blocks until notification from the FunctionRunner that the last
# result has been returned to Tune and that the function is safe to
# resume training.
self._continue_semaphore.acquire()
# If the trial should be terminated, exit gracefully.
if self._end_event.is_set():
self._end_event.clear()
sys.exit(0)
def make_checkpoint_dir(self, step):
checkpoint_dir = TrainableUtil.make_checkpoint_dir(
self.logdir, index=step)
logger.debug("Making checkpoint dir at %s", checkpoint_dir)
return checkpoint_dir
def set_checkpoint(self, checkpoint, is_new=True):
"""Sets the checkpoint to be returned upon get_checkpoint.
If this is a "new" checkpoint, it will notify Tune
(via has_new_checkpoint). Otherwise, it will NOT notify Tune.
"""
if isinstance(checkpoint, str):
try:
TrainableUtil.find_checkpoint_dir(checkpoint)
except FileNotFoundError:
logger.error("Checkpoint must be created with path given from "
"make_checkpoint_dir.")
raise
self._last_checkpoint = checkpoint
if is_new:
self._fresh_checkpoint = True
def has_new_checkpoint(self):
return self._fresh_checkpoint
def get_checkpoint(self):
self._fresh_checkpoint = False
return self._last_checkpoint
def _start(self):
self._last_report_time = time.time()
@property
def logdir(self):
return self._logdir
@property
def trial_name(self):
"""Trial name for the corresponding trial of this Trainable."""
return self._trial_name
@property
def trial_id(self):
"""Trial id for the corresponding trial of this Trainable."""
return self._trial_id
class _RunnerThread(threading.Thread):
"""Supervisor thread that runs your script."""
def __init__(self, entrypoint, error_queue):
threading.Thread.__init__(self)
self._entrypoint = entrypoint
self._error_queue = error_queue
self.daemon = True
def run(self):
try:
self._entrypoint()
except StopIteration:
logger.debug(
("Thread runner raised StopIteration. Interperting it as a "
"signal to terminate the thread without error."))
except Exception as e:
logger.exception("Runner Thread raised error.")
try:
# report the error but avoid indefinite blocking which would
# prevent the exception from being propagated in the unlikely
# case that something went terribly wrong
err_tb_str = traceback.format_exc()
self._error_queue.put(
err_tb_str, block=True, timeout=ERROR_REPORT_TIMEOUT)
except queue.Full:
logger.critical(
("Runner Thread was unable to report error to main "
"function runner thread. This means a previous error "
"was not processed. This should never happen."))
raise e
class FunctionRunner(Trainable):
"""Trainable that runs a user function reporting results.
This mode of execution does not support checkpoint/restore."""
_name = "func"
def setup(self, config):
# Semaphore for notifying the reporter to continue with the computation
# and to generate the next result.
self._continue_semaphore = threading.Semaphore(0)
# Event for notifying the reporter to exit gracefully, terminating
# the thread.
self._end_event = threading.Event()
# Queue for passing results between threads
self._results_queue = queue.Queue(1)
# Queue for passing errors back from the thread runner. The error queue
# has a max size of one to prevent stacking error and force error
# reporting to block until finished.
self._error_queue = queue.Queue(1)
self._status_reporter = StatusReporter(
self._results_queue,
self._continue_semaphore,
self._end_event,
trial_name=self.trial_name,
trial_id=self.trial_id,
logdir=self.logdir)
self._last_result = {}
session.init(self._status_reporter)
self._runner = None
self._restore_tmpdir = None
self.temp_checkpoint_dir = None
def _trainable_func(self, config, reporter, checkpoint_dir):
"""Subclasses can override this to set the trainable func."""
raise NotImplementedError
def _start(self):
def entrypoint():
return self._trainable_func(self.config, self._status_reporter,
self._status_reporter.get_checkpoint())
# the runner thread is not started until the first call to _train
self._runner = _RunnerThread(entrypoint, self._error_queue)
# if not alive, try to start
self._status_reporter._start()
try:
self._runner.start()
except RuntimeError:
# If this is reached, it means the thread was started and is
# now done or has raised an exception.
pass
def step(self):
"""Implements train() for a Function API.
If the RunnerThread finishes without reporting "done",
Tune will automatically provide a magic keyword __duplicate__
along with a result with "done=True". The TrialRunner will handle the
result accordingly (see tune/trial_runner.py).
"""
if self._runner and self._runner.is_alive():
# if started and alive, inform the reporter to continue and
# generate the next result
self._continue_semaphore.release()
else:
self._start()
result = None
while result is None and self._runner.is_alive():
# fetch the next produced result
try:
result = self._results_queue.get(
block=True, timeout=RESULT_FETCH_TIMEOUT)
except queue.Empty:
pass
# if no result were found, then the runner must no longer be alive
if result is None:
# Try one last time to fetch results in case results were reported
# in between the time of the last check and the termination of the
# thread runner.
try:
result = self._results_queue.get(block=False)
except queue.Empty:
pass
# check if error occurred inside the thread runner
if result is None:
# only raise an error from the runner if all results are consumed
self._report_thread_runner_error(block=True)
# Under normal conditions, this code should never be reached since
# this branch should only be visited if the runner thread raised
# an exception. If no exception were raised, it means that the
# runner thread never reported any results which should not be
# possible when wrapping functions with `wrap_function`.
raise TuneError(
("Wrapped function ran until completion without reporting "
"results or raising an exception."))
else:
if not self._error_queue.empty():
logger.warning(
("Runner error waiting to be raised in main thread. "
"Logging all available results first."))
# This keyword appears if the train_func using the Function API
# finishes without "done=True". This duplicates the last result, but
# the TrialRunner will not log this result again.
if RESULT_DUPLICATE in result:
new_result = self._last_result.copy()
new_result.update(result)
result = new_result
self._last_result = result
if self._status_reporter.has_new_checkpoint():
result[SHOULD_CHECKPOINT] = True
return result
def execute(self, fn):
return fn(self)
def save(self, checkpoint_path=None):
if checkpoint_path:
raise ValueError(
"Checkpoint path should not be used with function API.")
checkpoint = self._status_reporter.get_checkpoint()
state = self.get_state()
if not checkpoint:
state.update(iteration=0, timesteps_total=0, episodes_total=0)
# We drop a marker here to indicate that the checkpoint is empty
checkpoint = FuncCheckpointUtil.mk_null_checkpoint_dir(self.logdir)
parent_dir = checkpoint
elif isinstance(checkpoint, dict):
parent_dir = TrainableUtil.make_checkpoint_dir(
self.logdir, index=self.training_iteration)
elif isinstance(checkpoint, str):
parent_dir = TrainableUtil.find_checkpoint_dir(checkpoint)
# When the trainable is restored, a temporary checkpoint
# is created. However, when saved, it should become permanent.
# Ideally, there are no save calls upon a temporary
# checkpoint, but certain schedulers might.
if FuncCheckpointUtil.is_temp_checkpoint_dir(parent_dir):
relative_path = os.path.relpath(checkpoint, parent_dir)
parent_dir = FuncCheckpointUtil.create_perm_checkpoint(
checkpoint_dir=parent_dir,
logdir=self.logdir,
step=self.training_iteration)
checkpoint = os.path.abspath(
os.path.join(parent_dir, relative_path))
else:
raise ValueError("Provided checkpoint was expected to have "
"type (str, dict). Got {}.".format(
type(checkpoint)))
checkpoint_path = TrainableUtil.process_checkpoint(
checkpoint, parent_dir, state)
return checkpoint_path
def save_to_object(self):
checkpoint_path = self.save()
obj = TrainableUtil.checkpoint_to_object(checkpoint_path)
return obj
def load_checkpoint(self, checkpoint):
# This should be removed once Trainables are refactored.
if "tune_checkpoint_path" in checkpoint:
del checkpoint["tune_checkpoint_path"]
# If there does not exist a checkpoint, we will not restore
# from it and will remove the marker.
if FuncCheckpointUtil.is_null_checkpoint(checkpoint):
return
# By informing that this checkpoint is not new,
# we will not return the checkpoint path
# as a new checkpoint.
self._status_reporter.set_checkpoint(checkpoint, is_new=False)
def restore_from_object(self, obj):
self.temp_checkpoint_dir = (FuncCheckpointUtil.mk_temp_checkpoint_dir(
self.logdir))
checkpoint_path = TrainableUtil.create_from_pickle(
obj, self.temp_checkpoint_dir)
self.restore(checkpoint_path)
def cleanup(self):
# Trigger thread termination
self._end_event.set()
self._continue_semaphore.release()
# Do not wait for thread termination here.
# If everything stayed in synch properly, this should never happen.
if not self._results_queue.empty():
logger.warning(
("Some results were added after the trial stop condition. "
"These results won't be logged."))
# Check for any errors that might have been missed.
self._report_thread_runner_error()
session.shutdown()
if self.temp_checkpoint_dir is not None and os.path.exists(
self.temp_checkpoint_dir):
shutil.rmtree(self.temp_checkpoint_dir)
logger.debug("Clearing temporary checkpoint: %s",
self.temp_checkpoint_dir)
def reset_config(self, new_config):
if self._runner and self._runner.is_alive():
self._end_event.set()
self._continue_semaphore.release()
# Wait for thread termination so it is save to re-use the same
# actor.
thread_timeout = int(
os.environ.get("TUNE_FUNCTION_THREAD_TIMEOUT_S", 2))
self._runner.join(timeout=thread_timeout)
if self._runner.is_alive():
# Did not finish within timeout, reset unsuccessful.
return False
self._runner = None
self._last_result = {}
self._status_reporter.reset(
trial_name=self.trial_name,
trial_id=self.trial_id,
logdir=self.logdir)
return True
def _report_thread_runner_error(self, block=False):
try:
err_tb_str = self._error_queue.get(
block=block, timeout=ERROR_FETCH_TIMEOUT)
raise TuneError(
("Trial raised an exception. Traceback:\n{}".format(err_tb_str)
))
except queue.Empty:
pass
def wrap_function(train_func, durable=False, warn=True):
inherit_from = (FunctionRunner, )
if hasattr(train_func, "__mixins__"):
inherit_from = train_func.__mixins__ + inherit_from
if durable:
from ray.tune import DurableTrainable
inherit_from = (DurableTrainable, ) + inherit_from
func_args = inspect.getfullargspec(train_func).args
use_checkpoint = detect_checkpoint_function(train_func)
use_config_single = detect_config_single(train_func)
use_reporter = detect_reporter(train_func)
if not any([use_checkpoint, use_config_single, use_reporter]):
# use_reporter is hidden
raise ValueError(
"Unknown argument found in the Trainable function. "
"The function args must include a 'config' positional "
"parameter. Any other args must be 'checkpoint_dir'. "
"Found: {}".format(func_args))
if use_config_single and not use_checkpoint:
if log_once("tune_function_checkpoint") and warn:
logger.warning(
"Function checkpointing is disabled. This may result in "
"unexpected behavior when using checkpointing features or "
"certain schedulers. To enable, set the train function "
"arguments to be `func(config, checkpoint_dir=None)`.")
class ImplicitFunc(*inherit_from):
_name = train_func.__name__ if hasattr(train_func, "__name__") \
else "func"
def _trainable_func(self, config, reporter, checkpoint_dir):
if not use_checkpoint and not use_reporter:
fn = partial(train_func, config)
elif use_checkpoint:
fn = partial(train_func, config, checkpoint_dir=checkpoint_dir)
else:
fn = partial(train_func, config, reporter)
def handle_output(output):
if not output:
return
elif isinstance(output, dict):
reporter(**output)
elif isinstance(output, Number):
reporter(_metric=output)
else:
raise ValueError(
"Invalid return or yield value. Either return/yield "
"a single number or a dictionary object in your "
"trainable function.")
output = None
if inspect.isgeneratorfunction(train_func):
for output in fn():
handle_output(output)
else:
output = fn()
handle_output(output)
# If train_func returns, we need to notify the main event loop
# of the last result while avoiding double logging. This is done
# with the keyword RESULT_DUPLICATE -- see tune/trial_runner.py.
reporter(**{RESULT_DUPLICATE: True})
return output
return ImplicitFunc
| apache-2.0 | 4,263,119,803,300,873,700 | 37.196944 | 79 | 0.610721 | false |
ktan2020/legacy-automation | win/Lib/site-packages/runsnakerun/pstatsadapter.py | 1 | 2843 | import wx, sys, os, logging
log = logging.getLogger( __name__ )
from squaremap import squaremap
from runsnakerun import pstatsloader
class PStatsAdapter(squaremap.DefaultAdapter):
percentageView = False
total = 0
TREE = pstatsloader.TREE_CALLS
def value(self, node, parent=None):
if isinstance(parent, pstatsloader.PStatGroup):
if parent.cumulative:
return node.cumulative / parent.cumulative
else:
return 0
elif parent is None:
return node.cumulative
return parent.child_cumulative_time(node)
def label(self, node):
if isinstance(node, pstatsloader.PStatGroup):
return '%s / %s' % (node.filename, node.directory)
if self.percentageView and self.total:
time = '%0.2f%%' % round(node.cumulative * 100.0 / self.total, 2)
else:
time = '%0.3fs' % round(node.cumulative, 3)
return '%s@%s:%s [%s]' % (node.name, node.filename, node.lineno, time)
def empty(self, node):
if node.cumulative:
return node.local / float(node.cumulative)
return 0.0
def parents(self, node):
"""Determine all parents of node in our tree"""
return [
parent for parent in
getattr( node, 'parents', [] )
if getattr(parent, 'tree', self.TREE) == self.TREE
]
color_mapping = None
def background_color(self, node, depth):
"""Create a (unique-ish) background color for each node"""
if self.color_mapping is None:
self.color_mapping = {}
color = self.color_mapping.get(node.key)
if color is None:
depth = len(self.color_mapping)
red = (depth * 10) % 255
green = 200 - ((depth * 5) % 200)
blue = (depth * 25) % 200
self.color_mapping[node.key] = color = wx.Colour(red, green, blue)
return color
def SetPercentage(self, percent, total):
"""Set whether to display percentage values (and total for doing so)"""
self.percentageView = percent
self.total = total
def filename( self, node ):
"""Extension to squaremap api to provide "what file is this" information"""
if not node.directory:
# TODO: any cases other than built-ins?
return None
if node.filename == '~':
# TODO: look up C/Cython/whatever source???
return None
return os.path.join(node.directory, node.filename)
class DirectoryViewAdapter(PStatsAdapter):
"""Provides a directory-view-only adapter for PStats objects"""
TREE = pstatsloader.TREE_FILES
def children(self, node):
if isinstance(node, pstatsloader.PStatGroup):
return node.children
return []
| mit | -1,293,131,002,213,503,000 | 33.670732 | 83 | 0.590925 | false |
suutari/shoop | shuup/notify/typology.py | 1 | 4215 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import enumfields
from django import forms
from django.apps import apps
from django.core.exceptions import ObjectDoesNotExist
from django.utils.text import camel_case_to_spaces
from django.utils.translation import ugettext_lazy as _
class Type(object):
name = None
identifier = None
def get_field(self, **kwargs):
"""
Get a Django form field for this type.
The kwargs are passed directly to the field
constructor.
:param kwargs: Kwargs for field constructor
:type kwargs: dict
:return: Form field
:rtype: django.forms.Field
"""
return forms.CharField(**kwargs)
def unserialize(self, value):
return self.get_field().to_python(value)
def validate(self, value):
return self.get_field().validate(value)
def is_coercible_from(self, other_type):
return self.identifier == other_type.identifier
class _String(Type):
pass
class _Number(Type):
pass
class Boolean(Type):
name = _("Boolean")
identifier = "boolean"
class Integer(_Number):
name = _("Integer Number")
identifier = "integer"
def get_field(self, **kwargs):
return forms.IntegerField(**kwargs)
class Decimal(_Number):
name = _("Decimal Number")
identifier = "decimal"
def get_field(self, **kwargs):
return forms.DecimalField(**kwargs)
class Text(_String):
name = _("Text")
identifier = "text"
def is_coercible_from(self, other_type):
# All variables can be used as raw text
return True
class Language(_String):
name = _("Language")
identifier = "language"
class Email(_String):
name = _("Email Address")
identifier = "email"
def get_field(self, **kwargs):
return forms.EmailField(**kwargs)
class URL(_String):
name = _("URL Address")
identifier = "url"
def get_field(self, **kwargs):
return forms.URLField(**kwargs)
class Phone(_String):
name = _("Phone Number")
identifier = "phone"
class Model(Type):
model_label = None
identifier = "model"
@property
def name(self):
return self.get_model()._meta.verbose_name
def __init__(self, model_label):
"""
:param model_label: Model label in Django `app.Model` format (e.g. `shuup.Order`)
:type model_label: str
"""
self.model_label = model_label
def unserialize(self, value):
if isinstance(value, self.get_model()):
return value
try:
return self.get_model().objects.get(pk=value)
except ObjectDoesNotExist:
return None
def is_coercible_from(self, other_type):
return isinstance(other_type, Model) and self.get_model() == other_type.get_model()
def get_model(self):
"""
:rtype: django.db.models.Model
"""
return apps.get_model(self.model_label)
def get_field(self, **kwargs):
kwargs.setdefault("queryset", self.get_model().objects.all())
return forms.ModelChoiceField(**kwargs)
class Enum(Type):
enum_class = None
identifier = "enum"
@property
def name(self):
if self.enum_class:
return camel_case_to_spaces(self.enum_class.__class__.__name__)
return u"<Invalid Enum>"
def __init__(self, enum_class):
self.enum_class = enum_class
assert issubclass(enum_class, enumfields.Enum), "%r is not an enum" % enum_class
def unserialize(self, value):
if isinstance(value, self.enum_class):
return value
try:
return self.enum_class(value)
except ValueError:
try:
return self.enum_class(int(value))
except ValueError:
pass
return None
def get_field(self, **kwargs):
return enumfields.EnumField(self.enum_class).formfield(**kwargs)
| agpl-3.0 | 6,544,060,943,024,604,000 | 22.948864 | 91 | 0.61637 | false |
rex/Buffy | scripts/fetch-all.py | 1 | 3681 | #!/usr/bin/env python
import requests
import json
import os
from fabric.colors import red, green, blue, cyan, magenta
from time import sleep
api_key = 'be25c8a17fdcd031f54dd3417ee4d083'
api_base_url = 'http://api.themoviedb.org/3'
def pretty(data):
print ""
print json.dumps(data, sort_keys=True, indent=2)
print ""
def keyval(key, val):
print(cyan(key + ': ') + green(val))
def pretty_keys(source, keys):
for key in keys:
keyval(key, source.get(key))
def pretty_flat_keys(source, keys):
ret = ""
for key in keys:
ret = ret + ""
def getJSON(endpoint):
sleep(0.25)
return requests.get(api_base_url + endpoint + '?api_key=' + api_key).json()
def header(text):
print ""
print blue(text.upper().center(140, '='))
def subheader(text):
print ""
print magenta(text.upper().center(140, '-'))
def accent(text):
print red(text.upper().center(40))
# Util Methods
def readJSONFile(path):
print blue(" > Reading JSON File: {0}".format(path))
with open(path) as data_file:
return json.load(data_file)
def writeJSONFile(path, data):
print blue(" > Writing JSON File: {0}".format(path))
with open(path, 'w+') as outfile:
json.dump(data, outfile, sort_keys=True, indent=2)
# Cached API Methods
def getObjectByID(type, id, endpoint):
type_map = {
'episode': 'episodes',
'person': 'people',
'season': 'seasons',
'show': 'shows'
}
cache_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/{0}/{1}.json'.format(type_map.get(type), str(id))))
# print "OBJECT FILE PATH: {0}".format(cache_path)
if os.path.exists(cache_path):
return readJSONFile(cache_path)
else:
object_data = getJSON(endpoint)
writeJSONFile(cache_path, object_data)
return object_data
def getShow(id):
return getObjectByID('show', id, '/tv/{0}'.format(id))
def getSeason(show_id, season_number):
return getObjectByID('season', '{0}-{1}'.format(show_id, season_number), '/tv/{0}/season/{1}'.format(show_id, season_number))
def getEpisode(show_id, season_number, episode_number):
return getObjectByID('episode', '{0}-{1}-{2}'.format(show_id, season_number, episode_number), '/tv/{0}/season/{1}/episode/{2}'.format(show_id, season_number, episode_number))
def getPerson(id):
return getObjectByID('person', id, '/person/{0}'.format(id))
# show = getJSON('/tv/95')
# show = getObjectByID('show', 95)
# CURRENT_SHOW_ID = 95 # BUFFY
CURRENT_SHOW_ID = 2426 # ANGEL
show = getShow(CURRENT_SHOW_ID)
pretty_keys(show, ['name', 'first_air_date', 'number_of_seasons', 'number_of_episodes', 'overview'])
seasons = show.get('seasons', {})
for season in seasons:
season_id = season.get('id')
season_number = season.get('season_number')
header('season ' + str(season_number))
season_data = getSeason(CURRENT_SHOW_ID, season_number)
episodes = season_data.get('episodes', [])
keyval('Episode Count', len(episodes))
for episode in episodes:
subheader('episode ' + str(episode.get('episode_number')))
pretty_keys(episode, ['id', 'name'])
episode_number = episode.get('episode_number')
episode_data = getEpisode(CURRENT_SHOW_ID, season_number, episode_number)
for person in episode.get('crew', []):
getPerson(person.get('id'))
for person in episode.get('guest_stars', []):
getPerson(person.get('id'))
# crew = episode.get('crew', [])
# cast = episode.get('guest_stars', [])
# accent('crew')
# for crew_member in crew:
# keyval(crew_member.get('job'), crew_member.get('name'))
# accent('cast')
# for cast_member in cast:
# pretty_keys(cast_member, ['name'])
# pretty(season_data)
# break
# pretty(raw)
| mit | -3,976,199,772,057,554,000 | 25.292857 | 176 | 0.655257 | false |
mkhuthir/catkin_ws | src/patrol/src/shapes.py | 1 | 2024 | #!/usr/bin/python
# BEGIN ALL
import rospy
from smach import State,StateMachine
from time import sleep
class Drive(State):
def __init__(self, distance):
State.__init__(self, outcomes=['success'])
self.distance = distance
def execute(self, userdata):
print 'Driving', self.distance
sleep(1)
return 'success'
class Turn(State):
def __init__(self, angle):
State.__init__(self, outcomes=['success'])
self.angle = angle
def execute(self, userdata):
print 'Turning', self.angle
sleep(1)
return 'success'
if __name__ == '__main__':
# BEGIN PART_2
triangle = StateMachine(outcomes=['success'])
with triangle:
StateMachine.add('SIDE1', Drive(1), transitions={'success':'TURN1'})
StateMachine.add('TURN1', Turn(120), transitions={'success':'SIDE2'})
StateMachine.add('SIDE2', Drive(1), transitions={'success':'TURN2'})
StateMachine.add('TURN2', Turn(120), transitions={'success':'SIDE3'})
StateMachine.add('SIDE3', Drive(1), transitions={'success':'success'})
# END PART_2
square = StateMachine(outcomes=['success'])
with square:
StateMachine.add('SIDE1', Drive(1), transitions={'success':'TURN1'})
StateMachine.add('TURN1', Turn(90), transitions={'success':'SIDE2'})
StateMachine.add('SIDE2', Drive(1), transitions={'success':'TURN2'})
StateMachine.add('TURN2', Turn(90), transitions={'success':'SIDE3'})
StateMachine.add('SIDE3', Drive(1), transitions={'success':'TURN3'})
StateMachine.add('TURN3', Turn(90), transitions={'success':'SIDE4'})
StateMachine.add('SIDE4', Drive(1), transitions={'success':'success'})
# BEGIN PART_3
shapes = StateMachine(outcomes=['success'])
with shapes:
StateMachine.add('TRIANGLE', triangle, transitions={'success':'SQUARE'})
StateMachine.add('SQUARE', square, transitions={'success':'success'})
shapes.execute()
# END PART_3
# END ALL
| gpl-3.0 | -2,396,415,034,136,510,000 | 32.733333 | 80 | 0.623518 | false |
bbchung/clighter | misc/highlighting.py | 1 | 8169 | import vim
import string
import clighter_helper
from clang import cindex
OCCURRENCES_PRI = -11
SYNTAX_PRI = -12
CUSTOM_SYNTAX_GROUP = {
cindex.CursorKind.INCLUSION_DIRECTIVE: 'cligherInclusionDirective',
cindex.CursorKind.MACRO_INSTANTIATION: 'clighterMacroInstantiation',
cindex.CursorKind.VAR_DECL: 'clighterVarDecl',
cindex.CursorKind.STRUCT_DECL: 'clighterStructDecl',
cindex.CursorKind.UNION_DECL: 'clighterUnionDecl',
cindex.CursorKind.CLASS_DECL: 'clighterClassDecl',
cindex.CursorKind.ENUM_DECL: 'clighterEnumDecl',
cindex.CursorKind.PARM_DECL: 'clighterParmDecl',
cindex.CursorKind.FUNCTION_DECL: 'clighterFunctionDecl',
cindex.CursorKind.FUNCTION_TEMPLATE: 'clighterFunctionDecl',
cindex.CursorKind.CXX_METHOD: 'clighterFunctionDecl',
cindex.CursorKind.CONSTRUCTOR: 'clighterFunctionDecl',
cindex.CursorKind.DESTRUCTOR: 'clighterFunctionDecl',
cindex.CursorKind.FIELD_DECL: 'clighterFieldDecl',
cindex.CursorKind.ENUM_CONSTANT_DECL: 'clighterEnumConstantDecl',
cindex.CursorKind.NAMESPACE: 'clighterNamespace',
cindex.CursorKind.CLASS_TEMPLATE: 'clighterClassDecl',
cindex.CursorKind.TEMPLATE_TYPE_PARAMETER: 'clighterTemplateTypeParameter',
cindex.CursorKind.TEMPLATE_NON_TYPE_PARAMETER: 'clighterTemplateNoneTypeParameter',
cindex.CursorKind.TYPE_REF: 'clighterTypeRef', # class ref
cindex.CursorKind.NAMESPACE_REF: 'clighterNamespaceRef', # namespace ref
cindex.CursorKind.TEMPLATE_REF: 'clighterTemplateRef', # template class ref
cindex.CursorKind.DECL_REF_EXPR:
{
cindex.TypeKind.FUNCTIONPROTO: 'clighterDeclRefExprCall', # function call
cindex.TypeKind.ENUM: 'clighterDeclRefExprEnum', # enum ref
cindex.TypeKind.TYPEDEF: 'clighterTypeRef', # ex: cout
},
cindex.CursorKind.MEMBER_REF: 'clighterDeclRefExprCall', # ex: designated initializer
cindex.CursorKind.MEMBER_REF_EXPR:
{
cindex.TypeKind.UNEXPOSED: 'clighterMemberRefExprCall', # member function call
},
}
def clear_all():
__vim_clear_match_pri(OCCURRENCES_PRI, SYNTAX_PRI)
hl_window.symbol = None
vim.current.window.vars['clighter_hl'] = [
-1, [], []] # [hl_tick, syntax_range, occurrences_range]
def clear_occurrences():
__vim_clear_match_pri(OCCURRENCES_PRI)
vim.current.window.vars['clighter_hl'][2] = []
hl_window.symbol = None
def config_win_context(init):
if not init and 'clighter_hl' in vim.current.window.vars:
return
clear_all()
vim.current.window.vars['clighter_hl'] = [
-1, [], []] # [hl_tick, syntax_range, symbol_range]
def hl_window(clang_service, do_occurrences):
cc = clang_service.get_cc(vim.current.buffer.name)
if not cc:
return
parse_tick = cc.parse_tick
tu = cc.current_tu
if not tu:
return
top = string.atoi(vim.eval("line('w0')"))
bottom = string.atoi(vim.eval("line('w$')"))
height = bottom - top + 1
symbol = None
if vim.eval('g:ClighterOccurrences') == '1':
vim_cursor = clighter_helper.get_vim_cursor(tu)
symbol = clighter_helper.get_vim_symbol(vim_cursor)
occurrences_range = w_range = [top, bottom]
syntax_range = [max(top - height, 1), min(
bottom + height, len(vim.current.buffer))]
config_win_context(False)
if vim.current.window.vars['clighter_hl'][0] < parse_tick:
clear_all()
else:
if not __is_subrange(
w_range, list(
vim.current.window.vars['clighter_hl'][1])):
__vim_clear_match_pri(SYNTAX_PRI)
else:
syntax_range = None
if not __is_subrange(
w_range, list(
vim.current.window.vars['clighter_hl'][2])) or (
hl_window.symbol and (
not symbol or symbol != hl_window.symbol)):
clear_occurrences()
else:
occurrences_range = None
if not do_occurrences:
occurrences_range = None
hl_window.symbol = symbol
__do_highlight(
tu,
vim.current.buffer.name,
syntax_range,
occurrences_range,
parse_tick)
def __do_highlight(tu, file_name, syntax_range, occurrences_range, tick):
file = tu.get_file(file_name)
if not syntax_range and (not hl_window.symbol or not occurrences_range):
return
if syntax_range:
vim.current.window.vars['clighter_hl'][1] = syntax_range
if occurrences_range and hl_window.symbol:
vim.current.window.vars['clighter_hl'][2] = occurrences_range
union_range = __union(syntax_range, occurrences_range)
location1 = cindex.SourceLocation.from_position(
tu, file, line=union_range[0], column=1)
location2 = cindex.SourceLocation.from_position(
tu, file, line=union_range[1] + 1, column=1)
tokens = tu.get_tokens(
extent=cindex.SourceRange.from_locations(
location1,
location2))
syntax = {}
occurrence = {'clighterOccurrences':[]}
for token in tokens:
if token.kind.value != 2: # no keyword, comment
continue
t_cursor = token.cursor
t_cursor._tu = tu
# t_cursor = cindex.Cursor.from_location(
# tu,
# cindex.SourceLocation.from_position(
# tu, file,
# token.location.line,
# token.location.column
# )
# )
pos = [token.location.line, token.location.column, len( token.spelling)]
if t_cursor.spelling == token.spelling and __is_in_range(token.location.line, syntax_range):
group = __get_syntax_group(t_cursor.kind, t_cursor.type.kind)
if group:
if not syntax.has_key(group):
syntax[group] = []
syntax[group].append(pos)
if hl_window.symbol and __is_in_range(token.location.line, occurrences_range):
t_symbol = clighter_helper.get_semantic_symbol(t_cursor)
if t_symbol and token.spelling == t_symbol.spelling and t_symbol == hl_window.symbol:
occurrence['clighterOccurrences'].append(pos)
cmd = "call MatchIt({0}, {1})".format(syntax, SYNTAX_PRI)
vim.command(cmd)
cmd = "call MatchIt({0}, {1})".format(occurrence , OCCURRENCES_PRI)
vim.command(cmd)
vim.current.window.vars['clighter_hl'][0] = tick
def __get_default_syn(cursor_kind):
if cursor_kind.is_preprocessing():
return 'clighterPrepro'
elif cursor_kind.is_declaration():
return 'clighterDecl'
elif cursor_kind.is_reference():
return 'clighterRef'
else:
return None
def __get_syntax_group(cursor_kind, type_kind):
group = __get_default_syn(cursor_kind)
custom = CUSTOM_SYNTAX_GROUP.get(cursor_kind)
if custom:
if cursor_kind == cindex.CursorKind.DECL_REF_EXPR:
custom = custom.get(type_kind)
if custom:
group = custom
elif cursor_kind == cursor_kind == cindex.CursorKind.MEMBER_REF_EXPR:
custom = custom.get(type_kind)
if custom:
group = custom
else:
group = 'clighterMemberRefExprVar'
else:
group = custom
if group in vim.eval('g:clighter_highlight_blacklist'):
return None
return group
def __vim_clear_match_pri(*priorities):
cmd = "call s:clear_match_pri({0})".format(list(priorities))
vim.command(cmd)
def __union(range1, range2):
if range1 and range2:
return [min(range1[0], range2[0]), max(range1[1], range2[1])]
elif range1 and not range2:
return range1
elif not range1 and range2:
return range2
else:
return None
def __is_in_range(value, range):
if not range:
return False
if value >= range[0] and value <= range[1]:
return True
return False
def __is_subrange(inner, outer):
if not inner:
return True
if not outer:
return False
if inner[0] < outer[0]:
return False
if inner[1] > outer[1]:
return False
return True
| gpl-3.0 | 3,283,609,485,844,957,000 | 29.595506 | 100 | 0.62933 | false |
ak64th/thug | docs/conf.py | 1 | 8332 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# thug documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import thug
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'thug'
copyright = u"2017, Elmer Yu"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = thug.__version__
# The full version, including alpha/beta/rc tags.
release = thug.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'thugdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'thug.tex',
u'thug Documentation',
u'Elmer Yu', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'thug',
u'thug Documentation',
[u'Elmer Yu'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'thug',
u'thug Documentation',
u'Elmer Yu',
'thug',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit | 749,671,583,136,281,000 | 29.298182 | 76 | 0.702112 | false |
AlexandreAbraham/brainhack2013 | plot_craddock_spectral_clustering.py | 1 | 6073 | from brainhack.datasets import fetch_craddock_2012_test
from nilearn.input_data import NiftiMasker
from joblib import Memory
from sklearn.feature_extraction.image import grid_to_graph
import nibabel
from sklearn.cluster import spectral_clustering
from scipy.sparse import coo_matrix
from brainhack.metrics.cluster import dice
import numpy as np
from os.path import exists
import pylab as pl
dataset = fetch_craddock_2012_test()
masker = NiftiMasker(mask=dataset.mask)
masker.fit()
### Connectivity graph #######################################################
# In order to run spectral clustering, we need a connectivity graph of brain
# voxels. It can be built upon the mask.
mask_data = nibabel.load(dataset.mask).get_data().astype(bool)
connectivity = grid_to_graph(*mask_data.shape, mask=mask_data)
### Covariance estimator #####################################################
# We instantiate the estimator to use on all the subjects.
from brainhack.covariance.pearson import PearsonCorrelation
pearson = PearsonCorrelation(spatial=False)
pearson_spatial = PearsonCorrelation(spatial=True)
### Compute similarity matrices ##############################################
from brainhack.covariance.multi_covariance import MultiCovariance
if not exists('covariances.npy'):
multi_cov = MultiCovariance(pearson, mask=dataset.mask, standardize=True,
detrend=True, memory=Memory(cachedir='nilearn_cache'), memory_level=1)
# Should add low_pass = 0.1 ?
multi_cov.fit(dataset.func, connectivity=connectivity)
np.save('covariances.npy', multi_cov.covariances_)
covariances = np.load('covariances.npy')
'''
if not exists('covariances_spatial.npy'):
multi_cov_spatial = MultiCovariance(pearson_spatial, mask=dataset.mask,
standardize=True, detrend=True,
memory=Memory(cachedir='nilearn_cache'), memory_level=1)
multi_cov_spatial.fit(dataset.func, connectivity=connectivity)
np.save('covariances_spatial.npy', multi_cov_spatial.covariances_)
covariances_spatial = np.load('covariances_spatial.npy')
'''
### Reproduce figure #1 of Craddock paper ####################################
'''
rt = np.hstack([c.data for c in covariances])
rs = np.hstack([c.data for c in covariances_spatial])
# Split points that have same signs and opposite signs
rr = rt * rs
rt_plus = rt[rr >= 0]
rs_plus = rs[rr >= 0]
rt_minus = rt[rr < 0]
rs_minus = rs[rr < 0]
pl.figure(figsize=(8, 8))
pl.scatter(rt_plus, rs_plus)
pl.scatter(rt_minus, rs_minus, c='r')
pl.xlim(-1., 1.)
pl.ylim(-1., 1.)
pl.savefig('craddock_figure_1.png')
'''
### Helper Function ##########################################################
def clustering_to_connectivity(labels):
r = []
c = []
for label in np.unique(labels):
indices = np.where(labels == label)[0]
for i in indices:
for j in indices:
r.append(i)
c.append(j)
r = np.hstack(r)
c = np.hstack(c)
return coo_matrix((np.ones(len(r)), (r, c)))
### First group strategy: simply average similarity matrices #################
group_similarity_matrix = np.mean(covariances, axis=0)
row, col = group_similarity_matrix.nonzero()
# Threshold the matrix
data = group_similarity_matrix.data
data[data < .5] = 0.
group_similarity_matrix = coo_matrix((data, (row, col)))
# Run clustering
group_maps = spectral_clustering(group_similarity_matrix,
n_clusters=50, assign_labels='discretize')
nibabel.save(masker.inverse_transform(group_maps + 1),
'group_maps_1.nii.gz')
### We try to reproduce the DICE coefficient comparison ######################
n_subs = len(covariances)
cluster_dices = []
cluster_sizes = range(50, 350, 50) # range(0, 1050, 50):
for n_clusters in cluster_sizes:
print('n_clusters %d' % n_clusters)
if not exists('dice_%d.npy' % n_clusters):
dices = []
for i in range(n_subs):
print('%d/%d' % (i + 1, n_subs))
sub_cov = covariances[i].copy()
group_similarity_matrix = np.mean(
covariances[np.delete(np.arange(n_subs), i)], axis=0)
row, col = group_similarity_matrix.nonzero()
# Threshold the matrix
data = group_similarity_matrix.data
data[data < .5] = 0.
group_similarity_matrix = coo_matrix((data, (row, col)))
sub_data = sub_cov.data
sub_data[sub_data < .5] = 0.
sub_matrix = coo_matrix((sub_data, (row, col)))
# Run clustering
group_maps = spectral_clustering(group_similarity_matrix,
n_clusters=n_clusters, assign_labels='discretize')
sub_map = spectral_clustering(sub_matrix,
n_clusters=n_clusters, assign_labels='discretize')
dices.append(dice(clustering_to_connectivity(group_maps),
clustering_to_connectivity(sub_map)))
np.save('dice_%d.npy' % n_clusters, dices)
dices = np.load('dice_%d.npy' % n_clusters)
cluster_dices.append(dices)
pl.boxplot(cluster_dices, positions=cluster_sizes, widths=30)
pl.xlim(cluster_sizes[0] - 30, cluster_sizes[-1] + 30)
pl.show()
### Second group strategy ####################################################
'''
clusters = []
for i, cov in enumerate(covariances):
# Threshold covariance
data = cov.data
data[data < .5] = 0.
cov = coo_matrix((data, (cov.row, cov.col)))
labels = spectral_clustering(cov, n_clusters=50,
assign_labels='discretize')
clusters.append(clustering_to_connectivity(labels))
group_similarity_matrix = np.mean(clusters, axis=0)
row, col = group_similarity_matrix.nonzero()
# Threshold the matrix
data = group_similarity_matrix.data
data[data < .5] = 0.
group_similarity_matrix = coo_matrix((data, (row, col)))
# Run clustering
group_maps = spectral_clustering(group_similarity_matrix,
n_clusters=50, assign_labels='discretize')
nibabel.save(masker.inverse_transform(group_maps + 1),
'group_maps_2.nii.gz')
'''
| bsd-3-clause | 3,756,520,446,555,159,000 | 31.650538 | 78 | 0.628684 | false |
andydavidson/pypeer | lib/pypeer/PeeringDBClient.py | 1 | 1386 | #!/usr/bin/env python
import urllib2
class PeeringDBClient:
def __init__(self):
return
def asn(self, asn):
try:
loadasn = int(asn)
except ValueError:
print("asn is not an integer")
return
load_url = "https://beta.peeringdb.com/api/asn/" + str(loadasn)
file_url = urllib2.urlopen(load_url)
return file_url.read()
def ixlan(self, ixlanid):
try:
loadixlan = int(ixlanid)
except ValueError:
print("ixlan id is not an integer")
return
load_url = "https://beta.peeringdb.com/api/ixlan/" + str(loadixlan)
file_url = urllib2.urlopen(load_url)
return file_url.read()
def ixorg(self,ixorgid):
try:
loadixorg = int(ixorgid)
except ValueError:
print("ixorg id is not an integer")
return
load_url = "https://beta.peeringdb.com/api/ix/" + str(loadixorg)
file_url = urllib2.urlopen(load_url)
return file_url.read()
def poc(self,pocid):
try:
loadpocid = int(pocid)
except ValueError:
print("point of contact id is not an integer")
return
load_url = "https://beta.peeringdb.com/api/poc/" + str(loadpocid)
file_url = urllib2.urlopen(load_url)
return file_url.read() | mit | 1,337,078,391,708,123,600 | 27.895833 | 75 | 0.556277 | false |
pet-finder/pet-finder | app/config.py | 1 | 3052 | #!/usr/bin/python2.5
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Storage for configuration settings. Settings can be global or specific
to a subdomain, and their values can be of any JSON-encodable type."""
from google.appengine.ext import db
import UserDict, model, random, simplejson
class ConfigEntry(db.Model):
"""An application configuration setting, identified by its key_name."""
value = db.StringProperty(default='')
def get(name, default=None):
"""Gets a configuration setting."""
config = ConfigEntry.get_by_key_name(name)
if config:
return simplejson.loads(config.value)
return default
def get_or_generate(name):
"""Gets a configuration setting, or sets it to a random 32-byte value
encoded in hexadecimal if it doesn't exist. Use this function when you
need a persistent cryptographic secret unique to the application."""
random_hex = ''.join('%02x' % random.randrange(256) for i in range(32))
ConfigEntry.get_or_insert(key_name=name, value=simplejson.dumps(random_hex))
return get(name)
def set(**kwargs):
"""Sets configuration settings."""
db.put(ConfigEntry(key_name=name, value=simplejson.dumps(value))
for name, value in kwargs.items())
def get_for_subdomain(subdomain, name, default=None):
"""Gets a configuration setting for a particular subdomain. Looks for a
setting specific to the subdomain, then falls back to a global setting."""
value = get(subdomain + ':' + name)
if value is not None:
return value
return get(name, default)
def set_for_subdomain(subdomain, **kwargs):
"""Sets configuration settings for a particular subdomain. When used
with get_for_subdomain, has the effect of overriding global settings."""
subdomain = str(subdomain) # need an 8-bit string, not Unicode
set(**dict((subdomain + ':' + key, value) for key, value in kwargs.items()))
class Configuration(UserDict.DictMixin):
def __init__(self, subdomain):
self.subdomain = subdomain
def __getattr__(self, name):
return self[name]
def __getitem__(self, name):
"""Gets a configuration setting for this subdomain. Looks for a
subdomain-specific setting, then falls back to a global setting."""
return get_for_subdomain(self.subdomain, name)
def keys(self):
entries = model.filter_by_prefix(
ConfigEntry.all(), self.subdomain + ':')
return [entry.key().name().split(':', 1)[1] for entry in entries]
| apache-2.0 | 7,669,520,440,358,117,000 | 36.219512 | 80 | 0.697903 | false |
robbiehinch/cabel | view/configurator.py | 1 | 22005 | from xml.dom import minidom
import locale
import string
import wx.lib.colourselect as cSel
import wx.lib.filebrowsebutton as filebrowse
import wx
import tools.config
import os
class CabelConfigDialog(wx.Dialog):
def __init__(self, parent, cfg):
"""
"""
title = 'Preferences'
wx.Dialog.__init__(self, parent, -1, title,
style=wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER)
buttonSizer = wx.BoxSizer(wx.HORIZONTAL)
buttonOK = wx.Button(self, wx.ID_OK, "&OK")
buttonApply = wx.Button(self, wx.ID_APPLY)
buttonCancel = wx.Button(self, wx.ID_CANCEL, "&Cancel")
buttonSizer.Add(buttonOK, 0, wx.ALL | wx.ALIGN_RIGHT, 5)
buttonSizer.Add(buttonApply, 0, wx.ALL | wx.ALIGN_RIGHT, 5)
buttonSizer.Add(buttonCancel, 0, wx.ALL | wx.ALIGN_RIGHT, 5)
notebookSizer = wx.BoxSizer(wx.HORIZONTAL)
notebook = wx.Notebook(self, -1, size=(450,300), style = wx.NO_BORDER)
self.configParser = ConfigParser(cfg, notebook, parent)
self.configParser.fillCategoryNotebook()
notebookSizer.Add(notebook, 1, wx.EXPAND)
dialog_sizer = wx.BoxSizer(wx.VERTICAL)
dialog_sizer.Add(notebookSizer, 1, wx.EXPAND | wx.ALL, 2)
dialog_sizer.Add(buttonSizer, 0, wx.ALL | wx.ALIGN_BOTTOM | wx.ALIGN_RIGHT)
if "__WXMAC__" in wx.PlatformInfo:
self.SetSizer(dialog_sizer)
else:
self.SetSizerAndFit(dialog_sizer)
self.Centre()
self.Bind(wx.EVT_BUTTON, self.onOK, id = wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.onApply, id = wx.ID_APPLY)
self.Bind(wx.EVT_BUTTON, self.onCancel, id = wx.ID_CANCEL)
# Enable ToolTips Globally
wx.ToolTip_Enable(True)
wx.ToolTip_SetDelay(10)
def onCancel(self, evt):
# Set focus to Cancel button so values in panels are set
evt.GetEventObject().SetFocus()
self.EndModal(0)
def onOK(self, evt):
# Set focus to Ok button so values in panels are set
evt.GetEventObject().SetFocus()
self.configParser.SaveVars()
self.Destroy()
#self.EndModal(0)
def onApply(self, evt):
# Set focus to Apply button so values in panels are set
evt.GetEventObject().SetFocus()
self.configParser.SaveVars()
class ConfigParser(object):
"""
"""
def __init__(self, config, parent, worksp):
"""
Standardconstructor.
@type config: tools.config.Config
@param config: The config model to be parsed.
"""
self.config = config
self.parent = parent
self.workSpace = worksp
self.varValueDict = {}
def fillCategoryNotebook(self):
"""
"""
csoundCat = self.getCategoryPanel(self.config.csound)
viewCat = self.getCategoryPanel(self.config.view)
dirCat = self.getCategoryPanel(self.config.directories)
self.parent.AddPage(csoundCat, 'Csound')
self.parent.AddPage(viewCat, 'User Interface')
self.parent.AddPage(dirCat, 'Directories')
def getCategoryPanel(self, category):
"""
"""
panel = wx.Panel(self.parent)
panelSizer = wx.BoxSizer(wx.VERTICAL)
groups = {}
panelList = []
# group configVars
for configVar in category.vars:
setting = configVar.setting
if setting and setting.dialog:
if setting.group != None:
# Group
if groups.has_key(setting.group):
groups[setting.group].append(configVar)
else:
groups[setting.group] = [configVar]
panelList.append(setting.group)
else:
# None Group
panelList.append(configVar)
# get panels
for i in panelList:
if isinstance(i, tools.config.ConfigVar) or isinstance(i, tools.config.ListVar):
# None Group Panel
ctrlPanel = self.getControl(i, panel)
else:
# Group Panel
ctrlPanel = self.getGroupPanel(groups[i], panel)
panelSizer.Add(ctrlPanel, 0, wx.EXPAND|wx.ALL, 5)
panel.SetSizer(panelSizer)
return panel
def getGroupPanel(self, vars, parent):
"""
"""
groupBoxPanel = wx.Panel(parent)
groupBox = wx.StaticBox(groupBoxPanel, -1, vars[0].setting.group)
groupBoxSizer = wx.StaticBoxSizer(groupBox, wx.VERTICAL)
for groupVar in vars:
ctrl = self.getControl(groupVar, groupBoxPanel)
groupBoxSizer.Add(ctrl, 1, wx.EXPAND|wx.RIGHT, 3)
groupBoxPanelSizer = wx.BoxSizer()
groupBoxPanelSizer.Add(groupBoxSizer, 1, wx.ALL|wx.EXPAND, 6)
groupBoxPanel.SetSizer(groupBoxPanelSizer)
return groupBoxPanel
def getControl(self, var, parent):
"""
"""
type = var.valueType
if type == 'int':
return self.getIntVarCtrl(parent, var)
elif type == 'float':
return self.getFloatVarCtrl(parent, var)
elif type == 'boolean':
return self.getBooleanVarCtrl(parent, var)
elif type == 'colour':
return self.getColourVarCtrl(parent, var)
elif type == 'string' and var.nodeType == minidom.Node.TEXT_NODE:
return self.getStringVarMultiLineCtrl(parent, var)
elif type == 'string' and var.nodeType == minidom.Node.ATTRIBUTE_NODE:
return self.getStringVarSingleLineCtrl(parent, var)
elif type.find('list') >= 0:
return self.getListVarCtrl(parent, var)
elif type.find('enum') >= 0:
return self.getEnumVarCtrl(parent, var)
else:
# should never happen!
return self.getStringVarSingleLineCtrl(parent, var)
def SaveVars(self):
"""
"""
# Save each var in the varValDict
for varValTuple in self.varValueDict.items():
varValTuple[0].setVal(varValTuple[1])
if varValTuple[0].setting.updateFunc:
varValTuple[0].setting.updateView(self.workSpace)
# Clear varValDict
self.varValueDict = {}
def getColourVarCtrl(self, parent, var, id = -1, size = wx.Size(60,20)):
"""
"""
colourCtrlSizer = ConfigColourSelect(parent, id, self, var, size)
return colourCtrlSizer
def getListVarCtrl(self, parent, var):
"""
"""
return wx.Panel(parent)
def getEnumVarCtrl(self, parent, var):
"""
"""
return wx.Panel(parent)
def getIntVarCtrl(self, parent, var, id = -1, size = wx.DefaultSize):
"""
"""
return ConfigInt(parent, id, self, var, size)
def getFloatVarCtrl(self, parent, var, id = -1, size = wx.DefaultSize):
"""
"""
return ConfigFloat(parent, id, self, var, size)
def getStringVarSingleLineCtrl(self, parent, var, id = -1, size = wx.DefaultSize):
"""
"""
len = var.setting.ctrlLength
if len > 0:
size = wx.Size(len, wx.DefaultSize.GetHeight())
return ConfigStringSingleLine(parent, id, self, var, size)
def getStringVarMultiLineCtrl(self, parent, var, id = -1, size = wx.DefaultSize):
"""
"""
len = var.setting.ctrlLength
if len > 0 and var.setting.ctrlHeight:
hei = var.setting.ctrlHeight * 16
size = wx.Size(len, hei)
return ConfigStringMultiLine(parent, id, self, var, size)
def getBooleanVarCtrl(self, parent, var, id = -1, size = wx.DefaultSize):
"""
"""
return ConfigBooleanCheck(parent, id, self, var, size)
# --------------------------------------------------------------
class ConfigControl(object):
"""
ConfigControl.
Controller for the configurator input controlls.
It's an 'abstract' class. Inherited classes can implement 'abstract' invalidateVarVal(self, val)
for validation of the input to the control.
@type parent: wx.Panel
@ivar parent: The root panel of a wx.Notebook (category) page.
@type configParser: configurator.ConfigParser
@ivar configParser: ConfigParser in charge of this control.
@type configVar: tools.config.ConfigVar
@ivar configVar: ConfigVarrepresented through this control.
@type ctrl: wx.Control
@ivar ctrl: The input control. has to be set in the constructor of inherited class.
"""
def __init__(self, parent, configParser, var):
"""
Standardconstructor.
@type parent: wx.Panel
@param parent: The root panel of a wx.Notebook (category) page.
@type configParser: configurator.ConfigParser
@param configParser: ConfigParser in charge of this control.
@type var: config.ConfigVar
@param var: ConfigVarrepresented through this control.
"""
self.parent = parent
self.configParser = configParser
self.configVar = var
self.ctrl = None
self._defaultCtrlBgrColour = None
def updateVarValDict(self, val):
"""
Checks if the value val for the ConfigVar has changed, if it is valid
and caches the configVar/value pair in the responsible configParser.
@type val: self.configVar.valueType
@param val: The value returned by the control.
"""
# Check if ConfigVar Value has changed
if self.hasChanged(val):
# Check Config Var Value input
warning = self.invalidateVarVal(val)
# No Warning
if not warning:
self.configParser.varValueDict[self.configVar] = val
if self._defaultCtrlBgrColour:
self.paintCtrlWhite(None)
# Warning
else:
# Paint controlls red
self.paintTheInvalidRed()
# Register paint the Control White for
self.ctrl.Bind(wx.EVT_SET_FOCUS, self.paintCtrlWhite)
# Show warning dialog
d = wx.MessageDialog(self.parent, str(warning) + "\n Please correct the red marked Config Var.", "ConfigVar Error",
wx.OK | wx.ICON_ERROR)
d.ShowModal()
d.Destroy()
def hasChanged(self, val):
"""
Checks if the value of the configVarhas changed.
@type val: self.configVar.valueType
@param val: The value returned by the control.
"""
oldVal = self.configVar.getVal()
return not (oldVal == val)
def invalidateVarVal(self, val):
"""
Method for invalidation of the configVar's value.
@rtype: boolean/string
@return: A warning message if the Value is invalid, else False.
"""
setting = self.configVar.setting
# Mustn't a null value
if setting.notNone:
if val == None or val == '':
return "The value can't be empty!"
return False
def paintCtrlWhite(self, event):
"""
Paints the control as it was originally.
"""
self.ctrl.SetOwnBackgroundColour(self._defaultCtrlBgrColour)
self.ctrl.Refresh()
def paintTheInvalidRed(self):
"""
Paints the control red.
"""
self._defaultCtrlBgrColour = self.ctrl.GetBackgroundColour()
#self.ctrl.SetValue(str(self.configVar.getVal()))
self.ctrl.SetOwnBackgroundColour("red")
self.ctrl.Refresh()
def onOK(self, event):
"""
Calls the callback method of the CabelConfig control if the accelerator for
the OK or Apply Buttons (ALT-O or ALT-A) are pressed.
"""
if event.AltDown():
if event.GetKeyCode() == ord('O') or event.GetKeyCode() == ord('A'):
self.callback(event)
event.Skip()
def callback(self, event):
"""
Virtual method of ConfigControl.
Should call the updateVarValDict with the value of the control in order
to process the input of the control.
"""
pass
# --------------------------------------------------------------
class ConfigFloat(ConfigControl, wx.BoxSizer):
"""
"""
def __init__(self, parent, id, configParser, configFloatVar, _size):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configFloatVar)
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
# Controls
self.ctrl = wx.TextCtrl(self.parent, id, str(self.configVar.getVal()), size = _size)
textLabel = wx.StaticText(self.parent, -1, self.configVar.setting.displayName)
# Tooltip
textLabel.SetToolTip(wx.ToolTip(self.configVar.setting.description))
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Add controll to the sizer
self.Add(textLabel, 0, wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL, 5)
self.Add(self.ctrl, 1, wx.EXPAND | wx.ALL, 5)
# Binding
self.ctrl.Bind(wx.EVT_KILL_FOCUS, self.callback)
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
def callback(self, event):
"""
"""
val = unicode(self.ctrl.GetValue()).strip()
if val.find(',') >= 0:
val = val.replace(',', '.')
if val.find('.') == 0:
val = '0' + val
if val != unicode(self.ctrl.GetValue()).strip():
self.ctrl.SetValue(val)
self.ctrl.Refresh()
self.updateVarValDict(val)
def invalidateVarVal(self, val):
"""
"""
superInvalidation = ConfigControl.invalidateVarVal(self, val)
if not superInvalidation:
try:
value = float(val)
return False
except ValueError:
return 'The value must be a float type.'
else:
return superInvalidation
# --------------------------------------------------------------
class ConfigInt(ConfigControl, wx.BoxSizer):
"""
"""
def __init__(self, parent, id, configParser, configIntVar, _size):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configIntVar)
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
# Controls
self.ctrl = wx.TextCtrl(self.parent, id, str(self.configVar.getVal()), size = _size)
textLabel = wx.StaticText(self.parent, -1, self.configVar.setting.displayName)
# Tooltip
textLabel.SetToolTip(wx.ToolTip(self.configVar.setting.description))
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Add controll to the sizer
self.Add(textLabel, 0, wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL, 5)
self.Add(self.ctrl, 1, wx.EXPAND | wx.ALL, 5)
# Binding
self.ctrl.Bind(wx.EVT_KILL_FOCUS, self.callback)
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
def callback(self, event):
"""
"""
textVal = unicode(self.ctrl.GetValue()).strip()
self.updateVarValDict(textVal)
def invalidateVarVal(self, val):
"""
"""
superInvalidation = ConfigControl.invalidateVarVal(self, val)
if not superInvalidation:
if val.isdigit() or (val[0:1] in ['-', '+'] and val[1:].isdigit()):
return False
else:
return 'The value must be an integer type.'
else:
return superInvalidation
# --------------------------------------------------------------
class ConfigStringMultiLine(ConfigControl, wx.BoxSizer):
"""
"""
def __init__(self, parent, id, configParser, configStringTextNodeVar, _size):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configStringTextNodeVar)
wx.BoxSizer.__init__(self, wx.VERTICAL)
# Controls
self.ctrl = wx.TextCtrl(self.parent, id, self.configVar.getVal(), size = _size, style=wx.TE_MULTILINE)
textLabel = wx.StaticText(self.parent, -1, self.configVar.setting.displayName)
# Tooltip
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Add to sizer
self.Add(textLabel, 0, wx.ALIGN_LEFT, 5)
self.Add(self.ctrl, 1, wx.EXPAND | wx.ALL, 5)
# Binding
self.ctrl.Bind(wx.EVT_KILL_FOCUS, self.callback)
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
def callback(self, event):
"""
"""
textVal = unicode(self.ctrl.GetValue()).strip()
self.updateVarValDict(textVal)
# --------------------------------------------------------------
class ConfigStringSingleLine(ConfigControl, wx.BoxSizer):
"""
"""
def __init__(self, parent, id, configParser, configStringTextNodeVar, _size):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configStringTextNodeVar)
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
# Controls
if self.configVar.setting.choose == 'file':
file = self.configVar.getVal()
#path = file[:file.rfind(os.path.sep)]
#file = file[file.rfind(os.path.sep):]
self.ctrl = filebrowse.FileBrowseButton(self.parent, -1, size=_size, labelText='', buttonText='Browse', toolTip=self.configVar.setting.description, startDirectory=file, initialValue=file, changeCallback=self.callback)
self.ctrl.textControl.Bind(wx.EVT_KILL_FOCUS, self.callback)
elif self.configVar.setting.choose == 'path':
self.ctrl = filebrowse.DirBrowseButton(self.parent, -1, size=_size, labelText='', toolTip=self.configVar.setting.description, startDirectory=os.path.join(os.getcwd(), self.configVar.getVal()), changeCallback=self.callback)
self.ctrl.textControl.Bind(wx.EVT_KILL_FOCUS, self.callback)
self.ctrl.SetValue(os.path.join(os.getcwd(), self.configVar.getVal()), 0)
else:
self.ctrl = wx.TextCtrl(self.parent, id, self.configVar.getVal(), size = _size)
self.ctrl.Bind(wx.EVT_KILL_FOCUS, self.callback)
textLabel = wx.StaticText(self.parent, -1, self.configVar.setting.displayName)
# Tooltip
textLabel.SetToolTip(wx.ToolTip(self.configVar.setting.description))
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Add controll to the sizer
self.Add(textLabel, 0, wx.ALIGN_LEFT | wx.ALIGN_CENTRE_VERTICAL)
self.Add(self.ctrl, 1, wx.EXPAND | wx.ALL, 5)
# Binding
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
def callback(self, event):
"""
"""
if not self.ctrl.HasCapture():
textVal = unicode(self.ctrl.GetValue()).strip()
self.updateVarValDict(textVal)
# --------------------------------------------------------------
class ConfigBooleanCheck(ConfigControl, wx.BoxSizer):
"""
"""
def __init__(self, parent, id, configParser, configBooleanVar, size):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configBooleanVar)
wx.BoxSizer.__init__(self, wx.HORIZONTAL)
# Controls
self.ctrl = wx.CheckBox(self.parent, id, self.configVar.setting.displayName, style = wx.ALIGN_RIGHT)
self.ctrl.SetValue(self.configVar.getVal())
# ToolTip
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Binding
self.ctrl.Bind(wx.EVT_CHECKBOX, self.callback)
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
# Add controll zo the sizer
self.Add(self.ctrl, 0, wx.ALIGN_LEFT)
def callback(self, event):
"""
"""
self.updateVarValDict(self.ctrl.GetValue())
class ConfigColourSelect(ConfigControl, wx.FlexGridSizer):
"""
"""
def __init__(self, parent, id, configParser, configColourVar, size,
label="", pos=wx.DefaultPosition, style=0):
"""
"""
# Call superclass constructors
ConfigControl.__init__(self, parent, configParser, configColourVar)
wx.FlexGridSizer.__init__(self, 1, 2)
# Controls
self.ctrl = cSel.ColourSelect(self.parent, id, label, self.configVar.Value, \
pos, size, callback = self._callback)
colourCtrlLabel = wx.StaticText(self.parent, -1, self.configVar.setting.displayName)
# ToolTips
self.ctrl.SetToolTip(wx.ToolTip(self.configVar.setting.description))
colourCtrlLabel.SetToolTip(wx.ToolTip(self.configVar.setting.description))
# Add Controls to Sizer
self.Add(colourCtrlLabel, 0, wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL)
self.Add(self.ctrl, 0, wx.ALL, 3)
# Bindings
self.ctrl.Bind(wx.EVT_KEY_DOWN, self.onOK)
def _callback(self):
"""
"""
self.updateVarValDict(self.ctrl.GetColour())
def callback(self, event):
"""
"""
self._callback()
| gpl-2.0 | 5,448,971,613,829,644,000 | 34.377814 | 234 | 0.569916 | false |
itdagene-ntnu/quiz | quiz/main/models.py | 1 | 1039 | from django.db import models
class Quiz(models.Model):
title = models.CharField(max_length=200)
description = models.TextField()
start = models.DateTimeField()
end = models.DateTimeField()
def __unicode__(self):
return self.title
class Meta:
verbose_name_plural = 'quizes'
class Question(models.Model):
quiz = models.ForeignKey(Quiz)
text = models.TextField()
def __unicode__(self):
return self.text
class Choice(models.Model):
question = models.ForeignKey(Question)
correct_answer = models.BooleanField(default=False)
text = models.CharField(max_length=200)
def __unicode__(self):
return self.text
class Participant(models.Model):
email = models.EmailField(max_length=254)
phone_number = models.CharField(max_length=25)
def __unicode__(self):
return self.email
class Answer(models.Model):
question = models.ForeignKey(Question)
choice = models.ForeignKey(Choice)
participant = models.ForeignKey(Participant) | bsd-3-clause | -6,409,692,976,992,742,000 | 22.111111 | 55 | 0.6795 | false |
googleapis/googleapis-gen | google/cloud/talent/v4/talent-v4-py/google/cloud/talent_v4/__init__.py | 1 | 5445 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.company_service import CompanyServiceClient
from .services.company_service import CompanyServiceAsyncClient
from .services.completion import CompletionClient
from .services.completion import CompletionAsyncClient
from .services.event_service import EventServiceClient
from .services.event_service import EventServiceAsyncClient
from .services.job_service import JobServiceClient
from .services.job_service import JobServiceAsyncClient
from .services.tenant_service import TenantServiceClient
from .services.tenant_service import TenantServiceAsyncClient
from .types.common import BatchOperationMetadata
from .types.common import CompensationInfo
from .types.common import CustomAttribute
from .types.common import DeviceInfo
from .types.common import Location
from .types.common import RequestMetadata
from .types.common import ResponseMetadata
from .types.common import SpellingCorrection
from .types.common import TimestampRange
from .types.common import CommuteMethod
from .types.common import CompanySize
from .types.common import DegreeType
from .types.common import EmploymentType
from .types.common import HtmlSanitization
from .types.common import JobBenefit
from .types.common import JobCategory
from .types.common import JobLevel
from .types.common import PostingRegion
from .types.common import Visibility
from .types.company import Company
from .types.company_service import CreateCompanyRequest
from .types.company_service import DeleteCompanyRequest
from .types.company_service import GetCompanyRequest
from .types.company_service import ListCompaniesRequest
from .types.company_service import ListCompaniesResponse
from .types.company_service import UpdateCompanyRequest
from .types.completion_service import CompleteQueryRequest
from .types.completion_service import CompleteQueryResponse
from .types.event import ClientEvent
from .types.event import JobEvent
from .types.event_service import CreateClientEventRequest
from .types.filters import CommuteFilter
from .types.filters import CompensationFilter
from .types.filters import JobQuery
from .types.filters import LocationFilter
from .types.histogram import HistogramQuery
from .types.histogram import HistogramQueryResult
from .types.job import Job
from .types.job_service import BatchCreateJobsRequest
from .types.job_service import BatchCreateJobsResponse
from .types.job_service import BatchDeleteJobsRequest
from .types.job_service import BatchDeleteJobsResponse
from .types.job_service import BatchUpdateJobsRequest
from .types.job_service import BatchUpdateJobsResponse
from .types.job_service import CreateJobRequest
from .types.job_service import DeleteJobRequest
from .types.job_service import GetJobRequest
from .types.job_service import JobResult
from .types.job_service import ListJobsRequest
from .types.job_service import ListJobsResponse
from .types.job_service import SearchJobsRequest
from .types.job_service import SearchJobsResponse
from .types.job_service import UpdateJobRequest
from .types.job_service import JobView
from .types.tenant import Tenant
from .types.tenant_service import CreateTenantRequest
from .types.tenant_service import DeleteTenantRequest
from .types.tenant_service import GetTenantRequest
from .types.tenant_service import ListTenantsRequest
from .types.tenant_service import ListTenantsResponse
from .types.tenant_service import UpdateTenantRequest
__all__ = (
'CompanyServiceAsyncClient',
'CompletionAsyncClient',
'EventServiceAsyncClient',
'JobServiceAsyncClient',
'TenantServiceAsyncClient',
'BatchCreateJobsRequest',
'BatchCreateJobsResponse',
'BatchDeleteJobsRequest',
'BatchDeleteJobsResponse',
'BatchOperationMetadata',
'BatchUpdateJobsRequest',
'BatchUpdateJobsResponse',
'ClientEvent',
'CommuteFilter',
'CommuteMethod',
'Company',
'CompanyServiceClient',
'CompanySize',
'CompensationFilter',
'CompensationInfo',
'CompleteQueryRequest',
'CompleteQueryResponse',
'CompletionClient',
'CreateClientEventRequest',
'CreateCompanyRequest',
'CreateJobRequest',
'CreateTenantRequest',
'CustomAttribute',
'DegreeType',
'DeleteCompanyRequest',
'DeleteJobRequest',
'DeleteTenantRequest',
'DeviceInfo',
'EmploymentType',
'EventServiceClient',
'GetCompanyRequest',
'GetJobRequest',
'GetTenantRequest',
'HistogramQuery',
'HistogramQueryResult',
'HtmlSanitization',
'Job',
'JobBenefit',
'JobCategory',
'JobEvent',
'JobLevel',
'JobQuery',
'JobResult',
'JobServiceClient',
'JobView',
'ListCompaniesRequest',
'ListCompaniesResponse',
'ListJobsRequest',
'ListJobsResponse',
'ListTenantsRequest',
'ListTenantsResponse',
'Location',
'LocationFilter',
'PostingRegion',
'RequestMetadata',
'ResponseMetadata',
'SearchJobsRequest',
'SearchJobsResponse',
'SpellingCorrection',
'Tenant',
'TenantServiceClient',
'TimestampRange',
'UpdateCompanyRequest',
'UpdateJobRequest',
'UpdateTenantRequest',
'Visibility',
)
| apache-2.0 | 9,085,832,102,353,112,000 | 32.611111 | 74 | 0.828283 | false |
unt-libraries/django-nomination | nomination/url_handler.py | 1 | 25446 | import datetime
import itertools
import json
import re
import string
import time
from urllib.parse import urlparse
from django import http
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.db import IntegrityError
from nomination.models import Project, Nominator, URL, Value
SCHEME_ONE_SLASH = re.compile(r'(https?|ftps?):/([^/])')
ANCHOR_PATTERN = re.compile(r'^<a href=\"[^>]+>([^<]+)</a>')
def alphabetical_browse(project):
browse_key_list = string.digits + string.ascii_uppercase
browse_dict = {}
try:
surt_list = (
URL.objects.filter(url_project=project, attribute__iexact='surt').order_by('value')
)
except Exception:
raise http.Http404
# compile regex
topdom_rgx = re.compile(r'^[^:]+://\(([^,]+),')
singdom_rgx = re.compile(r'^[^:]+://(\([^,]+,([^,\)]{1}))')
for url_item in surt_list:
top_domain_search = topdom_rgx.search(url_item.value, 0)
if top_domain_search:
top_domain = top_domain_search.group(1)
if top_domain not in browse_dict:
browse_dict[top_domain] = {}
for key in browse_key_list:
browse_dict[top_domain][key] = None
domain_single_search = singdom_rgx.search(url_item.value, 0)
if domain_single_search:
domain_single = domain_single_search.group(2).upper()
browse_dict[top_domain][domain_single] = domain_single_search.group(1)
sorted_dict = {}
for top_domain, alpha_dict in browse_dict.items():
alpha_list = []
for key in sorted(alpha_dict.keys()):
alpha_list.append((key, alpha_dict[key],))
sorted_dict[top_domain] = alpha_list
return sorted_dict
def get_metadata(project):
"""Creates metadata/values set to pass to template."""
all_vals = None
metadata_vals = []
# combine values from value_sets and additional values
for x in project.project_metadata_set.all():
# get individual values in order by value_order
all_vals = x.metadata.values.all().order_by('metadata_values')
# get the set of value_sets and combine with other values
for z in x.metadata.value_sets.all():
all_vals = itertools.chain(z.values.all().order_by('valueset_values'), all_vals)
metadata_vals.append((x, all_vals))
return metadata_vals
def handle_metadata(request, posted_data):
"""Handles multivalue metadata and user supplied metadata values."""
for k in posted_data.keys():
# if key has more than one value, post all
requested_list = request.POST.getlist(k)
if len(requested_list) > 1:
# look for other_specify value
for aval in requested_list:
if aval == 'other_specify':
# add user input if there
try:
requested_list.append(posted_data[k+'_other'])
except KeyError:
pass
requested_list.remove(aval)
posted_data[k] = requested_list
else:
# if other_specify value, supply user input
if posted_data[k] == 'other_specify':
try:
posted_data[k] = posted_data[k+'_other']
except KeyError:
pass
return posted_data
def validate_date(dateinput):
"""Takes user's date input and checks the validity.
Returns a valid form of the date, or None if input was invalid.
"""
DEFAULT_DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
)
validdate = None
if dateinput.strip():
for format in DEFAULT_DATE_INPUT_FORMATS:
try:
validdate = datetime.date(*time.strptime(dateinput, format)[:3])
break
except ValueError:
continue
return validdate
def add_url(project, form_data):
summary_list = []
form_data['url_value'] = check_url(form_data['url_value'])
# Get the system nominator
system_nominator = get_object_or_404(Nominator, id=settings.SYSTEM_NOMINATOR_ID)
# Check for or add surt
surt_successful = surt_exists(project, system_nominator, form_data['url_value'])
if not surt_successful:
raise http.Http404
# Get/Add a nominator
nominator = get_nominator(form_data)
if not nominator:
return False
# Nominate a URL
summary_list = nominate_url(project, nominator, form_data, '1')
# Add other URL attributes
summary_list = add_other_attribute(project, nominator, form_data, summary_list)
return summary_list
def add_metadata(project, form_data):
summary_list = []
# Get/Add a nominator
nominator = get_nominator(form_data)
if not nominator:
raise http.Http404
if form_data['scope'] != '':
# Nominate a URL
summary_list = nominate_url(project, nominator, form_data, form_data['scope'])
# Add other URL attributes
summary_list = add_other_attribute(project, nominator, form_data, summary_list)
return summary_list
def check_url(url):
url = url.strip()
url = addImpliedHttpIfNecessary(url)
url = url.rstrip('/')
return url
def get_nominator(form_data):
try:
# Try to retrieve the nominator
nominator, created = Nominator.objects.get_or_create(
nominator_email=form_data['nominator_email'],
defaults={
'nominator_name': form_data['nominator_name'],
'nominator_institution': form_data['nominator_institution']
})
except Nominator.MultipleObjectsReturned:
# Retrieve unique nominator
try:
nominator = Nominator.objects.get(
nominator_email=form_data['nominator_email'],
nominator_name=form_data['nominator_name'],
nominator_institution=form_data['nominator_institution'])
except (Nominator.MultipleObjectsReturned, Nominator.DoesNotExist):
return False
except (IntegrityError, KeyError):
raise http.Http404
return nominator
def nominate_url(project, nominator, form_data, scope_value):
summary_list = []
# Nominate URL
try:
# Check if user has already nominated the URL
nomination_url, created = URL.objects.get_or_create(url_nominator__id__iexact=nominator.id,
url_project=project,
entity__iexact=form_data['url_value'],
attribute__iexact='nomination',
defaults={
'entity': form_data['url_value'],
'attribute': 'nomination',
'value': scope_value,
'url_nominator': nominator,
})
except Exception:
raise http.Http404
if created:
summary_list.append('You have successfully nominated ' + form_data['url_value'])
else:
if nomination_url.value == scope_value:
if scope_value == '1':
summary_list.append(
'You have already declared ' + form_data['url_value'] + ' as \"In Scope\"'
)
else:
summary_list.append(
'You have already declared ' + form_data['url_value'] + ' as \"Out of Scope\"'
)
else:
nomination_url.value = scope_value
nomination_url.save()
if scope_value == '1':
summary_list.append(
'You have successfully declared ' + form_data['url_value'] + ' as \"In Scope\"'
)
else:
summary_list.append(
'You have successfully declared ' + form_data['url_value']
+ ' as \"Out of Scope\"'
)
return summary_list
def add_other_attribute(project, nominator, form_data, summary_list):
# add to att_list the project specific attributes
att_list = []
for aobj in project.project_metadata_set.all():
att_list.append(aobj.metadata.name)
for attribute_name in att_list:
# Add attribute URL entry
if attribute_name in form_data:
# If attribute has a list of values associated
if isinstance(form_data[attribute_name], list):
for oneval in form_data[attribute_name]:
if len(oneval) > 0:
summary_list = save_attribute(project, nominator, form_data, summary_list,
attribute_name, oneval)
elif len(form_data[attribute_name]) > 0:
summary_list = save_attribute(project, nominator, form_data, summary_list,
attribute_name, form_data[attribute_name])
return summary_list
def save_attribute(project, nominator, form_data, summary_list, attribute_name, valvar):
"""Stores attribute/value for given url in database.
Separated out of add_other_attribute to handle attributes where a list
of values was submitted.
"""
try:
# Check if URL attribute and value already exist
added_url, created = URL.objects.get_or_create(url_nominator=nominator,
url_project=project,
entity__iexact=form_data['url_value'],
value__iexact=valvar,
attribute__iexact=attribute_name,
defaults={
'entity': form_data['url_value'],
'value': valvar,
'attribute': attribute_name,
})
except Exception:
raise http.Http404
if created:
summary_list.append('You have successfully added the '
+ attribute_name + ' \"' + valvar + '\" for '
+ form_data['url_value'])
else:
summary_list.append('You have already added the '
+ attribute_name + ' \"' + valvar + '\" for '
+ form_data['url_value'])
return summary_list
def surt_exists(project, system_nominator, url_entity):
# Create a SURT if the url doesn't already have one
try:
URL.objects.get_or_create(url_project=project,
entity__iexact=url_entity,
attribute__iexact='surt',
defaults={
'entity': url_entity,
'attribute': 'surt',
'value': surtize(url_entity),
'url_nominator': system_nominator
})
except Exception:
raise http.Http404
return True
def url_formatter(line):
"""
Formats the given url into the proper url format
"""
url = line.strip()
url = addImpliedHttpIfNecessary(url)
return url
def surtize(orig_url, preserveCase=False):
"""
Create a surt from a url. Based on Heritrix's SURT.java.
"""
# if url is submitted without scheme, add http://
orig_url = addImpliedHttpIfNecessary(orig_url)
# 1: scheme://
# 2: userinfo (if present)
# 3: @ (if present)
# 4: dotted-quad host
# 5: other host
# 6: :port
# 7: path
# group def. 1 2 3
URI_SPLITTER = r"^(\w+://)(?:([-\w\.!~\*'\(\)%;:&=+$,]+?)(@))?" + \
r"(?:((?:\d{1,3}\.){3}\d{1,3})|(\S+?))(:\d+)?(/\S*)?$"
# 4 5 6 7
# check URI validity
m = re.compile(URI_SPLITTER)
mobj = m.match(orig_url)
if not mobj:
return ''
# start building surt form
if mobj.group(1) == 'https://':
surt = 'http://('
elif mobj.group(1) == 'ftps://':
surt = 'ftp://('
else:
surt = mobj.group(1) + '('
# if dotted-quad ip match, don't reverse
if mobj.group(4) is not None:
surt += mobj.group(4)
# otherwise, reverse host
else:
splithost = mobj.group(5).split('.')
splithost.reverse()
hostpart = ','.join(splithost)
surt += hostpart + ','
# add port if it exists
surt = appendToSurt(mobj, 6, surt)
# add @ if it exists
surt = appendToSurt(mobj, 3, surt)
# add userinfo if it exists
surt = appendToSurt(mobj, 2, surt)
# close parentheses before path
surt += ')'
# add path if it exists
surt = appendToSurt(mobj, 7, surt)
# return surt
if preserveCase is False:
return surt.lower()
else:
return surt
def appendToSurt(matchobj, groupnum, surt):
if matchobj.group(groupnum) is not None:
surt += matchobj.group(groupnum)
return surt
def addImpliedHttpIfNecessary(uri):
colon = uri.find(':')
period = uri.find('.')
if colon == -1 or (period >= 0 and period < colon):
uri = 'http://' + uri
return uri
def create_json_browse(slug, url_attribute, root=''):
"""Create a JSON list which can be used to represent a tree of the SURT domains.
If a root is specified, the JSON list will show just the tree of domains under
the specified base domain. Otherwise, it will show all of the domains. Each entry
in the JSON list is a dict which states the base domain, child domain,
and whether the child domain has children or not.
"""
json_list = []
# Make sure the project exist in the database
project = get_object_or_404(Project, project_slug=slug)
if root != '':
# Find all URLs with the project and domain specified
url_list = URL.objects.filter(
url_project=project,
attribute__iexact='surt',
value__icontains=root
).order_by('value')
else:
# Find all URLs with the project specified (the base domains)
url_list = URL.objects.filter(
url_project=project,
attribute__iexact='surt'
).order_by('value')
if len(url_list) >= 100 and root != '':
category_list = []
for url_item in url_list:
name_search = re.compile(r'^[^:]+://\('+root+'([A-Za-z0-9]{1})').search(
url_item.value, 0)
if name_search:
if not name_search.group(1) in category_list:
category_list.append(name_search.group(1))
for category in category_list:
category_dict = {'text': category,
'id': root+category,
'hasChildren': True}
json_list.append(category_dict)
else:
name_pattern = re.compile(r'^[^:]+://\('+root+r'([^,\)]+)')
child_pattern = re.compile(r'^[^:]+://\('+root+r'[^,]+,[^,\)]+')
for url_item in url_list:
domain_dict = {}
# Determine if URL is a child of the expanded node
name_search = name_pattern.search(url_item.value, 0)
# if the URL exists under the expanded node
if name_search:
# Determine if the URL has children
child_found = child_pattern.search(url_item.value, 0)
# Create a new domain name for the new URL
domain_name = root + name_search.group(1)
domain_not_found = True
# For all URLs in the json list already
for existing_domain in json_list:
# Find the domain name within the anchor
found_anchor = ANCHOR_PATTERN.search(existing_domain['text'], 0)
if found_anchor:
removed_anchor = found_anchor.group(1)
else:
removed_anchor = None
# if the domain name already exists in the json list
if existing_domain['text'] == domain_name or removed_anchor == domain_name:
domain_not_found = False
# if the domain hasn't been added already, and it has a child node
if domain_not_found and child_found:
if len(domain_name.split(',')) > 1:
domain_dict = {'text': '<a href=\"surt/(' +
domain_name+'\">' +
domain_name+'</a>',
'id': domain_name+',',
'hasChildren': True}
else:
domain_dict = {'text': domain_name,
'id': domain_name+',',
'hasChildren': True}
# otherwise if the domain hasn't been added already, and it has no child
elif domain_not_found:
domain_dict = {'text': '<a href=\"surt/(' +
domain_name+'\">' +
domain_name+'</a>',
'id': domain_name+','}
# if the domain dictionary isn't empty
if domain_dict:
json_list.append(domain_dict)
return json.dumps(json_list)
def create_json_search(slug):
"""Create JSON list of all URLs added to the specified project."""
project = get_object_or_404(Project, project_slug=slug)
json_list = []
query_list = (URL.objects.filter(url_project=project)
.values_list('entity', flat=True)
.distinct()
.order_by('entity'))
for url_item in query_list:
json_list.append(url_item)
return json.dumps(json_list)
def create_url_list(project, base_list):
url_dict = {}
for url_object in base_list:
url_dict['entity'] = url_object.entity
if url_object.attribute == 'nomination':
if 'nomination_list' not in url_dict:
url_dict['nomination_list'] = []
url_dict['nomination_count'] = 0
url_dict['nomination_score'] = 0
if url_object.url_nominator.nominator_name + ' - ' + \
url_object.url_nominator.nominator_institution \
not in url_dict['nomination_list']:
url_dict['nomination_list'].append(
url_object.url_nominator.nominator_name + ' - ' +
url_object.url_nominator.nominator_institution)
url_dict['nomination_count'] += 1
url_dict['nomination_score'] += int(url_object.value)
elif url_object.attribute == 'surt':
url_dict['surt'] = get_domain_surt(url_object.value)
else:
if 'attribute_dict' not in url_dict:
url_dict['attribute_dict'] = {}
attrib_key = string.capwords(url_object.attribute.replace('_', ' '))
if attrib_key not in url_dict['attribute_dict']:
url_dict['attribute_dict'][attrib_key] = []
# try statement to replace value key with value value where applicable
try:
url_metadata = project.project_metadata_set.get(
metadata__name__exact=url_object.attribute).metadata
if len(url_metadata.values.all()) > 0 or len(url_metadata.value_sets.all()) > 0:
fullval = Value.objects.get(key__exact=url_object.value)
if fullval not in url_dict['attribute_dict'][attrib_key]:
url_dict['attribute_dict'][attrib_key].append(fullval)
else:
raise Exception()
except Exception:
if url_object.value not in url_dict['attribute_dict'][attrib_key]:
url_dict['attribute_dict'][attrib_key].append(url_object.value)
return url_dict
def create_url_dump(project):
url_dict = {}
# get QuerySet of url_data
entity_list = URL.objects.filter(url_project=project)
# get metadata and values
metadata_vals = get_metadata(project)
# turn metadata_vals into usable dict
val_dict = {}
for met, vals in metadata_vals:
val_dict[met.metadata.name] = {}
for eachv in vals:
val_dict[met.metadata.name][eachv.key] = eachv.value
# merge the data for URLs with same entity
for url_object in entity_list:
url_ent = url_object.entity
attrib_key = url_object.attribute
# if first time we've seen the entity, create url_dict entry
if url_ent not in url_dict.keys():
url_dict[url_ent] = \
{'nominators': [],
'nomination_count': 0,
'nomination_score': 0,
'attributes': {}}
if attrib_key == 'nomination':
nominator = url_object.url_nominator.nominator_name + ' - ' + \
url_object.url_nominator.nominator_institution
if nominator not in url_dict[url_ent]['nominators']:
url_dict[url_ent]['nominators'].append(nominator)
url_dict[url_ent]['nomination_count'] += 1
url_dict[url_ent]['nomination_score'] += int(url_object.value)
elif attrib_key == 'surt':
url_dict[url_ent]['surt'] = url_object.value
url_dict[url_ent]['domain_surt'] = get_domain_surt(url_object.value)
else:
if attrib_key not in url_dict[url_ent]['attributes'].keys():
url_dict[url_ent]['attributes'][attrib_key] = []
# replace value key with value value if applicable
try:
# see if the field has preset values
if val_dict[attrib_key]:
fullval = val_dict[attrib_key][url_object.value]
if fullval not in url_dict[url_ent]['attributes'][attrib_key]:
url_dict[url_ent]['attributes'][attrib_key].append(fullval)
else:
raise Exception()
except Exception:
if url_object.value not in \
url_dict[url_ent]['attributes'][attrib_key]:
url_dict[url_ent]['attributes'][attrib_key].append(url_object.value)
# sort attribute lists
for u, udata in url_dict.items():
if udata.get('attributes'):
for att_key, att_vals in udata['attributes'].items():
att_vals.sort()
return url_dict
def create_surt_dict(project, surt):
if strip_scheme(surt) == surt:
# SURTs with no scheme are ok
surt_pattern = r'^[^:]+://\{0}'.format(strip_scheme(surt))
try:
url_list = URL.objects.filter(
url_project=project,
attribute__iexact='surt',
value__iregex=surt_pattern
).order_by('value')
except Exception:
url_list = None
else:
try:
url_list = URL.objects.filter(
url_project=project,
attribute__iexact='surt',
value__istartswith=surt
).order_by('value')
except Exception:
url_list = None
letter = False
single_letter_search = re.compile(r'^(?:[^:]+://)?\([^,]+,([^,\)]+)').search(surt, 0)
if single_letter_search:
result = single_letter_search.group(1)
if len(result) == 1:
letter = result
return {
'url_list': url_list,
'letter': letter,
}
def get_domain_surt(surt):
domain_surt = re.compile(r'^([^:]+://\([^,]+,[^,]+,)').search(surt, 0)
if domain_surt:
return domain_surt.group(1)
else:
return surt
def fix_scheme_double_slash(url):
"""Add back slash lost by Apache removing null path segments."""
fixed_entity = re.sub(SCHEME_ONE_SLASH, r'\1://\2', url)
return fixed_entity
def strip_scheme(url):
"""Remove scheme from URL."""
scheme = '{}://'.format(urlparse(url).scheme)
return url.replace(scheme, '', 1)
| bsd-3-clause | -2,891,498,525,979,208,700 | 37.671733 | 99 | 0.519217 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/operations/_service_endpoint_policy_definitions_operations.py | 1 | 24305 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ServiceEndpointPolicyDefinitionsOperations(object):
"""ServiceEndpointPolicyDefinitionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
service_endpoint_policy_definition_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
service_endpoint_policy_definition_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified ServiceEndpoint policy definitions.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_endpoint_policy_name: The name of the Service Endpoint Policy.
:type service_endpoint_policy_name: str
:param service_endpoint_policy_definition_name: The name of the service endpoint policy
definition.
:type service_endpoint_policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
service_endpoint_policy_name=service_endpoint_policy_name,
service_endpoint_policy_definition_name=service_endpoint_policy_definition_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
service_endpoint_policy_definition_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ServiceEndpointPolicyDefinition"
"""Get the specified service endpoint policy definitions from service endpoint policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_endpoint_policy_name: The name of the service endpoint policy name.
:type service_endpoint_policy_name: str
:param service_endpoint_policy_definition_name: The name of the service endpoint policy
definition name.
:type service_endpoint_policy_definition_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ServiceEndpointPolicyDefinition, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.ServiceEndpointPolicyDefinition
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ServiceEndpointPolicyDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
service_endpoint_policy_definition_name, # type: str
service_endpoint_policy_definitions, # type: "_models.ServiceEndpointPolicyDefinition"
**kwargs # type: Any
):
# type: (...) -> "_models.ServiceEndpointPolicyDefinition"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinition"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(service_endpoint_policy_definitions, 'ServiceEndpointPolicyDefinition')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ServiceEndpointPolicyDefinition', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ServiceEndpointPolicyDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
service_endpoint_policy_definition_name, # type: str
service_endpoint_policy_definitions, # type: "_models.ServiceEndpointPolicyDefinition"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ServiceEndpointPolicyDefinition"]
"""Creates or updates a service endpoint policy definition in the specified service endpoint
policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_endpoint_policy_name: The name of the service endpoint policy.
:type service_endpoint_policy_name: str
:param service_endpoint_policy_definition_name: The name of the service endpoint policy
definition name.
:type service_endpoint_policy_definition_name: str
:param service_endpoint_policy_definitions: Parameters supplied to the create or update service
endpoint policy operation.
:type service_endpoint_policy_definitions: ~azure.mgmt.network.v2020_08_01.models.ServiceEndpointPolicyDefinition
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ServiceEndpointPolicyDefinition or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_08_01.models.ServiceEndpointPolicyDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinition"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
service_endpoint_policy_name=service_endpoint_policy_name,
service_endpoint_policy_definition_name=service_endpoint_policy_definition_name,
service_endpoint_policy_definitions=service_endpoint_policy_definitions,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ServiceEndpointPolicyDefinition', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
service_endpoint_policy_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ServiceEndpointPolicyDefinitionListResult"]
"""Gets all service endpoint policy definitions in a service end point policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_endpoint_policy_name: The name of the service endpoint policy name.
:type service_endpoint_policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ServiceEndpointPolicyDefinitionListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_08_01.models.ServiceEndpointPolicyDefinitionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinitionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ServiceEndpointPolicyDefinitionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions'} # type: ignore
| mit | -3,697,808,724,107,169,000 | 53.617978 | 285 | 0.667682 | false |
Dybov/real_estate_agency | real_estate_agency/applications/tests.py | 1 | 1571 | from unittest import skip
from django.test import TestCase, tag, override_settings
from django.conf import settings
from viberbot.api.messages import TextMessage
from .telegram_tasks import TOKEN, sendTelegramMessage
from .viber_tasks import VIBER_BOT, getBotAdmins
@skip('Because Telegram is blocking in Russia with using web filters also. \
So it is not reachable. Skip it until it will be unblocked')
@tag('telegram')
class TelegramBotTests(TestCase):
@override_settings(CELERY_ALWAYS_EAGER=True)
def setUp(self):
self.TOKEN = TOKEN
self.chat_id = settings.TELEGRAM_ADMINS_CHATS[0]
@tag('delayed')
def test_sendTelegramMessage(self):
answer = sendTelegramMessage.delay(
self.TOKEN,
self.chat_id,
"test_sendTelegramMessage is okay"
)
self.assertTrue(answer)
@tag('viber')
class ViberBotTests(TestCase):
def setUp(self):
self.bot = VIBER_BOT
self.admins = settings.VIBER_BOT_TEST_ADMINS
if not self.admins:
self.admins = getBotAdmins()
@tag('send')
def test_sending_admin_message(self):
# It will be delivered!
msg = TextMessage(text="Run viber bot tests")
for reciever in self.admins:
answer = self.bot.send_messages(
reciever,
msg,
)
self.assertTrue(answer)
def test_viber_valid_token(self):
try:
self.bot.get_account_info()
except Exception:
self.fail("Bot token is invalid!")
| mit | -5,472,998,082,393,986,000 | 28.092593 | 76 | 0.636537 | false |
kilisimba/gluten-free-tax | dbref/models.py | 1 | 2223 | from django.db import models
class Note(models.Model):
note = models.TextField(max_length=800)
def __unicode__(self): # Python 3: def __str__(self):
return self.note
class Company(models.Model):
name = models.CharField(max_length=200, default='Unknown')
validation_date = models.CharField(max_length=100, blank=True, null=True)
certification = models.CharField(max_length=200, blank=True, null=True)
note = models.ForeignKey(Note, blank=True, null=True)
def brands(self):
return ' / '.join(self.brand_set.values_list('name', flat=True))
def __unicode__(self):
return self.brands()
class Brand(models.Model):
name = models.CharField(max_length=200)
company = models.ForeignKey(Company, blank=True, null=True)
def __unicode__(self): # Python 3: def __str__(self):
return self.name
class Meta:
ordering = ['name']
class Site(models.Model):
domain = models.CharField(max_length=200)
company = models.ForeignKey(Company)
class Category(models.Model):
note = models.TextField(max_length=800)
def __unicode__(self): # Python 3: def __str__(self):
return self.note
class Meta:
verbose_name_plural = "Categories"
class Product(models.Model):
description = models.CharField(max_length=200)
unit = models.DecimalField(default=0, max_digits=8, decimal_places=2)
price = models.DecimalField(default=0, max_digits=8, decimal_places=2)
gluten_free = models.BooleanField(default=False)
company = models.ForeignKey(Company, blank=True, null=True)
category = models.ForeignKey(Category, blank=True, null=True)
note = models.ForeignKey(Note, blank=True, null=True)
def __unicode__(self): # Python 3: def __str__(self):
return self.description
class Association(models.Model):
equivalent = models.ForeignKey(Product, blank=True, null=True, \
related_name='nonGF', verbose_name='non-gluten free')
gluten_free = models.ForeignKey(Product, related_name='GF')
class Meta:
verbose_name = "Lookup Table"
ordering = ['gluten_free__description', 'equivalent__description']
| mit | 4,382,392,098,167,966,000 | 33.734375 | 89 | 0.65587 | false |
mfalesni/cfme_tests | cfme/services/catalogs/catalog_items/__init__.py | 1 | 17484 | import attr
import fauxfactory
from navmazing import NavigateToAttribute, NavigateToSibling
from widgetastic.widget import Text, Checkbox, View
from widgetastic_manageiq import FonticonPicker, ManageIQTree
from widgetastic_patternfly import Button, Input, BootstrapSelect, CandidateNotFound, Tab
from widgetastic.utils import VersionPick, Version
from cfme.common import Taggable, TagPageView
from cfme.common.vm_views import BasicProvisionFormView
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.services.catalogs import ServicesCatalogView
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from cfme.utils.pretty import Pretty
from cfme.utils.update import Updateable
from cfme.utils.wait import wait_for
# Views
class BasicInfoForm(ServicesCatalogView):
title = Text('#explorer_title_text')
name = Input(name='name')
description = Input(name='description')
display = Checkbox(name='display')
select_catalog = BootstrapSelect('catalog_id')
select_dialog = BootstrapSelect('dialog_id')
select_orch_template = BootstrapSelect('template_id')
select_provider = BootstrapSelect('manager_id')
select_config_template = BootstrapSelect('template_id')
subtype = BootstrapSelect('generic_subtype')
field_entry_point = Input(name='fqname')
retirement_entry_point = Input(name='retire_fqname')
select_resource = BootstrapSelect('resource_id')
tree = ManageIQTree('automate_treebox')
class ButtonGroupForm(ServicesCatalogView):
title = Text('#explorer_title_text')
btn_group_text = Input(name='name')
btn_group_hvr_text = Input(name='description')
btn_image = VersionPick({
Version.lowest(): BootstrapSelect('button_image'),
'5.9': FonticonPicker('button_icon')
})
class ButtonForm(ServicesCatalogView):
title = Text('#explorer_title_text')
btn_text = Input(name='name')
btn_hvr_text = Input(name='description')
btn_image = BootstrapSelect('button_image')
select_dialog = BootstrapSelect('dialog_id')
system_process = BootstrapSelect('instance_name')
request = Input(name='object_request')
@View.nested
class options(Tab): # noqa
TAB_NAME = 'Options'
btn_text = Input(name='name')
btn_hvr_text = Input(name='description')
select_dialog = BootstrapSelect('dialog_id')
btn_image = FonticonPicker('button_icon')
@View.nested
class advanced(Tab): # noqa
TAB_NAME = 'Advanced'
system_process = BootstrapSelect('instance_name')
request = Input(name='object_request')
class AllCatalogItemView(ServicesCatalogView):
title = Text('#explorer_title_text')
@property
def is_displayed(self):
return (
self.in_explorer and
self.title.text == 'All Service Catalog Items' and
self.catalog_items.is_opened and
self.catalog_items.tree.currently_selected == ['All Catalog Items']
)
class DetailsCatalogItemView(ServicesCatalogView):
title = Text('#explorer_title_text')
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Service Catalog Item "{}"'.format(self.context['object'].name)
)
class ChooseCatalogItemTypeView(ServicesCatalogView):
"""Intermediate view where an actual catalog item type is selected."""
select_item_type = BootstrapSelect('st_prov_type', can_hide_on_select=True)
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Adding a new Service Catalog Item'
)
class AddCatalogItemView(BasicInfoForm):
"""NonCloudInfraCatalogItem catalog items have this view."""
add = Button('Add')
cancel = Button('Cancel')
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Adding a new Service Catalog Item'
)
class TabbedAddCatalogItemView(ServicesCatalogView):
"""Cloud and Infra catalog items have this view."""
add = Button('Add')
cancel = Button('Cancel')
@View.nested
class basic_info(Tab): # noqa
TAB_NAME = 'Basic Info'
included_form = View.include(BasicInfoForm)
class request_info(Tab): # noqa
TAB_NAME = 'Request Info'
provisioning = View.nested(BasicProvisionFormView)
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Adding a new Service Catalog Item'
)
class EditCatalogItemView(BasicInfoForm):
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Editing Service Catalog Item "{}"'
.format(self.context['object'].name)
)
def after_fill(self, was_change):
# TODO: This is a workaround (Jira RHCFQE-5429)
if was_change:
wait_for(lambda: not self.save.disabled, timeout='10s', delay=0.2)
class TabbedEditCatalogItemView(ServicesCatalogView):
save = Button('Save')
reset = Button('Reset')
cancel = Button('Cancel')
@View.nested
class basic_info(Tab): # noqa
TAB_NAME = 'Basic Info'
included_form = View.include(BasicInfoForm)
class request_info(Tab): # noqa
TAB_NAME = 'Request Info'
provisioning = View.nested(BasicProvisionFormView)
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Editing Service Catalog Item "{}"'
.format(self.context['object'].name)
)
class AddButtonGroupView(ButtonGroupForm):
add = Button('Add')
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Adding a new Button Group'
)
class AddButtonView(ButtonForm):
add = Button('Add')
@property
def is_displayed(self):
return (
self.in_explorer and
self.catalog_items.is_opened and
self.title.text == 'Adding a new Button'
)
class BaseCatalogItem(BaseEntity, Updateable, Pretty, Taggable):
@property
def button_icon_name(self):
return 'Button Image 1' if self.appliance.version < '5.9' else 'broom'
def update(self, updates):
view = navigate_to(self, 'Edit')
changed = view.fill(updates)
if changed:
view.save.click()
else:
view.cancel.click()
view = self.create_view(DetailsCatalogItemView, override=updates)
assert view.is_displayed
view.flash.assert_no_error()
# TODO move these assertions to tests
# if changed:
# view.flash.assert_message(
# 'Service Catalog Item "{}" was saved'.format(updates.get('name', self.name)))
# else:
# view.flash.assert_message(
# 'Edit of Catalog Item"{}" was cancelled by the user'.format(self.name))
def delete(self):
view = navigate_to(self, 'Details')
view.configuration.item_select('Remove Catalog Item', handle_alert=True)
view = self.create_view(AllCatalogItemView)
assert view.is_displayed
view.flash.assert_success_message('The selected Catalog Item was deleted')
def add_button_group(self):
button_name = fauxfactory.gen_alpha()
view = navigate_to(self, 'AddButtonGroup')
view.fill({'btn_group_text': 'group_text',
'btn_group_hvr_text': button_name,
'btn_image': self.button_icon_name})
view.add.click()
view = self.create_view(DetailsCatalogItemView)
assert view.is_displayed
view.flash.assert_no_error()
return button_name
def add_button(self):
button_name = fauxfactory.gen_alpha()
view = navigate_to(self, 'AddButton')
if self.appliance.version < '5.9':
view.fill({'btn_text': 'btn_text',
'btn_hvr_text': button_name,
'btn_image': self.button_icon_name,
'select_dialog': self.dialog,
'system_process': 'Request',
'request': 'InspectMe'})
else:
view.fill({'options': {'btn_text': 'btn_text',
'btn_hvr_text': button_name,
'select_dialog': self.dialog,
'btn_image': self.button_icon_name},
'advanced': {'system_process': 'Request',
'request': 'InspectMe'}})
view.add.click()
view = self.create_view(DetailsCatalogItemView)
wait_for(lambda: view.is_displayed, timeout=5)
view.flash.assert_no_error()
return button_name
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@attr.s
class CloudInfraCatalogItem(BaseCatalogItem):
"""Catalog items that relate to cloud and infra providers."""
name = attr.ib()
prov_data = attr.ib()
catalog = attr.ib(default=None)
description = attr.ib(default=None)
display_in = attr.ib(default=None)
dialog = attr.ib(default=None)
domain = attr.ib(default='ManageIQ (Locked)')
item_type = None
@property
def fill_dict(self):
return {
'basic_info': {
'name': self.name,
'description': self.description,
'display': self.display_in,
'select_catalog': getattr(self.catalog, 'name', None),
'select_dialog': self.dialog
},
'request_info': {'provisioning': self.prov_data}
}
@attr.s
class NonCloudInfraCatalogItem(BaseCatalogItem):
"""Generic, Ansible Tower, Orchestration and OpenShift catalog items."""
name = attr.ib()
catalog = attr.ib(default=None)
description = attr.ib(default=None)
display_in = attr.ib(default=None)
dialog = attr.ib(default=None)
domain = attr.ib(default='ManageIQ (Locked)')
item_type = None
@property
def _fill_dict(self):
return {
'name': self.name,
'description': self.description,
'display': self.display_in,
'select_catalog': getattr(self.catalog, 'name', None),
'select_dialog': self.dialog,
}
class AmazonCatalogItem(CloudInfraCatalogItem):
item_type = 'Amazon'
@attr.s
class AnsibleTowerCatalogItem(NonCloudInfraCatalogItem):
provider = attr.ib(default=None)
config_template = attr.ib(default=None)
@property
def item_type(self):
if self.appliance.version >= '5.9':
return 'Ansible Tower'
else:
return 'AnsibleTower'
@property
def fill_dict(self):
self._fill_dict['select_provider'] = self.provider
self._fill_dict['select_config_template'] = self.config_template
return self._fill_dict
class AzureCatalogItem(CloudInfraCatalogItem):
item_type = 'Azure'
@attr.s
class GenericCatalogItem(NonCloudInfraCatalogItem):
subtype = attr.ib(default=None)
item_type = 'Generic'
@property
def fill_dict(self):
self._fill_dict['subtype'] = self.subtype
return self._fill_dict
class GoogleCatalogItem(CloudInfraCatalogItem):
item_type = 'Google'
@attr.s
class OpenShiftCatalogItem(NonCloudInfraCatalogItem):
provider = attr.ib(default=None)
item_type = 'OpenShift Template'
@property
def fill_dict(self):
self._fill_dict['select_provider'] = self.provider
return self._fill_dict
class OpenStackCatalogItem(CloudInfraCatalogItem):
item_type = 'OpenStack'
@attr.s
class OrchestrationCatalogItem(NonCloudInfraCatalogItem):
orch_template = attr.ib(default=None)
item_type = 'Orchestration'
@property
def fill_dict(self):
self._fill_dict['select_config_template'] = getattr(
self.orch_template, 'template_name', None)
return self._fill_dict
class RHVCatalogItem(CloudInfraCatalogItem):
@property
def item_type(self):
if self.appliance.version >= '5.9.0.17':
return 'Red Hat Virtualization'
else:
return 'RHEV'
class SCVMMCatalogItem(CloudInfraCatalogItem):
item_type = 'SCVMM'
class VMwareCatalogItem(CloudInfraCatalogItem):
item_type = 'VMware'
@attr.s
class CatalogItemsCollection(BaseCollection):
ENTITY = BaseCatalogItem
AMAZON = AmazonCatalogItem
ANSIBLE_TOWER = AnsibleTowerCatalogItem
AZURE = AzureCatalogItem
GENERIC = GenericCatalogItem
OPENSHIFT = OpenShiftCatalogItem
ORCHESTRATION = OrchestrationCatalogItem
RHV = RHVCatalogItem
SCVMM = SCVMMCatalogItem
VMWARE = VMwareCatalogItem
# damn circular imports
@property
def ANSIBLE_PLAYBOOK(self): # noqa
from cfme.services.catalogs.catalog_items import ansible_catalog_items
return ansible_catalog_items.AnsiblePlaybookCatalogItem
def instantiate(self, catalog_item_class, *args, **kwargs):
return catalog_item_class.from_collection(self, *args, **kwargs)
def create(self, catalog_item_class, *args, **kwargs):
"""Creates a catalog item in the UI.
Args:
catalog_item_class: type of a catalog item
*args: see the respectful catalog item class
**kwargs: see the respectful catalog item class
Returns:
An instance of catalog_item_class
"""
cat_item = self.instantiate(catalog_item_class, *args, **kwargs)
view = navigate_to(cat_item, 'Add')
view.fill(cat_item.fill_dict)
view.add.click()
view = self.create_view(AllCatalogItemView)
# TODO move this assertion to tests
# view.flash.assert_success_message('Catalog Item "{}" was added'.format(
# cat_item.name), partial=True)
assert view.is_displayed
view.flash.assert_no_error()
return cat_item
# Navigation steps
@navigator.register(CatalogItemsCollection, 'All')
class All(CFMENavigateStep):
VIEW = AllCatalogItemView
prerequisite = NavigateToAttribute('appliance.server', 'ServicesCatalog')
def step(self):
self.view.catalog_items.tree.click_path('All Catalog Items')
@navigator.register(CatalogItemsCollection, 'Choose Type')
class ChooseCatalogItemType(CFMENavigateStep):
VIEW = ChooseCatalogItemTypeView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.configuration.item_select('Add a New Catalog Item')
@navigator.register(BaseCatalogItem, 'Add')
class CatalogItemAddStep(CFMENavigateStep):
prerequisite = NavigateToAttribute('parent', 'Choose Type')
@property
def VIEW(self): # noqa
if isinstance(self.obj, CloudInfraCatalogItem):
return TabbedAddCatalogItemView
else:
return AddCatalogItemView
def am_i_here(self):
# Going to an Add page should always be done from first principles incase a previous Add
# failed
return False
def step(self):
self.prerequisite_view.select_item_type.select_by_visible_text(self.obj.item_type)
@navigator.register(BaseCatalogItem, 'Details')
class CatalogItemDetailsStep(CFMENavigateStep):
VIEW = DetailsCatalogItemView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
tree = self.prerequisite_view.catalog_items.tree
tree.click_path(
'All Catalog Items',
getattr(self.obj.catalog, 'name', 'Unassigned'),
self.obj.name
)
@navigator.register(BaseCatalogItem, 'Edit')
class CatalogItemEditStep(CFMENavigateStep):
prerequisite = NavigateToSibling('Details')
@property
def VIEW(self): # noqa
if isinstance(self.obj, CloudInfraCatalogItem):
return TabbedEditCatalogItemView
else:
return EditCatalogItemView
def step(self):
self.prerequisite_view.configuration.item_select('Edit this Item')
@navigator.register(BaseCatalogItem, 'AddButtonGroup')
class AddButtonGroup(CFMENavigateStep):
VIEW = AddButtonGroupView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.configuration.item_select('Add a new Button Group')
@navigator.register(BaseCatalogItem, 'AddButton')
class AddButton(CFMENavigateStep):
VIEW = AddButtonView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.configuration.item_select('Add a new Button')
@navigator.register(BaseCatalogItem, 'EditTagsFromDetails')
class EditTags(CFMENavigateStep):
VIEW = TagPageView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.policy.item_select('Edit Tags')
| gpl-2.0 | -116,139,787,695,724,860 | 29.619965 | 96 | 0.643617 | false |
mkocikowski/u8 | u8.py | 1 | 3925 | # -*- coding: UTF-8 -*-
import sys
import logging
import unicodedata
from collections import defaultdict
import HTMLParser
logger = logging.getLogger(__name__)
C_PUNCTUATION = frozenset(("Pc", "Pd", "Ps", "Pe", "Po", "Lm"))
C_QUOTE = frozenset(("Pi", "Pf"))
C_MARK = frozenset(("Mn", "Mc", "Me"))
C_SYMBOL = frozenset(("Sk", "So"))
C_CONTROL = frozenset(("Cc", "Cf", "Cs", "Co", "Cn"))
def get_unicode_ordinals_for_categories(categories=C_CONTROL):
ordinals = []
for category in categories:
ordinals[len(ordinals):] = [o for o in range(sys.maxunicode + 1)
if unicodedata.category(unichr(o)) == category]
return frozenset(ordinals)
def get_valid_xml_unicode_ordinals():
s = [i for i in range(0x20, 0xD7FF)]
s[len(s):] = [i for i in range(0xE000, 0xFFFD)]
s[len(s):] = [i for i in range(0x10000, 0x10FFFF)]
s[len(s):] = [0x9, 0xA, 0xD]
return frozenset(s)
def get_invalid_xml_unicode_ordinals():
s = frozenset(range(0x0, 0x10FFFF)).difference(get_valid_xml_unicode_ordinals())
return s
def get_translation_table():
d = {}
invalid = get_invalid_xml_unicode_ordinals()
strip = get_unicode_ordinals_for_categories(C_PUNCTUATION.union(C_MARK).union(C_SYMBOL).union(C_CONTROL))
d.update({c: None for c in invalid.union(strip)})
quotes = get_unicode_ordinals_for_categories(C_QUOTE)
d.update({c: u'"' for c in quotes})
d.update({ord(u): u for u in u"\":;'-,.<>@=!?$%"}) # these characters should always be allowed
return d
TRANSLATION_TABLE = get_translation_table()
def _is_sane_unicode(unicode_s=None):
assert(type(unicode_s) is unicode)
u = unicode_s.encode('utf-8', errors='strict').decode('utf-8', errors='strict')
if unicode_s == u: return True
return False
def _unicode_to_unicode(unicode_s=None):
assert(type(unicode_s) is unicode)
s = unicode.encode(unicode_s, 'utf-8', errors='replace')
return _str_to_unicode(s)
def _str_to_unicode(str_s=None):
assert(type(str_s) is str)
u = unicode(str_s, 'utf-8', errors='replace').replace(u"\ufffd", u"?")
return u
def to_unicode(data=None):
"""Converts input to unicode.
Returned unicode can be idempotently converted to utf-8 string and
back with 'errors' set to 'strict'. The conversion itself runs with
'errors' set to 'replace', meaning all errors will be replaced with
'?'.
Args:
data: str or unicode
Returns:
unicode
Raises:
TypeError, UnicodeError
"""
sanitized = None
if type(data) is unicode:
sanitized = _unicode_to_unicode(unicode_s=data)
elif type(data) is str:
sanitized = _str_to_unicode(str_s=data)
else:
raise TypeError("input must be str or unicode")
if not _is_sane_unicode(sanitized):
raise UnicodeError("input cannot be converted")
return sanitized
def unescape_html_entities(unicode_s=None):
"""Unescapes html entities in input unicode.
Args:
unicode_s: unicode
Returns:
unicode
"""
h = HTMLParser.HTMLParser()
u = h.unescape(unicode_s)
return u
def normalize(unicode_s=None):
"""Normalizes unicode to NFKC form."""
u = unicodedata.normalize('NFKC', unicode_s)
return u
def simplify(unicode_s=None):
"""Strips unwanted characters."""
u = unicode_s.translate(TRANSLATION_TABLE)
return u
def u8(data):
"""Converts input into sanitized, normalized utf-8 string.
Top level module function, call this unless you need fine-grained functionality.
Args:
data: str or unicode
Returns:
utf-8 encoded string. This string can be idempotently converted
to and from unicode using 'strict' errors.
Raises:
TypeError, UnicodeError
"""
u = to_unicode(data)
u = unescape_html_entities(u)
u = normalize(u)
u = simplify(u)
return u.encode('utf-8')
| mit | 4,791,592,369,525,969,000 | 25.166667 | 109 | 0.642293 | false |
MatthewCox/MoronBot | MBPyFunctionServer/FunctionHandler.py | 1 | 1060 | import os
import GlobalVars
def LoadFunction(path, loadAs=''):
loadType = 'l'
name = path
src = __import__('PythonFunctions.' + name, globals(), locals(), [])
if loadAs != '':
name = loadAs
if name in GlobalVars.functions:
loadType = 'rel'
reload(src)
components = name.split('.')
for comp in components[:1]:
src = getattr(src, comp)
print str(src)
func = src.Instantiate()
GlobalVars.functions.update({name:func})
return loadType
def UnloadFunction(name):
success = True
if name in GlobalVars.functions.keys():
del GlobalVars.functions[name]
else:
success = False
return success
def AutoLoadFunctions():
root = os.path.join('.', 'PythonFunctions')
for item in os.listdir(root):
if not os.path.isfile(os.path.join(root, item)):
continue
if not item.endswith('.py'):
continue
try:
LoadFunction(item[:-3])
except Exception, x:
print x.args
| mit | -6,151,578,014,082,445,000 | 22.043478 | 72 | 0.574528 | false |
DeepThoughtTeam/tensorflow | tensorflow/python/kernel_tests/cwise_ops_test.py | 1 | 46126 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for coefficient-wise operations.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import tensorflow.python.platform
import numpy as np
import tensorflow as tf
_ADD = lambda x, y: x + y
_SUB = lambda x, y: x - y
_MUL = lambda x, y: x * y
_POW = lambda x, y: x ** y
_TRUEDIV = lambda x, y: x / y
_FLOORDIV = lambda x, y: x // y
_MOD = lambda x, y: x % y
_NEG = lambda x: -x
_ABS = abs
_LT = lambda x, y: x < y
_LE = lambda x, y: x <= y
_GT = lambda x, y: x > y
_GE = lambda x, y: x >= y
_AND = lambda x, y: x & y
_OR = lambda x, y: x | y
_XOR = lambda x, y: x ^ y
_INV = lambda x: ~x
class UnaryOpTest(tf.test.TestCase):
def _compareCpu(self, x, np_func, tf_func):
np_ans = np_func(x)
with self.test_session(use_gpu=False):
inx = tf.convert_to_tensor(x)
y = tf_func(inx)
tf_cpu = y.eval()
self.assertShapeEqual(np_ans, y)
self.assertAllClose(np_ans, tf_cpu)
# TODO(ebrevdo): add gradient for lgamma (digamma) and remove lgamma here.
if tf_func in (tf.lgamma,):
return # Return early
if x.dtype == np.float32:
s = list(np.shape(x))
jacob_t, jacob_n = tf.test.compute_gradient(inx,
s,
y,
s,
x_init_value=x)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
s = list(np.shape(x))
jacob_t, jacob_n = tf.test.compute_gradient(inx,
s,
y,
s,
x_init_value=x)
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _compareGpu(self, x, np_func, tf_func):
np_ans = np_func(x)
with self.test_session(use_gpu=True):
result = tf_func(tf.convert_to_tensor(x))
tf_gpu = result.eval()
self.assertShapeEqual(np_ans, result)
self.assertAllClose(np_ans, tf_gpu)
# TODO(zhifengc/ke): make gradient checker work on GPU.
def _compareBoth(self, x, np_func, tf_func):
self._compareCpu(x, np_func, tf_func)
self._compareGpu(x, np_func, tf_func)
def _inv(self, x):
return 1.0 / x
def _rsqrt(self, x):
return self._inv(np.sqrt(x))
def _sigmoid(self, x):
return 1.0 / (1.0 + np.exp(-x))
def _replace_domain_error_with_inf(self, fn):
def func(x):
try:
return fn(x)
except ValueError as e:
if "domain error" in e.message:
return np.inf * np.ones_like(x)
else:
raise e
return func
def testFloatBasic(self):
x = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float32)
y = (x + .5).astype(np.float32) # no zero
z = (x + 15.5).astype(np.float32) # all positive
self._compareBoth(x, np.abs, tf.abs)
self._compareBoth(x, np.abs, _ABS)
self._compareBoth(x, np.negative, tf.neg)
self._compareBoth(x, np.negative, _NEG)
self._compareBoth(y, self._inv, tf.inv)
self._compareBoth(x, np.square, tf.square)
self._compareBoth(z, np.sqrt, tf.sqrt)
self._compareBoth(z, self._rsqrt, tf.rsqrt)
self._compareBoth(x, np.exp, tf.exp)
self._compareBoth(z, np.log, tf.log)
self._compareBoth(x, np.tanh, tf.tanh)
self._compareBoth(x, self._sigmoid, tf.sigmoid)
self._compareBoth(y, np.sign, tf.sign)
self._compareBoth(x, np.sin, tf.sin)
self._compareBoth(x, np.cos, tf.cos)
self._compareBoth(
x,
np.vectorize(self._replace_domain_error_with_inf(math.lgamma)),
tf.lgamma)
self._compareBoth(x, np.vectorize(math.erf), tf.erf)
self._compareBoth(x, np.vectorize(math.erfc), tf.erfc)
def testFloatTanhEdge(self):
x = np.arange(40, 40 + 6).reshape(6).astype(np.float32)
self._compareBoth(x, np.tanh, tf.tanh)
x = np.arange(-40, -40 + 6).reshape(6).astype(np.float32)
self._compareBoth(x, np.tanh, tf.tanh)
def testFloatEmpty(self):
x = np.empty((2, 0, 5), dtype=np.float32)
self._compareBoth(x, np.abs, tf.abs)
self._compareBoth(x, np.abs, _ABS)
self._compareBoth(x, np.negative, tf.neg)
self._compareBoth(x, np.negative, _NEG)
self._compareBoth(x, self._inv, tf.inv)
self._compareBoth(x, np.square, tf.square)
self._compareBoth(x, np.sqrt, tf.sqrt)
self._compareBoth(x, self._rsqrt, tf.rsqrt)
self._compareBoth(x, np.exp, tf.exp)
self._compareBoth(x, np.log, tf.log)
self._compareBoth(x, np.tanh, tf.tanh)
self._compareBoth(x, self._sigmoid, tf.sigmoid)
self._compareBoth(x, np.sign, tf.sign)
self._compareBoth(x, np.sin, tf.sin)
self._compareBoth(x, np.cos, tf.cos)
def testDoubleBasic(self):
x = np.arange(-3, 3).reshape(1, 3, 2).astype(np.float64)
y = (x + .5).astype(np.float64) # no zero
z = (x + 15.5).astype(np.float64) # all positive
self._compareBoth(x, np.abs, tf.abs)
self._compareBoth(x, np.abs, _ABS)
self._compareBoth(x, np.negative, tf.neg)
self._compareBoth(x, np.negative, _NEG)
self._compareBoth(y, self._inv, tf.inv)
self._compareBoth(x, np.square, tf.square)
self._compareBoth(z, np.sqrt, tf.sqrt)
self._compareBoth(z, self._rsqrt, tf.rsqrt)
self._compareBoth(x, np.exp, tf.exp)
self._compareBoth(z, np.log, tf.log)
self._compareBoth(x, np.tanh, tf.tanh)
self._compareBoth(x, self._sigmoid, tf.sigmoid)
self._compareBoth(y, np.sign, tf.sign)
self._compareBoth(x, np.sin, tf.sin)
self._compareBoth(x, np.cos, tf.cos)
def testInt32Basic(self):
x = np.arange(-6, 6, 2).reshape(1, 3, 2).astype(np.int32)
self._compareCpu(x, np.abs, tf.abs)
self._compareCpu(x, np.abs, _ABS)
self._compareCpu(x, np.negative, tf.neg)
self._compareCpu(x, np.negative, _NEG)
self._compareCpu(x, np.square, tf.square)
self._compareCpu(x, np.sign, tf.sign)
def testInt64Basic(self):
x = np.arange(
-6 << 40, 6 << 40, 2 << 40).reshape(1, 3, 2).astype(np.int64)
self._compareCpu(x, np.abs, tf.abs)
self._compareCpu(x, np.abs, _ABS)
self._compareCpu(x, np.negative, tf.neg)
self._compareCpu(x, np.negative, _NEG)
self._compareCpu(x, np.square, tf.square)
self._compareCpu(x, np.sign, tf.sign)
def testComplex64Basic(self):
x = np.complex(1, 1) * np.arange(-3, 3).reshape(1, 3, 2).astype(
np.complex64)
y = x + 0.5 # no zeros
self._compareCpu(x, np.abs, tf.abs)
self._compareCpu(x, np.abs, _ABS)
self._compareCpu(x, np.negative, tf.neg)
self._compareCpu(x, np.negative, _NEG)
self._compareCpu(y, self._inv, tf.inv)
self._compareCpu(x, np.square, tf.square)
self._compareCpu(x, np.sqrt, tf.sqrt)
self._compareCpu(y, self._rsqrt, tf.rsqrt)
self._compareCpu(x, np.exp, tf.exp)
self._compareCpu(y, np.log, tf.log)
self._compareCpu(x, np.tanh, tf.tanh)
self._compareCpu(x, self._sigmoid, tf.sigmoid)
self._compareCpu(x, np.sin, tf.sin)
self._compareCpu(x, np.cos, tf.cos)
class BinaryOpTest(tf.test.TestCase):
def _compareCpu(self, x, y, np_func, tf_func):
np_ans = np_func(x, y)
with self.test_session(use_gpu=False):
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf_func(inx, iny)
tf_cpu = out.eval()
# Test that the op takes precedence over numpy operators.
np_left = tf_func(x, iny).eval()
np_right = tf_func(inx, y).eval()
if np_ans.dtype != np.object:
self.assertAllClose(np_ans, tf_cpu)
self.assertAllClose(np_ans, np_left)
self.assertAllClose(np_ans, np_right)
self.assertShapeEqual(np_ans, out)
def _compareGradientX(self, x, y, np_func, tf_func):
z = np_func(x, y)
zs = list(z.shape)
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf_func(inx, iny)
xs = list(x.shape)
jacob_t, jacob_n = tf.test.compute_gradient(inx,
xs,
out,
zs,
x_init_value=x)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _compareGradientY(self, x, y, np_func, tf_func):
z = np_func(x, y)
zs = list(z.shape)
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf_func(inx, iny)
ys = list(np.shape(y))
jacob_t, jacob_n = tf.test.compute_gradient(iny,
ys,
out,
zs,
x_init_value=y)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _compareGpu(self, x, y, np_func, tf_func):
np_ans = np_func(x, y)
with self.test_session(use_gpu=True):
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf_func(inx, iny)
tf_gpu = out.eval()
self.assertAllClose(np_ans, tf_gpu)
self.assertShapeEqual(np_ans, out)
# TODO(zhifengc/ke): make gradient checker work on GPU.
def _compareBoth(self, x, y, np_func, tf_func):
self._compareCpu(x, y, np_func, tf_func)
if x.dtype in (np.float32, np.float64):
if tf_func not in (_FLOORDIV, tf.floordiv):
self._compareGradientX(x, y, np_func, tf_func)
self._compareGradientY(x, y, np_func, tf_func)
self._compareGpu(x, y, np_func, tf_func)
def testFloatBasic(self):
x = np.linspace(-10, 10, 6).reshape(1, 3, 2).astype(np.float32)
y = np.linspace(20, -20, 6).reshape(1, 3, 2).astype(np.float32)
self._compareBoth(x, y, np.add, tf.add)
self._compareBoth(x, y, np.subtract, tf.sub)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y + 0.1, np.true_divide, tf.truediv)
self._compareBoth(x, y + 0.1, np.floor_divide, tf.floordiv)
self._compareBoth(x, y, np.add, _ADD)
self._compareBoth(x, y, np.subtract, _SUB)
self._compareBoth(x, y, np.multiply, _MUL)
self._compareBoth(x, y + 0.1, np.true_divide, _TRUEDIV)
self._compareBoth(x, y + 0.1, np.floor_divide, _FLOORDIV)
def testFloatDifferentShapes(self):
x = np.array([1, 2, 3, 4]).reshape(2, 2).astype(np.float32)
y = np.array([1, 2]).reshape(2, 1).astype(np.float32)
with self.test_session() as sess:
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
s = tf.reduce_sum(inx * iny)
gx, gy = sess.run(tf.gradients(s, [inx, iny]))
# gx is simply the broadcasted y
self.assertAllEqual(gx, np.array([1, 1, 2, 2])
.reshape(2, 2).astype(np.float32))
# gy is x's column summed up
self.assertAllEqual(gy, np.array([3, 7]).
reshape(2, 1).astype(np.float32))
def testDoubleBasic(self):
x = np.linspace(-10, 10, 6).reshape(1, 3, 2).astype(np.float64)
y = np.linspace(20, -20, 6).reshape(1, 3, 2).astype(np.float64)
self._compareBoth(x, y, np.add, tf.add)
self._compareBoth(x, y, np.subtract, tf.sub)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y + 0.1, np.true_divide, tf.truediv)
self._compareBoth(x, y + 0.1, np.floor_divide, tf.floordiv)
self._compareBoth(x, y, np.add, _ADD)
self._compareBoth(x, y, np.subtract, _SUB)
self._compareBoth(x, y, np.multiply, _MUL)
self._compareBoth(x, y + 0.1, np.true_divide, _TRUEDIV)
self._compareBoth(x, y + 0.1, np.floor_divide, _FLOORDIV)
def testInt8Basic(self):
x = np.arange(1, 13, 2).reshape(1, 3, 2).astype(np.int8)
y = np.arange(1, 7, 1).reshape(1, 3, 2).astype(np.int8)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y, np.multiply, _MUL)
def testInt16Basic(self):
x = np.arange(1, 13, 2).reshape(1, 3, 2).astype(np.int16)
y = np.arange(1, 7, 1).reshape(1, 3, 2).astype(np.int16)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y, np.multiply, _MUL)
def testInt32Basic(self):
x = np.arange(1, 13, 2).reshape(1, 3, 2).astype(np.int32)
y = np.arange(1, 7, 1).reshape(1, 3, 2).astype(np.int32)
self._compareBoth(x, y, np.add, tf.add)
self._compareBoth(x, y, np.subtract, tf.sub)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y, np.true_divide, tf.truediv)
self._compareBoth(x, y, np.floor_divide, tf.floordiv)
self._compareBoth(x, y, np.mod, tf.mod)
self._compareBoth(x, y, np.add, _ADD)
self._compareBoth(x, y, np.subtract, _SUB)
self._compareBoth(x, y, np.multiply, _MUL)
self._compareBoth(x, y, np.true_divide, _TRUEDIV)
self._compareBoth(x, y, np.floor_divide, _FLOORDIV)
self._compareBoth(x, y, np.mod, _MOD)
def testInt64Basic(self):
x = np.arange(1 << 40, 13 << 40, 2 << 40).reshape(1, 3, 2).astype(np.int64)
y = np.arange(1, 7, 1).reshape(1, 3, 2).astype(np.int64)
self._compareBoth(x, y, np.subtract, tf.sub)
self._compareBoth(x, y, np.multiply, tf.mul)
self._compareBoth(x, y, np.true_divide, tf.truediv)
self._compareBoth(x, y, np.floor_divide, tf.floordiv)
self._compareBoth(x, y, np.mod, tf.mod)
self._compareBoth(x, y, np.subtract, _SUB)
self._compareBoth(x, y, np.multiply, _MUL)
self._compareBoth(x, y, np.true_divide, _TRUEDIV)
self._compareBoth(x, y, np.floor_divide, _FLOORDIV)
self._compareBoth(x, y, np.mod, _MOD)
def testComplex64Basic(self):
x = np.complex(1, 1) * np.linspace(-10, 10, 6).reshape(1, 3, 2).astype(
np.complex64)
y = np.complex(1, 1) * np.linspace(20, -20, 6).reshape(1, 3, 2).astype(
np.complex64)
self._compareCpu(x, y, np.add, tf.add)
self._compareCpu(x, y, np.subtract, tf.sub)
self._compareCpu(x, y, np.multiply, tf.mul)
self._compareCpu(x, y + 0.1, np.true_divide, tf.truediv)
self._compareCpu(x, y, np.add, _ADD)
self._compareCpu(x, y, np.subtract, _SUB)
self._compareCpu(x, y, np.multiply, _MUL)
self._compareCpu(x, y + 0.1, np.true_divide, _TRUEDIV)
def testString(self):
x = np.array([["x_0_0", "x_0_1", "x_0_2"],
["x_1_0", "x_1_1", "x_1_2"],
["x_2_0", "x_2_1", "x_2_2"]], dtype=np.object)
y = np.array([["y_0_0", "y_0_1", "y_0_2"],
["y_1_0", "y_1_1", "y_1_2"],
["y_2_0", "y_2_1", "y_2_2"]], dtype=np.object)
z = np.array([["z_0", "z_1", "z_2"]], dtype=np.object)
w = np.array("w", dtype=np.object)
self._compareCpu(x, y, _ADD, _ADD)
self._compareCpu(x, z, _ADD, _ADD)
self._compareCpu(x, w, _ADD, _ADD)
self._compareCpu(z, w, _ADD, _ADD)
def _compareBCast(self, xs, ys, dtype, np_func, tf_func):
x = (1 + np.linspace(0, 5, np.prod(xs))).astype(dtype).reshape(xs)
y = (1 + np.linspace(0, 5, np.prod(ys))).astype(dtype).reshape(ys)
self._compareCpu(x, y, np_func, tf_func)
if x.dtype in (np.float32, np.float64):
if tf_func not in (_FLOORDIV, tf.floordiv):
self._compareGradientX(x, y, np_func, tf_func)
self._compareGradientY(x, y, np_func, tf_func)
self._compareGpu(x, y, np_func, tf_func)
# TODO(josh11b,vrv): Refactor this to use parameterized tests.
def _testBCastByFunc(self, funcs, xs, ys):
dtypes = [
np.float32,
np.float64,
np.int32,
np.int64,
np.complex64
]
for dtype in dtypes:
for (np_func, tf_func) in funcs:
if dtype == np.complex64 and tf_func in (_FLOORDIV, tf.floordiv):
continue # floordiv makes no sense for complex numbers
self._compareBCast(xs, ys, dtype, np_func, tf_func)
self._compareBCast(ys, xs, dtype, np_func, tf_func)
def _testBCastA(self, xs, ys):
funcs = [
(np.add, tf.add),
(np.add, _ADD),
]
self._testBCastByFunc(funcs, xs, ys)
def _testBCastB(self, xs, ys):
funcs = [
(np.subtract, tf.sub),
(np.subtract, _SUB),
(np.power, tf.pow),
]
self._testBCastByFunc(funcs, xs, ys)
def _testBCastC(self, xs, ys):
funcs = [
(np.multiply, tf.mul),
(np.multiply, _MUL),
]
self._testBCastByFunc(funcs, xs, ys)
def _testBCastD(self, xs, ys):
funcs = [
(np.true_divide, tf.truediv),
(np.floor_divide, tf.floordiv),
(np.true_divide, _TRUEDIV),
(np.floor_divide, _FLOORDIV),
]
self._testBCastByFunc(funcs, xs, ys)
def testBCast_0A(self):
self._testBCastA([1, 3, 2], [1])
def testBCast_0B(self):
self._testBCastB([1, 3, 2], [1])
def testBCast_0C(self):
self._testBCastC([1, 3, 2], [1])
def testBCast_0D(self):
self._testBCastD([1, 3, 2], [1])
def testBCast_1A(self):
self._testBCastA([1, 3, 2], [2])
def testBCast_1B(self):
self._testBCastB([1, 3, 2], [2])
def testBCast_1C(self):
self._testBCastC([1, 3, 2], [2])
def testBCast_1D(self):
self._testBCastD([1, 3, 2], [2])
def testBCast_2A(self):
self._testBCastA([1, 3, 2], [3, 2])
def testBCast_2B(self):
self._testBCastB([1, 3, 2], [3, 2])
def testBCast_2C(self):
self._testBCastC([1, 3, 2], [3, 2])
def testBCast_2D(self):
self._testBCastD([1, 3, 2], [3, 2])
def testBCast_3A(self):
self._testBCastA([1, 3, 2], [3, 1])
def testBCast_3B(self):
self._testBCastB([1, 3, 2], [3, 1])
def testBCast_3C(self):
self._testBCastC([1, 3, 2], [3, 1])
def testBCast_3D(self):
self._testBCastD([1, 3, 2], [3, 1])
def testBCast_4A(self):
self._testBCastA([1, 3, 2], [1, 3, 2])
def testBCast_4B(self):
self._testBCastB([1, 3, 2], [1, 3, 2])
def testBCast_4C(self):
self._testBCastC([1, 3, 2], [1, 3, 2])
def testBCast_4D(self):
self._testBCastD([1, 3, 2], [1, 3, 2])
def testBCast_5A(self):
self._testBCastA([1, 3, 2], [2, 3, 1])
def testBCast_5B(self):
self._testBCastB([1, 3, 2], [2, 3, 1])
def testBCast_5C(self):
self._testBCastC([1, 3, 2], [2, 3, 1])
def testBCast_5D(self):
self._testBCastD([1, 3, 2], [2, 3, 1])
def testBCast_6A(self):
self._testBCastA([1, 3, 2], [2, 1, 1])
def testBCast_6B(self):
self._testBCastB([1, 3, 2], [2, 1, 1])
def testBCast_6C(self):
self._testBCastC([1, 3, 2], [2, 1, 1])
def testBCast_6D(self):
self._testBCastD([1, 3, 2], [2, 1, 1])
def testBCast_7A(self):
self._testBCastA([1, 3, 2], [1, 3, 1])
def testBCast_7B(self):
self._testBCastB([1, 3, 2], [1, 3, 1])
def testBCast_7C(self):
self._testBCastC([1, 3, 2], [1, 3, 1])
def testBCast_7D(self):
self._testBCastD([1, 3, 2], [1, 3, 1])
def testBCast_8A(self):
self._testBCastA([2, 1, 5], [2, 3, 1])
def testBCast_8B(self):
self._testBCastB([2, 1, 5], [2, 3, 1])
def testBCast_8C(self):
self._testBCastC([2, 1, 5], [2, 3, 1])
def testBCast_8D(self):
self._testBCastD([2, 1, 5], [2, 3, 1])
def testBCast_9A(self):
self._testBCastA([2, 0, 5], [2, 0, 1])
def testBCast_9B(self):
self._testBCastB([2, 0, 5], [2, 0, 1])
def testBCast_9C(self):
self._testBCastC([2, 0, 5], [2, 0, 1])
def testBCast_9D(self):
self._testBCastD([2, 0, 5], [2, 0, 1])
def testBCast_10A(self):
self._testBCastA([2, 3, 0], [2, 3, 1])
def testBCast_10B(self):
self._testBCastB([2, 3, 0], [2, 3, 1])
def testBCast_10C(self):
self._testBCastC([2, 3, 0], [2, 3, 1])
def testBCast_10D(self):
self._testBCastD([2, 3, 0], [2, 3, 1])
def testBCast_11A(self):
self._testBCastA([1, 3, 2], [1, 3, 2])
def testBCast_11B(self):
self._testBCastB([1, 3, 2], [1, 3, 2])
def testBCast_11C(self):
self._testBCastC([1, 3, 2], [1, 3, 2])
def testBCast_11D(self):
self._testBCastD([1, 3, 2], [1, 3, 2])
def testBCast_12A(self):
self._testBCastA([1, 1, 1, 1, 3, 2], [1, 3, 2])
def testBCast_12B(self):
self._testBCastB([1, 1, 1, 1, 3, 2], [1, 3, 2])
def testBCast_12C(self):
self._testBCastC([1, 1, 1, 1, 3, 2], [1, 3, 2])
def testBCast_12D(self):
self._testBCastD([1, 1, 1, 1, 3, 2], [1, 3, 2])
def testBCast_13A(self):
self._testBCastA([1, 3, 2, 1, 1], [1])
def testBCast_13B(self):
self._testBCastB([1, 3, 2, 1, 1], [1])
def testBCast_13C(self):
self._testBCastC([1, 3, 2, 1, 1], [1])
def testBCast_13D(self):
self._testBCastD([1, 3, 2, 1, 1], [1])
def testBCast_14A(self):
self._testBCastA([2, 3, 1, 1, 5], [1])
def testBCast_14B(self):
self._testBCastB([2, 3, 1, 1, 5], [1])
def testBCast_14C(self):
self._testBCastC([2, 3, 1, 1, 5], [1])
def testBCast_14D(self):
self._testBCastD([2, 3, 1, 1, 5], [1])
def testBCast_15A(self):
self._testBCastA([10, 3, 1, 2], [3, 1, 2])
def testBCast_15B(self):
self._testBCastB([10, 3, 1, 2], [3, 1, 2])
def testBCast_15C(self):
self._testBCastC([10, 3, 1, 2], [3, 1, 2])
def testBCast_15D(self):
self._testBCastD([10, 3, 1, 2], [3, 1, 2])
def testMismatchedDimensions(self):
for func in [tf.add, tf.sub, tf.mul, tf.div, _ADD, _SUB, _MUL, _TRUEDIV,
_FLOORDIV]:
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Incompatible shapes" in str(e)):
func(tf.convert_to_tensor([10.0, 20.0, 30.0]),
tf.convert_to_tensor([[40.0, 50.0], [60.0, 70.0]]))
class ComparisonOpTest(tf.test.TestCase):
def _compare(self, func, x, y, dtype):
with self.test_session(use_gpu=False):
out = func(tf.convert_to_tensor(np.array([x]).astype(dtype)),
tf.convert_to_tensor(np.array([y]).astype(dtype)))
ret = out.eval()
return ret[0]
def testScalarCompareScalar(self):
dtypes = [np.float32, np.float64, np.int32, np.int64]
data = [-1, 0, 1]
for t in dtypes:
for x in data:
for y in data:
self.assertEqual(self._compare(tf.less, x, y, t),
x < y)
self.assertEqual(self._compare(tf.less_equal, x, y, t),
x <= y)
self.assertEqual(self._compare(tf.greater, x, y, t),
x > y)
self.assertEqual(self._compare(tf.greater_equal, x, y, t),
x >= y)
self.assertEqual(self._compare(tf.equal, x, y, t),
x == y)
self.assertEqual(self._compare(tf.not_equal, x, y, t),
x != y)
def _compareCpu(self, x, y, np_func, tf_func):
np_ans = np_func(x, y)
with self.test_session(use_gpu=False):
out = tf_func(tf.convert_to_tensor(x), tf.convert_to_tensor(y))
tf_cpu = out.eval()
self.assertAllEqual(np_ans, tf_cpu)
def _compareGpu(self, x, y, np_func, tf_func):
np_ans = np_func(x, y)
with self.test_session(use_gpu=True):
out = tf_func(tf.convert_to_tensor(x), tf.convert_to_tensor(y))
tf_gpu = out.eval()
self.assertAllEqual(np_ans, tf_gpu)
def _compareBoth(self, x, y, np_func, tf_func):
self._compareCpu(x, y, np_func, tf_func)
if x.dtype == np.float32 or x.dtype == np.float64:
self._compareGpu(x, y, np_func, tf_func)
def testTensorCompareTensor(self):
x = np.linspace(-15, 15, 6).reshape(1, 3, 2)
y = np.linspace(20, -10, 6).reshape(1, 3, 2)
for t in [np.float32, np.float64, np.int32, np.int64]:
xt = x.astype(t)
yt = y.astype(t)
self._compareBoth(xt, yt, np.less, tf.less)
self._compareBoth(xt, yt, np.less_equal, tf.less_equal)
self._compareBoth(xt, yt, np.greater, tf.greater)
self._compareBoth(xt, yt, np.greater_equal, tf.greater_equal)
self._compareBoth(xt, yt, np.equal, tf.equal)
self._compareBoth(xt, yt, np.not_equal, tf.not_equal)
# TODO(zhifengc): complex64 doesn't work on GPU yet.
self._compareCpu(x.astype(np.complex64), y.astype(np.complex64),
np.equal, tf.equal)
self._compareCpu(x.astype(np.complex64), y.astype(np.complex64),
np.not_equal, tf.not_equal)
def _compareBCast(self, xs, ys, dtype, np_func, tf_func):
x = np.linspace(-15, 15, np.prod(xs)).astype(dtype).reshape(xs)
y = np.linspace(20, -10, np.prod(ys)).astype(dtype).reshape(ys)
self._compareCpu(x, y, np_func, tf_func)
self._compareCpu(y, x, np_func, tf_func)
if x.dtype == np.float32 or x.dtype == np.float64:
self._compareGpu(x, y, np_func, tf_func)
self._compareGpu(y, x, np_func, tf_func)
def _testBCastByFunc(self, np_func, tf_func):
shapes = [
([1, 3, 2], [1]),
([1, 3, 2], [2]),
([1, 3, 2], [3, 2]),
([1, 3, 2], [3, 1]),
([1, 3, 2], [1, 3, 2]),
([1, 3, 2], [2, 3, 1]),
([1, 3, 2], [2, 1, 1]),
([1, 3, 2], [1, 3, 1]),
([2, 1, 5], [2, 3, 1]),
([2, 0, 5], [2, 0, 1]),
([2, 3, 0], [2, 3, 1]),
]
dtypes = [
np.float32,
np.float64,
np.int32,
np.int64,
]
for (xs, ys) in shapes:
for dtype in dtypes:
self._compareBCast(xs, ys, dtype, np_func, tf_func)
def testBCastLess(self):
self._testBCastByFunc(np.less, tf.less)
def testBCastLessEqual(self):
self._testBCastByFunc(np.less_equal, tf.less_equal)
def testBCastGreater(self):
self._testBCastByFunc(np.greater, tf.greater)
def testBCastGreaterEqual(self):
self._testBCastByFunc(np.greater_equal, tf.greater_equal)
def testBCastEqual(self):
self._testBCastByFunc(np.equal, tf.equal)
def testBCastNotEqual(self):
self._testBCastByFunc(np.not_equal, tf.not_equal)
def testShapeMismatch(self):
dtypes = [np.float32, np.float64, np.int32, np.int64]
funcs = [tf.less, tf.less_equal, tf.greater,
tf.greater_equal, tf.equal, tf.not_equal]
x = np.arange(0, 10).reshape([2, 5])
y = np.arange(0, 10).reshape([5, 2])
for t in dtypes:
for f in funcs:
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Incompatible shapes" in str(e)):
f(x.astype(t), y.astype(t))
class LogicalOpTest(tf.test.TestCase):
def _compareBinary(self, x, y, np_func, tf_func, use_gpu=False):
np_ans = np_func(x, y)
with self.test_session(use_gpu=use_gpu):
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf_func(inx, iny)
tf_val = out.eval()
self.assertEqual(out.dtype, tf.bool)
self.assertAllEqual(np_ans, tf_val)
self.assertShapeEqual(np_ans, out)
def _not(self, x, use_gpu=False):
np_ans = np.logical_not(x)
with self.test_session(use_gpu=use_gpu):
out = tf.logical_not(tf.convert_to_tensor(x))
tf_val = out.eval()
self.assertEqual(out.dtype, tf.bool)
self.assertAllEqual(np_ans, tf_val)
self.assertShapeEqual(np_ans, out)
def testScalar(self):
data = [np.array([True]), np.array([False])]
for use_gpu in [True, False]:
for x in data:
self._not(x, use_gpu)
for x in data:
for y in data:
self._compareBinary(
x, y, np.logical_and, tf.logical_and, use_gpu)
self._compareBinary(
x, y, np.logical_or, tf.logical_or, use_gpu)
self._compareBinary(
x, y, np.logical_xor, tf.logical_xor, use_gpu)
def testTensor(self):
x = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
y = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
for use_gpu in [True, False]:
self._not(x, use_gpu)
self._compareBinary(x, y, np.logical_and, tf.logical_and, use_gpu)
self._compareBinary(x, y, np.logical_or, tf.logical_or, use_gpu)
self._compareBinary(x, y, np.logical_xor, tf.logical_xor, use_gpu)
def testBCast(self):
shapes = [
([1, 3, 2], [1]),
([1, 3, 2], [2]),
([1, 3, 2], [3, 2]),
([1, 3, 2], [3, 1]),
([1, 3, 2], [1, 3, 2]),
([1, 3, 2], [2, 3, 1]),
([1, 3, 2], [2, 1, 1]),
([1, 3, 2], [1, 3, 1]),
([2, 1, 5], [2, 3, 1]),
([2, 0, 5], [2, 0, 1]),
([2, 3, 0], [2, 3, 1]),
]
for (xs, ys) in shapes:
x = np.random.randint(0, 2, np.prod(xs)).astype(np.bool).reshape(xs)
y = np.random.randint(0, 2, np.prod(ys)).astype(np.bool).reshape(ys)
for use_gpu in [True, False]:
self._compareBinary(x, y, np.logical_and, tf.logical_and, use_gpu)
self._compareBinary(x, y, np.logical_or, tf.logical_or, use_gpu)
self._compareBinary(x, y, np.logical_xor, tf.logical_xor, use_gpu)
def testShapeMismatch(self):
x = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
y = np.random.randint(0, 2, 6).astype(np.bool).reshape(3, 2, 1)
for f in [tf.logical_and, tf.logical_or, tf.logical_xor]:
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Incompatible shapes" in str(e)):
f(x, y)
class SelectOpTest(tf.test.TestCase):
def _compare(self, c, x, y, use_gpu):
np_ans = np.where(c, x, y)
with self.test_session(use_gpu=use_gpu):
out = tf.select(c, x, y)
tf_ans = out.eval()
self.assertAllEqual(np_ans, tf_ans)
self.assertShapeEqual(np_ans, out)
def _compareGradientX(self, c, x, y):
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf.select(c, inx, iny)
s = list(np.shape(c))
jacob_t, jacob_n = tf.test.compute_gradient(inx,
s,
out,
s,
x_init_value=x)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _compareGradientY(self, c, x, y):
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = tf.select(c, inx, iny)
s = list(np.shape(c))
jacob_t, jacob_n = tf.test.compute_gradient(iny,
s,
out,
s,
x_init_value=y)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def testBasic(self):
c = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
x = np.random.rand(1, 3, 2) * 100
y = np.random.rand(1, 3, 2) * 100
for t in [np.float32, np.float64, np.int32, np.int64, np.complex64]:
xt = x.astype(t)
yt = y.astype(t)
self._compare(c, xt, yt, use_gpu=False)
if t in [np.float32, np.float64]:
self._compare(c, xt, yt, use_gpu=True)
def testGradients(self):
c = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
x = np.random.rand(1, 3, 2) * 100
y = np.random.rand(1, 3, 2) * 100
for t in [np.float32, np.float64]:
xt = x.astype(t)
yt = y.astype(t)
self._compareGradientX(c, xt, yt)
self._compareGradientY(c, xt, yt)
def testShapeMismatch(self):
c = np.random.randint(0, 2, 6).astype(np.bool).reshape(1, 3, 2)
x = np.random.rand(1, 3, 2) * 100
y = np.random.rand(2, 5, 3) * 100
for t in [np.float32, np.float64, np.int32, np.int64, np.complex64]:
xt = x.astype(t)
yt = y.astype(t)
with self.assertRaises(ValueError):
tf.select(c, xt, yt)
class MinMaxOpTest(tf.test.TestCase):
def _compare(self, x, y, use_gpu):
np_min, np_max = np.minimum(x, y), np.maximum(x, y)
with self.test_session(use_gpu=use_gpu) as sess:
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
omin, omax = tf.minimum(inx, iny), tf.maximum(inx, iny)
tf_min, tf_max = sess.run([omin, omax])
self.assertAllEqual(np_min, tf_min)
self.assertAllEqual(np_max, tf_max)
def testBasic(self):
x = np.random.rand(1, 3, 2) * 100.
y = np.random.rand(1, 3, 2) * 100.
for t in [np.float32, np.float64, np.int32, np.int64]:
self._compare(x.astype(t), y.astype(t), use_gpu=False)
self._compare(x.astype(t), y.astype(t), use_gpu=True)
def testDifferentShapes(self):
x = np.random.rand(1, 3, 2) * 100.
y = np.random.rand(2) * 100. # should broadcast
for t in [np.float32, np.float64, np.int32, np.int64]:
self._compare(x.astype(t), y.astype(t), use_gpu=False)
self._compare(x.astype(t), y.astype(t), use_gpu=True)
def testScalar(self):
x = np.random.rand(1, 3, 2) * 100.
y = np.asscalar(np.random.rand(1) * 100.) # should broadcast
# dropped np.float64, int64 because TF automatically converts to 32 bit
for t in [np.float32, np.int32]:
self._compare(x.astype(t), t(y), use_gpu=False)
self._compare(x.astype(t), t(y), use_gpu=True)
def _compareGradientX(self, func, x, y):
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = func(inx, iny)
s = list(np.shape(x))
jacob_t, jacob_n = tf.test.compute_gradient(inx,
s,
out,
s,
x_init_value=x)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def _compareGradientY(self, func, x, y):
with self.test_session():
inx = tf.convert_to_tensor(x)
iny = tf.convert_to_tensor(y)
out = func(inx, iny)
s = list(np.shape(x))
jacob_t, jacob_n = tf.test.compute_gradient(iny,
s,
out,
s,
x_init_value=y)
if x.dtype == np.float32:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3)
elif x.dtype == np.float64:
self.assertAllClose(jacob_t, jacob_n, rtol=1e-5, atol=1e-5)
def testGradients(self):
x = np.random.rand(1, 3, 2) * 100.
# ensure x != y
y = x + (np.random.randint(2, size=x.shape) - .5) * 2 # -1 or +1
self._compareGradientX(tf.maximum, x, y)
self._compareGradientY(tf.maximum, x, y)
self._compareGradientX(tf.minimum, x, y)
self._compareGradientY(tf.minimum, x, y)
class MathOpsOverloadTest(tf.test.TestCase):
def _computeTensorAndLiteral(self, x, y, dtype, func):
with self.test_session(use_gpu=False):
inx = tf.convert_to_tensor(x, dtype=dtype)
z = func(inx, y) # Should use __add__, __sub__, etc.
return z.eval()
def _computeLiteralAndTensor(self, x, y, dtype, func):
with self.test_session(use_gpu=False):
iny = tf.convert_to_tensor(y, dtype=dtype)
z = func(x, iny) # Should use __radd__, __rsub__, etc.
return z.eval()
def _compareBinary(self, x, y, dtype, np_func, tf_func):
np_ans = np_func(x, y)
self.assertAllClose(np_ans, self._computeTensorAndLiteral(
x, y, dtype, tf_func))
self.assertAllClose(np_ans, self._computeLiteralAndTensor(
x, y, dtype, tf_func))
def _compareUnary(self, x, dtype, np_func, tf_func):
np_ans = np_func(x)
with self.test_session(use_gpu=False):
self.assertAllClose(np_ans, tf_func(tf.convert_to_tensor(x, dtype=dtype)).eval())
def testOverload(self):
dtypes = [
tf.float32,
tf.float64,
tf.int32,
tf.int64,
tf.complex64,
]
funcs = [
(np.add, _ADD),
(np.subtract, _SUB),
(np.multiply, _MUL),
(np.power, _POW),
(np.true_divide, _TRUEDIV),
(np.floor_divide, _FLOORDIV),
]
for dtype in dtypes:
for np_func, tf_func in funcs:
if dtype == tf.complex64 and tf_func == _FLOORDIV:
continue # floordiv makes no sense for complex
self._compareBinary(10, 5, dtype, np_func, tf_func)
# Mod only works for int32 and int64.
for dtype in [tf.int32, tf.int64]:
self._compareBinary(10, 3, dtype, np.mod, _MOD)
def testOverloadComparisons(self):
dtypes = [
tf.float32,
tf.float64,
tf.int32,
tf.int64,
]
funcs = [
(np.less, _LT),
(np.less_equal, _LE),
(np.greater, _GT),
(np.greater_equal, _GE),
]
for dtype in dtypes:
for np_func, tf_func in funcs:
self._compareBinary(10, 5, dtype, np_func, tf_func)
logical_funcs = [
(np.logical_and, _AND),
(np.logical_or, _OR),
(np.logical_xor, _XOR),
]
for np_func, tf_func in logical_funcs:
self._compareBinary(True, False, tf.bool, np_func, tf_func)
self._compareBinary(True, True, tf.bool, np_func, tf_func)
self._compareBinary(False, False, tf.bool, np_func, tf_func)
self._compareBinary(False, True, tf.bool, np_func, tf_func)
self._compareBinary([True, True, False, False],
[True, False, True, False],
tf.bool, np_func, tf_func)
self._compareUnary(True, tf.bool, np.logical_not, _INV)
self._compareUnary(False, tf.bool, np.logical_not, _INV)
self._compareUnary([True, False], tf.bool, np.logical_not, _INV)
class IsFiniteInfNanTest(tf.test.TestCase):
def _compare(self, x, use_gpu):
np_finite, np_inf, np_nan = np.isfinite(x), np.isinf(x), np.isnan(x)
with self.test_session(use_gpu=use_gpu) as sess:
inx = tf.convert_to_tensor(x)
ofinite, oinf, onan = tf.is_finite(inx), tf.is_inf(
inx), tf.is_nan(inx)
tf_finite, tf_inf, tf_nan = sess.run([ofinite, oinf, onan])
self.assertAllEqual(np_inf, tf_inf)
self.assertAllEqual(np_nan, tf_nan)
self.assertAllEqual(np_finite, tf_finite)
self.assertShapeEqual(np_inf, oinf)
self.assertShapeEqual(np_nan, onan)
self.assertShapeEqual(np_finite, ofinite)
def _testDtype(self, dtype):
fi = np.finfo(dtype)
data = np.array([0, -1, 1, fi.resolution, -fi.resolution, fi.min, fi.max,
-np.inf, np.inf, np.nan]).astype(dtype)
self._compare(data, use_gpu=False)
self._compare(data, use_gpu=True)
def testFloat(self):
self._testDtype(np.float32)
def testDouble(self):
self._testDtype(np.float64)
class RoundingTest(tf.test.TestCase):
def _compare(self, x, use_gpu):
np_floor, np_ceil = np.floor(x), np.ceil(x)
with self.test_session(use_gpu=use_gpu) as sess:
inx = tf.convert_to_tensor(x)
ofloor, oceil = tf.floor(inx), tf.ceil(inx)
tf_floor, tf_ceil = sess.run([ofloor, oceil])
self.assertAllEqual(np_floor, tf_floor)
self.assertAllEqual(np_ceil, tf_ceil)
self.assertShapeEqual(np_floor, ofloor)
self.assertShapeEqual(np_ceil, oceil)
def _testDtype(self, dtype):
data = (np.arange(-3, 3) / 4.).reshape([1, 3, 2]).astype(dtype)
self._compare(data, use_gpu=True)
self._compare(data, use_gpu=True)
def testTypes(self):
for dtype in [np.float32, np.float64]:
self._testDtype(dtype)
class ComplexMakeRealImagTest(tf.test.TestCase):
def _compareMake(self, real, imag, use_gpu):
np_ans = real + (1j) * imag
with self.test_session(use_gpu=use_gpu):
real = tf.convert_to_tensor(real)
imag = tf.convert_to_tensor(imag)
tf_ans = tf.complex(real, imag)
out = tf_ans.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, tf_ans)
def testMake(self):
real = (np.arange(-3, 3) / 4.).reshape([1, 3, 2]).astype(np.float32)
imag = (np.arange(-3, 3) / 5.).reshape([1, 3, 2]).astype(np.float32)
for use_gpu in [False, True]:
self._compareMake(real, imag, use_gpu)
self._compareMake(real, 12.0, use_gpu)
self._compareMake(23.0, imag, use_gpu)
def _compareRealImag(self, cplx, use_gpu):
np_real, np_imag = np.real(cplx), np.imag(cplx)
with self.test_session(use_gpu=use_gpu) as sess:
inx = tf.convert_to_tensor(cplx)
tf_real = tf.real(inx)
tf_imag = tf.imag(inx)
tf_real_val, tf_imag_val = sess.run([tf_real, tf_imag])
self.assertAllEqual(np_real, tf_real_val)
self.assertAllEqual(np_imag, tf_imag_val)
self.assertShapeEqual(np_real, tf_real)
self.assertShapeEqual(np_imag, tf_imag)
def testRealImag(self):
real = (np.arange(-3, 3) / 4.).reshape([1, 3, 2]).astype(np.float32)
imag = (np.arange(-3, 3) / 5.).reshape([1, 3, 2]).astype(np.float32)
cplx = real + (1j) * imag
self._compareRealImag(cplx, use_gpu=False)
self._compareRealImag(cplx, use_gpu=True)
def _compareConj(self, cplx, use_gpu):
np_ans = np.conj(cplx)
with self.test_session(use_gpu=use_gpu):
inx = tf.convert_to_tensor(cplx)
tf_conj = tf.conj(inx)
tf_ans = tf_conj.eval()
self.assertAllEqual(np_ans, tf_ans)
self.assertShapeEqual(np_ans, tf_conj)
def testConj(self):
real = (np.arange(-3, 3) / 4.).reshape([1, 3, 2]).astype(np.float32)
imag = (np.arange(-3, 3) / 5.).reshape([1, 3, 2]).astype(np.float32)
cplx = real + (1j) * imag
self._compareConj(cplx, use_gpu=False)
self._compareConj(cplx, use_gpu=True)
def _compareGradient(self, x):
# x[:, 0] is real, x[:, 1] is imag. We combine real and imag into
# complex numbers. Then, we extract real and imag parts and
# computes the squared sum. This is obviously the same as sum(real
# * real) + sum(imag * imag). We just want to make sure the
# gradient function is checked.
with self.test_session():
inx = tf.convert_to_tensor(x)
real, imag = tf.split(1, 2, inx)
real, imag = tf.reshape(real, [-1]), tf.reshape(imag, [-1])
cplx = tf.complex(real, imag)
cplx = tf.conj(cplx)
loss = tf.reduce_sum(
tf.square(tf.real(cplx))) + tf.reduce_sum(
tf.square(tf.imag(cplx)))
epsilon = 1e-3
jacob_t, jacob_n = tf.test.compute_gradient(inx,
list(x.shape),
loss,
[1],
x_init_value=x,
delta=epsilon)
self.assertAllClose(jacob_t, jacob_n, rtol=epsilon, atol=epsilon)
def testGradient(self):
data = np.arange(1, 2, 0.10).reshape([5, 2]).astype(np.float32)
self._compareGradient(data)
def _compareMulGradient(self, data):
# data is a float matrix of shape [n, 4]. data[:, 0], data[:, 1],
# data[:, 2], data[:, 3] are real parts of x, imaginary parts of
# x, real parts of y and imaginary parts of y.
with self.test_session():
inp = tf.convert_to_tensor(data)
xr, xi, yr, yi = tf.split(1, 4, inp)
def vec(x): # Reshape to a vector
return tf.reshape(x, [-1])
xr, xi, yr, yi = vec(xr), vec(xi), vec(yr), vec(yi)
def cplx(r, i): # Combine to a complex vector
return tf.complex(r, i)
x, y = cplx(xr, xi), cplx(yr, yi)
# z is x times y in complex plane.
z = x * y
# Defines the loss function as the sum of all coefficients of z.
loss = tf.reduce_sum(tf.real(z) + tf.imag(z))
epsilon = 0.005
jacob_t, jacob_n = tf.test.compute_gradient(inp,
list(data.shape),
loss,
[1],
x_init_value=data,
delta=epsilon)
self.assertAllClose(jacob_t, jacob_n, rtol=epsilon, atol=epsilon)
def testMulGradient(self):
data = np.arange(1, 2, 0.125).reshape([2, 4]).astype(np.float32)
self._compareMulGradient(data)
class AccumulateTest(tf.test.TestCase):
def testSimple(self):
with self.test_session():
random_arrays = [np.random.rand(16, 16, 16, 16).astype(np.float32)
for _ in range(20)]
random_tensors = [tf.convert_to_tensor(x, dtype=tf.float32)
for x in random_arrays]
tf_val = tf.accumulate_n(random_tensors)
np_val = random_arrays[0]
for random_array in random_arrays[1:]:
np_val += random_array
self.assertAllClose(np_val, tf_val.eval())
def testZeroArgs(self):
with self.test_session():
with self.assertRaises(ValueError):
tf_val = tf.accumulate_n([])
tf_val.eval()
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 7,038,976,285,839,350,000 | 34.481538 | 87 | 0.568118 | false |
sissaschool/xmlschema | xmlschema/validators/identities.py | 1 | 16757 | #
# Copyright (c), 2016-2020, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <[email protected]>
#
"""
This module contains classes for other XML Schema identity constraints.
"""
import re
import math
from collections import Counter
from typing import Dict, Union
from elementpath import XPath2Parser, ElementPathError, XPathContext, translate_pattern
from ..exceptions import XMLSchemaTypeError, XMLSchemaValueError
from ..names import XSD_QNAME, XSD_UNIQUE, XSD_KEY, XSD_KEYREF, XSD_SELECTOR, XSD_FIELD
from ..helpers import get_qname, get_extended_qname
from ..xpath import iter_schema_nodes
from .xsdbase import XsdComponent
from .attributes import XsdAttribute
XSD_IDENTITY_XPATH_SYMBOLS = {
'processing-instruction', 'following-sibling', 'preceding-sibling',
'ancestor-or-self', 'attribute', 'following', 'namespace', 'preceding',
'ancestor', 'position', 'comment', 'parent', 'child', 'false', 'text', 'node',
'true', 'last', 'not', 'and', 'mod', 'div', 'or', '..', '//', '!=', '<=', '>=', '(', ')',
'[', ']', '.', '@', ',', '/', '|', '*', '-', '=', '+', '<', '>', ':', '(end)', '(name)',
'(string)', '(float)', '(decimal)', '(integer)', '::', '{', '}',
}
# XSD identities use a restricted parser and a context for iterate element
# references. The XMLSchemaProxy is not used for the specific selection of
# fields and elements and the XSD fields are got at first validation run.
class IdentityXPathContext(XPathContext):
_iter_nodes = staticmethod(iter_schema_nodes)
class IdentityXPathParser(XPath2Parser):
symbol_table = {
k: v for k, v in XPath2Parser.symbol_table.items() if k in XSD_IDENTITY_XPATH_SYMBOLS
}
SYMBOLS = XSD_IDENTITY_XPATH_SYMBOLS
class XsdSelector(XsdComponent):
"""Class for defining an XPath selector for an XSD identity constraint."""
_ADMITTED_TAGS = {XSD_SELECTOR}
xpath_default_namespace = ''
pattern = translate_pattern(
r"(\.//)?(((child::)?((\i\c*:)?(\i\c*|\*)))|\.)(/(((child::)?"
r"((\i\c*:)?(\i\c*|\*)))|\.))*(\|(\.//)?(((child::)?((\i\c*:)?"
r"(\i\c*|\*)))|\.)(/(((child::)?((\i\c*:)?(\i\c*|\*)))|\.))*)*",
back_references=False,
lazy_quantifiers=False,
anchors=False
)
token = None
parser = None
def __init__(self, elem, schema, parent):
super(XsdSelector, self).__init__(elem, schema, parent)
def _parse(self):
super(XsdSelector, self)._parse()
try:
self.path = self.elem.attrib['xpath']
except KeyError:
self.parse_error("'xpath' attribute required")
self.path = '*'
else:
try:
match = self.pattern.match(self.path.replace(' ', ''))
except AttributeError:
# Compile regex pattern
self.__class__.pattern = re.compile(self.pattern)
match = self.pattern.match(self.path.replace(' ', ''))
if not match:
msg = "invalid XPath expression for an {}"
self.parse_error(msg.format(self.__class__.__name__))
# XSD 1.1 xpathDefaultNamespace attribute
if self.schema.XSD_VERSION > '1.0':
if 'xpathDefaultNamespace' in self.elem.attrib:
self.xpath_default_namespace = self._parse_xpath_default_namespace(self.elem)
else:
self.xpath_default_namespace = self.schema.xpath_default_namespace
self.parser = IdentityXPathParser(
namespaces=self.namespaces,
strict=False,
compatibility_mode=True,
default_namespace=self.xpath_default_namespace,
)
try:
self.token = self.parser.parse(self.path)
except ElementPathError as err:
self.token = self.parser.parse('*')
self.parse_error(err)
def __repr__(self):
return '%s(path=%r)' % (self.__class__.__name__, self.path)
@property
def built(self):
return self.token is not None
@property
def target_namespace(self):
# TODO: implement a property in elementpath for getting XPath token's namespace
if self.token is None:
pass # xpathDefaultNamespace="##targetNamespace"
elif self.token.symbol == ':':
return self.token[1].namespace or self.xpath_default_namespace
elif self.token.symbol == '@' and self.token[0].symbol == ':':
return self.token[0][1].namespace or self.xpath_default_namespace
return self.schema.target_namespace
class XsdFieldSelector(XsdSelector):
"""Class for defining an XPath field selector for an XSD identity constraint."""
_ADMITTED_TAGS = {XSD_FIELD}
pattern = translate_pattern(
r"(\.//)?((((child::)?((\i\c*:)?(\i\c*|\*)))|\.)/)*((((child::)?"
r"((\i\c*:)?(\i\c*|\*)))|\.)|((attribute::|@)((\i\c*:)?(\i\c*|\*))))"
r"(\|(\.//)?((((child::)?((\i\c*:)?(\i\c*|\*)))|\.)/)*"
r"((((child::)?((\i\c*:)?(\i\c*|\*)))|\.)|"
r"((attribute::|@)((\i\c*:)?(\i\c*|\*)))))*",
back_references=False,
lazy_quantifiers=False,
anchors=False
)
class XsdIdentity(XsdComponent):
"""
Common class for XSD identity constraints.
:ivar selector: the XPath selector of the identity constraint.
:ivar fields: a list containing the XPath field selectors of the identity constraint.
"""
selector = None
elements = None # XSD elements bound by selector (for speed-up and lazy mode)
fields = ()
def __init__(self, elem, schema, parent):
super(XsdIdentity, self).__init__(elem, schema, parent)
def _parse(self):
super(XsdIdentity, self)._parse()
try:
self.name = get_qname(self.target_namespace, self.elem.attrib['name'])
except KeyError:
self.parse_error("missing required attribute 'name'")
self.name = None
for child in self.elem:
if child.tag == XSD_SELECTOR:
self.selector = XsdSelector(child, self.schema, self)
break
else:
self.parse_error("missing 'selector' declaration.")
self.fields = []
for child in self.elem:
if child.tag == XSD_FIELD:
self.fields.append(XsdFieldSelector(child, self.schema, self))
def build(self):
if self.ref is True:
try:
ref = self.maps.identities[self.name]
except KeyError:
self.parse_error("unknown identity constraint {!r}".format(self.name))
return
else:
if not isinstance(ref, self.__class__):
self.parse_error("attribute 'ref' points to a different kind constraint")
self.selector = ref.selector
self.fields = ref.fields
self.ref = ref
context = IdentityXPathContext(self.schema, item=self.parent)
try:
self.elements = {
e: None for e in self.selector.token.select_results(context) if e.name
}
except AttributeError:
self.elements = {}
else:
if any(isinstance(e, XsdAttribute) for e in self.elements):
self.parse_error("selector xpath cannot select attributes")
elif not self.elements:
# Try to detect target XSD elements extracting QNames
# of the leaf elements from the XPath expression and
# use them to match global elements.
for qname in self.selector.token.iter_leaf_elements():
xsd_element = self.maps.elements.get(
get_extended_qname(qname, self.namespaces)
)
if xsd_element is not None and xsd_element not in self.elements:
self.elements[xsd_element] = None
@property
def built(self):
return self.elements is not None
def get_fields(self, elem, namespaces=None, decoders=None):
"""
Get fields for a schema or instance context element.
:param elem: an Element or an XsdElement
:param namespaces: is an optional mapping from namespace prefix to URI.
:param decoders: context schema fields decoders.
:return: a tuple with field values. An empty field is replaced by `None`.
"""
fields = []
if isinstance(elem, XsdComponent):
context_class = IdentityXPathContext
else:
context_class = XPathContext
for k, field in enumerate(self.fields):
result = field.token.get_results(context_class(elem))
if not result:
if decoders is not None and decoders[k] is not None:
value = decoders[k].value_constraint
if value is not None:
if decoders[k].type.root_type.name == XSD_QNAME:
value = get_extended_qname(value, namespaces)
if isinstance(value, list):
fields.append(tuple(value))
elif isinstance(value, bool):
fields.append((value, bool))
elif not isinstance(value, float):
fields.append(value)
elif math.isnan(value):
fields.append(('nan', float))
else:
fields.append((value, float))
continue
if not isinstance(self, XsdKey) or 'ref' in elem.attrib and \
self.schema.meta_schema is None and self.schema.XSD_VERSION != '1.0':
fields.append(None)
elif field.target_namespace not in self.maps.namespaces:
fields.append(None)
else:
msg = "missing key field {!r} for {!r}"
raise XMLSchemaValueError(msg.format(field.path, self))
elif len(result) == 1:
if decoders is None or decoders[k] is None:
fields.append(result[0])
else:
if decoders[k].type.content_type_label not in ('simple', 'mixed'):
raise XMLSchemaTypeError("%r field doesn't have a simple type!" % field)
value = decoders[k].data_value(result[0])
if decoders[k].type.root_type.name == XSD_QNAME:
value = get_extended_qname(value, namespaces)
if isinstance(value, list):
fields.append(tuple(value))
elif isinstance(value, bool):
fields.append((value, bool))
elif not isinstance(value, float):
fields.append(value)
elif math.isnan(value):
fields.append(('nan', float))
else:
fields.append((value, float))
else:
raise XMLSchemaValueError("%r field selects multiple values!" % field)
return tuple(fields)
def get_counter(self, enabled=True):
return IdentityCounter(self, enabled)
class XsdUnique(XsdIdentity):
_ADMITTED_TAGS = {XSD_UNIQUE}
class XsdKey(XsdIdentity):
_ADMITTED_TAGS = {XSD_KEY}
class XsdKeyref(XsdIdentity):
"""
Implementation of xs:keyref.
:ivar refer: reference to a *xs:key* declaration that must be in the same element \
or in a descendant element.
"""
_ADMITTED_TAGS = {XSD_KEYREF}
refer = None
refer_path = '.'
def _parse(self):
super(XsdKeyref, self)._parse()
try:
self.refer = self.schema.resolve_qname(self.elem.attrib['refer'])
except (KeyError, ValueError, RuntimeError) as err:
if 'refer' not in self.elem.attrib:
self.parse_error("missing required attribute 'refer'")
else:
self.parse_error(err)
def build(self):
super(XsdKeyref, self).build()
if isinstance(self.refer, (XsdKey, XsdUnique)):
return # referenced key/unique identity constraint already set
elif isinstance(self.ref, XsdKeyref):
self.refer = self.ref.refer
if self.refer is None:
return # attribute or key/unique identity constraint missing
elif isinstance(self.refer, str):
refer = self.parent.identities.get(self.refer)
if refer is not None and refer.ref is None:
self.refer = refer
else:
try:
self.refer = self.maps.identities[self.refer]
except KeyError:
self.parse_error("key/unique identity constraint %r is missing" % self.refer)
return
if not isinstance(self.refer, (XsdKey, XsdUnique)):
self.parse_error("reference to a non key/unique identity constraint %r" % self.refer)
elif len(self.refer.fields) != len(self.fields):
self.parse_error("field cardinality mismatch between %r and %r" % (self, self.refer))
elif self.parent is not self.refer.parent:
refer_path = self.refer.parent.get_path(ancestor=self.parent)
if refer_path is None:
# From a note in par. 3.11.5 Part 1 of XSD 1.0 spec: "keyref
# identity-constraints may be defined on domains distinct from
# the embedded domain of the identity-constraint they reference,
# or the domains may be the same but self-embedding at some depth.
# In either case the node table for the referenced identity-constraint
# needs to propagate upwards, with conflict resolution."
refer_path = self.parent.get_path(ancestor=self.refer.parent, reverse=True)
if refer_path is None:
refer_path = self.parent.get_path(reverse=True) + '/' + \
self.refer.parent.get_path()
self.refer_path = refer_path
@property
def built(self):
return self.elements is not None and isinstance(self.refer, XsdIdentity)
def get_counter(self, enabled=True):
return KeyrefCounter(self, enabled)
class Xsd11Unique(XsdUnique):
def _parse(self):
if self._parse_reference():
super(XsdIdentity, self)._parse()
self.ref = True
else:
super(Xsd11Unique, self)._parse()
class Xsd11Key(XsdKey):
def _parse(self):
if self._parse_reference():
super(XsdIdentity, self)._parse()
self.ref = True
else:
super(Xsd11Key, self)._parse()
class Xsd11Keyref(XsdKeyref):
def _parse(self):
if self._parse_reference():
super(XsdIdentity, self)._parse()
self.ref = True
else:
super(Xsd11Keyref, self)._parse()
class IdentityCounter:
def __init__(self, identity: Union[XsdKey, XsdKeyref], enabled=True):
self.counter = Counter()
self.identity = identity
self.enabled = enabled
def __repr__(self):
return "%s%r" % (self.__class__.__name__[:-7], self.counter)
def clear(self):
self.counter.clear()
self.enabled = True
def increase(self, fields: tuple):
self.counter[fields] += 1
if self.counter[fields] == 2:
msg = "duplicated value {!r} for {!r}"
raise XMLSchemaValueError(msg.format(fields, self.identity))
class KeyrefCounter(IdentityCounter):
def increase(self, fields: tuple):
self.counter[fields] += 1
def iter_errors(self, identities: Dict[Union[XsdKey, XsdKeyref],
Union['IdentityCounter', 'KeyrefCounter']]):
refer_values = identities[self.identity.refer].counter
for v in filter(lambda x: x not in refer_values, self.counter):
if len(v) == 1 and v[0] in refer_values:
continue
elif self.counter[v] > 1:
msg = "value {} not found for {!r} ({} times)"
yield XMLSchemaValueError(msg.format(v, self.identity.refer, self.counter[v]))
else:
msg = "value {} not found for {!r}"
yield XMLSchemaValueError(msg.format(v, self.identity.refer))
| mit | -9,066,070,327,560,499,000 | 37.170843 | 97 | 0.564003 | false |
googleads/googleads-python-lib | examples/adwords/adwords_appengine_demo/views/show_adgroups_view.py | 1 | 2673 | #!/usr/bin/env python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles request to view the AdGroups associated with a given Campaign."""
import os
from handlers.api_handler import APIHandler
from handlers.ndb_handler import InitUser
import webapp2
from google.appengine.api import users
from google.appengine.ext.webapp import template
class ShowAdGroups(webapp2.RequestHandler):
"""View listing the AdGroups belonging to a client's Campaign."""
def get(self):
"""Handle get request."""
client_customer_id = self.request.get('clientCustomerId')
campaign_id = self.request.get('campaignId')
template_values = {
'back_url': '/showCampaigns?clientCustomerId=%s' % client_customer_id,
'back_msg': 'View Campaigns.',
'campaign_id': campaign_id,
'ccid': client_customer_id,
'logout_url': users.create_logout_url('/'),
'user_nickname': users.get_current_user().nickname()
}
try:
try:
app_user = InitUser()
# Load Client instance.
handler = APIHandler(app_user.client_id,
app_user.client_secret,
app_user.refresh_token,
app_user.adwords_manager_cid,
app_user.developer_token)
adgroups = handler.GetAdGroups(client_customer_id, campaign_id)
# Use template to write output to the page.
template_values['adgroups'] = adgroups
except Exception as e:
template_values['error'] = str(e)
finally:
path = os.path.join(os.path.dirname(__file__),
'../templates/show_adgroups.html')
self.response.out.write(template.render(path, template_values))
def post(self):
"""Handle post request."""
client_customer_id = self.request.get('clientCustomerId')
campaign_id = self.request.get('campaignId')
if not client_customer_id or not campaign_id:
self.redirect('/')
else:
self.redirect('/showAdGroups?clientCustomerId=%s&campaignId=%s'
% (client_customer_id, campaign_id))
| apache-2.0 | 3,225,096,587,231,493,000 | 35.121622 | 78 | 0.654695 | false |
tanium/pytan | BUILD/doc/source/examples/ask_saved_question_refresh_data_code.py | 1 | 3079 | # import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = False
# optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["refresh_data"] = True
kwargs["qtype"] = u'saved'
kwargs["name"] = u'Installed Applications'
print "...CALLING: handler.ask with args: {}".format(kwargs)
response = handler.ask(**kwargs)
print "...OUTPUT: Type of response: ", type(response)
print "...OUTPUT: Pretty print of response:"
print pprint.pformat(response)
print "...OUTPUT: Equivalent Question if it were to be asked in the Tanium Console: "
print response['question_object'].query_text
if response['question_results']:
# call the export_obj() method to convert response to CSV and store it in out
export_kwargs = {}
export_kwargs['obj'] = response['question_results']
export_kwargs['export_format'] = 'csv'
print "...CALLING: handler.export_obj() with args {}".format(export_kwargs)
out = handler.export_obj(**export_kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: CSV Results of response: "
print out
| mit | 2,296,055,094,753,374,000 | 32.835165 | 90 | 0.71809 | false |
edvardlindelof/ERICA-prediction | models/linear/train_and_export_model.py | 1 | 1230 | import tensorflow as tf
from tensorflow.contrib.learn import LinearRegressor
from tensorflow.contrib import layers
from tensorflow.contrib.learn.python.learn.utils import input_fn_utils
tf.logging.set_verbosity(tf.logging.INFO)
import pandas as pd
FEATURES = ["ttt30", "all", "MEP", "triaged", "PRIO3", "PRIO4"]
pdframe = pd.read_csv("../NALState2017-07-26T18:45:33.190+02:00.csv")
def input_fn_train():
feature_cols = {name: tf.constant(pdframe[name].get_values()) for name in FEATURES}
outputs = tf.constant(pdframe["TTLOfNextPatient"].get_values())
return feature_cols, outputs
feature_cols = [layers.real_valued_column(name) for name in FEATURES]
regressor = LinearRegressor(
feature_columns=feature_cols,
model_dir="./modeldir"
)
regressor.fit(input_fn=input_fn_train, steps=10000)
'''
def serving_input_fn():
default_inputs = {col.name: tf.placeholder(col.dtype, [None]) for col in feature_cols}
features = {key: tf.expand_dims(tensor, -1) for key, tensor in default_inputs.items()}
return input_fn_utils.InputFnOps(
features=features,
labels=None,
default_inputs=default_inputs
)
regressor.export_savedmodel(
"exportedmodel",
serving_input_fn
)
'''
| mit | 5,681,200,717,155,857,000 | 30.538462 | 90 | 0.715447 | false |
arthurdejong/python-pskc | pskc/scripts/csv2pskc.py | 1 | 5421 | # csv2pskc.py - script to convert a CSV file to PSKC
#
# Copyright (C) 2018 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Script to convert a CSV file to PSKC."""
import argparse
import base64
import csv
import sys
from binascii import a2b_hex
import dateutil.parser
import pskc
from pskc.scripts.util import (
OutputFile, VersionAction, get_key, get_password)
epilog = '''
supported columns:
id, serial, secret, counter, time_offset, time_interval, interval,
time_drift, issuer, manufacturer, response_length, algorithm
And any other properties of pskc.key.Key instances.
Report bugs to <[email protected]>.
'''.strip()
# set up command line parser
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='Convert a CSV file to PSKC.', epilog=epilog)
parser.add_argument(
'input', nargs='?', metavar='FILE', help='the CSV file to read')
parser.add_argument(
'-V', '--version', action=VersionAction)
parser.add_argument(
'-o', '--output', metavar='FILE',
help='write PSKC to file instead of stdout')
parser.add_argument(
'-c', '--columns', metavar='COL|COL:LABEL,..',
help='list of columns or label to column mapping to import')
parser.add_argument(
'--skip-rows', metavar='N', type=int, default=1,
help='the number of rows before rows with key information start')
parser.add_argument(
'-x', '--set', metavar='COL=VALUE', action='append',
type=lambda x: x.split('=', 1), dest='extra_columns',
help='add an extra value that is added to all key containers')
parser.add_argument(
'-p', '--password', '--passwd', metavar='PASS/FILE',
help='password to use for encrypting the PSKC file)')
parser.add_argument(
'-s', '--secret', metavar='KEY/FILE',
help='hex encoded encryption key or a file containing the binary key')
encodings = {
'hex': a2b_hex,
'base32': base64.b32decode,
'base64': base64.b64decode,
}
parser.add_argument(
'-e', '--secret-encoding', choices=sorted(encodings.keys()),
help='encoding used for reading key material',
default='hex')
def from_column(key, value, args):
"""Convert a key value read from a CSV file in a format for PSKC."""
# decode encoded secret
if key == 'secret':
return encodings[args.secret_encoding](value)
# convert dates to timestamps
if key.endswith('_date'):
return dateutil.parser.parse(value)
return value
def open_csvfile(inputfile):
"""Open the CSV file, trying to detect the dialect."""
# Guess dialect if possible and open the CSV file
dialect = 'excel'
try:
# seek before read to skip sniffing on non-seekable files
inputfile.seek(0)
try:
dialect = csv.Sniffer().sniff(inputfile.read(1024))
except Exception: # pragma: no cover (very hard to test in doctest)
pass
inputfile.seek(0)
except IOError: # pragma: no cover (very hard to test in doctest)
pass
return csv.reader(inputfile, dialect)
def main():
"""Convert a CSV file to PSKC."""
# parse command-line arguments
args = parser.parse_args()
# open the CSV file
csvfile = open_csvfile(open(args.input, 'r') if args.input else sys.stdin)
# figure out the meaning of the columns
columns = []
if args.skip_rows > 0:
columns = [x.lower().replace(' ', '_') for x in next(csvfile)]
for i in range(args.skip_rows - 1):
next(csvfile)
if args.columns:
if ':' in args.columns:
# --columns is a list of mappings
mapping = dict(
(label.lower().replace(' ', '_'), key.lower())
for label, key in (
column.split(':')
for column in args.columns.split(',')))
columns = [mapping.get(column, column) for column in columns]
else:
# --columns is a list of columns
columns = [x.lower() for x in args.columns.split(',')]
# store rows in PSKC structure
pskcfile = pskc.PSKC()
for row in csvfile:
data = dict(args.extra_columns or [])
for column, value in zip(columns, row):
for key in column.split('+'):
if value and key not in ('', '-'):
data[key] = from_column(key, value, args)
pskcfile.add_key(**data)
# encrypt the file if needed
if args.secret:
pskcfile.encryption.setup_preshared_key(key=get_key(args.secret))
elif args.password:
pskcfile.encryption.setup_pbkdf2(get_password(args.password))
# write output PSKC file
with OutputFile(args.output) as output:
pskcfile.write(output)
| lgpl-2.1 | 6,463,509,971,803,191,000 | 35.14 | 78 | 0.653754 | false |
isagalaev/sm-openid | openid/test/test_ax.py | 1 | 18276 | """Tests for the attribute exchange extension module
"""
import unittest
from openid.extensions import ax
from openid.message import NamespaceMap, Message, OPENID2_NS
from openid.consumer import Response
class BogusAXMessage(ax.AXMessage):
mode = 'bogus'
getExtensionArgs = ax.AXMessage._newArgs
class DummyRequest(object):
def __init__(self, message):
self.message = message
class AXMessageTest(unittest.TestCase):
def setUp(self):
self.bax = BogusAXMessage()
def test_checkMode(self):
check = self.bax._checkMode
self.assertRaises(ax.NotAXMessage, check, {})
self.assertRaises(ax.AXError, check, {'mode': 'fetch_request'})
# does not raise an exception when the mode is right
check({'mode': self.bax.mode})
def test_checkMode_newArgs(self):
"""_newArgs generates something that has the correct mode"""
# This would raise AXError if it didn't like the mode newArgs made.
self.bax._checkMode(self.bax._newArgs())
class AttrInfoTest(unittest.TestCase):
def test_construct(self):
self.assertRaises(TypeError, ax.AttrInfo)
type_uri = 'a uri'
ainfo = ax.AttrInfo(type_uri)
self.assertEqual(type_uri, ainfo.type_uri)
self.assertEqual(1, ainfo.count)
self.assertFalse(ainfo.required)
self.assertTrue(ainfo.alias is None)
class ToTypeURIsTest(unittest.TestCase):
def setUp(self):
self.aliases = NamespaceMap()
def test_empty(self):
for empty in [None, '']:
uris = ax.toTypeURIs(self.aliases, empty)
self.assertEqual([], uris)
def test_undefined(self):
self.assertRaises(
KeyError,
ax.toTypeURIs, self.aliases, 'http://janrain.com/')
def test_one(self):
uri = 'http://janrain.com/'
alias = 'openid_hackers'
self.aliases.addAlias(uri, alias)
uris = ax.toTypeURIs(self.aliases, alias)
self.assertEqual([uri], uris)
def test_two(self):
uri1 = 'http://janrain.com/'
alias1 = 'openid_hackers'
self.aliases.addAlias(uri1, alias1)
uri2 = 'http://jyte.com/'
alias2 = 'openid_hack'
self.aliases.addAlias(uri2, alias2)
uris = ax.toTypeURIs(self.aliases, ','.join([alias1, alias2]))
self.assertEqual([uri1, uri2], uris)
class ParseAXValuesTest(unittest.TestCase):
"""Testing AXKeyValueMessage.parseExtensionArgs."""
def failUnlessAXKeyError(self, ax_args):
msg = ax.AXKeyValueMessage()
self.assertRaises(KeyError, msg.parseExtensionArgs, ax_args)
def failUnlessAXValues(self, ax_args, expected_args):
"""Fail unless parseExtensionArgs(ax_args) == expected_args."""
msg = ax.AXKeyValueMessage()
msg.parseExtensionArgs(ax_args)
self.assertEqual(expected_args, msg.data)
def test_emptyIsValid(self):
self.failUnlessAXValues({}, {})
def test_missingValueForAliasExplodes(self):
self.failUnlessAXKeyError({'type.foo': 'urn:foo'})
def test_countPresentButNotValue(self):
self.failUnlessAXKeyError({'type.foo': 'urn:foo',
'count.foo': '1'})
def test_invalidCountValue(self):
msg = ax.FetchRequest()
self.assertRaises(ax.AXError,
msg.parseExtensionArgs,
{'type.foo': 'urn:foo',
'count.foo': 'bogus'})
def test_requestUnlimitedValues(self):
msg = ax.FetchRequest()
msg.parseExtensionArgs(
{'mode': 'fetch_request',
'required': 'foo',
'type.foo': 'urn:foo',
'count.foo': ax.UNLIMITED_VALUES})
attrs = list(msg.iterAttrs())
foo = attrs[0]
self.assertTrue(foo.count == ax.UNLIMITED_VALUES)
self.assertTrue(foo.wantsUnlimitedValues())
def test_longAlias(self):
# Spec minimum length is 32 characters. This is a silly test
# for this library, but it's here for completeness.
alias = 'x' * ax.MINIMUM_SUPPORTED_ALIAS_LENGTH
msg = ax.AXKeyValueMessage()
msg.parseExtensionArgs(
{'type.%s' % (alias,): 'urn:foo',
'count.%s' % (alias,): '1',
'value.%s.1' % (alias,): 'first'}
)
def test_invalidAlias(self):
types = [
ax.AXKeyValueMessage,
ax.FetchRequest
]
inputs = [
{'type.a.b':'urn:foo',
'count.a.b':'1'},
{'type.a,b':'urn:foo',
'count.a,b':'1'},
]
for typ in types:
for input in inputs:
msg = typ()
self.assertRaises(ax.AXError, msg.parseExtensionArgs,
input)
def test_countPresentAndIsZero(self):
self.failUnlessAXValues(
{'type.foo': 'urn:foo',
'count.foo': '0',
}, {'urn:foo': []})
def test_singletonEmpty(self):
self.failUnlessAXValues(
{'type.foo': 'urn:foo',
'value.foo': '',
}, {'urn:foo': []})
def test_doubleAlias(self):
self.failUnlessAXKeyError(
{'type.foo': 'urn:foo',
'value.foo': '',
'type.bar': 'urn:foo',
'value.bar': '',
})
def test_doubleSingleton(self):
self.failUnlessAXValues(
{'type.foo': 'urn:foo',
'value.foo': '',
'type.bar': 'urn:bar',
'value.bar': '',
}, {'urn:foo': [], 'urn:bar': []})
def test_singletonValue(self):
self.failUnlessAXValues(
{'type.foo': 'urn:foo',
'value.foo': 'Westfall',
}, {'urn:foo': ['Westfall']})
class FetchRequestTest(unittest.TestCase):
def setUp(self):
self.msg = ax.FetchRequest()
self.type_a = 'http://janrain.example.com/a'
self.alias_a = 'a'
def test_mode(self):
self.assertEqual(self.msg.mode, 'fetch_request')
def test_construct(self):
self.assertEqual({}, self.msg.requested_attributes)
self.assertEqual(None, self.msg.update_url)
msg = ax.FetchRequest('hailstorm')
self.assertEqual({}, msg.requested_attributes)
self.assertEqual('hailstorm', msg.update_url)
def test_add(self):
uri = 'mud://puddle'
# Not yet added:
self.assertFalse(uri in self.msg)
attr = ax.AttrInfo(uri)
self.msg.add(attr)
# Present after adding
self.assertTrue(uri in self.msg)
def test_addTwice(self):
uri = 'lightning://storm'
attr = ax.AttrInfo(uri)
self.msg.add(attr)
self.assertRaises(KeyError, self.msg.add, attr)
def test_getExtensionArgs_empty(self):
expected_args = {
'mode': 'fetch_request',
}
self.assertEqual(expected_args, self.msg.getExtensionArgs())
def test_getExtensionArgs_noAlias(self):
attr = ax.AttrInfo(type_uri='type://of.transportation')
self.msg.add(attr)
ax_args = self.msg.getExtensionArgs()
for k, v in ax_args.items():
if v == attr.type_uri and k.startswith('type.'):
alias = k[5:]
break
else:
self.fail("Didn't find the type definition")
self.failUnlessExtensionArgs({
'type.' + alias: attr.type_uri,
'if_available': alias,
})
def test_getExtensionArgs_alias_if_available(self):
attr = ax.AttrInfo(
type_uri='type://of.transportation',
alias='transport',
)
self.msg.add(attr)
self.failUnlessExtensionArgs({
'type.' + attr.alias: attr.type_uri,
'if_available': attr.alias,
})
def test_getExtensionArgs_alias_req(self):
attr = ax.AttrInfo(
type_uri='type://of.transportation',
alias='transport',
required=True,
)
self.msg.add(attr)
self.failUnlessExtensionArgs({
'type.' + attr.alias: attr.type_uri,
'required': attr.alias,
})
def failUnlessExtensionArgs(self, expected_args):
"""Make sure that getExtensionArgs has the expected result
This method will fill in the mode.
"""
expected_args = dict(expected_args)
expected_args['mode'] = self.msg.mode
self.assertEqual(expected_args, self.msg.getExtensionArgs())
def test_isIterable(self):
self.assertEqual([], list(self.msg))
self.assertEqual([], list(self.msg.iterAttrs()))
def test_getRequiredAttrs_empty(self):
self.assertEqual([], self.msg.getRequiredAttrs())
def test_parseExtensionArgs_extraType(self):
extension_args = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
}
self.assertRaises(ValueError,
self.msg.parseExtensionArgs, extension_args)
def test_parseExtensionArgs(self):
extension_args = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
'if_available': self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.assertTrue(self.type_a in self.msg)
self.assertEqual([self.type_a], list(self.msg))
attr_info = self.msg.requested_attributes.get(self.type_a)
self.assertTrue(attr_info)
self.assertFalse(attr_info.required)
self.assertEqual(self.type_a, attr_info.type_uri)
self.assertEqual(self.alias_a, attr_info.alias)
self.assertEqual([attr_info], list(self.msg.iterAttrs()))
def test_extensionArgs_idempotent(self):
extension_args = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
'if_available': self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.assertEqual(extension_args, self.msg.getExtensionArgs())
self.assertFalse(self.msg.requested_attributes[self.type_a].required)
def test_extensionArgs_idempotent_count_required(self):
extension_args = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
'count.' + self.alias_a: '2',
'required': self.alias_a
}
self.msg.parseExtensionArgs(extension_args)
self.assertEqual(extension_args, self.msg.getExtensionArgs())
self.assertTrue(self.msg.requested_attributes[self.type_a].required)
def test_extensionArgs_count1(self):
extension_args = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
'count.' + self.alias_a: '1',
'if_available': self.alias_a,
}
extension_args_norm = {
'mode': 'fetch_request',
'type.' + self.alias_a: self.type_a,
'if_available': self.alias_a,
}
self.msg.parseExtensionArgs(extension_args)
self.assertEqual(extension_args_norm, self.msg.getExtensionArgs())
class FetchResponseTest(unittest.TestCase):
def setUp(self):
self.msg = ax.FetchResponse()
self.value_a = 'monkeys'
self.type_a = 'http://phone.home/'
self.alias_a = 'robocop'
self.request_update_url = 'http://update.bogus/'
def test_construct(self):
self.assertTrue(self.msg.update_url is None)
self.assertEqual({}, self.msg.data)
def test_getExtensionArgs_empty(self):
expected_args = {
'mode': 'fetch_response',
}
self.assertEqual(expected_args, self.msg.getExtensionArgs())
def test_getExtensionArgs_empty_request(self):
expected_args = {
'mode': 'fetch_response',
}
req = ax.FetchRequest()
msg = ax.FetchResponse(request=req)
self.assertEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_empty_request_some(self):
uri = 'http://not.found/'
alias = 'ext0'
expected_args = {
'mode': 'fetch_response',
'type.%s' % (alias,): uri,
'count.%s' % (alias,): '0'
}
req = ax.FetchRequest()
req.add(ax.AttrInfo(uri))
msg = ax.FetchResponse(request=req)
self.assertEqual(expected_args, msg.getExtensionArgs())
def test_updateUrlInResponse(self):
uri = 'http://not.found/'
alias = 'ext0'
expected_args = {
'mode': 'fetch_response',
'update_url': self.request_update_url,
'type.%s' % (alias,): uri,
'count.%s' % (alias,): '0'
}
req = ax.FetchRequest(update_url=self.request_update_url)
req.add(ax.AttrInfo(uri))
msg = ax.FetchResponse(request=req)
self.assertEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_some_request(self):
expected_args = {
'mode': 'fetch_response',
'type.' + self.alias_a: self.type_a,
'value.' + self.alias_a + '.1': self.value_a,
'count.' + self.alias_a: '1'
}
req = ax.FetchRequest()
req.add(ax.AttrInfo(self.type_a, alias=self.alias_a))
msg = ax.FetchResponse(request=req)
msg.addValue(self.type_a, self.value_a)
self.assertEqual(expected_args, msg.getExtensionArgs())
def test_getExtensionArgs_some_not_request(self):
req = ax.FetchRequest()
msg = ax.FetchResponse(request=req)
msg.addValue(self.type_a, self.value_a)
self.assertRaises(KeyError, msg.getExtensionArgs)
def test_getSingle_success(self):
req = ax.FetchRequest()
self.msg.addValue(self.type_a, self.value_a)
self.assertEqual(self.value_a, self.msg.getSingle(self.type_a))
def test_getSingle_none(self):
self.assertEqual(None, self.msg.getSingle(self.type_a))
def test_getSingle_extra(self):
self.msg.setValues(self.type_a, ['x', 'y'])
self.assertRaises(ax.AXError, self.msg.getSingle, self.type_a)
def test_get(self):
self.assertRaises(KeyError, self.msg.get, self.type_a)
def test_fromSuccessResponseWithoutExtension(self):
"""return None for SuccessResponse with no AX paramaters."""
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
}
sf = ['openid.' + i for i in list(args.keys())]
msg = Message.fromOpenIDArgs(args)
oreq = Response(msg, signed_fields=sf)
r = ax.FetchResponse.fromSuccessResponse(oreq)
self.assertTrue(r is None, "%s is not None" % (r,))
def test_fromSuccessResponseWithoutData(self):
"""return something for SuccessResponse with AX paramaters,
even if it is the empty set."""
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.mode': 'fetch_response',
}
sf = ['openid.' + i for i in list(args.keys())]
msg = Message.fromOpenIDArgs(args)
oreq = Response(msg, signed_fields=sf)
r = ax.FetchResponse.fromSuccessResponse(oreq)
self.assertTrue(r is not None)
def test_fromSuccessResponseWithData(self):
name = 'ext0'
value = 'snozzberry'
uri = "http://willy.wonka.name/"
args = {
'mode': 'id_res',
'ns': OPENID2_NS,
'ns.ax': ax.AXMessage.ns_uri,
'ax.update_url': 'http://example.com/realm/update_path',
'ax.mode': 'fetch_response',
'ax.type.' + name: uri,
'ax.count.' + name: '1',
'ax.value.%s.1' % name: value,
}
sf = ['openid.' + i for i in list(args.keys())]
msg = Message.fromOpenIDArgs(args)
resp = Response(msg, signed_fields=sf)
ax_resp = ax.FetchResponse.fromSuccessResponse(resp)
values = ax_resp.get(uri)
# coming through the system they'll be bytes-ified...
self.assertEqual([value], values)
class StoreRequestTest(unittest.TestCase):
def setUp(self):
self.msg = ax.StoreRequest()
self.type_a = 'http://three.count/'
self.alias_a = 'juggling'
def test_construct(self):
self.assertEqual({}, self.msg.data)
def test_getExtensionArgs_empty(self):
args = self.msg.getExtensionArgs()
expected_args = {
'mode': 'store_request',
}
self.assertEqual(expected_args, args)
def test_getExtensionArgs_nonempty(self):
aliases = NamespaceMap()
aliases.addAlias(self.type_a, self.alias_a)
msg = ax.StoreRequest(aliases=aliases)
msg.setValues(self.type_a, ['foo', 'bar'])
args = msg.getExtensionArgs()
expected_args = {
'mode': 'store_request',
'type.' + self.alias_a: self.type_a,
'count.' + self.alias_a: '2',
'value.%s.1' % (self.alias_a,): 'foo',
'value.%s.2' % (self.alias_a,): 'bar',
}
self.assertEqual(expected_args, args)
class StoreResponseTest(unittest.TestCase):
def test_success(self):
msg = ax.StoreResponse()
self.assertTrue(msg.succeeded())
self.assertFalse(msg.error_message)
self.assertEqual({'mode': 'store_response_success'},
msg.getExtensionArgs())
def test_fail_nomsg(self):
msg = ax.StoreResponse(False)
self.assertFalse(msg.succeeded())
self.assertFalse(msg.error_message)
self.assertEqual({'mode': 'store_response_failure'},
msg.getExtensionArgs())
def test_fail_msg(self):
reason = 'no reason, really'
msg = ax.StoreResponse(False, reason)
self.assertFalse(msg.succeeded())
self.assertEqual(reason, msg.error_message)
self.assertEqual({'mode': 'store_response_failure',
'error': reason}, msg.getExtensionArgs())
| apache-2.0 | -2,085,521,261,451,490,300 | 32.289617 | 77 | 0.570037 | false |
nielsmadan/mercury | plugin/mercury/python.py | 1 | 1771 | import parser
import mercury.util
import re
_STD_MODULES = ['re', 'random', 'itertools', 'string']
_IMPORT_1_REGEX = re.compile(r'^import (\S+)')
_IMPORT_2_REGEX = re.compile(r'^from (\S+) import (\S+)')
_INDENT_REGEX = re.compile(r'^(\s*).*')
def execute(code, buff):
code_lines = code.split("\n")
last_expr_start_line = _find_last_expr(code_lines)
if last_expr_start_line is not None:
last_expr = "\n".join(code_lines[last_expr_start_line:])
if not last_expr.startswith("print"):
last_expr = "print " + last_expr
code_lines[last_expr_start_line:] = last_expr.split("\n")
code_lines = _remove_indent(code_lines)
import_lines = _find_std_imports(buff)
return mercury.util.run_command(['python', '-c', '\n'.join(import_lines + code_lines)])
def _remove_indent(code_lines):
res = re.match(_INDENT_REGEX, code_lines[0])
if len(res.group(1)) > 0:
code_lines = [line[len(res.group(1)):] for line in code_lines]
return code_lines
def _find_std_imports(lines):
res = []
for line in lines:
m = re.search(_IMPORT_1_REGEX, line)
if m is not None and m.group(1) in _STD_MODULES:
res.append(line)
else:
m = re.search(_IMPORT_2_REGEX, line)
if m is not None and m.group(1) in _STD_MODULES:
res.append(line)
return res
def _find_last_expr(code_lines):
for x in range(len(code_lines) - 1, -1, -1):
code = '\n'.join(code_lines[x:])
try:
parser.suite(code)
try:
parser.expr(code)
except: # last statement is not an expression
return None
return x
except:
pass
return None
| isc | -7,417,111,132,614,242,000 | 25.833333 | 91 | 0.567476 | false |
detrout/pykolab | pykolab/setup/setup_kolabd.py | 1 | 2939 | # -*- coding: utf-8 -*-
# Copyright 2010-2012 Kolab Systems AG (http://www.kolabsys.com)
#
# Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen a kolabsys.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 3 or, at your option, any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
import os
import subprocess
import components
import pykolab
from pykolab import utils
from pykolab.constants import *
from pykolab.translate import _
log = pykolab.getLogger('pykolab.setup')
conf = pykolab.getConf()
def __init__():
components.register(
'kolabd',
execute,
description=description(),
after=['ldap','imap']
)
def description():
return _("Setup the Kolab daemon.")
def execute(*args, **kw):
if conf.has_section('example.org'):
primary_domain = conf.get('kolab', 'primary_domain')
if not primary_domain == 'example.org':
utils.multiline_message(
_("""
Copying the configuration section for 'example.org' over to
a section applicable to your domain '%s'.
""") % (primary_domain)
)
conf.cfg_parser._sections[primary_domain] = \
conf.cfg_parser._sections['example.org']
conf.cfg_parser._sections.pop('example.org')
fp = open(conf.cli_keywords.config_file, "w+")
conf.cfg_parser.write(fp)
fp.close()
if os.path.isfile('/bin/systemctl'):
subprocess.call(['/bin/systemctl', 'restart', 'kolabd.service'])
elif os.path.isfile('/sbin/service'):
subprocess.call(['/sbin/service', 'kolabd', 'restart'])
elif os.path.isfile('/usr/sbin/service'):
subprocess.call(['/usr/sbin/service','kolab-server','restart'])
else:
log.error(_("Could not start the kolab server service."))
if os.path.isfile('/bin/systemctl'):
subprocess.call(['/bin/systemctl', 'enable', 'kolabd.service'])
elif os.path.isfile('/sbin/chkconfig'):
subprocess.call(['/sbin/chkconfig', 'kolabd', 'on'])
elif os.path.isfile('/usr/sbin/update-rc.d'):
subprocess.call(['/usr/sbin/update-rc.d', 'kolab-server', 'defaults'])
else:
log.error(_("Could not configure to start on boot, the " + \
"kolab server service."))
| gpl-3.0 | -5,576,212,773,111,987,000 | 34.841463 | 87 | 0.626744 | false |
rafaduran/django-pluggable-registration | registration/admin.py | 1 | 1928 | from django.contrib import admin
from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
actions = ['resend_activation_email', 'delete_expired', 'delete_activated',
'clean']
list_display = ('email', 'activation_key_expired',
'activation_key_already_activated', 'activation_key_invalid')
search_fields = ('email',)
def resend_activation_email(self, request, queryset):
"""
Re-sends activation emails for the selected users.
Note that this will *only* send activation emails for users
who are eligible to activate; emails will not be sent to users
whose activation keys are invalid (expired or already activated).
"""
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
if not profile.activation_key_invalid():
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
def delete_expired(self, request, queryset):
"""
Deletes expired registration profiles.
"""
RegistrationProfile.objects.delete_expired(queryset)
def delete_activated(self, request, queryset):
"""
Deletes already activated registration profiles.
"""
RegistrationProfile.objects.delete_activated(queryset)
def clean(self, request, queryset):
"""
Deletes both, expired and already activated registration profiles.
"""
self.delete_expired(request, queryset)
self.delete_activated(request, queryset)
admin.site.register(RegistrationProfile, RegistrationAdmin)
| bsd-3-clause | -5,219,089,974,694,655,000 | 34.703704 | 79 | 0.674274 | false |
shawnchain/mclub-talkbox | test/test_record2.py | 1 | 3253 | #!/usr/bin/python
import pyaudio
import wave
import opus
from opus import encoder, decoder
import time
import RPIO
#the gpio routines
PTT_PIN = 27
def gpio_init():
global PTT_PIN, COR_PIN
print "RPi Board rev %d" % (RPIO.RPI_REVISION)
RPIO.setwarnings(False)
#RPIO.setmode(RPIO.BOARD)
RPIO.setup(PTT_PIN, RPIO.OUT, initial=RPIO.LOW)
def gpio_reset():
RPIO.cleanup()
#handle the radio
def radio_ptt_on():
global PTT_PIN
print "Turn on the radio PTT"
RPIO.output(PTT_PIN, True) # low level triggers the PTT on
def radio_ptt_off():
global PTT_PIN
print "Turn off the radio PTT"
RPIO.output(PTT_PIN, False) # high level triggers the PTT off
gpio_init()
#the audio part
CHUNK = 320
FORMAT = pyaudio.paInt16
SAMPLE_WIDTH = 2
CHANNELS = 1
RATE = 16000
RECORD_SECONDS = 5
WAVE_OUTPUT_FILENAME = "recorder_output.wav"
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
stream_output = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
output=True,
frames_per_buffer=CHUNK)
print("* recording")
pcmdata = []
expected_record_pcmdata_len = CHANNELS * SAMPLE_WIDTH * RECORD_SECONDS * RATE
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
if i % 50 == 0:
print "recorded %ds, chunk bytes: %d" % ((i / 50 + 1),len(data))
pcmdata.append(data)
pcmbytes = b''.join(pcmdata);
print("* done recording, expected %d bytes, recorded %d bytes" % (expected_record_pcmdata_len, len(pcmbytes)))
stream.stop_stream()
stream.close()
#codec setup
opus_chunk = 20 # 20ms
frames_per_chunk = RATE * opus_chunk / 1000 # 320 frames per 20ms chunk
bytes_per_chunk = frames_per_chunk * SAMPLE_WIDTH # 640 bytes per 20ms chunk
enc = encoder.Encoder(RATE ,CHANNELS,'voip')
enc.complexity = 8
enc.vbr = 1
enc.bitrate = 27800
enc.signal = 3001 # signal voice
dec = decoder.Decoder(RATE ,CHANNELS)
#encoding
print "Encode PCM frames ..."
start = time.time()
encdata = []
for i in range(0, len(pcmbytes) , bytes_per_chunk):
x = pcmbytes[i: i + bytes_per_chunk]
y = enc.encode(x, frames_per_chunk)
encdata.append(y) #encode each frame into array
#if (i / frames_per_chunk) % 10 == 0:
# print "chunk %d (%d/%d bytes)/ %d" % (i / frames_per_chunk, len(y), len(x), f.getnframes() / frames_per_chunk)
#streamout.write(x)
elapsed = time.time() - start
print "Encode completed, elapsed %f s" % elapsed
#decoding
print "Decode OPUS frames ..."
start = time.time()
decdata = ''
for x in encdata:
decdata += dec.decode(x,frames_per_chunk)
elapsed = time.time() - start
print "Decode completed, elapsed %f s" % elapsed
#save to wave
print "Saving PCM frames ..."
wf = wave.open(WAVE_OUTPUT_FILENAME, 'wb')
wf.setnchannels(CHANNELS)
wf.setsampwidth(p.get_sample_size(FORMAT))
wf.setframerate(RATE)
wf.writeframes(decdata)
wf.close()
# play the dec data
radio_ptt_on()
print "Playing decoded PCM frames ..."
stream_output.write(decdata)
stream_output.stop_stream()
stream_output.close()
p.terminate()
radio_ptt_off()
gpio_reset()
| gpl-3.0 | -1,605,328,599,768,575,500 | 24.614173 | 119 | 0.659084 | false |
gajduk/social-networks-analysis-wan-bms | python code/compute_network_statistics.py | 1 | 4168 | import networkx as nx
from load_data import *
from compute_metrics_multiplex import *
def numNodes(g):
return g.number_of_nodes()
def numEdges(g):
return g.number_of_edges()
def avgDeg(g):
return g.number_of_edges()*1.0/g.number_of_nodes()
def getGiantComponent(g):
return g.subgraph(max([key for key in nx.strongly_connected_components(g)],key=len))
def diameter(g):
return nx.diameter(getGiantComponent(g))
def avgPathLen(g):
return nx.average_shortest_path_length(getGiantComponent(g))
def nodesInGiantComponent(g):
return getGiantComponent(g).number_of_nodes()
def edgesInGiantComponent(g):
return getGiantComponent(g).number_of_edges()
def assortativity(g):
return nx.degree_assortativity_coefficient(g,x="in",y="in")
stats = { "# nodes":numNodes, "# edges":numEdges, "Avg. degree":avgDeg , "Diameter":diameter,"Avg. path length":avgPathLen,\
"# Nodes in GC":nodesInGiantComponent,"# Edges in GC":edgesInGiantComponent,\
"Assortativity":assortativity}
def getHeader():
res = "\t"
for stat in stats:
res += ","+stat
return res
def getStatsForGraph(g):
res = g.graph["title"][4:]
for stat in stats:
res += ","+str(stats[stat](g))
return res
def getGlobalStatsFromAttribute(graphs,graph_combinations=[],metrics=[]):
res = graphs[0].graph["title"][:3]
for metric in metrics:
res += ","+metric
for i,k in graph_combinations:
gi,gk = graphs[i],graphs[k]
if i == k:
res += "\n"+gi.graph["title"][4:]
else:
res += "\n"+gi.graph["title"][4:]+"->"+gk.graph["title"][4:]
for metric in metrics:
attribute_name = "Global "+getMetricString(metric,gi,gk)
res += ",{0:.3f}".format(gi.graph[attribute_name])
return res
def getStatsDistributionFromAttribute(graphs,graph_combinations=[],metrics=[]):
res = graphs[0].graph["title"][:3]+"\n"
for i,k in graph_combinations:
g = graphs[i]
res += "\n"+g.graph["title"][4:]+"\nid"
for metric in metrics:
res += ","+metric
res += ",#out_edges,#in_edges\n"
for node in range(g.number_of_nodes()):
res += str(node)+","
for metric in metrics:
attribute_name = getMetricString(metric,g,g)
value = g.graph[attribute_name][node]
res += '{0:.3f}'.format(value)+","
res += str(len(g.out_edges(node)))+","+str(len(g.in_edges(node)))+"\n"
return res
def getStasForDataset(dataset="wan"):
graphs,node_mapping = load_4_layers(dataset)
#metrics = metrics_dict_multiplex.copy()
#del metrics["Overlapping Index"]
metrics = {"Reciprocity":reciprocity,"tp1":tp1,"tp2":tp2,\
"tc1":tc1,"tc2":tc2}
temp = [(i,i) for i in range(len(graphs))]
addMetricsAsAttributesMultiplex(graphs,temp,metrics)
res = getGlobalStatsFromAttribute(graphs,temp,metrics)
res += "\n"
res += "\n"
#res += "Multiplex"
#res += "\n"
#res += "\n"
#metrics = metrics_dict_multiplex.copy()
#temp = graph_combinations+[(k,i) for i,k in graph_combinations]
#addMetricsAsAttributesMultiplex(graphs,temp,metrics)
#res += getGlobalStatsFromAttribute(graphs,temp,metrics)
return res
def getSingleStatsForDataset(dataset="wan"):
graphs,node_mapping = load_4_layers(dataset)
temp = [(i,i) for i in range(len(graphs))]
metrics = {"Reciprocity":reciprocity,"tp1":tp1,"tp2":tp2,\
"tc1":tc1,"tc2":tc2}
addMetricsAsAttributesMultiplex(graphs,temp,metrics)
res = getStatsDistributionFromAttribute(graphs,temp,metrics)
return res
from csv_to_latex_table import *
def main():
with open("wan_table_56.csv","w") as pout:
pout.write(get56TablesForDataset("wan"))
with open("bms_table_56.csv","w") as pout:
pout.write(get56TablesForDataset("bms"))
table_from_csv("wan_table_56.txt","wan_table_56.csv")
table_from_csv("bms_table_56.txt","bms_table_56.csv")
if __name__ == "__main__":
main()
| mit | -1,605,592,017,539,149,000 | 31.061538 | 124 | 0.610365 | false |
avanzosc/avanzosc6.1 | avanzosc_crm_extension/wizard/__init__.py | 1 | 1075 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Avanzosc - Avanced Open Source Consulting
# Copyright (C) 2011 - 2012 Avanzosc <http://www.avanzosc.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
import wizard_crmext
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 3,291,739,809,187,456,000 | 43.791667 | 78 | 0.620465 | false |
uclouvain/OSIS-Louvain | base/forms/proposal/learning_unit_proposal.py | 1 | 9376 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django import forms
from django.db.models import Q, OuterRef, Subquery, Exists
from django.utils.translation import gettext_lazy as _, pgettext_lazy
from django_filters import FilterSet, filters, OrderingFilter
from base.business import event_perms
from base.business.entity import get_entities_ids
from base.models.academic_year import AcademicYear
from base.models.entity import Entity
from base.models.entity_version import EntityVersion
from base.models.enums.proposal_state import ProposalState, LimitedProposalState
from base.models.enums.proposal_type import ProposalType
from base.models.learning_unit_year import LearningUnitYear, LearningUnitYearQuerySet
from base.models.proposal_learning_unit import ProposalLearningUnit
from base.views.learning_units.search.common import SearchTypes
def _get_sorted_choices(tuple_of_choices):
return tuple(sorted(tuple_of_choices, key=lambda item: item[1]))
class ProposalLearningUnitOrderingFilter(OrderingFilter):
def filter(self, qs, value):
queryset = super().filter(qs, value)
if value and 'folder' in value:
queryset = queryset.order_by("entity_folder", "proposallearningunit__folder_id")
elif value and '-folder' in value:
queryset = queryset.order_by("-entity_folder", "-proposallearningunit__folder_id")
return queryset
class ProposalLearningUnitFilter(FilterSet):
academic_year = filters.ModelChoiceFilter(
queryset=AcademicYear.objects.all(),
required=False,
label=_('Ac yr.'),
empty_label=pgettext_lazy("plural", "All"),
)
acronym = filters.CharFilter(
field_name="acronym",
lookup_expr="iregex",
max_length=40,
required=False,
label=_('Code'),
)
requirement_entity = filters.CharFilter(
method='filter_entity',
max_length=20,
label=_('Req. Entity'),
)
with_entity_subordinated = filters.BooleanFilter(
method=lambda queryset, *args, **kwargs: queryset,
label=_('Include subordinate entities'),
widget=forms.CheckboxInput,
initial='True'
)
tutor = filters.CharFilter(
method="filter_tutor",
max_length=40,
label=_('Tutor'),
)
entity_folder = filters.ChoiceFilter(
field_name="proposallearningunit__entity_id",
label=_('Folder entity'),
required=False,
empty_label=pgettext_lazy("plural", "All"),
)
folder = filters.NumberFilter(
field_name="proposallearningunit__folder_id",
min_value=0,
required=False,
label=_('Folder num.'),
widget=forms.TextInput()
)
proposal_type = filters.ChoiceFilter(
field_name="proposallearningunit__type",
label=_('Proposal type'),
choices=_get_sorted_choices(ProposalType.choices()),
required=False,
empty_label=pgettext_lazy("plural", "All"),
)
proposal_state = filters.ChoiceFilter(
field_name="proposallearningunit__state",
label=_('Proposal status'),
choices=_get_sorted_choices(ProposalState.choices()),
required=False,
empty_label=pgettext_lazy("plural", "All"),
)
search_type = filters.CharFilter(
field_name="acronym",
method=lambda request, *args, **kwargs: request,
widget=forms.HiddenInput,
required=False,
initial=SearchTypes.PROPOSAL_SEARCH.value
)
order_by_field = 'ordering'
ordering = ProposalLearningUnitOrderingFilter(
fields=(
('academic_year__year', 'academic_year'),
('acronym', 'acronym'),
('full_title', 'title'),
('learning_container_year__container_type', 'type'),
('entity_requirement', 'requirement_entity'),
('proposallearningunit__type', 'proposal_type'),
('proposallearningunit__state', 'proposal_state'),
('proposallearningunit__folder_id', 'folder'), # Overrided by ProposalLearningUnitOrderingFilter
),
widget=forms.HiddenInput
)
class Meta:
model = LearningUnitYear
fields = [
"academic_year",
"acronym",
"subtype",
"requirement_entity",
]
def __init__(self, *args, person=None, **kwargs):
super().__init__(*args, **kwargs)
self.person = person
self.queryset = self.get_queryset
self._get_entity_folder_id_linked_ordered_by_acronym(self.person)
# Academic year default value = n+1 for proposals search -> use event having n+1 as first open academic year
event_perm = event_perms.EventPermCreationOrEndDateProposalFacultyManager()
self.form.fields["academic_year"].initial = event_perm.get_academic_years().first()
def _get_entity_folder_id_linked_ordered_by_acronym(self, person):
most_recent_acronym = EntityVersion.objects.filter(
entity__id=OuterRef('id'),
).order_by(
"-start_date"
).values('acronym')[:1]
entities = Entity.objects.filter(
proposallearningunit__isnull=False
).annotate(
entity_acronym=Subquery(most_recent_acronym)
).distinct().order_by(
"entity_acronym"
)
self.form.fields['entity_folder'].choices = [(ent.pk, ent.entity_acronym)
for ent in entities]
def filter_entity(self, queryset, name, value):
with_subordinated = self.form.cleaned_data['with_entity_subordinated']
lookup_expression = "__".join(["learning_container_year", name, "in"])
if value:
entity_ids = get_entities_ids(value, with_subordinated)
queryset = queryset.filter(**{lookup_expression: entity_ids})
return queryset
def filter_tutor(self, queryset, name, value):
for tutor_name in value.split():
filter_by_first_name = Q(
learningcomponentyear__attributionchargenew__attribution__tutor__person__first_name__iregex=tutor_name
)
filter_by_last_name = Q(
learningcomponentyear__attributionchargenew__attribution__tutor__person__last_name__iregex=tutor_name
)
queryset = queryset.filter(
filter_by_first_name | filter_by_last_name
).distinct()
return queryset
@property
def get_queryset(self):
# Need this close so as to return empty query by default when form is unbound
if not self.data:
return LearningUnitYear.objects.none()
entity_folder = EntityVersion.objects.filter(
entity=OuterRef('proposallearningunit__entity'),
).current(
OuterRef('academic_year__start_date')
).values('acronym')[:1]
has_proposal = ProposalLearningUnit.objects.filter(
learning_unit_year=OuterRef('pk'),
)
queryset = LearningUnitYear.objects_with_container.filter(
proposallearningunit__isnull=False
).select_related(
'academic_year',
'learning_container_year__academic_year',
'language',
'externallearningunityear',
'campus',
'proposallearningunit',
'campus__organization',
).prefetch_related(
"learningcomponentyear_set",
).annotate(
has_proposal=Exists(has_proposal),
entity_folder=Subquery(entity_folder),
)
queryset = LearningUnitYearQuerySet.annotate_full_title_class_method(queryset)
queryset = LearningUnitYearQuerySet.annotate_entities_allocation_and_requirement_acronym(queryset)
return queryset
class ProposalStateModelForm(forms.ModelForm):
class Meta:
model = ProposalLearningUnit
fields = ['state']
def __init__(self, *args, **kwargs):
super().__init__(*args)
if kwargs.pop('is_faculty_manager', False):
self.fields['state'].choices = LimitedProposalState.choices()
| agpl-3.0 | -4,709,324,834,823,567,000 | 37.739669 | 118 | 0.631147 | false |
delete/spymanager | src/flooder.py | 1 | 1306 | import gevent
from gevent import monkey, pool
monkey.patch_all()
class Flooder():
def __init__(self, site, publisher, image_cache_handler):
self.site = site
self.publisher = publisher
self.image_cache_handler = image_cache_handler
self.cache_handlers = []
def _push_to(self, chat_id, messages):
self.publisher.send_to(chat_id)
for message in messages:
self.publisher.send(message)
def _pull_from(self, subscription):
user = self.site.get_user(subscription['username'])
self.image_cache_handler.get_or_create(username=user.username)
new_images = self.image_cache_handler.get_the_news(user.images)
# This need run after send all images, because bulk is raising an
# InvalidOperation Exception: Bulk operations can only be executed once
self.image_cache_handler.add_the_images(new_images)
chat_ids = [s['chat_id'] for s in subscription['subscribers']]
p = pool.Pool(5)
for _id in chat_ids:
p.spawn(self._push_to, _id, new_images)
p.join()
def flood_to(self, subscriptions):
jobs = [
gevent.spawn(self._pull_from, subscription)
for subscription in subscriptions
]
gevent.wait(jobs)
| mit | -7,102,846,170,723,223,000 | 29.372093 | 79 | 0.627871 | false |
liuyang1/ssh | newcifar/radius.py | 1 | 1432 | import conf
import numpy as np
import random
import load
import scipy.spatial
import hashing
def query(q, hashingArray):
rank = [scipy.spatial.distance.hamming(q, h) for h in hashingArray]
K = len(q)
r = 2.0 / len(q)
return [idx for idx, val in enumerate(rank) if val <= r]
if __name__ == "__main__":
K = conf.K
W = hashing.buildSeqLHW()
hashingArray = load.loadData("/tmp/cifar.hashingArray")
data = load.loadData("/home/liuy/obj/gist.ans.py.data.10000")
X = np.matrix(data)
X = X.T
X -= X.mean(1)
train = load.loadData("/home/liuy/obj/cifar-10-batches-py/data_batch_1")
label = train["labels"]
precisionLst, recallLst = [], []
idxLst = range(0, len(data))
random.shuffle(idxLst)
idxLst = idxLst[0:200]
for idx in idxLst:
x = X[:, idx]
objlabel = label[idx]
q = hashing.hashingK(W, x)
ret = query(q, hashingArray)
cnt = len([i for i in ret if label[i] == objlabel]) - 1
try:
precision = (cnt + 0.0) / (len(ret) - 1)
except ZeroDivisionError:
precision = 0
try:
recall = (cnt + 0.0) / (len([i for i in label if i == objlabel]) - 1)
except ZeroDivisionError:
recall = 0
precisionLst.append(precision)
recallLst.append(recall)
print sum(precisionLst) / len(precisionLst)
print sum(recallLst) / len(recallLst)
| gpl-2.0 | 1,308,179,569,570,913,000 | 26.538462 | 81 | 0.586592 | false |
cellnopt/cellnopt | test/io/test_mapback.py | 1 | 1156 | from cno.io.mapback import MapBack
from cno import CNOGraph, cnodata
def test_mapback():
# init
pknmodel = CNOGraph(cnodata("PKN-ToyMMB.sif"), cnodata("MD-ToyMMB.csv"))
model = CNOGraph(cnodata("PKN-ToyMMB.sif"), cnodata("MD-ToyMMB.csv"))
model.preprocessing()
mp = MapBack(pknmodel, model)
links2map = ['Erk^TNFa=Hsp27', '!Akt=Mek']
new = mp.mapback(links2map)
assert sorted(new) == sorted(['!Akt=Mek', 'Akt=Mek', 'Erk=Hsp27',
'TNFa=TRAF6', 'TRAF6=p38', 'p38=Hsp27'])
mp.plotall(links2map)
def test_mapback_and_gates_in_pkn():
model = CNOGraph(cnodata("PKN-ToyPB_True.sif"), cnodata("MD-ToyPB_True.csv"))
model.preprocessing(expansion=False)
pknmodel = CNOGraph(cnodata("PKN-ToyPB_True.sif"), cnodata("MD-ToyPB_True.csv"))
mp = MapBack(pknmodel, model)
solutions = sorted(['!erk=ph', '!ikb=nfkb', 'egf=egfr', 'mkk4=p38',
'ras=map3k1', 'sos=ras', 'tnfa=tnfr', 'tnfr=traf2',
'traf2=map3k7', 'map3k1^map3k7=mkk4', 'egfr^!ph=sos'])
new = mp.mapback(['sos^tnfa=p38', '!ikb=nfkb', 'egf^!erk=sos'])
assert sorted(new) == sorted(solutions)
| bsd-2-clause | 1,428,987,621,450,412,000 | 27.9 | 84 | 0.625433 | false |
sberrydavis/Forager | bin/cb/generate_feed.py | 1 | 4065 | __author__ = 'CarbonBlack, byt3smith'
# stdlib imports
import re
import sys
import time
import urllib
import json
import optparse
import socket
import base64
import hashlib
# cb imports
sys.path.insert(0, "../../")
from cbfeeds.feed import CbReport
from cbfeeds.feed import CbFeed
from cbfeeds.feed import CbFeedInfo
#pypi
from colorama import Fore, Back, Style, init
# Initialize colorama
init(autoreset=True)
def gen_report_id(iocs):
"""
a report id should be unique
because generate_feed_from_raw may be run repeatedly on the same data, it should
also be deterministic.
this routine sorts all the indicators, then hashes in order to meet these criteria
"""
md5 = hashlib.md5()
# sort the iocs so that a re-order of the same set of iocs results in the same report id
iocs.sort()
for ioc in iocs:
md5.update(ioc.strip())
return md5.hexdigest()
def build_reports(options):
reports = []
ips = []
domains = []
md5s = []
# read all of the lines (of text) from the provided
# input file (of IOCs)
#
iocs = options['ioc_file']
try:
raw_iocs = open(iocs).readlines()
except:
print(Fore.RED + '\n[-]' + Fore.RESET),
print 'Could not open %s' % iocs
exit(0)
# iterate over each of the lines
# attempt to determine if each line is a suitable
# ipv4 address, dns name, or md5
#
for raw_ioc in raw_iocs:
# strip off any leading or trailing whitespace
# skip any empty lines
#
raw_ioc = raw_ioc.strip()
if len(raw_ioc) == 0:
continue
try:
# attempt to parse the line as an ipv4 address
#
socket.inet_aton(raw_ioc)
# parsed as an ipv4 address!
#
ips.append(raw_ioc)
except Exception, e:
# attept to parse the line as a md5 and, if that fails,
# as a domain. use trivial parsing
#
if 32 == len(raw_ioc) and \
re.findall(r"([a-fA-F\d]{32})", raw_ioc):
md5s.append(raw_ioc)
elif -1 != raw_ioc.find("."):
domains.append(raw_ioc)
fields = {'iocs': {
},
'timestamp': int(time.mktime(time.gmtime())),
'link': options['feed_link'],
'title': options['report_name'],
'id': gen_report_id(ips + domains + md5s),
'score': 100}
if len(ips) > 0:
fields['iocs']['ipv4'] = ips
if len(domains) > 0:
fields['iocs']['dns'] = domains
if len(md5s) > 0:
fields['iocs']['md5'] = md5s
reports.append(CbReport(**fields))
return reports
def create_feed(options):
feed_meta = json.loads(options)
# generate the required feed information fields
# based on command-line arguments
#
feedinfo = {'name': feed_meta['name'],
'display_name': feed_meta['display_name'],
'provider_url': feed_meta['provider_url'],
'summary': feed_meta['summary'],
'tech_data': feed_meta['tech_data']}
# if an icon was provided, encode as base64 and
# include in the feed information
#
if feed_meta['icon']:
try:
bytes = base64.b64encode(open(feed_meta['icon']).read())
feedinfo['icon'] = bytes
except:
print(Fore.RED + '\n[-]' + Fore.RESET),
print 'Could not open %s. Make sure file still exists.\n' % feed_meta['icon']
# build a CbFeedInfo instance
# this does field validation
#
feedinfo = CbFeedInfo(**feedinfo)
# build a list of reports (always one report in this
# case). the single report will include all the IOCs
#
reports = build_reports(feed_meta)
# build a CbFeed instance
# this does field validation (including on the report data)
#
feed = CbFeed(feedinfo, reports)
return feed.dump()
| mit | 3,383,304,211,393,348,600 | 25.743421 | 92 | 0.565806 | false |
geggo/pyface | examples/chained_wizard.py | 3 | 3371 | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
""" Chained wizard example. """
from __future__ import print_function
# Standard library imports.
import os
import sys
# Put the Enthought library on the Python path.
sys.path.append(os.path.abspath(r'..\..\..'))
# Enthought library imports.
from pyface.api import GUI, OK
from pyface.wizard.api import ChainedWizard, Wizard, WizardPage
from traits.api import Color, HasTraits, Int, Str
class Details(HasTraits):
""" Some test data. """
name = Str
color = Color
class SimpleWizardPage(WizardPage):
""" A simple wizard page. """
#### 'SimpleWizardPage' interface #########################################
# The page color.
color = Color
###########################################################################
# 'IWizardPage' interface.
###########################################################################
def _create_page_content(self, parent):
""" Create the wizard page. """
details = Details(color=self.color)
details.on_trait_change(self._on_name_changed, 'name')
return details.edit_traits(parent=parent, kind='subpanel').control
###########################################################################
# Private interface.
###########################################################################
#### Trait event handlers #################################################
def _on_name_changed(self, new):
""" Called when the name has been changed. """
self.complete = len(new.strip()) > 0
return
# Application entry point.
if __name__ == '__main__':
# Create the GUI (this does NOT start the GUI event loop).
gui = GUI()
wizard = ChainedWizard(
parent = None,
title = 'Chained wizard root',
pages = [
SimpleWizardPage(id='foo', color='red',
heading="The Red Page",
subheading="The default color on this page is red.")
]
)
next_wizard = Wizard(
parent = None,
title = 'Chained wizard child.',
pages = [
SimpleWizardPage(id='bar', color='yellow',
heading="The Yellow Page",
subheading="The default color on this page is yellow."),
SimpleWizardPage(id='baz', color='green',
heading="The Green Page",
subheading="The default color on this page is green.")
]
)
wizard.next_wizard = next_wizard
# Open the wizard.
if wizard.open() == OK:
print('Wizard completed successfully')
else:
print('Wizard cancelled')
#### EOF ######################################################################
| bsd-3-clause | 1,969,681,704,659,646,500 | 30.504673 | 79 | 0.503115 | false |
skinkie/SleekXMPP--XEP-0080- | sleekxmpp/xmlstream/handler/callback.py | 2 | 2874 | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.xmlstream.handler.base import BaseHandler
class Callback(BaseHandler):
"""
The Callback handler will execute a callback function with
matched stanzas.
The handler may execute the callback either during stream
processing or during the main event loop.
Callback functions are all executed in the same thread, so be
aware if you are executing functions that will block for extended
periods of time. Typically, you should signal your own events using the
SleekXMPP object's event() method to pass the stanza off to a threaded
event handler for further processing.
Methods:
prerun -- Overrides BaseHandler.prerun
run -- Overrides BaseHandler.run
"""
def __init__(self, name, matcher, pointer, thread=False,
once=False, instream=False, stream=None):
"""
Create a new callback handler.
Arguments:
name -- The name of the handler.
matcher -- A matcher object for matching stanza objects.
pointer -- The function to execute during callback.
thread -- DEPRECATED. Remains only for backwards compatibility.
once -- Indicates if the handler should be used only
once. Defaults to False.
instream -- Indicates if the callback should be executed
during stream processing instead of in the
main event loop.
stream -- The XMLStream instance this handler should monitor.
"""
BaseHandler.__init__(self, name, matcher, stream)
self._pointer = pointer
self._once = once
self._instream = instream
def prerun(self, payload):
"""
Execute the callback during stream processing, if
the callback was created with instream=True.
Overrides BaseHandler.prerun
Arguments:
payload -- The matched stanza object.
"""
BaseHandler.prerun(self, payload)
if self._instream:
self.run(payload, True)
def run(self, payload, instream=False):
"""
Execute the callback function with the matched stanza payload.
Overrides BaseHandler.run
Arguments:
payload -- The matched stanza object.
instream -- Force the handler to execute during
stream processing. Used only by prerun.
Defaults to False.
"""
if not self._instream or instream:
BaseHandler.run(self, payload)
self._pointer(payload)
if self._once:
self._destroy = True
| mit | 3,967,132,389,688,762,400 | 33.214286 | 77 | 0.612735 | false |
dacsunlimited/dacplay-game-samples | tools/burn-btc.py | 1 | 2064 | #! /usr/bin/env python
"""
burn-btc: create a bitcoin burn address
By James C. Stroud
This program requries base58 (https://pypi.python.org/pypi/base58/0.2.1).
Call the program with a template burn address as the only argument::
% burn-btc 1BurnBTCForFunBurnBTCForFunXXXXXXX
1BurnBTCForFunBurnBTCForFunXTmJXrC
For instructions, call the program with no arguments::
% burn-btc
usage: burn-btc TEMPLATE
TEMPLATE - 34 letters & numbers (no zeros)
the first two are coin specific
An example template is accessible using "test" as the argument::
% burn-btc test
1BurnBTCForFunBurnBTCForFunXTmJXrC
Validate bitcoin burn addresses at http://uncaptcha.net/bitcoin.php
"""
import sys
import binascii
from hashlib import sha256
from base58 import b58encode, b58decode
ABET = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
class BurnBTCError(Exception):
pass
class AlphabetError(BurnBTCError):
pass
def hh256(s):
s = sha256(s).digest()
return binascii.hexlify(sha256(s).digest())
def b58ec(s):
unencoded = str(bytearray.fromhex(unicode(s)))
encoded = b58encode(unencoded)
return encoded
def b58dc(encoded, trim=0):
unencoded = b58decode(encoded)[:-trim]
return unencoded
def burn(s):
decoded = b58dc(s, trim=4)
decoded_hex = binascii.hexlify(decoded)
check = hh256(decoded)[:8]
coded = decoded_hex + check
return b58ec(coded)
def usage():
print "usage: burn-btc TEMPLATE"
print
print " TEMPLATE - 34 letters & numbers (no zeros)"
print " the first two are coin specific"
raise SystemExit
if __name__ == "__main__":
if len(sys.argv) != 2:
usage()
if sys.argv[1] == "test":
template = "1BurnBTCForFunBurnBTCForFunXXXXXXX"
else:
template = sys.argv[1]
for c in template:
if c not in ABET:
raise AlphabetError("Character '%s' is not valid base58." % c)
tlen = len(template)
if tlen < 34:
template = template + ((34 - tlen) * "X")
else:
template = template[:34]
print burn(template) | unlicense | -1,859,950,050,534,061,000 | 21.692308 | 73 | 0.692345 | false |
transientlunatic/acreroad_1420 | acreroad_1420/receiver.py | 2 | 5474 | """
acreroad_1420 Receiver software
Software designed to receive signals through the 1420MHz telescope at Acre Road observatory.
Parameters
----------
serial : str
The serial number of the ettus device being used.
"""
# The large number of imports required for GNURadio
import os
import sys
sys.path.append(os.environ.get('GRC_HIER_PATH', os.path.expanduser('~/.grc_gnuradio')))
from PyQt4 import Qt
from PyQt4.QtCore import QObject, pyqtSlot
from gnuradio import blocks
from gnuradio import eng_notation
from gnuradio import gr
from gnuradio import qtgui
from gnuradio import uhd
from gnuradio.eng_option import eng_option
from gnuradio.filter import firdes
from gnuradio.qtgui import Range, RangeWidget
from optparse import OptionParser
from specflatfile import specflatfile # grc-generated hier_block
from srt_integrator import srt_integrator # grc-generated hier_block
from ts_ave import ts_ave # grc-generated hier_block
import spectroscopy
import sip
import time
# Parse config files
import ConfigParser
class Receiver(gr.top_block):
def __init__(self, ettus_id=None, \
source_gain = None, \
freq_0=None, \
freq_offset=None, \
samp_rate=None, \
eq_file=None
):
gr.top_block.__init__(self, "SRT Recorder")
# Access the configuration file
# TODO Move this to a better location within the entire package.
config = ConfigParser.SafeConfigParser()
config.read('settings.cfg')
if not ettus_id:
ettus_id = config.get('receiver', 'ettus_id')
if not source_gain:
source_gain = float(config.get('receiver', 'source_gain'))
if not freq_0:
freq_0 = float(config.get('receiver', 'centre_freq'))
if not freq_offset:
freq_offset = float(config.get('receiver', 'freq_offset'))
if not samp_rate:
samp_rate = float(config.get('receiver', 'samp_rate'))
self.samp_rate = samp_rate
if not eq_file:
eq_file = config.get('receiver', 'equalisation_file')
self.eq_file = eq_file
# Initialise the RX stream from UHD
self._init_rx_connection(ettus_id, samp_rate, freq_0, freq_offset, source_gain)
def _init_rx_connection(self, ettus_id, samp_rate, freq_0, freq_offset, gain):
"""
Establish a recieving connection to an Ettus device uing the UHD protocol.
Parameters
----------
ettus_id : str
The serial number of the ettus device
samp_rate : int
The sample rate desired from the reciever.
freq_0 : float
The desired centre frequency of the passband
freq_offset : float
The local oscillator offset, used to remove the peak produced by the LO.
gain : float
The desired gain from the receiver.
"""
#
# We need to set up a connection to the Ettus receiver
#
self.rx_con = uhd.usrp_source( ",".join(("serial="+ettus_id, "")),
uhd.stream_args(
cpu_format="fc32",
channels=range(1),
)
)
self.rx_con.set_samp_rate(samp_rate)
self.rx_con.set_center_freq(uhd.tune_request(freq_0 , -freq_offset), 0)
self.rx_con.set_gain(gain, 0)
def save_spectral_power(self, filepath, fft_len=2048, int_time=5, samp_rate=None, eq_file=None):
"""
Save the integrated spectrum from the telescope at regular intervals to an ASCI file
Parameters
----------
filepath : str
The path to the file where the results should be output.
fft_len : int
The number of bins which the spectrum should be made of (i.e. the length of the
FFT transform.) Defaults to 2048.
int_time : float
The number of seconds over which each output sample is integrated. Defaults to 5 seconds.
samp_rate : int
The input sample rate of the data. This defaults to the sample rate of the ettus
device, which should be fine for most (all?!) circumstances.
eq_file : str
The file containing the equalisation profile. This defaults to the the profile
used in the rest of the module.
"""
if not samp_rate: samp_rate = self.samp_rate
if not eq_file: eq_file = self.eq_file
self.specflatfile = specflatfile(fft_len=2**11,fft_size=2**11,flat_file=eq_file,samp_rate=samp_rate)
self.integrator = srt_integrator(fft_len=fft_len, int_time=int_time, reset_flag=0, samp_rate=samp_rate)
self.blocks_keep_one_in_n = blocks.keep_one_in_n(gr.sizeof_float*fft_len, int_time*int(samp_rate/fft_len))
self.blocks_null = blocks.null_sink(gr.sizeof_float*fft_len)
self.asci_sink = spectroscopy.asci_sink(fft_len, filepath)
self.connect((self.rx_con, 0), (self.specflatfile, 0))
self.connect((self.specflatfile, 0), (self.integrator, 0))
self.connect((self.integrator,0), (self.blocks_null, 0))
self.connect((self.integrator, 1), (self.blocks_keep_one_in_n, 0))
self.connect((self.blocks_keep_one_in_n, 0), (self.asci_sink, 0))
| bsd-3-clause | -8,130,030,511,325,656,000 | 35.744966 | 114 | 0.606503 | false |
bjornaa/roppy | examples/makecoast.py | 1 | 1894 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Extract a closed coast line
Extracts a coast line from GSHHS using the
advanced polygon handling features in Basemap
The polygons are saved to a two-columns
text file, using Nans to sepatate the polygons.
An example of how to read back the data and
plot filled land is given in pcoast.py
"""
# ----------------------------------
# Bjørn Ådlandsvik <bjorn at imr.no>
# Institute of Marine Research
# 2014-10-12
# ----------------------------------
# ---------------
# Imports
# ---------------
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
# -----------------
# User settings
# -----------------
# Geographical extent (should include all land in domain)
lon0, lon1 = -12, 16 # Longitude range
lat0, lat1 = 47, 66 # Latitude range
# Choose GSHHS resolution
res = 'i' # intermediate resolution
# Output coast file
outfile = 'data/coast.dat'
# ------------------------------
# Set up Basemap map projection
# ------------------------------
# Use cylindrical equidistand projection
# i.e. x = lon, y = lat
m = Basemap(projection = 'cyl',
llcrnrlon = lon0,
llcrnrlat = lat0,
urcrnrlon = lon1,
urcrnrlat = lat1,
resolution = res)
# ----------------------------
# Get the coast polygon data
# ----------------------------
polygons = []
for i, p in enumerate(m.coastpolygons):
# Use only coast polygons (ignore lakes)
if m.coastpolygontypes[i] == 1:
polygons.append(p)
# --------------------
# Save the coast data
# --------------------
with open(outfile, 'w') as fid:
for p in polygons: # Loop over the polygons
for v in zip(*p): # Loop over the vertices
fid.write('{:7.3f}{:7.3f}\n'.format(*v))
fid.write(' Nan Nan\n') # Separate the polygons
| mit | -6,324,998,205,786,012,000 | 24.567568 | 64 | 0.536998 | false |
sdispater/poet | poet/installer.py | 1 | 18571 | # -*- coding: utf-8 -*-
import tempfile
import os
import shutil
import subprocess
from packaging.utils import canonicalize_name
from pip.download import unpack_url
from pip.index import Link
from piptools.resolver import Resolver
from piptools.repositories import PyPIRepository
from piptools.scripts.compile import get_pip_command
from piptools.cache import DependencyCache
from piptools.utils import is_pinned_requirement, key_from_req
from .locations import CACHE_DIR
from .package.pip_dependency import PipDependency
from .utils.helpers import call, template
class Installer(object):
UNSAFE = ['setuptools']
def __init__(self, command, repository, with_progress=False):
self._command = command
self._poet = command.poet
self._repository = repository
self._with_progress = with_progress
def install(self, features=None, dev=True):
"""
Install packages defined in configuration files.
If a lock file does not exist, it will lock dependencies
before installing them.
:param features: Features to install
:type features: list or None
:param dev: Whether to install dev dependencies or not
:type dev: bool
:rtype: None
"""
if not os.path.exists(self._poet.lock_file):
if features:
for feature in features:
if feature not in self._poet.features:
raise ValueError(
'Feature [{}] does not exist'
.format(feature)
)
self.lock(dev=dev)
return self.install(features=features, dev=dev)
lock = self._poet.lock
if features:
for feature in features:
if feature not in lock.features:
raise ValueError(
'Feature [{}] does not exist'
.format(feature)
)
self._command.line('')
self._command.line('<info>Installing dependencies</>')
self._command.line('')
deps = lock.pip_dependencies
if dev:
deps += lock.pip_dev_dependencies
featured_packages = set()
for feature, packages in lock.features.items():
if feature in features:
for package in packages:
featured_packages.add(canonicalize_name(package))
for dep in deps:
name = dep.name
# Package is optional but is not featured
if dep.optional and name not in featured_packages:
continue
# Checking Python version
if dep.is_python_restricted():
python_version = self._command.python_version
if not any([python_version in python for python in dep.python]):
# If the package is not compatible
# with the current Python version
# we do not install
if self._command.output.is_verbose():
self._command.line(
' - Skipping <info>{}</> '
'(Specifies Python <comment>{}</> and current Python is <comment>{}</>)'
.format(name, ','.join([str(p) for p in dep.python]), python_version)
)
continue
cmd = [self._command.pip(), 'install', dep.normalized_name]
if dep.is_vcs_dependency():
constraint = dep.pretty_constraint
# VCS must be updated to be installed
cmd.append('-U')
else:
constraint = dep.constraint.replace('==', '')
message = (
' - Installing <info>{}</> (<comment>{}</>)'
.format(name, constraint)
)
end_message = (
'Installed <info>{}</> (<comment>{}</>)'
.format(name, constraint)
)
error_message = 'Error while installing [{}]'.format(name)
self._progress(cmd, message[3:], end_message, message, error_message)
def update(self, packages=None, features=None, dev=True):
if self._poet.is_lock():
raise Exception('Update is only available with a poetry.toml file.')
if packages and features:
raise Exception('Cannot specify packages and features when updating.')
self._command.line('')
self._command.line('<info>Updating dependencies</>')
self._command.line('')
# Reading current lock
lock = self._poet.lock
current_deps = lock.pip_dependencies
if dev:
current_deps += lock.pip_dev_dependencies
# Resolving new dependencies and locking them
deps = self._poet.pip_dependencies
if dev:
deps += self._poet.pip_dev_dependencies
featured_packages = set()
for feature, _packages in self._poet.features.items():
if feature in features:
for package in _packages:
featured_packages.add(canonicalize_name(package))
# Removing optional packages unless they are featured packages
deps = [
dep
for dep in deps
if not dep.optional
or dep.optional and dep.name in featured_packages
]
if packages:
packages = [p for p in self.resolve(deps) if p['name'] in packages]
else:
packages = self.resolve(deps)
deps = [PipDependency(p['name'], p['version']) for p in packages]
delete = not packages and not features
actions = self._resolve_update_actions(deps, current_deps, delete=delete)
if not actions:
self._command.line(' - <info>Dependencies already up-to-date!</info>')
return
installs = len([a for a in actions if a[0] == 'install'])
updates = len([a for a in actions if a[0] == 'update'])
uninstalls = len([a for a in actions if a[0] == 'remove'])
summary = []
if updates:
summary.append('<comment>{}</> updates'.format(updates))
if installs:
summary.append('<comment>{}</> installations'.format(installs))
if uninstalls:
summary.append('<comment>{}</> uninstallations'.format(uninstalls))
if len(summary) > 1:
summary = ', '.join(summary)
else:
summary = summary[0]
self._command.line(' - Summary: {}'.format(summary))
error = False
for action, from_, dep in actions:
cmd = [self._command.pip()]
description = 'Installing'
if action == 'remove':
description = 'Removing'
cmd += ['uninstall', dep.normalized_name, '-y']
elif action == 'update':
description = 'Updating'
cmd += ['install', dep.normalized_name, '-U']
else:
cmd += ['install', dep.normalized_name]
name = dep.name
if dep.is_vcs_dependency():
constraint = dep.pretty_constraint
else:
constraint = dep.constraint.replace('==', '')
version = '<comment>{}</>'.format(constraint)
if from_:
if from_.is_vcs_dependency():
constraint = from_.pretty_constraint
else:
constraint = from_.constraint.replace('==', '')
version = '<comment>{}</> -> '.format(constraint) + version
message = ' - {} <info>{}</> ({})'.format(description, name, version)
start_message = message[3:]
end_message = '{} <info>{}</> ({})'.format(description.replace('ing', 'ed'), name, version)
error_message = 'Error while {} [{}]'.format(description.lower(), name)
self._progress(cmd, start_message, end_message, message, error_message)
if not error:
# If everything went well, we write down the lock file
features = {}
for name, featured_packages in self._poet.features.items():
name = canonicalize_name(name)
features[name] = [canonicalize_name(p) for p in featured_packages]
self._write_lock(packages, features)
def lock(self, dev=True):
if self._poet.is_lock():
return
self._command.line('')
self._command.line('<info>Locking dependencies to <comment>poetry.lock</></>')
self._command.line('')
deps = self._poet.pip_dependencies
if dev:
deps += self._poet.pip_dev_dependencies
packages = self.resolve(deps)
features = {}
for name, featured_packages in self._poet.features.items():
name = canonicalize_name(name)
features[name] = [canonicalize_name(p) for p in featured_packages]
self._write_lock(packages, features)
def resolve(self, deps):
if not self._with_progress:
self._command.line(' - <info>Resolving dependencies</>')
return self._resolve(deps)
with self._spin(
'<info>Resolving dependencies</>',
'<info>Resolving dependencies</>'
):
return self._resolve(deps)
def _resolve(self, deps):
# Checking if we should active prereleases
prereleases = False
for dep in deps:
if dep.accepts_prereleases():
prereleases = True
break
constraints = [dep.as_requirement() for dep in deps]
command = get_pip_command()
opts, _ = command.parse_args([])
resolver = Resolver(
constraints, PyPIRepository(opts, command._build_session(opts)),
cache=DependencyCache(CACHE_DIR),
prereleases=prereleases
)
matches = resolver.resolve()
pinned = [m for m in matches if not m.editable and is_pinned_requirement(m)]
unpinned = [m for m in matches if m.editable or not is_pinned_requirement(m)]
reversed_dependencies = resolver.reverse_dependencies(matches)
# Complete reversed dependencies with cache
cache = resolver.dependency_cache.cache
for m in unpinned:
name = key_from_req(m.req)
if name not in cache:
continue
dependencies = cache[name][list(cache[name].keys())[0]]
for dep in dependencies:
dep = canonicalize_name(dep)
if dep not in reversed_dependencies:
reversed_dependencies[dep] = set()
reversed_dependencies[dep].add(canonicalize_name(name))
hashes = resolver.resolve_hashes(pinned)
packages = []
for m in matches:
name = key_from_req(m.req)
if name in self.UNSAFE:
continue
version = str(m.req.specifier)
if m in unpinned:
url, specifier = m.link.url.split('@')
rev, _ = specifier.split('#')
version = self._get_vcs_version(url, rev)
checksum = 'sha1:{}'.format(version['rev'])
else:
version = version.replace('==', '')
checksum = list(hashes[m])
# Figuring out category and optionality
category = None
optional = False
# Checking if it's a main dependency
for dep in deps:
if dep.name == name:
category = dep.category
optional = dep.optional
break
if not category:
def _category(child):
opt = False
cat = None
parents = reversed_dependencies.get(child, set())
for parent in parents:
for dep in deps:
if dep.name != parent:
continue
opt = dep.optional
if dep.category == 'main':
# Dependency is given by at least one main package
# We flag it as main
return 'main', opt
return 'dev', opt
cat, op = _category(parent)
if cat is not None:
return cat, opt
return cat, opt
category, optional = _category(name)
# If category is still None at this point
# The dependency must have come from a VCS
# dependency. To avoid missing packages
# we assume "main" category and not optional
if category is None:
category = 'main'
optional = False
if not isinstance(checksum, list):
checksum = [checksum]
# Retrieving Python restriction if any
python = self._get_pythons_for_package(
name, reversed_dependencies, deps
)
python = list(python)
if '*' in python:
# If at least one parent gave a wildcard
# Then it should be installed for any Python version
python = ['*']
package = {
'name': name,
'version': version,
'checksum': checksum,
'category': category,
'optional': optional,
'python': python
}
packages.append(package)
return sorted(packages, key=lambda p: p['name'].lower())
def _resolve_update_actions(self, deps, current_deps, delete=True):
"""
Determine actions on depenncies.
:param deps: New dependencies
:type deps: list[poet.package.PipDependency]
:param current_deps: Current dependencies
:type current_deps: list[poet.package.PipDependency]
:param delete: Whether to add delete actions or not
:type delete: bool
:return: List of actions to execute
:type: list[tuple]
"""
actions = []
for dep in deps:
action = None
from_ = None
found = False
for current_dep in current_deps:
name = dep.name
current_name = current_dep.name
version = dep.normalized_constraint
current_version = current_dep.normalized_constraint
if name == current_name:
# Existing dependency
found = True
if version == current_version:
break
# If version is different we mark it
# as to be updated
action = 'update'
from_ = current_dep
break
if not found:
# New dependency. We mark it as to be installed.
action = 'install'
if action:
actions.append((action, from_, dep))
if not delete:
return actions
# We need to check if we have to remove
# any dependency
for dep in current_deps:
found = False
for new_dep in deps:
if dep.name == new_dep.name:
found = True
break
if not found:
actions.append(('remove', None, dep))
return actions
def _get_vcs_version(self, url, rev):
tmp_dir = tempfile.mkdtemp()
current_dir = self._poet.base_dir
try:
unpack_url(Link(url), tmp_dir, download_dir=tmp_dir, only_download=True)
os.chdir(tmp_dir)
call(['git', 'checkout', rev])
revision = call(['git', 'rev-parse', rev])
# Getting info
revision = revision.strip()
version = {
'git': url,
'rev': revision
}
except Exception:
raise
finally:
shutil.rmtree(tmp_dir)
# Going back to current directory
os.chdir(current_dir)
return version
def _write_lock(self, packages, features):
self._command.line(' - <info>Writing dependencies</>')
content = self._generate_lock_content(packages, features)
with open(self._poet.lock_file, 'w') as f:
f.write(content)
def _generate_lock_content(self, packages, features):
lock_template = template('poetry.lock')
return lock_template.render(
name=self._poet.name,
version=self._poet.version,
packages=packages,
features=features
)
def _get_pythons_for_package(self, name, reversed_dependencies, deps):
pythons = set()
if name not in reversed_dependencies:
# Main dependency
for dep in deps:
if name == dep.name:
for p in dep.python:
pythons.add(str(p))
break
if not len(pythons):
pythons.add('*')
return pythons
parents = reversed_dependencies[name]
for parent in parents:
parent_pythons = self._get_pythons_for_package(
parent, reversed_dependencies, deps
)
pythons = pythons.union(parent_pythons)
if not len(pythons):
pythons.add('*')
return pythons
def _call(self, cmd, error_message):
try:
return call(cmd)
except subprocess.CalledProcessError as e:
raise Exception(error_message + ' ({})'.format(str(e)))
def _progress(self, cmd, start_message, end_message, default_message, error_message):
if not self._with_progress:
self._command.line(default_message)
return self._call(cmd, error_message)
with self._spin(start_message, end_message):
return self._call(cmd, error_message)
def _spin(self, start_message, end_message):
return self._command.spin(start_message, end_message)
| mit | -7,657,989,580,882,023,000 | 31.637961 | 103 | 0.520274 | false |
sameersingh/uci-statnlp | hw2/generator.py | 1 | 2604 | #!/bin/python
from __future__ import print_function
from lm import LangModel
import random
import numpy as np
class Sampler:
def __init__(self, lm, temp = 1.0):
"""Sampler for a given language model.
Supports the use of temperature, i.e. how peaky we want to treat the
distribution as. Temperature of 1 means no change, temperature <1 means
less randomness (samples high probability words even more), and temp>1
means more randomness (samples low prob words more than otherwise). See
simulated annealing for what this means.
"""
self.lm = lm
self.temp = temp
def sample_sentence(self, prefix = [], max_length = 100):
"""Sample a random sentence (list of words) from the language model.
Samples words till either EOS symbol is sampled or max_length is reached.
Does not make any assumptions about the length of the context.
"""
assert type(prefix) == list
sent = prefix
word = self.sample_next(sent, False)
while len(sent) <= max_length and word != "END_OF_SENTENCE":
sent.append(word)
word = self.sample_next(sent)
return sent
def sample_next(self, prev, incl_eos = True):
"""Samples a single word from context.
Can be useful to debug the model, for example if you have a bigram model,
and know the probability of X-Y should be really high, you can run
sample_next([Y]) to see how often X get generated.
incl_eos determines whether the space of words should include EOS or not.
"""
wps = []
tot = -np.inf # this is the log (total mass)
for w in self.lm.vocab():
if not incl_eos and w == "END_OF_SENTENCE":
continue
lp = self.lm.cond_logprob(w, prev, 0)
wps.append([w, lp/self.temp])
tot = np.logaddexp2(lp/self.temp, tot)
p = random.random()
word = random.choice(wps)[0]
s = -np.inf # running mass
for w,lp in wps:
s = np.logaddexp2(s, lp)
if p < pow(2, s-tot):
word = w
break
return word
if __name__ == "__main__":
from lm import Unigram
unigram = Unigram()
corpus = [
[ "sam", "i", "am" ]
]
unigram.fit_corpus(corpus)
print(unigram.model)
sampler = Sampler(unigram)
for i in range(10):
print(i, ":", " ".join(str(x) for x in sampler.sample_sentence([])))
| apache-2.0 | -1,307,023,713,031,869,000 | 32.72 | 81 | 0.566436 | false |
fbradyirl/home-assistant | homeassistant/components/hive/binary_sensor.py | 1 | 2420 | """Support for the Hive binary sensors."""
from homeassistant.components.binary_sensor import BinarySensorDevice
from . import DATA_HIVE, DOMAIN
DEVICETYPE_DEVICE_CLASS = {"motionsensor": "motion", "contactsensor": "opening"}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Hive sensor devices."""
if discovery_info is None:
return
session = hass.data.get(DATA_HIVE)
add_entities([HiveBinarySensorEntity(session, discovery_info)])
class HiveBinarySensorEntity(BinarySensorDevice):
"""Representation of a Hive binary sensor."""
def __init__(self, hivesession, hivedevice):
"""Initialize the hive sensor."""
self.node_id = hivedevice["Hive_NodeID"]
self.node_name = hivedevice["Hive_NodeName"]
self.device_type = hivedevice["HA_DeviceType"]
self.node_device_type = hivedevice["Hive_DeviceType"]
self.session = hivesession
self.attributes = {}
self.data_updatesource = "{}.{}".format(self.device_type, self.node_id)
self._unique_id = "{}-{}".format(self.node_id, self.device_type)
self.session.entities.append(self)
@property
def unique_id(self):
"""Return unique ID of entity."""
return self._unique_id
@property
def device_info(self):
"""Return device information."""
return {"identifiers": {(DOMAIN, self.unique_id)}, "name": self.name}
def handle_update(self, updatesource):
"""Handle the new update request."""
if "{}.{}".format(self.device_type, self.node_id) not in updatesource:
self.schedule_update_ha_state()
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICETYPE_DEVICE_CLASS.get(self.node_device_type)
@property
def name(self):
"""Return the name of the binary sensor."""
return self.node_name
@property
def device_state_attributes(self):
"""Show Device Attributes."""
return self.attributes
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self.session.sensor.get_state(self.node_id, self.node_device_type)
def update(self):
"""Update all Node data from Hive."""
self.session.core.update_data(self.node_id)
self.attributes = self.session.attributes.state_attributes(self.node_id)
| apache-2.0 | 6,885,640,137,103,015,000 | 33.084507 | 81 | 0.645868 | false |
gilbertpilz/solum | solum/tests/api/camp/test_camp_v1_1_endpoint.py | 1 | 1393 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from solum import objects
from solum.tests import base
from solum.tests import fakes
from solum.api.controllers.camp import camp_v1_1_endpoint
@mock.patch('pecan.request', new_callable=fakes.FakePecanRequest)
@mock.patch('pecan.response', new_callable=fakes.FakePecanResponse)
class TestCAMPEndpoint(base.BaseTestCase):
def setUp(self):
super(TestCAMPEndpoint, self).setUp()
objects.load()
def test_camp_endpoint_get(self, resp_mock, request_mock):
fake_platform_endpoint = fakes.FakeCAMPEndpoint()
cont = camp_v1_1_endpoint.Controller()
resp = cont.index()
self.assertEqual(200, resp_mock.status)
self.assertEqual(fake_platform_endpoint.name, resp['result'].name)
self.assertEqual(fake_platform_endpoint.type, resp['result'].type)
| apache-2.0 | 9,157,043,422,705,384,000 | 36.648649 | 75 | 0.731515 | false |
vquinones/admindemo | content/models.py | 1 | 7173 | from django.db import models
from django.conf import settings
from django.utils import timezone
from easy_thumbnails.files import get_thumbnailer
from easy_thumbnails.fields import ThumbnailerImageField
from easy_thumbnails.signals import saved_file
from easy_thumbnails.signal_handlers import generate_aliases_global
saved_file.connect(generate_aliases_global)
SOCIAL_CHANNELS = (
('chatter', 'Chatter'),
('instagram', 'Instagram'),
('twitter', 'Twitter'),
)
class BaseModel(models.Model):
"""
Abstract model that defines a Display.
"""
last_modified = models.DateTimeField(auto_now=True,
default=timezone.now)
created = models.DateTimeField(auto_now_add=True,
default=timezone.now)
last_modified.db_index = True
created.db_index = True
ordering = ('-last_modified',)
class Meta:
abstract = True
class ImageMixin(models.Model):
image = ThumbnailerImageField(upload_to='uploads/images')
class Meta:
abstract = True
def image_absolute_url(self, alias='original'):
if self.image:
if alias == 'original':
url = self.image.url
else:
url = get_thumbnailer(self.image)[alias].url
return settings.BASE_IMAGE_URL + url
else:
return None
def large_image_thumbnail(self):
if self.image:
url = settings.BASE_IMAGE_URL +\
get_thumbnailer(self.image)['large_thumbnail'].url
return '<img src="%s" />' % url
else:
return None
large_image_thumbnail.allow_tags = True
large_image_thumbnail.short_description = "Image"
def image_thumbnail(self):
if self.image:
url = settings.BASE_IMAGE_URL +\
get_thumbnailer(self.image)['thumbnail'].url
return '<img src="%s" />' % url
else:
return None
image_thumbnail.allow_tags = True
image_thumbnail.short_description = "Thumbnail"
class VideoMixin(models.Model):
video = models.FileField(upload_to='uploads/videos', null=True,
blank=True)
class Meta:
abstract = True
def video_absolute_url(self):
if self.video:
return settings.BASE_IMAGE_URL + self.video.url
else:
return None
video_absolute_url.allow_tags = True
video_absolute_url.short_description = "Video URL"
class Hashtag(BaseModel):
name = models.CharField(unique=True, max_length=255)
name.db_index = True
def __unicode__(self):
return self.name
class Image(BaseModel, ImageMixin):
title = models.CharField(max_length=255)
def __unicode__(self):
return self.title
class WordCloudTerm(BaseModel):
"""
Radian6 word cloud data
"""
term = models.CharField(max_length=255)
weight = models.IntegerField(default=0)
def __unicode__(self):
return self.term
class Demographic(BaseModel):
"""
Radian6 demographic data
"""
label = models.CharField(max_length=255)
count = models.IntegerField(default=0)
type = models.CharField(max_length=255)
def __unicode__(self):
return self.label
class Influencer(BaseModel, ImageMixin):
"""
Radian6 influencer data
"""
handle = models.CharField(max_length=255)
score = models.IntegerField(default=0)
def __unicode__(self):
return self.handle
class SocialUser(BaseModel, ImageMixin):
original_id = models.CharField(max_length=255)
channel = models.CharField(max_length=255, choices=SOCIAL_CHANNELS)
username = models.CharField(max_length=255, blank=True)
first_name = models.CharField(max_length=255, blank=True)
last_name = models.CharField(max_length=255, blank=True)
# Indexes
original_id.db_index = True
class Meta:
unique_together = ('original_id', 'channel',)
def __unicode__(self):
if self.username:
return self.username
else:
return self.first_name + ' ' + self.last_name
class SocialItem(BaseModel, ImageMixin, VideoMixin):
MEDIA_TYPES = (
('image', 'Image'),
('video', 'Video'),
)
ITEM_STATES = (
(0, 'Unpublished'),
(1, 'Deleted'),
(2, 'Published')
)
original_id = models.CharField(max_length=255)
channel = models.CharField(max_length=255, choices=SOCIAL_CHANNELS)
state = models.SmallIntegerField(choices=ITEM_STATES, default=0)
user = models.ForeignKey(SocialUser, related_name='social_item_user')
text = models.TextField(blank=True)
media_type = models.CharField(max_length=255, choices=MEDIA_TYPES,
blank=True)
media_caption = models.TextField(blank=True)
hashtags = models.ManyToManyField(Hashtag, null=True,
related_name='social_item_hashtag')
likes = models.IntegerField(default=0)
shares = models.IntegerField(default=0)
created_at = models.DateTimeField()
# Indexes
original_id.db_index = True
channel.db_index = True
class Meta:
unique_together = ('original_id', 'channel',)
def get_media(self):
if self.media_type == 'image':
if self.image:
url = settings.BASE_IMAGE_URL +\
get_thumbnailer(self.image)['large_thumbnail'].url
return '<img src="%s" />' % url
else:
return ''
else:
if self.video:
url = settings.BASE_IMAGE_URL + self.video.url
return '<video width="200" height="200" src="%s" autoplay loop />' % url
else:
return ''
get_media.allow_tags = True
get_media.short_description = "Media"
def user_profile_image(self):
if self.user.image:
url = settings.BASE_IMAGE_URL +\
get_thumbnailer(self.user.image)['thumbnail'].url
return '<img src="%s" />' % url
else:
return None
user_profile_image.allow_tags = True
user_profile_image.short_description = "Profile Image"
class SponsorType(BaseModel):
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Sponsor(BaseModel, ImageMixin):
name = models.CharField(max_length=255)
type = models.ForeignKey(SponsorType, related_name='sponsor_type')
def __unicode__(self):
return self.name
class Video(BaseModel, VideoMixin):
COLORS = (
('009DDC', 'Blue'),
('959595', 'Gray'),
('B5D334', 'Green'),
('E98300', 'Orange'),
('3FCFD5', 'Teal'),
('FECB00', 'Yellow'),
)
title = models.CharField(max_length=255)
override_color = models.CharField(max_length=6, choices=COLORS,
default='', null=True, blank=True)
duration = models.FloatField(default=15, blank=True, null=True)
has_alpha = models.BooleanField(default=False)
def __unicode__(self):
return self.title
| gpl-2.0 | 7,148,862,275,792,738,000 | 27.019531 | 88 | 0.602677 | false |
twilio/twilio-python | tests/integration/sync/v1/service/sync_map/test_sync_map_permission.py | 1 | 7856 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class SyncMapPermissionTestCase(IntegrationTestCase):
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Maps/MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"map_sid": "MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
'''
))
actual = self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").fetch()
self.assertIsNotNone(actual)
def test_delete_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").delete()
self.holodeck.assert_has_request(Request(
'delete',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Maps/MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
))
def test_delete_response(self):
self.holodeck.mock(Response(
204,
None,
))
actual = self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").delete()
self.assertTrue(actual)
def test_list_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions.list()
self.holodeck.assert_has_request(Request(
'get',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Maps/MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions',
))
def test_read_empty_response(self):
self.holodeck.mock(Response(
200,
'''
{
"permissions": [],
"meta": {
"first_page_url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/sidOrUniqueName/Permissions?PageSize=50&Page=0",
"key": "permissions",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/sidOrUniqueName/Permissions?PageSize=50&Page=0"
}
}
'''
))
actual = self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions.list()
self.assertIsNotNone(actual)
def test_read_full_response(self):
self.holodeck.mock(Response(
200,
'''
{
"permissions": [
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"map_sid": "MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
],
"meta": {
"first_page_url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/sidOrUniqueName/Permissions?PageSize=50&Page=0",
"key": "permissions",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/sidOrUniqueName/Permissions?PageSize=50&Page=0"
}
}
'''
))
actual = self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions.list()
self.assertIsNotNone(actual)
def test_update_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").update(read=True, write=True, manage=True)
values = {'Read': True, 'Write': True, 'Manage': True, }
self.holodeck.assert_has_request(Request(
'post',
'https://sync.twilio.com/v1/Services/ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Maps/MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Permissions/identity',
data=values,
))
def test_update_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"service_sid": "ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"map_sid": "MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"identity": "identity",
"read": true,
"write": true,
"manage": true,
"url": "https://sync.twilio.com/v1/Services/ISaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Maps/MPaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Permissions/identity"
}
'''
))
actual = self.client.sync.v1.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_maps("MPXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.sync_map_permissions("identity").update(read=True, write=True, manage=True)
self.assertIsNotNone(actual)
| mit | -2,522,647,256,466,869,000 | 40.566138 | 164 | 0.55499 | false |
PurityControl/uchi-komi-python | problems/euler/0007-10001st-prime/ichi/primes.py | 1 | 1314 | def prime_at(position):
""" returns the nth prime number
args:
position: the nth numbered prime
"""
primes_list = list(take(position, primes()))
return primes_list[-1]
def take(num, seq):
"""Generator takes num items from seq
Args:
num: number of items to take from list
seq: the sequence to iterate over
"""
counter = 1
for item in seq:
if counter <= num:
yield item
else:
return
counter += 1
def primes():
""" Geneartor that lazily evaluates the next prime number.
Warning: This will produce an infinte sequence"""
def _end_of_primes_p(counter, primes):
return counter == len(primes) - 1
def _prime_p(num, primes):
for prime in primes:
if num % prime == 0:
return False
return True
def _get_kval_primes(k_val, primes):
return [x for x in [k_val*6-1, k_val*6+1] if _prime_p(x, primes)]
counter = 0
primes = [2, 3]
k_val = 1
while True:
while _end_of_primes_p(counter, primes):
next_primes = _get_kval_primes(k_val, list(primes))
k_val += 1
for prime in next_primes:
primes.append(prime)
yield primes[counter]
counter += 1
| mit | 4,598,691,187,448,797,700 | 22.890909 | 74 | 0.552511 | false |
TwistedHardware/roshanRush | operation/migrations/0003_auto__chg_field_location_parent.py | 1 | 3186 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Location.parent'
db.alter_column(u'operation_location', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['operation.Location'], null=True))
def backwards(self, orm):
# Changing field 'Location.parent'
db.alter_column(u'operation_location', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['operation.Location']))
models = {
u'operation.link': {
'Meta': {'object_name': 'Link'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'operation.location': {
'Meta': {'object_name': 'Location'},
'help': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['operation.Location']", 'null': 'True', 'blank': 'True'})
},
u'operation.operation': {
'Meta': {'object_name': 'Operation'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['operation.Location']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'operation_code': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'operation.operationlink': {
'Meta': {'object_name': 'OperationLink'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'link': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['operation.Link']"}),
'operation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['operation.Operation']"})
},
u'operation.operationparameter': {
'Meta': {'object_name': 'OperationParameter'},
'default_value': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'help': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'operation': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['operation.Operation']"})
}
}
complete_apps = ['operation'] | gpl-2.0 | -2,541,884,486,197,036,000 | 53.948276 | 155 | 0.567797 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.