repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
elit3ge/SickRage | refs/heads/master | lib/sqlalchemy/orm/evaluator.py | 79 | # orm/evaluator.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import operator
from ..sql import operators
class UnevaluatableError(Exception):
pass
_straight_ops = set(getattr(operators, op)
for op in ('add', 'mul', 'sub',
'div',
'mod', 'truediv',
'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
_notimplemented_ops = set(getattr(operators, op)
for op in ('like_op', 'notlike_op', 'ilike_op',
'notilike_op', 'between_op', 'in_op',
'notin_op', 'endswith_op', 'concat_op'))
class EvaluatorCompiler(object):
def process(self, clause):
meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
if not meth:
raise UnevaluatableError(
"Cannot evaluate %s" % type(clause).__name__)
return meth(clause)
def visit_grouping(self, clause):
return self.process(clause.element)
def visit_null(self, clause):
return lambda obj: None
def visit_false(self, clause):
return lambda obj: False
def visit_true(self, clause):
return lambda obj: True
def visit_column(self, clause):
if 'parentmapper' in clause._annotations:
key = clause._annotations['parentmapper'].\
_columntoproperty[clause].key
else:
key = clause.key
get_corresponding_attr = operator.attrgetter(key)
return lambda obj: get_corresponding_attr(obj)
def visit_clauselist(self, clause):
evaluators = list(map(self.process, clause.clauses))
if clause.operator is operators.or_:
def evaluate(obj):
has_null = False
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if value:
return True
has_null = has_null or value is None
if has_null:
return None
return False
elif clause.operator is operators.and_:
def evaluate(obj):
for sub_evaluate in evaluators:
value = sub_evaluate(obj)
if not value:
if value is None:
return None
return False
return True
else:
raise UnevaluatableError(
"Cannot evaluate clauselist with operator %s" %
clause.operator)
return evaluate
def visit_binary(self, clause):
eval_left, eval_right = list(map(self.process,
[clause.left, clause.right]))
operator = clause.operator
if operator is operators.is_:
def evaluate(obj):
return eval_left(obj) == eval_right(obj)
elif operator is operators.isnot:
def evaluate(obj):
return eval_left(obj) != eval_right(obj)
elif operator in _straight_ops:
def evaluate(obj):
left_val = eval_left(obj)
right_val = eval_right(obj)
if left_val is None or right_val is None:
return None
return operator(eval_left(obj), eval_right(obj))
else:
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
return evaluate
def visit_unary(self, clause):
eval_inner = self.process(clause.element)
if clause.operator is operators.inv:
def evaluate(obj):
value = eval_inner(obj)
if value is None:
return None
return not value
return evaluate
raise UnevaluatableError(
"Cannot evaluate %s with operator %s" %
(type(clause).__name__, clause.operator))
def visit_bindparam(self, clause):
val = clause.value
return lambda obj: val
|
gnarula/eden_deployment | refs/heads/master | controllers/setup.py | 1 | # -*- coding: utf-8 -*-
import os
"""
Setup Tool
"""
module = request.controller
resourcename = request.function
if not settings.has_module(module):
raise HTTP(404, body="Module disabled: %s" % module)
def index():
""" Show the index """
return dict()
def local_deploy():
s3db.configure("setup_deploy", onvalidation=schedule_local)
def prep(r):
resource = r.resource
query = (db.setup_deploy.type == "local")
# make some fields optional
db.setup_deploy.remote_user.required = False
db.setup_deploy.remote_user.writable = False
db.setup_deploy.remote_user.readable = False
db.setup_deploy.private_key.required = False
db.setup_deploy.private_key.writable = False
db.setup_deploy.private_key.readable = False
resource.add_filter(query)
if r.method in ("create", None):
appname = request.application
s3.scripts.append("/%s/static/scripts/S3/s3.setup.js" % appname)
return True
s3.prep = prep
def postp(r, output):
db.setup_deploy.prepop_options.requires = None
if r.method == "read":
record = r.record
output["item"][0].append(TR(TD(LABEL("Status"), _class="w2p_fl")))
output["item"][0].append(TR(TD(record.scheduler_id.status)))
if record.scheduler_id.status == "FAILED":
resource = s3db.resource("scheduler_run")
row = db(resource.table.task_id == record.scheduler_id).select().first()
output["item"][0].append(TR(TD(LABEL("Traceback"), _class="w2p_fl")))
output["item"][0].append(TR(TD(row.traceback)))
output["item"][0].append(TR(TD(LABEL("Output"), _class="w2p_fl")))
output["item"][0].append(TR(TD(row.run_output)))
return output
s3.postp = postp
return s3_rest_controller("setup", "deploy",
populate=dict(host="127.0.0.1",
sitename=current.deployment_settings.get_base_public_url()
),
rheader=s3db.setup_rheader
)
def remote_deploy():
s3db.configure("setup_deploy", onvalidation=schedule_remote)
def prep(r):
resource = r.resource
query = (db.setup_deploy.type == "remote")
resource.add_filter(query)
if r.method in ("create", None):
appname = request.application
s3.scripts.append("/%s/static/scripts/S3/s3.setup.js" % appname)
return True
s3.prep = prep
def postp(r, output):
db.setup_deploy.prepop_options.requires = None
if r.method == "read":
record = r.record
output["item"][0].append(TR(TD(LABEL("Status"), _class="w2p_fl")))
output["item"][0].append(TR(TD(record.scheduler_id.status)))
if record.scheduler_id.status == "FAILED":
resource = s3db.resource("scheduler_run")
row = db(resource.table.task_id == record.scheduler_id).select().first()
output["item"][0].append(TR(TD(LABEL("Traceback"), _class="w2p_fl")))
output["item"][0].append(TR(TD(row.traceback)))
output["item"][0].append(TR(TD(LABEL("Output"), _class="w2p_fl")))
output["item"][0].append(TR(TD(row.run_output)))
return output
s3.postp = postp
return s3_rest_controller("setup", "deploy", rheader=s3db.setup_rheader)
def schedule_local(form):
"""
Schedule a deployment using s3task.
"""
# ToDo: support repo
# Check if already deployed using coapp
resource = s3db.resource("setup_deploy")
rows = db(resource.table.type == "local").select()
prod = False
for row in rows:
if row.scheduler_id.status == "COMPLETED":
if row.prepop == form.vars.prepop:
form.errors["prepop"] = "%s site has been installed previously" % row.prepop
return
if row.prepop == "prod":
prod = True
elif row.scheduler_id.status == "RUNNING" or row.scheduler_id.status == "ASSIGNED":
form.errors["host"] = "Another Local Deployment is running. Please wait for it to complete"
return
if form.vars.prepop == "test" and not prod:
form.errors["prepop"] = "Production site must be installed before test"
return
if form.vars.prepop == "demo" and not prod:
demo_type = "beforeprod"
elif form.vars.prepop == "demo" and prod:
demo_type = "afterprod"
else:
demo_type = None
row = s3db.setup_create_yaml_file(
"127.0.0.1",
form.vars.password,
form.vars.web_server,
form.vars.database_type,
form.vars.prepop,
','.join(form.vars.prepop_options),
form.vars.distro,
True,
form.vars.hostname,
form.vars.template,
form.vars.sitename,
demo_type=demo_type
)
form.vars["scheduler_id"] = row.id
form.vars["type"] = "local"
def schedule_remote(form):
"""
Schedule a deployment using s3task.
"""
# ToDo: support repo
# Check if already deployed using coapp
resource = s3db.resource("setup_deploy")
rows = db(resource.table.type == "remote" and resource.table.host == form.vars.host).select()
prod = False
for row in rows:
if row.scheduler_id.status == "COMPLETED":
if row.prepop == form.vars.prepop:
form.errors["prepop"] = "%s site has been installed previously" % row.prepop
return
if row.prepop == "prod":
prod = True
elif row.scheduler_id.status in ("RUNNING", "ASSIGNED", "QUEUED"):
form.errors["host"] = "Another Local Deployment is running. Please wait for it to complete"
return
if form.vars.prepop == "test" and not prod:
form.errors["prepop"] = "Production site must be installed before test"
return
if form.vars.prepop == "demo" and not prod:
demo_type = "beforeprod"
elif form.vars.prepop == "demo" and prod:
demo_type = "afterprod"
else:
demo_type = None
row = s3db.setup_create_yaml_file(
form.vars.host,
form.vars.password,
form.vars.web_server,
form.vars.database_type,
form.vars.prepop,
''.join(form.vars.prepop_options),
form.vars.distro,
False,
form.vars.hostname,
form.vars.template,
form.vars.sitename,
os.path.join(request.folder, "uploads", form.vars.private_key.filename),
form.vars.remote_user,
demo_type=demo_type,
)
form.vars["scheduler_id"] = row.id
form.vars["type"] = "remote"
def prepop_setting():
if request.ajax:
template = request.post_vars.get("template")
module_name = "applications.eden_deployment.private.templates.%s.config" % template
__import__(module_name)
config = sys.modules[module_name]
prepopulate_options = config.settings.base.get("prepopulate_options")
if isinstance(prepopulate_options, dict):
if "mandatory" in prepopulate_options:
del prepopulate_options["mandatory"]
return json.dumps(prepopulate_options.keys())
else:
return json.dumps(["mandatory"])
def refresh():
try:
id = request.args[0]
except:
current.session.error = T("Record Not Found")
redirect(URL(c="setup", f="index"))
result = s3db.setup_refresh(id)
if result["success"]:
current.session.flash = result["msg"]
redirect(URL(c="setup", f=result["f"], args=result["args"]))
else:
current.session.error = result["msg"]
redirect(URL(c="setup", f=result["f"], args=result["args"]))
def upgrade_status():
if request.ajax:
_id = request.post_vars.get("id")
status = s3db.setup_upgrade_status(_id)
if status:
return json.dumps(status)
|
goldeneye-source/ges-python | refs/heads/master | lib/ctypes/test/test_parameters.py | 11 | import unittest, sys
class SimpleTypesTestCase(unittest.TestCase):
def setUp(self):
import ctypes
try:
from _ctypes import set_conversion_mode
except ImportError:
pass
else:
self.prev_conv_mode = set_conversion_mode("ascii", "strict")
def tearDown(self):
try:
from _ctypes import set_conversion_mode
except ImportError:
pass
else:
set_conversion_mode(*self.prev_conv_mode)
def test_subclasses(self):
from ctypes import c_void_p, c_char_p
# ctypes 0.9.5 and before did overwrite from_param in SimpleType_new
class CVOIDP(c_void_p):
def from_param(cls, value):
return value * 2
from_param = classmethod(from_param)
class CCHARP(c_char_p):
def from_param(cls, value):
return value * 4
from_param = classmethod(from_param)
self.assertEqual(CVOIDP.from_param("abc"), "abcabc")
self.assertEqual(CCHARP.from_param("abc"), "abcabcabcabc")
try:
from ctypes import c_wchar_p
except ImportError:
return
class CWCHARP(c_wchar_p):
def from_param(cls, value):
return value * 3
from_param = classmethod(from_param)
self.assertEqual(CWCHARP.from_param("abc"), "abcabcabc")
# XXX Replace by c_char_p tests
def test_cstrings(self):
from ctypes import c_char_p, byref
# c_char_p.from_param on a Python String packs the string
# into a cparam object
s = b"123"
self.assertIs(c_char_p.from_param(s)._obj, s)
# new in 0.9.1: convert (encode) unicode to ascii
self.assertEqual(c_char_p.from_param(b"123")._obj, b"123")
self.assertRaises(TypeError, c_char_p.from_param, "123\377")
self.assertRaises(TypeError, c_char_p.from_param, 42)
# calling c_char_p.from_param with a c_char_p instance
# returns the argument itself:
a = c_char_p(b"123")
self.assertIs(c_char_p.from_param(a), a)
def test_cw_strings(self):
from ctypes import byref
try:
from ctypes import c_wchar_p
except ImportError:
## print "(No c_wchar_p)"
return
c_wchar_p.from_param("123")
self.assertRaises(TypeError, c_wchar_p.from_param, 42)
self.assertRaises(TypeError, c_wchar_p.from_param, b"123\377")
pa = c_wchar_p.from_param(c_wchar_p("123"))
self.assertEqual(type(pa), c_wchar_p)
def test_int_pointers(self):
from ctypes import c_short, c_uint, c_int, c_long, POINTER, pointer
LPINT = POINTER(c_int)
## p = pointer(c_int(42))
## x = LPINT.from_param(p)
x = LPINT.from_param(pointer(c_int(42)))
self.assertEqual(x.contents.value, 42)
self.assertEqual(LPINT(c_int(42)).contents.value, 42)
self.assertEqual(LPINT.from_param(None), None)
if c_int != c_long:
self.assertRaises(TypeError, LPINT.from_param, pointer(c_long(42)))
self.assertRaises(TypeError, LPINT.from_param, pointer(c_uint(42)))
self.assertRaises(TypeError, LPINT.from_param, pointer(c_short(42)))
def test_byref_pointer(self):
# The from_param class method of POINTER(typ) classes accepts what is
# returned by byref(obj), it type(obj) == typ
from ctypes import c_short, c_uint, c_int, c_long, pointer, POINTER, byref
LPINT = POINTER(c_int)
LPINT.from_param(byref(c_int(42)))
self.assertRaises(TypeError, LPINT.from_param, byref(c_short(22)))
if c_int != c_long:
self.assertRaises(TypeError, LPINT.from_param, byref(c_long(22)))
self.assertRaises(TypeError, LPINT.from_param, byref(c_uint(22)))
def test_byref_pointerpointer(self):
# See above
from ctypes import c_short, c_uint, c_int, c_long, pointer, POINTER, byref
LPLPINT = POINTER(POINTER(c_int))
LPLPINT.from_param(byref(pointer(c_int(42))))
self.assertRaises(TypeError, LPLPINT.from_param, byref(pointer(c_short(22))))
if c_int != c_long:
self.assertRaises(TypeError, LPLPINT.from_param, byref(pointer(c_long(22))))
self.assertRaises(TypeError, LPLPINT.from_param, byref(pointer(c_uint(22))))
def test_array_pointers(self):
from ctypes import c_short, c_uint, c_int, c_long, POINTER
INTARRAY = c_int * 3
ia = INTARRAY()
self.assertEqual(len(ia), 3)
self.assertEqual([ia[i] for i in range(3)], [0, 0, 0])
# Pointers are only compatible with arrays containing items of
# the same type!
LPINT = POINTER(c_int)
LPINT.from_param((c_int*3)())
self.assertRaises(TypeError, LPINT.from_param, c_short*3)
self.assertRaises(TypeError, LPINT.from_param, c_long*3)
self.assertRaises(TypeError, LPINT.from_param, c_uint*3)
## def test_performance(self):
## check_perf()
def test_noctypes_argtype(self):
import _ctypes_test
from ctypes import CDLL, c_void_p, ArgumentError
func = CDLL(_ctypes_test.__file__)._testfunc_p_p
func.restype = c_void_p
# TypeError: has no from_param method
self.assertRaises(TypeError, setattr, func, "argtypes", (object,))
class Adapter(object):
def from_param(cls, obj):
return None
func.argtypes = (Adapter(),)
self.assertEqual(func(None), None)
self.assertEqual(func(object()), None)
class Adapter(object):
def from_param(cls, obj):
return obj
func.argtypes = (Adapter(),)
# don't know how to convert parameter 1
self.assertRaises(ArgumentError, func, object())
self.assertEqual(func(c_void_p(42)), 42)
class Adapter(object):
def from_param(cls, obj):
raise ValueError(obj)
func.argtypes = (Adapter(),)
# ArgumentError: argument 1: ValueError: 99
self.assertRaises(ArgumentError, func, 99)
################################################################
if __name__ == '__main__':
unittest.main()
|
bingsyslab/360projection | refs/heads/master | cube.py | 1 | import cv2
import numpy as np
def deg2rad(d):
return float(d) * np.pi / 180
def xrotation(th):
c = np.cos(th)
s = np.sin(th)
return np.array([[1, 0, 0], [0, c, s], [0, -s, c]])
def yrotation(th):
c = np.cos(th)
s = np.sin(th)
return np.array([[c, 0, s], [0, 1, 0], [-s, 0, c]])
class Face:
def __init__(self, descr, img, pv, xprj, yprj, expand_coef, rotate, yaw=0, pitch=0):
self.img = img
self.descr = descr
(self.ih, self.iw, _) = img.shape
self.pv = pv
self.plane_pt = np.copy(pv)
self.k = np.dot(self.plane_pt, self.pv)
self.xprj = xprj
self.yprj = yprj
self.expand_coef = expand_coef
self.rotate = rotate
self.yaw = yaw
self.pitch = pitch
m = np.dot(yrotation(yaw), xrotation(pitch))
self.pv = np.dot(m, self.pv)
self.plane_pt = np.dot(m, self.plane_pt)
self.xprj = np.dot(m, self.xprj)
self.yprj = np.dot(m, self.yprj)
def intersect(self, lv, pr=False):
"""
lv - line vector
pv - plane vector
k - plane constant
plane equation: x \cdot pv = k
finds the vector where dot(lv*t, pv) = k
"""
d = np.dot(lv, self.pv)
if d == 0:
# parallel lines
self.ivmag2 = np.inf
return
t = self.k / d
if t < 0:
# Each ray should intersect with both
# a positive face and negative face.
# We only want the positive face.
self.ivmag2 = np.inf
return
self.iv = lv * t
self.ivmag2 = np.dot(self.iv, self.iv)
def get_face_prj(self):
a = np.array([[0., 0., 0., -np.dot(self.plane_pt, self.xprj)],
[0., 0., 0., -np.dot(self.plane_pt, self.yprj)],
[0., 0., 0., 1.]])
a[0, :3] = self.xprj
a[1, :3] = self.yprj
return a.T
def get_img_prj(self):
ec = self.expand_coef
ih = self.ih * .5
iw = self.iw * .5
if not self.rotate:
return np.array([[iw / ec, 0., iw],
[0., -ih / ec, ih]]).T
else:
return np.array([[0., iw / ec, iw - 1],
[ih / ec, 0., ih]]).T
def get_isect_pixel(self):
"""
returns the pixel associated with the plane intersecting vector, self.iv
Note that plane faces are 2 by 2 for a sphere of radius 1.
"""
if self.ivmag2 == np.inf:
raise
y = int (np.round((.5 - np.dot(self.iv - self.plane_pt, self.yprj) / self.expand_coef * .5) * self.ih))
if y < 0.: y = 0
if y >= self.ih: y = self.ih - 1
x = int (np.round((.5 + np.dot(self.iv - self.plane_pt, self.xprj) / self.expand_coef * .5) * self.iw))
if x < 0.: x = 0
if x >= self.iw: x = self.iw - 1
if not self.rotate:
return self.img[y, x]
else:
return self.img[x, self.iw - 1 - y]
class Cube:
def __init__(self, img, expand_coef, offcenter_z, yaw, pitch, pl_type=False, is_stereo=True):
[self.InitMono, self.InitStereo][is_stereo](img, expand_coef, offcenter_z, yaw, pitch, pl_type)
def InitStereo(self, img, expand_coef, offcenter_z, yaw, pitch, pl_type):
self.InitMono(img[:, :img.shape[1] / 2], expand_coef, offcenter_z, yaw, pitch, pl_type)
def InitMono(self, img, expand_coef, offcenter_z, yaw, pitch, pl_type):
(h, w, _) = img.shape
self.offcenter_z = offcenter_z
b = 0
ROTATE = 1
NO_ROTATE = 0
a = np.array
if pl_type: # playlist
self.faces = [
Face('left', img[:h / 2, w / 3:w * 2 / 3], a([-1., 0., 0.]), a([ 0., 0., 1.]), a([0., 1., 0.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('front', img[h / 2:, w / 3:w * 2 / 3], a([ 0., 0., 1.]), a([ 1., 0., 0.]), a([0., 1., 0.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('right', img[:h / 2, :w / 3], a([ 1., 0., 0.]), a([ 0., 0., -1.]), a([0., 1., 0.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('top', img[:h / 2, w * 2 / 3:], a([ 0., 1., 0.]), a([ 1., 0., 0.]), a([0., 0., -1.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('back', img[h / 2:, w * 2 / 3:], a([ 0., 0., -1.]), a([-1., 0., 0.]), a([0., 1., 0.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('bottom', img[h / 2:, :w / 3], a([ 0., -1., 0.]), a([ 1., 0., 0.]), a([0., 0., 1.]), expand_coef, NO_ROTATE, yaw, pitch),
]
else: # playlist_rotated_cubemap or playlist_dynamic_streaming
self.faces = [
Face('left', img[b:h / 3 - b, 0:w / 2], a([-1., 0., 0.]), a([ 0., 0., 1.]), a([0., 1., 0.]), expand_coef, ROTATE, yaw, pitch),
Face('front', img[h / 3 + b:h * 2 / 3 - b, 0:w / 2], a([ 0., 0., 1.]), a([ 1., 0., 0.]), a([0., 1., 0.]), expand_coef, ROTATE, yaw, pitch),
Face('right', img[h * 2 / 3 + b:h - b, 0:w / 2], a([ 1., 0., 0.]), a([ 0., 0., -1.]), a([0., 1., 0.]), expand_coef, ROTATE, yaw, pitch),
Face('top', img[b:h / 3 - b, w / 2:], a([ 0., 1., 0.]), a([-1., 0., 0.]), a([0., 0., 1.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('back', img[h / 3 + b:h * 2 / 3 - b, w / 2:w], a([ 0., 0., -1.]), a([-1., 0., 0.]), a([0., 1., 0.]), expand_coef, NO_ROTATE, yaw, pitch),
Face('bottom', img[h * 2 / 3 + b:h - b, w / 2:], a([ 0., -1., 0.]), a([-1., 0., 0.]), a([0., 0., -1.]), expand_coef, NO_ROTATE, yaw, pitch),
]
self.img = img
self.front_face = self.faces[1].pv
self.face_vecs = np.zeros((3, 6))
for i, f in enumerate(self.faces):
self.face_vecs[:, i] = f.pv / f.k
def render_view(self, theta0, phi0, rendered_image, fov_h, fov_v):
"""
given yaw and pitch of head orientation, render view with fov_h * fov_v
"""
m = np.dot(yrotation(phi0), xrotation(theta0))
(height, width, _) = rendered_image.shape
DI = np.ones((height * width, 3), np.int)
trans = np.array([[2.*np.tan(fov_h / 2.) / float(width), 0., -np.tan(fov_h / 2.)],
[0., -2.*np.tan(fov_v / 2.) / float(height), np.tan(fov_v / 2.)]])
xx, yy = np.meshgrid(np.arange(width), np.arange(height))
DI[:, 0] = xx.reshape(height * width)
DI[:, 1] = yy.reshape(height * width)
v = np.ones((height * width, 3), np.float)
v[:, :2] = np.dot(DI, trans.T)
v = np.dot(v, m.T)
v = v / np.linalg.norm(v, ord=2, axis=1)[:, np.newaxis]
v += self.offcenter_z * self.front_face
t_inv = np.dot(v, self.face_vecs)
t_inv_mx = np.argmax(t_inv, 1)
for i, f in enumerate(self.faces):
fvecs = (t_inv_mx == i)
t_inv_i = t_inv[fvecs, i][:, np.newaxis]
if t_inv_i.shape[0] == 0: continue
pts = np.ones((t_inv_i.shape[0], 4), np.float)
pts[:, :3] = v[fvecs, :] / t_inv_i
pts = np.rint(np.dot(pts, np.dot(f.get_face_prj(), f.get_img_prj()))).astype(np.int)
rendered_image[DI[fvecs, 1], DI[fvecs, 0]] = f.img[pts[:, 1], pts[:, 0]]
def cube_to_equi(self, equi_image):
"""
generate an equirectangular image using the (offset) cube
if it is an offset cube, the generated equirectangular will clearly show
that areas where the front cube face is located is more detailed than other areas
"""
(height, width, _) = equi_image.shape
DI = np.ones((height * width, 3), np.int)
xx, yy = np.meshgrid(np.arange(width), np.arange(height))
DI[:, 0] = xx.reshape(height * width)
DI[:, 1] = yy.reshape(height * width)
trans = np.array([[2.*np.pi / float(width), 0., -np.pi],
[0., -np.pi / float(height), .5 * np.pi]])
phi_theta = np.dot(DI, trans.T)
c_theta = np.cos(phi_theta[:, 1])
s_theta = np.sin(phi_theta[:, 1])
c_phi = np.cos(phi_theta[:, 0])
s_phi = np.sin(phi_theta[:, 0])
zero = np.zeros(width * height, np.float)
rot = np.array([
[c_phi, -s_phi * s_theta, s_phi * c_theta],
[zero, c_theta, s_theta],
[-s_phi, -c_phi * s_theta, c_phi * c_theta]
])
rot = np.transpose(rot, (2, 0, 1))
v = np.dot(rot, np.array([0., 0., 1.]).T)
v += self.offcenter_z * self.front_face
t_inv = np.dot(v, self.face_vecs)
t_inv_mx = np.argmax(t_inv, 1)
for i, f in enumerate(self.faces):
fvecs = (t_inv_mx == i)
t_inv_i = t_inv[fvecs, i][:, np.newaxis]
if t_inv_i.shape[0] == 0: continue
pts = np.ones((t_inv_i.shape[0], 4), np.float)
pts[:, :3] = v[fvecs, :] / t_inv_i
pts = np.rint(np.dot(pts, np.dot(f.get_face_prj(), f.get_img_prj()))).astype(np.int)
equi_image[DI[fvecs, 1], DI[fvecs, 0]] = f.img[pts[:, 1], pts[:, 0]]
def offaxis_cube_to_equi_np(img, yaw, pitch, expand_coef, offcenter_z):
equi_image = np.zeros((1000, 2000, 3), np.uint8)
Cube(img, expand_coef, offcenter_z, yaw, pitch).cube_to_equi(equi_image)
cv2.imwrite('nnnn_equi_image.jpg', equi_image)
def offaxis_cube_to_render_np(theta0, phi0, yaw, pitch, offcenter_z, fov_h, fov_v):
rendered_image = np.zeros((1000, 1000, 3), np.uint8)
Cube(img, expand_coef, offcenter_z, yaw, pitch).render_view(
deg2rad(theta0), deg2rad(phi0), rendered_image, deg2rad(fov_h), deg2rad(fov_v))
cv2.imwrite('rendered_image_%d_%d.bmp' % (theta0, phi0), rendered_image)
if __name__ == '__main__':
img = cv2.imread('../scene_1/scene00181-oculus.jpg')
expand_coef = 1.03125
offcenter_z = -0.7
# assume yaw and pitch of the center of cube's front face are both 0
# in rad
yaw = 0
pitch = 0
# draw offaxis cube onto equirectangular
offaxis_cube_to_equi_np(img, yaw, pitch, expand_coef, offcenter_z)
# field of view
fov_h = 90
fov_v = 90
# viewing angle
for theta0, phi0 in [(0, 0), (-45, 330)]:
offaxis_cube_to_render_np(
theta0, phi0, yaw, pitch, offcenter_z, fov_h, fov_v)
|
marcsans/cf-cold-start | refs/heads/master | src/similarity.py | 1 |
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
# In[2]:
def similarity(U):
num_users = U.shape[0]
min_dist = 0.01
sim = np.eye(num_users)
for i in range(num_users):
for j in range(i):
s = 1 / (min_dist + np.arccos(np.dot(U[i,:], U[j,:].T)/(np.linalg.norm(U[i,:])*np.linalg.norm(U[j,:]))/np.pi))
sim[i,j] = s
sim[j,i] = s
return sim
# In[ ]:
|
junix/powerline | refs/heads/develop | powerline/listers/vim.py | 18 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
from powerline.theme import requires_segment_info
from powerline.bindings.vim import (current_tabpage, list_tabpages, vim_getbufoption)
try:
import vim
except ImportError:
vim = {}
def tabpage_updated_segment_info(segment_info, tabpage):
segment_info = segment_info.copy()
window = tabpage.window
buffer = window.buffer
segment_info.update(
tabpage=tabpage,
tabnr=tabpage.number,
window=window,
winnr=window.number,
window_id=int(window.vars.get('powerline_window_id', -1)),
buffer=buffer,
bufnr=buffer.number,
)
return segment_info
@requires_segment_info
def tablister(pl, segment_info, **kwargs):
'''List all tab pages in segment_info format
Specifically generates a list of segment info dictionaries with ``window``,
``winnr``, ``window_id``, ``buffer`` and ``bufnr`` keys set to tab-local
ones and additional ``tabpage`` and ``tabnr`` keys.
Adds either ``tab:`` or ``tab_nc:`` prefix to all segment highlight groups.
Works best with vim-7.4 or later: earlier versions miss tabpage object and
thus window objects are not available as well.
'''
cur_tabpage = current_tabpage()
cur_tabnr = cur_tabpage.number
def add_multiplier(tabpage, dct):
dct['priority_multiplier'] = 1 + (0.001 * abs(tabpage.number - cur_tabnr))
return dct
return (
(lambda tabpage, prefix: (
tabpage_updated_segment_info(segment_info, tabpage),
add_multiplier(tabpage, {'highlight_group_prefix': prefix})
))(tabpage, 'tab' if tabpage == cur_tabpage else 'tab_nc')
for tabpage in list_tabpages()
)
def buffer_updated_segment_info(segment_info, buffer):
segment_info = segment_info.copy()
segment_info.update(
window=None,
winnr=None,
window_id=None,
buffer=buffer,
bufnr=buffer.number,
)
return segment_info
@requires_segment_info
def bufferlister(pl, segment_info, show_unlisted=False, **kwargs):
'''List all buffers in segment_info format
Specifically generates a list of segment info dictionaries with ``buffer``
and ``bufnr`` keys set to buffer-specific ones, ``window``, ``winnr`` and
``window_id`` keys set to None.
Adds either ``buf:`` or ``buf_nc:`` prefix to all segment highlight groups.
:param bool show_unlisted:
True if unlisted buffers should be shown as well. Current buffer is
always shown.
'''
cur_buffer = vim.current.buffer
cur_bufnr = cur_buffer.number
def add_multiplier(buffer, dct):
dct['priority_multiplier'] = 1 + (0.001 * abs(buffer.number - cur_bufnr))
return dct
return (
(
buf_segment_info,
add_multiplier(buf_segment_info['buffer'], {'highlight_group_prefix': prefix})
)
for buf_segment_info, prefix in (
(
buffer_updated_segment_info(
segment_info,
buffer
),
('buf' if buffer is cur_buffer else 'buf_nc')
)
for buffer in vim.buffers
) if (
buf_segment_info['buffer'] is cur_buffer
or show_unlisted
or int(vim_getbufoption(buf_segment_info, 'buflisted'))
)
)
|
Avinash-Raj/appengine-django-skeleton | refs/heads/master | lib/django/core/management/commands/createcachetable.py | 342 | from django.conf import settings
from django.core.cache import caches
from django.core.cache.backends.db import BaseDatabaseCache
from django.core.management.base import BaseCommand, CommandError
from django.db import (
DEFAULT_DB_ALIAS, connections, models, router, transaction,
)
from django.db.utils import DatabaseError
from django.utils.encoding import force_text
class Command(BaseCommand):
help = "Creates the tables needed to use the SQL cache backend."
requires_system_checks = False
def add_arguments(self, parser):
parser.add_argument('args', metavar='table_name', nargs='*',
help='Optional table names. Otherwise, settings.CACHES is used to '
'find cache tables.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database onto which the cache tables will be '
'installed. Defaults to the "default" database.')
parser.add_argument('--dry-run', action='store_true', dest='dry_run',
help='Does not create the table, just prints the SQL that would '
'be run.')
def handle(self, *tablenames, **options):
db = options.get('database')
self.verbosity = int(options.get('verbosity'))
dry_run = options.get('dry_run')
if len(tablenames):
# Legacy behavior, tablename specified as argument
for tablename in tablenames:
self.create_table(db, tablename, dry_run)
else:
for cache_alias in settings.CACHES:
cache = caches[cache_alias]
if isinstance(cache, BaseDatabaseCache):
self.create_table(db, cache._table, dry_run)
def create_table(self, database, tablename, dry_run):
cache = BaseDatabaseCache(tablename, {})
if not router.allow_migrate_model(database, cache.cache_model_class):
return
connection = connections[database]
if tablename in connection.introspection.table_names():
if self.verbosity > 0:
self.stdout.write("Cache table '%s' already exists." % tablename)
return
fields = (
# "key" is a reserved word in MySQL, so use "cache_key" instead.
models.CharField(name='cache_key', max_length=255, unique=True, primary_key=True),
models.TextField(name='value'),
models.DateTimeField(name='expires', db_index=True),
)
table_output = []
index_output = []
qn = connection.ops.quote_name
for f in fields:
field_output = [qn(f.name), f.db_type(connection=connection)]
field_output.append("%sNULL" % ("NOT " if not f.null else ""))
if f.primary_key:
field_output.append("PRIMARY KEY")
elif f.unique:
field_output.append("UNIQUE")
if f.db_index:
unique = "UNIQUE " if f.unique else ""
index_output.append("CREATE %sINDEX %s ON %s (%s);" %
(unique, qn('%s_%s' % (tablename, f.name)), qn(tablename),
qn(f.name)))
table_output.append(" ".join(field_output))
full_statement = ["CREATE TABLE %s (" % qn(tablename)]
for i, line in enumerate(table_output):
full_statement.append(' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
full_statement.append(');')
full_statement = "\n".join(full_statement)
if dry_run:
self.stdout.write(full_statement)
for statement in index_output:
self.stdout.write(statement)
return
with transaction.atomic(using=database,
savepoint=connection.features.can_rollback_ddl):
with connection.cursor() as curs:
try:
curs.execute(full_statement)
except DatabaseError as e:
raise CommandError(
"Cache table '%s' could not be created.\nThe error was: %s." %
(tablename, force_text(e)))
for statement in index_output:
curs.execute(statement)
if self.verbosity > 1:
self.stdout.write("Cache table '%s' created." % tablename)
|
KaranToor/MA450 | refs/heads/master | google-cloud-sdk/lib/surface/genomics/datasets/restore.py | 6 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of gcloud genomics datasets restore.
"""
from googlecloudsdk.api_lib.genomics import genomics_util
from googlecloudsdk.api_lib.genomics.exceptions import GenomicsError
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
class DatasetsRestore(base.UpdateCommand):
"""Restores a deleted dataset.
"""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument('id',
help='The ID of the deleted dataset to be restored.')
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace, All the arguments that were provided to this
command invocation.
Raises:
HttpException: An http error response was received while executing api
request.
Returns:
None
"""
prompt_message = (
'Restoring dataset {0} will restore all objects in '
'the dataset.').format(args.id)
if not console_io.PromptContinue(message=prompt_message):
raise GenomicsError('Restore aborted by user.')
apitools_client = genomics_util.GetGenomicsClient()
genomics_messages = genomics_util.GetGenomicsMessages()
dataset = genomics_messages.GenomicsDatasetsUndeleteRequest(
datasetId=args.id)
result = apitools_client.datasets.Undelete(dataset)
log.RestoredResource('{0}, id: {1}'.format(result.name, result.id),
kind='dataset')
return result
|
JavML/django | refs/heads/master | tests/gis_tests/test_geoip2.py | 75 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import unittest
from unittest import skipUnless
from django.conf import settings
from django.contrib.gis.geoip2 import HAS_GEOIP2
from django.contrib.gis.geos import HAS_GEOS, GEOSGeometry
from django.test import mock
from django.utils import six
if HAS_GEOIP2:
from django.contrib.gis.geoip2 import GeoIP2, GeoIP2Exception
# Note: Requires both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoLite2-City.mmdb' and
# 'GeoLite2-City.mmdb'.
@skipUnless(HAS_GEOIP2 and getattr(settings, "GEOIP_PATH", None),
"GeoIP is required along with the GEOIP_PATH setting.")
class GeoIPTest(unittest.TestCase):
addr = '128.249.1.1'
fqdn = 'tmc.edu'
def test01_init(self):
"GeoIP initialization."
g1 = GeoIP2() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP2(path, 0) # Passing in data path explicitly.
g3 = GeoIP2.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertTrue(g._country)
self.assertTrue(g._city)
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLite2-City.mmdb')
cntry = os.path.join(path, 'GeoLite2-Country.mmdb')
g4 = GeoIP2(city, country='')
self.assertIsNone(g4._country)
g5 = GeoIP2(cntry, city='')
self.assertIsNone(g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
self.assertRaises(GeoIP2Exception, GeoIP2, cache=bad)
if isinstance(bad, six.string_types):
e = GeoIP2Exception
else:
e = TypeError
self.assertRaises(e, GeoIP2, bad, 0)
def test02_bad_query(self):
"GeoIP query parameter checking."
cntry_g = GeoIP2(city='<foo>')
# No city database available, these calls should fail.
self.assertRaises(GeoIP2Exception, cntry_g.city, 'tmc.edu')
self.assertRaises(GeoIP2Exception, cntry_g.coords, 'tmc.edu')
# Non-string query should raise TypeError
self.assertRaises(TypeError, cntry_g.country_code, 17)
self.assertRaises(TypeError, cntry_g.country_name, GeoIP2)
@mock.patch('socket.gethostbyname')
def test03_country(self, gethostbyname):
"GeoIP country querying methods."
gethostbyname.return_value = '128.249.1.1'
g = GeoIP2(city='<foo>')
for query in (self.fqdn, self.addr):
self.assertEqual(
'US',
g.country_code(query),
'Failed for func country_code and query %s' % query
)
self.assertEqual(
'United States',
g.country_name(query),
'Failed for func country_name and query %s' % query
)
self.assertEqual(
{'country_code': 'US', 'country_name': 'United States'},
g.country(query)
)
@skipUnless(HAS_GEOS, "Geos is required")
@mock.patch('socket.gethostbyname')
def test04_city(self, gethostbyname):
"GeoIP city querying methods."
gethostbyname.return_value = '128.249.1.1'
g = GeoIP2(country='<foo>')
for query in (self.fqdn, self.addr):
# Country queries should still work.
self.assertEqual(
'US',
g.country_code(query),
'Failed for func country_code and query %s' % query
)
self.assertEqual(
'United States',
g.country_name(query),
'Failed for func country_name and query %s' % query
)
self.assertEqual(
{'country_code': 'US', 'country_name': 'United States'},
g.country(query)
)
# City information dictionary.
d = g.city(query)
self.assertEqual('US', d['country_code'])
self.assertEqual('Houston', d['city'])
self.assertEqual('TX', d['region'])
geom = g.geos(query)
self.assertIsInstance(geom, GEOSGeometry)
lon, lat = (-95.4010, 29.7079)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 4)
self.assertAlmostEqual(lat, tup[1], 4)
@mock.patch('socket.gethostbyname')
def test05_unicode_response(self, gethostbyname):
"GeoIP strings should be properly encoded (#16553)."
gethostbyname.return_value = '194.27.42.76'
g = GeoIP2()
d = g.city("nigde.edu.tr")
self.assertEqual('Niğde', d['city'])
d = g.country('200.26.205.1')
# Some databases have only unaccented countries
self.assertIn(d['country_name'], ('Curaçao', 'Curacao'))
def test06_ipv6_query(self):
"GeoIP can lookup IPv6 addresses."
g = GeoIP2()
d = g.city('2002:81ed:c9a5::81ed:c9a5') # IPv6 address for www.nhm.ku.edu
self.assertEqual('US', d['country_code'])
self.assertEqual('Lawrence', d['city'])
self.assertEqual('KS', d['region'])
def test_repr(self):
path = settings.GEOIP_PATH
g = GeoIP2(path=path)
meta = g._reader.metadata()
version = '%s.%s' % (meta.binary_format_major_version, meta.binary_format_minor_version)
country_path = g._country_file
city_path = g._city_file
expected = '<GeoIP2 [v%(version)s] _country_file="%(country)s", _city_file="%(city)s">' % {
'version': version,
'country': country_path,
'city': city_path,
}
self.assertEqual(repr(g), expected)
|
diego-d5000/MisValesMd | refs/heads/master | env/lib/python2.7/site-packages/django/utils/regex_helper.py | 1 | """
Functions for reversing a regular expression (used in reverse URL resolving).
Used internally by Django and not intended for external use.
This is not, and is not intended to be, a complete reg-exp decompiler. It
should be good enough for a large class of URLS, however.
"""
from __future__ import unicode_literals
from django.utils import six
from django.utils.six.moves import zip
# Mapping of an escape character to a representative of that class. So, e.g.,
# "\w" is replaced by "x" in a reverse URL. A value of None means to ignore
# this sequence. Any missing key is mapped to itself.
ESCAPE_MAPPINGS = {
"A": None,
"b": None,
"B": None,
"d": "0",
"D": "x",
"s": " ",
"S": "x",
"w": "x",
"W": "!",
"Z": None,
}
class Choice(list):
"""
Used to represent multiple possibilities at this point in a pattern string.
We use a distinguished type, rather than a list, so that the usage in the
code is clear.
"""
class Group(list):
"""
Used to represent a capturing group in the pattern string.
"""
class NonCapture(list):
"""
Used to represent a non-capturing group in the pattern string.
"""
def normalize(pattern):
"""
Given a reg-exp pattern, normalizes it to an iterable of forms that
suffice for reverse matching. This does the following:
(1) For any repeating sections, keeps the minimum number of occurrences
permitted (this means zero for optional groups).
(2) If an optional group includes parameters, include one occurrence of
that group (along with the zero occurrence case from step (1)).
(3) Select the first (essentially an arbitrary) element from any character
class. Select an arbitrary character for any unordered class (e.g. '.'
or '\w') in the pattern.
(5) Ignore comments and any of the reg-exp flags that won't change
what we construct ("iLmsu"). "(?x)" is an error, however.
(6) Raise an error on all other non-capturing (?...) forms (e.g.
look-ahead and look-behind matches) and any disjunctive ('|')
constructs.
Django's URLs for forward resolving are either all positional arguments or
all keyword arguments. That is assumed here, as well. Although reverse
resolving can be done using positional args when keyword args are
specified, the two cannot be mixed in the same reverse() call.
"""
# Do a linear scan to work out the special features of this pattern. The
# idea is that we scan once here and collect all the information we need to
# make future decisions.
result = []
non_capturing_groups = []
consume_next = True
pattern_iter = next_char(iter(pattern))
num_args = 0
# A "while" loop is used here because later on we need to be able to peek
# at the next character and possibly go around without consuming another
# one at the top of the loop.
try:
ch, escaped = next(pattern_iter)
except StopIteration:
return [('', [])]
try:
while True:
if escaped:
result.append(ch)
elif ch == '.':
# Replace "any character" with an arbitrary representative.
result.append(".")
elif ch == '|':
# FIXME: One day we'll should do this, but not in 1.0.
raise NotImplementedError('Awaiting Implementation')
elif ch == "^":
pass
elif ch == '$':
break
elif ch == ')':
# This can only be the end of a non-capturing group, since all
# other unescaped parentheses are handled by the grouping
# section later (and the full group is handled there).
#
# We regroup everything inside the capturing group so that it
# can be quantified, if necessary.
start = non_capturing_groups.pop()
inner = NonCapture(result[start:])
result = result[:start] + [inner]
elif ch == '[':
# Replace ranges with the first character in the range.
ch, escaped = next(pattern_iter)
result.append(ch)
ch, escaped = next(pattern_iter)
while escaped or ch != ']':
ch, escaped = next(pattern_iter)
elif ch == '(':
# Some kind of group.
ch, escaped = next(pattern_iter)
if ch != '?' or escaped:
# A positional group
name = "_%d" % num_args
num_args += 1
result.append(Group((("%%(%s)s" % name), name)))
walk_to_end(ch, pattern_iter)
else:
ch, escaped = next(pattern_iter)
if ch in "iLmsu#":
# All of these are ignorable. Walk to the end of the
# group.
walk_to_end(ch, pattern_iter)
elif ch == ':':
# Non-capturing group
non_capturing_groups.append(len(result))
elif ch != 'P':
# Anything else, other than a named group, is something
# we cannot reverse.
raise ValueError("Non-reversible reg-exp portion: '(?%s'" % ch)
else:
ch, escaped = next(pattern_iter)
if ch not in ('<', '='):
raise ValueError("Non-reversible reg-exp portion: '(?P%s'" % ch)
# We are in a named capturing group. Extra the name and
# then skip to the end.
if ch == '<':
terminal_char = '>'
# We are in a named backreference.
else:
terminal_char = ')'
name = []
ch, escaped = next(pattern_iter)
while ch != terminal_char:
name.append(ch)
ch, escaped = next(pattern_iter)
param = ''.join(name)
# Named backreferences have already consumed the
# parenthesis.
if terminal_char != ')':
result.append(Group((("%%(%s)s" % param), param)))
walk_to_end(ch, pattern_iter)
else:
result.append(Group((("%%(%s)s" % param), None)))
elif ch in "*?+{":
# Quantifiers affect the previous item in the result list.
count, ch = get_quantifier(ch, pattern_iter)
if ch:
# We had to look ahead, but it wasn't need to compute the
# quantifier, so use this character next time around the
# main loop.
consume_next = False
if count == 0:
if contains(result[-1], Group):
# If we are quantifying a capturing group (or
# something containing such a group) and the minimum is
# zero, we must also handle the case of one occurrence
# being present. All the quantifiers (except {0,0},
# which we conveniently ignore) that have a 0 minimum
# also allow a single occurrence.
result[-1] = Choice([None, result[-1]])
else:
result.pop()
elif count > 1:
result.extend([result[-1]] * (count - 1))
else:
# Anything else is a literal.
result.append(ch)
if consume_next:
ch, escaped = next(pattern_iter)
else:
consume_next = True
except StopIteration:
pass
except NotImplementedError:
# A case of using the disjunctive form. No results for you!
return [('', [])]
return list(zip(*flatten_result(result)))
def next_char(input_iter):
"""
An iterator that yields the next character from "pattern_iter", respecting
escape sequences. An escaped character is replaced by a representative of
its class (e.g. \w -> "x"). If the escaped character is one that is
skipped, it is not returned (the next character is returned instead).
Yields the next character, along with a boolean indicating whether it is a
raw (unescaped) character or not.
"""
for ch in input_iter:
if ch != '\\':
yield ch, False
continue
ch = next(input_iter)
representative = ESCAPE_MAPPINGS.get(ch, ch)
if representative is None:
continue
yield representative, True
def walk_to_end(ch, input_iter):
"""
The iterator is currently inside a capturing group. We want to walk to the
close of this group, skipping over any nested groups and handling escaped
parentheses correctly.
"""
if ch == '(':
nesting = 1
else:
nesting = 0
for ch, escaped in input_iter:
if escaped:
continue
elif ch == '(':
nesting += 1
elif ch == ')':
if not nesting:
return
nesting -= 1
def get_quantifier(ch, input_iter):
"""
Parse a quantifier from the input, where "ch" is the first character in the
quantifier.
Returns the minimum number of occurrences permitted by the quantifier and
either None or the next character from the input_iter if the next character
is not part of the quantifier.
"""
if ch in '*?+':
try:
ch2, escaped = next(input_iter)
except StopIteration:
ch2 = None
if ch2 == '?':
ch2 = None
if ch == '+':
return 1, ch2
return 0, ch2
quant = []
while ch != '}':
ch, escaped = next(input_iter)
quant.append(ch)
quant = quant[:-1]
values = ''.join(quant).split(',')
# Consume the trailing '?', if necessary.
try:
ch, escaped = next(input_iter)
except StopIteration:
ch = None
if ch == '?':
ch = None
return int(values[0]), ch
def contains(source, inst):
"""
Returns True if the "source" contains an instance of "inst". False,
otherwise.
"""
if isinstance(source, inst):
return True
if isinstance(source, NonCapture):
for elt in source:
if contains(elt, inst):
return True
return False
def flatten_result(source):
"""
Turns the given source sequence into a list of reg-exp possibilities and
their arguments. Returns a list of strings and a list of argument lists.
Each of the two lists will be of the same length.
"""
if source is None:
return [''], [[]]
if isinstance(source, Group):
if source[1] is None:
params = []
else:
params = [source[1]]
return [source[0]], [params]
result = ['']
result_args = [[]]
pos = last = 0
for pos, elt in enumerate(source):
if isinstance(elt, six.string_types):
continue
piece = ''.join(source[last:pos])
if isinstance(elt, Group):
piece += elt[0]
param = elt[1]
else:
param = None
last = pos + 1
for i in range(len(result)):
result[i] += piece
if param:
result_args[i].append(param)
if isinstance(elt, (Choice, NonCapture)):
if isinstance(elt, NonCapture):
elt = [elt]
inner_result, inner_args = [], []
for item in elt:
res, args = flatten_result(item)
inner_result.extend(res)
inner_args.extend(args)
new_result = []
new_args = []
for item, args in zip(result, result_args):
for i_item, i_args in zip(inner_result, inner_args):
new_result.append(item + i_item)
new_args.append(args[:] + i_args)
result = new_result
result_args = new_args
if pos >= last:
piece = ''.join(source[last:])
for i in range(len(result)):
result[i] += piece
return result, result_args
|
laurent-george/weboob | refs/heads/master | weboob/capabilities/pricecomparison.py | 7 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import Capability, BaseObject, Field, DecimalField, \
StringField
from .date import DateField
__all__ = ['Shop', 'Price', 'Product', 'CapPriceComparison']
class Product(BaseObject):
"""
A product.
"""
name = StringField('Name of product')
class Shop(BaseObject):
"""
A shop where the price is.
"""
name = StringField('Name of shop')
location = StringField('Location of the shop')
info = StringField('Information about the shop')
class Price(BaseObject):
"""
Price.
"""
date = DateField('Date when this price has been published')
cost = DecimalField('Cost of the product in this shop')
currency = StringField('Currency of the price')
message = StringField('Message related to this price')
shop = Field('Shop information', Shop)
product = Field('Product', Product)
class CapPriceComparison(Capability):
"""
Capability for price comparison websites.
"""
def search_products(self, pattern=None):
"""
Search products from a pattern.
:param pattern: pattern to search
:type pattern: str
:rtype: iter[:class:`Product`]
"""
raise NotImplementedError()
def iter_prices(self, product):
"""
Iter prices for a product.
:param product: product to search
:type product: :class:`Product`
:rtype: iter[:class:`Price`]
"""
raise NotImplementedError()
def get_price(self, id):
"""
Get a price from an ID
:param id: ID of price
:type id: str
:rtype: :class:`Price`
"""
raise NotImplementedError()
|
newmediamedicine/indivo_server_1_0 | refs/heads/master | utils/datasections/machine_apps.py | 1 | from indivo import models
import importer_utils
class Machine_apps:
machineapp_tags = ('name', 'email', 'consumer_key', 'secret', 'app_type')
def __init__(self, machineapps_node, verbosity):
self.process_machineapps(machineapps_node, verbosity)
def process_machineapps(self, machineapps_node, verbosity):
machine_apps = []
for node in machineapps_node.childNodes:
machine_app = {}
machine_app_name = node.getAttribute(self.machineapp_tags[0])
machine_app[self.machineapp_tags[0]] = machine_app_name
machine_app[self.machineapp_tags[1]] = node.getAttribute(self.machineapp_tags[1])
if verbosity:
print "\tAdding admin app: ", machine_app_name
for tag_name in self.machineapp_tags:
elem_node = node.getElementsByTagName(tag_name)
if elem_node and len(elem_node) > 0 and elem_node[0].firstChild:
machine_app[tag_name] = importer_utils.clean_value(elem_node[0].firstChild.nodeValue)
machine_apps.append(machine_app)
return self.create_machineapps(machine_apps)
def create_machineapps(self, machine_apps):
for machine_app in machine_apps:
models.MachineApp.objects.get_or_create(**machine_app)
return True
|
dracorpg/python-ivi | refs/heads/master | ivi/testequity/testequityf4.py | 6 | """
Python Interchangeable Virtual Instrument Library
Driver for Test Equity Model 140
Copyright (c) 2014 Jeff Wurzbach
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .. import ivi
from .. import ics
class testequityf4(ivi.IviContainer):
"Watlow F4 controller used in TestEquity Enviromental Chambers"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', '')
super(testequityf4, self).__init__(*args, **kwargs)
self._add_property('chamber_temperature', self._get_temperature)
self._add_property('chamber_temperature_setpoint', self._get_temperature_setpoint, self._set_temperature_setpoint )
self._add_property('chamber_humidity', self._get_humidity)
self._add_property('chamber_humidity_setpoint', self._get_humidity_setpoint, self._set_humidity_setpoint)
self._add_property('temperature_decimal_config', self._get_temperature_decimal_config)
self._add_property('humidity_decimal_config', self._get_humidity_decimal_config)
self._add_property('part_temperature_decimal_config', self._get_part_temperature_decimal_config)
self._add_property('temperature_unit', self._get_temperature_unit_config)
self._temperature_decimal_config = 1 #default to 500 means 50.0degC
self._humidity_decimal_config = 1 #default to 500 means 50.0%RH
self._part_temperature_decimal_config = 1 #default to 500 means 50.0degC
self._temperature_unit = 1 #default to degC
#grab the decimal configrutions for the controller and chache them. provide a method to change them if allowed (i.e. if someone changes the defualt config from TestEquity).
def _get_temperature_decimal_config(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._temperature_decimal_config = self._read_register(606)
self._set_cache_valid()
return self._temperature_decimal_config
def _get_humidity_decimal_config(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._humidity_decimal_config = self._read_register(616)
self._set_cache_valid()
return self._humidity_decimal_config
def _get_part_temperature_decimal_config(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._part_temperature_decimal_config = self._read_register(626)
self._set_cache_valid()
return self._part_temperature_decimal_config
def _set_temperature_decimal_config(self, value):
value = int(value)
if not self._driver_operation_simulate:
self._write_register(606,value)
self._temperature_decimal_config= value
def _set_humidity_decimal_config(self, value):
value = int(value)
if not self._driver_operation_simulate:
self._write_register(616,value)
self._humidity_decimal_config= value
def _set_part_temperature_decimal_config(self, value):
value = int(value)
if not self._driver_operation_simulate:
self._write_register(626,value)
self._part_temperature_decimal_config= value
#Provide ability to read and write the config on the UOM for temperature. Cache results to make sure things work nicely
def _get_temperature_unit_config(self):
if not self._driver_operation_simulate and not self._get_cache_valid():
self._temperature_unit = self._read_register(901)
self._set_cache_valid()
return self._temperature_unit
def _set_temperature_unit_config(self, unit_of_measure="c"):
self.driver_operation.invalidate_all_attributes()
if unit_of_measure=="f":
value = 0
else:
value = 1
value = int(value)
if not self._driver_operation_simulate:
self._write_register(901,value)
self._temperature_unit= value
#_get_temperature(), _get_humidity(), and _get_part_temperature() are not cached so that the reads are accruate.
def _get_temperature(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(100))
if self._temperature_decimal_config==1:
temperature=float(resp)/10
else:
temperature=float(resp)
return temperature
return 0
def _get_humidity(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(104))
if self._humidity_decimal_config==1:
humidity=float(resp)/10
else:
humidity=float(resp)
return humidity
return 0
def _get_part_temperature(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(108))
if self._part_temperature_decimal_config==1:
part_temperature=float(resp)/10
else:
part_temperature=float(resp)
return part_temperature
return 0
#get the compressor state
def _get_compressor_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2070))
return resp
return 0
#get the event 1 register state
def _get_event_one_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2000))
return resp
return 0
#get the event 2 register state
def _get_event_two_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2010))
return resp
return 0
#get the event 3 register state
def _get_event_three_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2020))
return resp
return 0
#get the event 4 register state
def _get_event_four_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2030))
return resp
return 0
#get the event 5 register state
def _get_event_five_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2040))
return resp
return 0
#get the event 6 register state
def _get_event_six_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2050))
return resp
return 0
#get the event 7 register state
def _get_event_seven_state(self):
if not self._driver_operation_simulate:
resp=int(self._read_register(2060))
return resp
return 0
#set the event 1 register state
def _set_event_one_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2000, value)
#set the event 2 register state
def _set_event_two_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2010, value)
#set the event 3 register state
def _set_event_three_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2020, value)
#set the event 4 register state
def _set_event_four_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2030, value)
#set the event 5 register state
def _set_event_five_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2040, value)
#set the event 6 register state
def _set_event_six_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2050, value)
#set the event 7 register state
def _set_event_six_state(self, state):
value=int(bool(state))
if not self._driver_operation_simulate:
self._write_register(2060, value)
def _get_temperature_setpoint(self):
resp=int(self._read_register(300))
#print(resp)
#print(self._temperature_decimal_config)
if self._temperature_decimal_config==1:
temperature=float(resp)/10
else:
temperature=float(resp)
return temperature
def _get_humidity_setpoint(self):
resp=int(self._read_register(319))
if self._humidity_decimal_config==1:
humidity=float(resp)/10
else:
humidity=float(resp)
return humidity
def _set_temperature_setpoint(self, value):
if self._temperature_decimal_config==1:
temperature=int(float(value)*10)
else:
temperature=int(value)
if not self._driver_operation_simulate:
self._write_register(300, temperature)
def _set_humidity_setpoint(self, value):
if self._humidity_decimal_config==1:
humidity=int(float(value)*10)
else:
humidity=int(value)
if not self._driver_operation_simulate:
self._write_register(319, humidity) |
shaufi/odoo | refs/heads/8.0 | addons/hr_payroll/wizard/__init__.py | 442 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_payroll_payslips_by_employees
import hr_payroll_contribution_register_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
sniegu/django-surround | refs/heads/master | surround/django/basic/invalid_subdomain_urls.py | 1 | from django.conf.urls import patterns, url
from django.http import Http404
def invalid_host_view(request):
raise Http404('unmatched domain: %s' % request.get_host())
urlpatterns = patterns('portal.views',
url(r'^.*$', invalid_host_view),
)
|
PaulKinlan/cli-caniuse | refs/heads/master | site/app/scripts/bower_components/jsrepl-build/extern/python/closured/lib/python2.7/distutils/command/upload.py | 176 | """distutils.command.upload
Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
import os
import socket
import platform
from urllib2 import urlopen, Request, HTTPError
from base64 import standard_b64encode
import urlparse
import cStringIO as StringIO
from hashlib import md5
from distutils.errors import DistutilsOptionError
from distutils.core import PyPIRCCommand
from distutils.spawn import spawn
from distutils import log
class upload(PyPIRCCommand):
description = "upload binary package to PyPI"
user_options = PyPIRCCommand.user_options + [
('sign', 's',
'sign files to upload using gpg'),
('identity=', 'i', 'GPG identity used to sign files'),
]
boolean_options = PyPIRCCommand.boolean_options + ['sign']
def initialize_options(self):
PyPIRCCommand.initialize_options(self)
self.username = ''
self.password = ''
self.show_response = 0
self.sign = False
self.identity = None
def finalize_options(self):
PyPIRCCommand.finalize_options(self)
if self.identity and not self.sign:
raise DistutilsOptionError(
"Must use --sign for --identity to have meaning"
)
config = self._read_pypirc()
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
# getting the password from the distribution
# if previously set by the register command
if not self.password and self.distribution.password:
self.password = self.distribution.password
def run(self):
if not self.distribution.dist_files:
raise DistutilsOptionError("No dist file created in earlier command")
for command, pyversion, filename in self.distribution.dist_files:
self.upload_file(command, pyversion, filename)
def upload_file(self, command, pyversion, filename):
# Makes sure the repository URL is compliant
schema, netloc, url, params, query, fragments = \
urlparse.urlparse(self.repository)
if params or query or fragments:
raise AssertionError("Incompatible url %s" % self.repository)
if schema not in ('http', 'https'):
raise AssertionError("unsupported schema " + schema)
# Sign if requested
if self.sign:
gpg_args = ["gpg", "--detach-sign", "-a", filename]
if self.identity:
gpg_args[2:2] = ["--local-user", self.identity]
spawn(gpg_args,
dry_run=self.dry_run)
# Fill in the data - send all the meta-data in case we need to
# register a new release
f = open(filename,'rb')
try:
content = f.read()
finally:
f.close()
meta = self.distribution.metadata
data = {
# action
':action': 'file_upload',
'protcol_version': '1',
# identify release
'name': meta.get_name(),
'version': meta.get_version(),
# file content
'content': (os.path.basename(filename),content),
'filetype': command,
'pyversion': pyversion,
'md5_digest': md5(content).hexdigest(),
# additional meta-data
'metadata_version' : '1.0',
'summary': meta.get_description(),
'home_page': meta.get_url(),
'author': meta.get_contact(),
'author_email': meta.get_contact_email(),
'license': meta.get_licence(),
'description': meta.get_long_description(),
'keywords': meta.get_keywords(),
'platform': meta.get_platforms(),
'classifiers': meta.get_classifiers(),
'download_url': meta.get_download_url(),
# PEP 314
'provides': meta.get_provides(),
'requires': meta.get_requires(),
'obsoletes': meta.get_obsoletes(),
}
comment = ''
if command == 'bdist_rpm':
dist, version, id = platform.dist()
if dist:
comment = 'built for %s %s' % (dist, version)
elif command == 'bdist_dumb':
comment = 'built for %s' % platform.platform(terse=1)
data['comment'] = comment
if self.sign:
data['gpg_signature'] = (os.path.basename(filename) + ".asc",
open(filename+".asc").read())
# set up the authentication
auth = "Basic " + standard_b64encode(self.username + ":" +
self.password)
# Build up the MIME payload for the POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = StringIO.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if not isinstance(value, list):
value = [value]
for value in value:
if isinstance(value, tuple):
fn = ';filename="%s"' % value[0]
value = value[1]
else:
fn = ""
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write(fn)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue()
self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
# build the Request
headers = {'Content-type':
'multipart/form-data; boundary=%s' % boundary,
'Content-length': str(len(body)),
'Authorization': auth}
request = Request(self.repository, data=body,
headers=headers)
# send the data
try:
result = urlopen(request)
status = result.getcode()
reason = result.msg
if self.show_response:
msg = '\n'.join(('-' * 75, r.read(), '-' * 75))
self.announce(msg, log.INFO)
except socket.error, e:
self.announce(str(e), log.ERROR)
return
except HTTPError, e:
status = e.code
reason = e.msg
if status == 200:
self.announce('Server response (%s): %s' % (status, reason),
log.INFO)
else:
self.announce('Upload failed (%s): %s' % (status, reason),
log.ERROR)
|
dennis-sheil/commandergenius | refs/heads/sdl_android | project/jni/python/src/Lib/test/test_StringIO.py | 55 | # Tests StringIO and cStringIO
import unittest
import StringIO
import cStringIO
import types
from test import test_support
class TestGenericStringIO(unittest.TestCase):
# use a class variable MODULE to define which module is being tested
# Line of data to test as string
_line = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!'
# Constructor to use for the test data (._line is passed to this
# constructor)
constructor = str
def setUp(self):
self._line = self.constructor(self._line)
self._lines = self.constructor((self._line + '\n') * 5)
self._fp = self.MODULE.StringIO(self._lines)
def test_reads(self):
eq = self.assertEqual
self.assertRaises(TypeError, self._fp.seek)
eq(self._fp.read(10), self._line[:10])
eq(self._fp.readline(), self._line[10:] + '\n')
eq(len(self._fp.readlines(60)), 2)
def test_writes(self):
f = self.MODULE.StringIO()
self.assertRaises(TypeError, f.seek)
f.write(self._line[:6])
f.seek(3)
f.write(self._line[20:26])
f.write(self._line[52])
self.assertEqual(f.getvalue(), 'abcuvwxyz!')
def test_writelines(self):
f = self.MODULE.StringIO()
f.writelines([self._line[0], self._line[1], self._line[2]])
f.seek(0)
self.assertEqual(f.getvalue(), 'abc')
def test_writelines_error(self):
def errorGen():
yield 'a'
raise KeyboardInterrupt()
f = self.MODULE.StringIO()
self.assertRaises(KeyboardInterrupt, f.writelines, errorGen())
def test_truncate(self):
eq = self.assertEqual
f = self.MODULE.StringIO()
f.write(self._lines)
f.seek(10)
f.truncate()
eq(f.getvalue(), 'abcdefghij')
f.truncate(5)
eq(f.getvalue(), 'abcde')
f.write('xyz')
eq(f.getvalue(), 'abcdexyz')
self.assertRaises(IOError, f.truncate, -1)
f.close()
self.assertRaises(ValueError, f.write, 'frobnitz')
def test_closed_flag(self):
f = self.MODULE.StringIO()
self.assertEqual(f.closed, False)
f.close()
self.assertEqual(f.closed, True)
f = self.MODULE.StringIO("abc")
self.assertEqual(f.closed, False)
f.close()
self.assertEqual(f.closed, True)
def test_isatty(self):
f = self.MODULE.StringIO()
self.assertRaises(TypeError, f.isatty, None)
self.assertEqual(f.isatty(), False)
f.close()
self.assertRaises(ValueError, f.isatty)
def test_iterator(self):
eq = self.assertEqual
unless = self.failUnless
eq(iter(self._fp), self._fp)
# Does this object support the iteration protocol?
unless(hasattr(self._fp, '__iter__'))
unless(hasattr(self._fp, 'next'))
i = 0
for line in self._fp:
eq(line, self._line + '\n')
i += 1
eq(i, 5)
self._fp.close()
self.assertRaises(ValueError, self._fp.next)
class TestStringIO(TestGenericStringIO):
MODULE = StringIO
def test_unicode(self):
if not test_support.have_unicode: return
# The StringIO module also supports concatenating Unicode
# snippets to larger Unicode strings. This is tested by this
# method. Note that cStringIO does not support this extension.
f = self.MODULE.StringIO()
f.write(self._line[:6])
f.seek(3)
f.write(unicode(self._line[20:26]))
f.write(unicode(self._line[52]))
s = f.getvalue()
self.assertEqual(s, unicode('abcuvwxyz!'))
self.assertEqual(type(s), types.UnicodeType)
class TestcStringIO(TestGenericStringIO):
MODULE = cStringIO
import sys
if sys.platform.startswith('java'):
# Jython doesn't have a buffer object, so we just do a useless
# fake of the buffer tests.
buffer = str
class TestBufferStringIO(TestStringIO):
constructor = buffer
class TestBuffercStringIO(TestcStringIO):
constructor = buffer
def test_main():
test_support.run_unittest(
TestStringIO,
TestcStringIO,
TestBufferStringIO,
TestBuffercStringIO
)
if __name__ == '__main__':
test_main()
|
z0by/django | refs/heads/master | django/contrib/flatpages/migrations/0001_initial.py | 134 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FlatPage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('url', models.CharField(max_length=100, verbose_name='URL', db_index=True)),
('title', models.CharField(max_length=200, verbose_name='title')),
('content', models.TextField(verbose_name='content', blank=True)),
('enable_comments', models.BooleanField(default=False, verbose_name='enable comments')),
('template_name', models.CharField(help_text="Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'.", max_length=70, verbose_name='template name', blank=True)),
('registration_required', models.BooleanField(default=False, help_text='If this is checked, only logged-in users will be able to view the page.', verbose_name='registration required')),
('sites', models.ManyToManyField(to='sites.Site', verbose_name='sites')),
],
options={
'ordering': ('url',),
'db_table': 'django_flatpage',
'verbose_name': 'flat page',
'verbose_name_plural': 'flat pages',
},
bases=(models.Model,),
),
]
|
bwsblake/lettercounter | refs/heads/master | django-norel-env/lib/python2.7/site-packages/django/utils/timesince.py | 79 | from __future__ import unicode_literals
import datetime
from django.utils.timezone import is_aware, utc
from django.utils.translation import ungettext, ugettext
def timesince(d, now=None, reversed=False):
"""
Takes two datetime objects and returns the time between d and now
as a nicely formatted string, e.g. "10 minutes". If d occurs after now,
then "0 minutes" is returned.
Units used are years, months, weeks, days, hours, and minutes.
Seconds and microseconds are ignored. Up to two adjacent units will be
displayed. For example, "2 weeks, 3 days" and "1 year, 3 months" are
possible outputs, but "2 weeks, 3 hours" and "1 year, 5 days" are not.
Adapted from
http://web.archive.org/web/20060617175230/http://blog.natbat.co.uk/archive/2003/Jun/14/time_since
"""
chunks = (
(60 * 60 * 24 * 365, lambda n: ungettext('year', 'years', n)),
(60 * 60 * 24 * 30, lambda n: ungettext('month', 'months', n)),
(60 * 60 * 24 * 7, lambda n : ungettext('week', 'weeks', n)),
(60 * 60 * 24, lambda n : ungettext('day', 'days', n)),
(60 * 60, lambda n: ungettext('hour', 'hours', n)),
(60, lambda n: ungettext('minute', 'minutes', n))
)
# Convert datetime.date to datetime.datetime for comparison.
if not isinstance(d, datetime.datetime):
d = datetime.datetime(d.year, d.month, d.day)
if now and not isinstance(now, datetime.datetime):
now = datetime.datetime(now.year, now.month, now.day)
if not now:
now = datetime.datetime.now(utc if is_aware(d) else None)
delta = (d - now) if reversed else (now - d)
# ignore microseconds
since = delta.days * 24 * 60 * 60 + delta.seconds
if since <= 0:
# d is in the future compared to now, stop processing.
return '0 ' + ugettext('minutes')
for i, (seconds, name) in enumerate(chunks):
count = since // seconds
if count != 0:
break
s = ugettext('%(number)d %(type)s') % {'number': count, 'type': name(count)}
if i + 1 < len(chunks):
# Now get the second item
seconds2, name2 = chunks[i + 1]
count2 = (since - (seconds * count)) // seconds2
if count2 != 0:
s += ugettext(', %(number)d %(type)s') % {'number': count2, 'type': name2(count2)}
return s
def timeuntil(d, now=None):
"""
Like timesince, but returns a string measuring the time until
the given time.
"""
return timesince(d, now, reversed=True)
|
krintoxi/NoobSec-Toolkit | refs/heads/master | NoobSecToolkit /tools/inject/waf/uspses.py | 10 | #!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import re
from lib.core.enums import HTTP_HEADER
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "USP Secure Entry Server (United Security Providers)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, headers, code = get_page(get=vector)
retval = re.search(r"Secure Entry Server", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
if retval:
break
return retval
|
pforret/python-for-android | refs/heads/master | python-modules/twisted/twisted/conch/checkers.py | 59 | # -*- test-case-name: twisted.conch.test.test_checkers -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Provide L{ICredentialsChecker} implementations to be used in Conch protocols.
"""
import os, base64, binascii, errno
try:
import pwd
except ImportError:
pwd = None
else:
import crypt
try:
# get this from http://www.twistedmatrix.com/users/z3p/files/pyshadow-0.2.tar.gz
import shadow
except:
shadow = None
try:
from twisted.cred import pamauth
except ImportError:
pamauth = None
from zope.interface import implements, providedBy
from twisted.conch import error
from twisted.conch.ssh import keys
from twisted.cred.checkers import ICredentialsChecker
from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey
from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials
from twisted.internet import defer
from twisted.python import failure, reflect, log
from twisted.python.util import runAsEffectiveUser
from twisted.python.filepath import FilePath
def verifyCryptedPassword(crypted, pw):
if crypted[0] == '$': # md5_crypt encrypted
salt = '$1$' + crypted.split('$')[2]
else:
salt = crypted[:2]
return crypt.crypt(pw, salt) == crypted
class UNIXPasswordDatabase:
credentialInterfaces = IUsernamePassword,
implements(ICredentialsChecker)
def requestAvatarId(self, credentials):
if pwd:
try:
cryptedPass = pwd.getpwnam(credentials.username)[1]
except KeyError:
return defer.fail(UnauthorizedLogin("invalid username"))
else:
if cryptedPass not in ['*', 'x'] and \
verifyCryptedPassword(cryptedPass, credentials.password):
return defer.succeed(credentials.username)
if shadow:
gid = os.getegid()
uid = os.geteuid()
os.setegid(0)
os.seteuid(0)
try:
shadowPass = shadow.getspnam(credentials.username)[1]
except KeyError:
os.setegid(gid)
os.seteuid(uid)
return defer.fail(UnauthorizedLogin("invalid username"))
os.setegid(gid)
os.seteuid(uid)
if verifyCryptedPassword(shadowPass, credentials.password):
return defer.succeed(credentials.username)
return defer.fail(UnauthorizedLogin("invalid password"))
return defer.fail(UnauthorizedLogin("unable to verify password"))
class SSHPublicKeyDatabase:
"""
Checker that authenticates SSH public keys, based on public keys listed in
authorized_keys and authorized_keys2 files in user .ssh/ directories.
"""
credentialInterfaces = ISSHPrivateKey,
implements(ICredentialsChecker)
def requestAvatarId(self, credentials):
d = defer.maybeDeferred(self.checkKey, credentials)
d.addCallback(self._cbRequestAvatarId, credentials)
d.addErrback(self._ebRequestAvatarId)
return d
def _cbRequestAvatarId(self, validKey, credentials):
"""
Check whether the credentials themselves are valid, now that we know
if the key matches the user.
@param validKey: A boolean indicating whether or not the public key
matches a key in the user's authorized_keys file.
@param credentials: The credentials offered by the user.
@type credentials: L{ISSHPrivateKey} provider
@raise UnauthorizedLogin: (as a failure) if the key does not match the
user in C{credentials}. Also raised if the user provides an invalid
signature.
@raise ValidPublicKey: (as a failure) if the key matches the user but
the credentials do not include a signature. See
L{error.ValidPublicKey} for more information.
@return: The user's username, if authentication was successful.
"""
if not validKey:
return failure.Failure(UnauthorizedLogin("invalid key"))
if not credentials.signature:
return failure.Failure(error.ValidPublicKey())
else:
try:
pubKey = keys.Key.fromString(credentials.blob)
if pubKey.verify(credentials.signature, credentials.sigData):
return credentials.username
except: # any error should be treated as a failed login
log.err()
return failure.Failure(UnauthorizedLogin('error while verifying key'))
return failure.Failure(UnauthorizedLogin("unable to verify key"))
def getAuthorizedKeysFiles(self, credentials):
"""
Return a list of L{FilePath} instances for I{authorized_keys} files
which might contain information about authorized keys for the given
credentials.
On OpenSSH servers, the default location of the file containing the
list of authorized public keys is
U{$HOME/.ssh/authorized_keys<http://www.openbsd.org/cgi-bin/man.cgi?query=sshd_config>}.
I{$HOME/.ssh/authorized_keys2} is also returned, though it has been
U{deprecated by OpenSSH since
2001<http://marc.info/?m=100508718416162>}.
@return: A list of L{FilePath} instances to files with the authorized keys.
"""
pwent = pwd.getpwnam(credentials.username)
root = FilePath(pwent.pw_dir).child('.ssh')
files = ['authorized_keys', 'authorized_keys2']
return [root.child(f) for f in files]
def checkKey(self, credentials):
"""
Retrieve files containing authorized keys and check against user
credentials.
"""
uid, gid = os.geteuid(), os.getegid()
ouid, ogid = pwd.getpwnam(credentials.username)[2:4]
for filepath in self.getAuthorizedKeysFiles(credentials):
if not filepath.exists():
continue
try:
lines = filepath.open()
except IOError, e:
if e.errno == errno.EACCES:
lines = runAsEffectiveUser(ouid, ogid, filepath.open)
else:
raise
for l in lines:
l2 = l.split()
if len(l2) < 2:
continue
try:
if base64.decodestring(l2[1]) == credentials.blob:
return True
except binascii.Error:
continue
return False
def _ebRequestAvatarId(self, f):
if not f.check(UnauthorizedLogin):
log.msg(f)
return failure.Failure(UnauthorizedLogin("unable to get avatar id"))
return f
class SSHProtocolChecker:
"""
SSHProtocolChecker is a checker that requires multiple authentications
to succeed. To add a checker, call my registerChecker method with
the checker and the interface.
After each successful authenticate, I call my areDone method with the
avatar id. To get a list of the successful credentials for an avatar id,
use C{SSHProcotolChecker.successfulCredentials[avatarId]}. If L{areDone}
returns True, the authentication has succeeded.
"""
implements(ICredentialsChecker)
def __init__(self):
self.checkers = {}
self.successfulCredentials = {}
def get_credentialInterfaces(self):
return self.checkers.keys()
credentialInterfaces = property(get_credentialInterfaces)
def registerChecker(self, checker, *credentialInterfaces):
if not credentialInterfaces:
credentialInterfaces = checker.credentialInterfaces
for credentialInterface in credentialInterfaces:
self.checkers[credentialInterface] = checker
def requestAvatarId(self, credentials):
"""
Part of the L{ICredentialsChecker} interface. Called by a portal with
some credentials to check if they'll authenticate a user. We check the
interfaces that the credentials provide against our list of acceptable
checkers. If one of them matches, we ask that checker to verify the
credentials. If they're valid, we call our L{_cbGoodAuthentication}
method to continue.
@param credentials: the credentials the L{Portal} wants us to verify
"""
ifac = providedBy(credentials)
for i in ifac:
c = self.checkers.get(i)
if c is not None:
d = defer.maybeDeferred(c.requestAvatarId, credentials)
return d.addCallback(self._cbGoodAuthentication,
credentials)
return defer.fail(UnhandledCredentials("No checker for %s" % \
', '.join(map(reflect.qual, ifac))))
def _cbGoodAuthentication(self, avatarId, credentials):
"""
Called if a checker has verified the credentials. We call our
L{areDone} method to see if the whole of the successful authentications
are enough. If they are, we return the avatar ID returned by the first
checker.
"""
if avatarId not in self.successfulCredentials:
self.successfulCredentials[avatarId] = []
self.successfulCredentials[avatarId].append(credentials)
if self.areDone(avatarId):
del self.successfulCredentials[avatarId]
return avatarId
else:
raise error.NotEnoughAuthentication()
def areDone(self, avatarId):
"""
Override to determine if the authentication is finished for a given
avatarId.
@param avatarId: the avatar returned by the first checker. For
this checker to function correctly, all the checkers must
return the same avatar ID.
"""
return True
|
Kytoh/authpuppy | refs/heads/master | ap-node-extra-plugin/web/js/lib/build/build.py | 28 | #!/usr/bin/env python
import sys
sys.path.append("../tools")
import mergejs
have_compressor = None
try:
import jsmin
have_compressor = "jsmin"
except ImportError:
try:
import minimize
have_compressor = "minimize"
except Exception, E:
print E
pass
sourceDirectory = "../lib"
configFilename = "full.cfg"
outputFilename = "OpenLayers.js"
if len(sys.argv) > 1:
configFilename = sys.argv[1]
extension = configFilename[-4:]
if extension != ".cfg":
configFilename = sys.argv[1] + ".cfg"
if len(sys.argv) > 2:
outputFilename = sys.argv[2]
print "Merging libraries."
merged = mergejs.run(sourceDirectory, None, configFilename)
if have_compressor == "jsmin":
print "Compressing using jsmin."
minimized = jsmin.jsmin(merged)
elif have_compressor == "minimize":
print "Compressing using minimize."
minimized = minimize.minimize(merged)
else: # fallback
print "Not compressing."
minimized = merged
print "Adding license file."
minimized = file("license.txt").read() + minimized
print "Writing to %s." % outputFilename
file(outputFilename, "w").write(minimized)
print "Done."
|
gangadharkadam/letzfrappe | refs/heads/v5.0 | frappe/patches/v5_0/communication_parent.py | 74 | import frappe
def execute():
frappe.reload_doc("core", "doctype", "communication")
frappe.db.sql("""update tabCommunication set reference_doctype = parenttype, reference_name = parent""")
|
lthurlow/Network-Grapher | refs/heads/master | proj/internal/vuln-scan.py | 1 | #!/usr/bin/python
nessus = "/opt/nessus/bin/nessus"
print nessus
in_file = open("EXTENDED","r")
ip_list = []
count = 0
fcount = 0
out_file = open("TESTX","w")
for line in in_file:
if count % 31 == 0:
out_file.close()
filename = "IPS-"+str(fcount)
out_file = open(filename,"w")
fcount += 1
count = 1
else:
k = (line.strip().split(',')[2]).strip()
if (k[0:3] == "128"):
#print k
ip_list.append(k)
out_file.write(k+"\n")
count += 1
|
j0nathan33/CouchPotatoServer | refs/heads/develop | libs/enzyme/riff.py | 179 | # -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <[email protected]>
# Copyright 2003-2006 Thomas Schueppel <[email protected]>
# Copyright 2003-2006 Dirk Meyer <[email protected]>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
__all__ = ['Parser']
import os
import struct
import string
import logging
import time
from exceptions import ParseError
import core
# get logging object
log = logging.getLogger(__name__)
# List of tags
# http://kibus1.narod.ru/frames_eng.htm?sof/abcavi/infotags.htm
# http://www.divx-digest.com/software/avitags_dll.html
# File Format: google for odmlff2.pdf
AVIINFO = {
'INAM': 'title',
'IART': 'artist',
'IPRD': 'product',
'ISFT': 'software',
'ICMT': 'comment',
'ILNG': 'language',
'IKEY': 'keywords',
'IPRT': 'trackno',
'IFRM': 'trackof',
'IPRO': 'producer',
'IWRI': 'writer',
'IGNR': 'genre',
'ICOP': 'copyright'
}
# Taken from libavcodec/mpeg4data.h (pixel_aspect struct)
PIXEL_ASPECT = {
1: (1, 1),
2: (12, 11),
3: (10, 11),
4: (16, 11),
5: (40, 33)
}
class Riff(core.AVContainer):
"""
AVI parser also parsing metadata like title, languages, etc.
"""
table_mapping = { 'AVIINFO' : AVIINFO }
def __init__(self, file):
core.AVContainer.__init__(self)
# read the header
h = file.read(12)
if h[:4] != "RIFF" and h[:4] != 'SDSS':
raise ParseError()
self.has_idx = False
self.header = {}
self.junkStart = None
self.infoStart = None
self.type = h[8:12]
if self.type == 'AVI ':
self.mime = 'video/avi'
elif self.type == 'WAVE':
self.mime = 'audio/wav'
try:
while self._parseRIFFChunk(file):
pass
except IOError:
log.exception(u'error in file, stop parsing')
self._find_subtitles(file.name)
if not self.has_idx and isinstance(self, core.AVContainer):
log.debug(u'WARNING: avi has no index')
self._set('corrupt', True)
def _find_subtitles(self, filename):
"""
Search for subtitle files. Right now only VobSub is supported
"""
base = os.path.splitext(filename)[0]
if os.path.isfile(base + '.idx') and \
(os.path.isfile(base + '.sub') or os.path.isfile(base + '.rar')):
file = open(base + '.idx')
if file.readline().find('VobSub index file') > 0:
for line in file.readlines():
if line.find('id') == 0:
sub = core.Subtitle()
sub.language = line[4:6]
sub.trackno = base + '.idx' # Maybe not?
self.subtitles.append(sub)
file.close()
def _parseAVIH(self, t):
retval = {}
v = struct.unpack('<IIIIIIIIIIIIII', t[0:56])
(retval['dwMicroSecPerFrame'],
retval['dwMaxBytesPerSec'],
retval['dwPaddingGranularity'],
retval['dwFlags'],
retval['dwTotalFrames'],
retval['dwInitialFrames'],
retval['dwStreams'],
retval['dwSuggestedBufferSize'],
retval['dwWidth'],
retval['dwHeight'],
retval['dwScale'],
retval['dwRate'],
retval['dwStart'],
retval['dwLength']) = v
if retval['dwMicroSecPerFrame'] == 0:
log.warning(u'ERROR: Corrupt AVI')
raise ParseError()
return retval
def _parseSTRH(self, t):
retval = {}
retval['fccType'] = t[0:4]
log.debug(u'_parseSTRH(%r) : %d bytes' % (retval['fccType'], len(t)))
if retval['fccType'] != 'auds':
retval['fccHandler'] = t[4:8]
v = struct.unpack('<IHHIIIIIIIII', t[8:52])
(retval['dwFlags'],
retval['wPriority'],
retval['wLanguage'],
retval['dwInitialFrames'],
retval['dwScale'],
retval['dwRate'],
retval['dwStart'],
retval['dwLength'],
retval['dwSuggestedBufferSize'],
retval['dwQuality'],
retval['dwSampleSize'],
retval['rcFrame']) = v
else:
try:
v = struct.unpack('<IHHIIIIIIIII', t[8:52])
(retval['dwFlags'],
retval['wPriority'],
retval['wLanguage'],
retval['dwInitialFrames'],
retval['dwScale'],
retval['dwRate'],
retval['dwStart'],
retval['dwLength'],
retval['dwSuggestedBufferSize'],
retval['dwQuality'],
retval['dwSampleSize'],
retval['rcFrame']) = v
self.delay = float(retval['dwStart']) / \
(float(retval['dwRate']) / retval['dwScale'])
except (KeyError, IndexError, ValueError, ZeroDivisionError):
pass
return retval
def _parseSTRF(self, t, strh):
fccType = strh['fccType']
retval = {}
if fccType == 'auds':
v = struct.unpack('<HHHHHH', t[0:12])
(retval['wFormatTag'],
retval['nChannels'],
retval['nSamplesPerSec'],
retval['nAvgBytesPerSec'],
retval['nBlockAlign'],
retval['nBitsPerSample'],
) = v
ai = core.AudioStream()
ai.samplerate = retval['nSamplesPerSec']
ai.channels = retval['nChannels']
# FIXME: Bitrate calculation is completely wrong.
#ai.samplebits = retval['nBitsPerSample']
#ai.bitrate = retval['nAvgBytesPerSec'] * 8
# TODO: set code if possible
# http://www.stats.uwa.edu.au/Internal/Specs/DXALL/FileSpec/\
# Languages
# ai.language = strh['wLanguage']
ai.codec = retval['wFormatTag']
self.audio.append(ai)
elif fccType == 'vids':
v = struct.unpack('<IIIHH', t[0:16])
(retval['biSize'],
retval['biWidth'],
retval['biHeight'],
retval['biPlanes'],
retval['biBitCount']) = v
v = struct.unpack('IIIII', t[20:40])
(retval['biSizeImage'],
retval['biXPelsPerMeter'],
retval['biYPelsPerMeter'],
retval['biClrUsed'],
retval['biClrImportant']) = v
vi = core.VideoStream()
vi.codec = t[16:20]
vi.width = retval['biWidth']
vi.height = retval['biHeight']
# FIXME: Bitrate calculation is completely wrong.
#vi.bitrate = strh['dwRate']
vi.fps = float(strh['dwRate']) / strh['dwScale']
vi.length = strh['dwLength'] / vi.fps
self.video.append(vi)
return retval
def _parseSTRL(self, t):
retval = {}
size = len(t)
i = 0
while i < len(t) - 8:
key = t[i:i + 4]
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += 8
value = t[i:]
if key == 'strh':
retval[key] = self._parseSTRH(value)
elif key == 'strf':
retval[key] = self._parseSTRF(value, retval['strh'])
else:
log.debug(u'_parseSTRL: unsupported stream tag %r', key)
i += sz
return retval, i
def _parseODML(self, t):
retval = {}
size = len(t)
i = 0
key = t[i:i + 4]
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += 8
value = t[i:]
if key != 'dmlh':
log.debug(u'_parseODML: Error')
i += sz - 8
return (retval, i)
def _parseVPRP(self, t):
retval = {}
v = struct.unpack('<IIIIIIIIII', t[:4 * 10])
(retval['VideoFormat'],
retval['VideoStandard'],
retval['RefreshRate'],
retval['HTotalIn'],
retval['VTotalIn'],
retval['FrameAspectRatio'],
retval['wPixel'],
retval['hPixel']) = v[1:-1]
# I need an avi with more informations
# enum {FORMAT_UNKNOWN, FORMAT_PAL_SQUARE, FORMAT_PAL_CCIR_601,
# FORMAT_NTSC_SQUARE, FORMAT_NTSC_CCIR_601,...} VIDEO_FORMAT;
# enum {STANDARD_UNKNOWN, STANDARD_PAL, STANDARD_NTSC, STANDARD_SECAM}
# VIDEO_STANDARD;
#
r = retval['FrameAspectRatio']
r = float(r >> 16) / (r & 0xFFFF)
retval['FrameAspectRatio'] = r
if self.video:
map(lambda v: setattr(v, 'aspect', r), self.video)
return (retval, v[0])
def _parseLISTmovi(self, size, file):
"""
Digs into movi list, looking for a Video Object Layer header in an
mpeg4 stream in order to determine aspect ratio.
"""
i = 0
n_dc = 0
done = False
# If the VOL header doesn't appear within 5MB or 5 video chunks,
# give up. The 5MB limit is not likely to apply except in
# pathological cases.
while i < min(1024 * 1024 * 5, size - 8) and n_dc < 5:
data = file.read(8)
if ord(data[0]) == 0:
# Eat leading nulls.
data = data[1:] + file.read(1)
i += 1
key, sz = struct.unpack('<4sI', data)
if key[2:] != 'dc' or sz > 1024 * 500:
# This chunk is not video or is unusually big (> 500KB);
# skip it.
file.seek(sz, 1)
i += 8 + sz
continue
n_dc += 1
# Read video chunk into memory
data = file.read(sz)
#for p in range(0,min(80, sz)):
# print "%02x " % ord(data[p]),
#print "\n\n"
# Look through the picture header for VOL startcode. The basic
# logic for this is taken from libavcodec, h263.c
pos = 0
startcode = 0xff
def bits(v, o, n):
# Returns n bits in v, offset o bits.
return (v & 2 ** n - 1 << (64 - n - o)) >> 64 - n - o
while pos < sz:
startcode = ((startcode << 8) | ord(data[pos])) & 0xffffffff
pos += 1
if startcode & 0xFFFFFF00 != 0x100:
# No startcode found yet
continue
if startcode >= 0x120 and startcode <= 0x12F:
# We have the VOL startcode. Pull 64 bits of it and treat
# as a bitstream
v = struct.unpack(">Q", data[pos : pos + 8])[0]
offset = 10
if bits(v, 9, 1):
# is_ol_id, skip over vo_ver_id and vo_priority
offset += 7
ar_info = bits(v, offset, 4)
if ar_info == 15:
# Extended aspect
num = bits(v, offset + 4, 8)
den = bits(v, offset + 12, 8)
else:
# A standard pixel aspect
num, den = PIXEL_ASPECT.get(ar_info, (0, 0))
# num/den indicates pixel aspect; convert to video aspect,
# so we need frame width and height.
if 0 not in [num, den]:
width, height = self.video[-1].width, self.video[-1].height
self.video[-1].aspect = num / float(den) * width / height
done = True
break
startcode = 0xff
i += 8 + len(data)
if done:
# We have the aspect, no need to continue parsing the movi
# list, so break out of the loop.
break
if i < size:
# Seek past whatever might be remaining of the movi list.
file.seek(size - i, 1)
def _parseLIST(self, t):
retval = {}
i = 0
size = len(t)
while i < size - 8:
# skip zero
if ord(t[i]) == 0: i += 1
key = t[i:i + 4]
sz = 0
if key == 'LIST':
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += 8
key = "LIST:" + t[i:i + 4]
value = self._parseLIST(t[i:i + sz])
if key == 'strl':
for k in value.keys():
retval[k] = value[k]
else:
retval[key] = value
i += sz
elif key == 'avih':
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += 8
value = self._parseAVIH(t[i:i + sz])
i += sz
retval[key] = value
elif key == 'strl':
i += 4
(value, sz) = self._parseSTRL(t[i:])
key = value['strh']['fccType']
i += sz
retval[key] = value
elif key == 'odml':
i += 4
(value, sz) = self._parseODML(t[i:])
i += sz
elif key == 'vprp':
i += 4
(value, sz) = self._parseVPRP(t[i:])
retval[key] = value
i += sz
elif key == 'JUNK':
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += sz + 8
else:
sz = struct.unpack('<I', t[i + 4:i + 8])[0]
i += 8
# in most cases this is some info stuff
if not key in AVIINFO.keys() and key != 'IDIT':
log.debug(u'Unknown Key: %r, len: %d' % (key, sz))
value = t[i:i + sz]
if key == 'ISFT':
# product information
if value.find('\0') > 0:
# works for Casio S500 camera videos
value = value[:value.find('\0')]
value = value.replace('\0', '').lstrip().rstrip()
value = value.replace('\0', '').lstrip().rstrip()
if value:
retval[key] = value
if key in ['IDIT', 'ICRD']:
# Timestamp the video was created. Spec says it
# should be a format like "Wed Jan 02 02:03:55 1990"
# Casio S500 uses "2005/12/24/ 14:11", but I've
# also seen "December 24, 2005"
specs = ('%a %b %d %H:%M:%S %Y', '%Y/%m/%d/ %H:%M', '%B %d, %Y')
for tmspec in specs:
try:
tm = time.strptime(value, tmspec)
# save timestamp as int
self.timestamp = int(time.mktime(tm))
break
except ValueError:
pass
else:
log.debug(u'no support for time format %r', value)
i += sz
return retval
def _parseRIFFChunk(self, file):
h = file.read(8)
if len(h) < 8:
return False
name = h[:4]
size = struct.unpack('<I', h[4:8])[0]
if name == 'LIST':
pos = file.tell() - 8
key = file.read(4)
if key == 'movi' and self.video and not self.video[-1].aspect and \
self.video[-1].width and self.video[-1].height and \
self.video[-1].format in ['DIVX', 'XVID', 'FMP4']: # any others?
# If we don't have the aspect (i.e. it isn't in odml vprp
# header), but we do know the video's dimensions, and
# we're dealing with an mpeg4 stream, try to get the aspect
# from the VOL header in the mpeg4 stream.
self._parseLISTmovi(size - 4, file)
return True
elif size > 80000:
log.debug(u'RIFF LIST %r too long to parse: %r bytes' % (key, size))
t = file.seek(size - 4, 1)
return True
elif size < 5:
log.debug(u'RIFF LIST %r too short: %r bytes' % (key, size))
return True
t = file.read(size - 4)
log.debug(u'parse RIFF LIST %r: %d bytes' % (key, size))
value = self._parseLIST(t)
self.header[key] = value
if key == 'INFO':
self.infoStart = pos
self._appendtable('AVIINFO', value)
elif key == 'MID ':
self._appendtable('AVIMID', value)
elif key == 'hdrl':
# no need to add this info to a table
pass
else:
log.debug(u'Skipping table info %r' % key)
elif name == 'JUNK':
self.junkStart = file.tell() - 8
self.junkSize = size
file.seek(size, 1)
elif name == 'idx1':
self.has_idx = True
log.debug(u'idx1: %r bytes' % size)
# no need to parse this
t = file.seek(size, 1)
elif name == 'RIFF':
log.debug(u'New RIFF chunk, extended avi [%i]' % size)
type = file.read(4)
if type != 'AVIX':
log.debug(u'Second RIFF chunk is %r, not AVIX, skipping', type)
file.seek(size - 4, 1)
# that's it, no new informations should be in AVIX
return False
elif name == 'fmt ' and size <= 50:
# This is a wav file.
data = file.read(size)
fmt = struct.unpack("<HHLLHH", data[:16])
self._set('codec', hex(fmt[0]))
self._set('samplerate', fmt[2])
# fmt[3] is average bytes per second, so we must divide it
# by 125 to get kbits per second
self._set('bitrate', fmt[3] / 125)
# ugly hack: remember original rate in bytes per second
# so that the length can be calculated in next elif block
self._set('byterate', fmt[3])
# Set a dummy fourcc so codec will be resolved in finalize.
self._set('fourcc', 'dummy')
elif name == 'data':
# XXX: this is naive and may not be right. For example if the
# stream is something that supports VBR like mp3, the value
# will be off. The only way to properly deal with this issue
# is to decode part of the stream based on its codec, but
# kaa.metadata doesn't have this capability (yet?)
# ugly hack: use original rate in bytes per second
self._set('length', size / float(self.byterate))
file.seek(size, 1)
elif not name.strip(string.printable + string.whitespace):
# check if name is something usefull at all, maybe it is no
# avi or broken
t = file.seek(size, 1)
log.debug(u'Skipping %r [%i]' % (name, size))
else:
# bad avi
log.debug(u'Bad or broken avi')
return False
return True
Parser = Riff
|
EelcoHoogendoorn/Numpy_arraysetops_EP | refs/heads/master | setup.py | 1 | from setuptools import find_packages, setup
from version import __version__
setup(
name="numpy-indexed",
packages=find_packages(),
version=__version__,
install_requires=['numpy', 'future'],
keywords="numpy group_by set-operations indexing",
description=open("README.rst").readlines()[5],
long_description=open("README.rst").read(),
author="Eelco Hoogendoorn",
author_email="[email protected]",
url="https://github.com/EelcoHoogendoorn/Numpy_arraysetops_EP",
license="Freely Distributable",
platforms='Any',
classifiers=[
"Development Status :: 4 - Beta",
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
"Topic :: Utilities",
'Topic :: Scientific/Engineering',
"License :: {}".format("Freely Distributable"),
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
],
)
|
dmlc/mxnet | refs/heads/master | python/mxnet/gluon/model_zoo/vision/squeezenet.py | 11 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""SqueezeNet, implemented in Gluon."""
__all__ = ['SqueezeNet', 'squeezenet1_0', 'squeezenet1_1']
import os
from ....context import cpu
from ...block import HybridBlock
from ... import nn
from ...contrib.nn import HybridConcurrent
from .... import base
# Helpers
def _make_fire(squeeze_channels, expand1x1_channels, expand3x3_channels):
out = nn.HybridSequential(prefix='')
out.add(_make_fire_conv(squeeze_channels, 1))
paths = HybridConcurrent(axis=1, prefix='')
paths.add(_make_fire_conv(expand1x1_channels, 1))
paths.add(_make_fire_conv(expand3x3_channels, 3, 1))
out.add(paths)
return out
def _make_fire_conv(channels, kernel_size, padding=0):
out = nn.HybridSequential(prefix='')
out.add(nn.Conv2D(channels, kernel_size, padding=padding))
out.add(nn.Activation('relu'))
return out
# Net
class SqueezeNet(HybridBlock):
r"""SqueezeNet model from the `"SqueezeNet: AlexNet-level accuracy with 50x fewer parameters
and <0.5MB model size" <https://arxiv.org/abs/1602.07360>`_ paper.
SqueezeNet 1.1 model from the `official SqueezeNet repo
<https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_.
SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters
than SqueezeNet 1.0, without sacrificing accuracy.
Parameters
----------
version : str
Version of squeezenet. Options are '1.0', '1.1'.
classes : int, default 1000
Number of classification classes.
"""
def __init__(self, version, classes=1000, **kwargs):
super(SqueezeNet, self).__init__(**kwargs)
assert version in ['1.0', '1.1'], ("Unsupported SqueezeNet version {version}:"
"1.0 or 1.1 expected".format(version=version))
with self.name_scope():
self.features = nn.HybridSequential(prefix='')
if version == '1.0':
self.features.add(nn.Conv2D(96, kernel_size=7, strides=2))
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(16, 64, 64))
self.features.add(_make_fire(16, 64, 64))
self.features.add(_make_fire(32, 128, 128))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(32, 128, 128))
self.features.add(_make_fire(48, 192, 192))
self.features.add(_make_fire(48, 192, 192))
self.features.add(_make_fire(64, 256, 256))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(64, 256, 256))
else:
self.features.add(nn.Conv2D(64, kernel_size=3, strides=2))
self.features.add(nn.Activation('relu'))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(16, 64, 64))
self.features.add(_make_fire(16, 64, 64))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(32, 128, 128))
self.features.add(_make_fire(32, 128, 128))
self.features.add(nn.MaxPool2D(pool_size=3, strides=2, ceil_mode=True))
self.features.add(_make_fire(48, 192, 192))
self.features.add(_make_fire(48, 192, 192))
self.features.add(_make_fire(64, 256, 256))
self.features.add(_make_fire(64, 256, 256))
self.features.add(nn.Dropout(0.5))
self.output = nn.HybridSequential(prefix='')
self.output.add(nn.Conv2D(classes, kernel_size=1))
self.output.add(nn.Activation('relu'))
self.output.add(nn.AvgPool2D(13))
self.output.add(nn.Flatten())
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
# Constructor
def get_squeezenet(version, pretrained=False, ctx=cpu(),
root=os.path.join(base.data_dir(), 'models'), **kwargs):
r"""SqueezeNet model from the `"SqueezeNet: AlexNet-level accuracy with 50x fewer parameters
and <0.5MB model size" <https://arxiv.org/abs/1602.07360>`_ paper.
SqueezeNet 1.1 model from the `official SqueezeNet repo
<https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_.
SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters
than SqueezeNet 1.0, without sacrificing accuracy.
Parameters
----------
version : str
Version of squeezenet. Options are '1.0', '1.1'.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default $MXNET_HOME/models
Location for keeping the model parameters.
"""
net = SqueezeNet(version, **kwargs)
if pretrained:
from ..model_store import get_model_file
net.load_parameters(get_model_file('squeezenet%s'%version, root=root), ctx=ctx)
return net
def squeezenet1_0(**kwargs):
r"""SqueezeNet 1.0 model from the `"SqueezeNet: AlexNet-level accuracy with 50x fewer parameters
and <0.5MB model size" <https://arxiv.org/abs/1602.07360>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_squeezenet('1.0', **kwargs)
def squeezenet1_1(**kwargs):
r"""SqueezeNet 1.1 model from the `official SqueezeNet repo
<https://github.com/DeepScale/SqueezeNet/tree/master/SqueezeNet_v1.1>`_.
SqueezeNet 1.1 has 2.4x less computation and slightly fewer parameters
than SqueezeNet 1.0, without sacrificing accuracy.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_squeezenet('1.1', **kwargs)
|
NL66278/odoo | refs/heads/8.0 | addons/website_mail_group/models/mail_group.py | 321 | # -*- coding: utf-8 -*-
from openerp.osv import osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.addons.website.models.website import slug
class MailGroup(osv.Model):
_inherit = 'mail.group'
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
res = super(MailGroup, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context)
group = self.browse(cr, uid, id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
headers = {}
if res.get('headers'):
try:
headers = eval(res['headers'])
except Exception:
pass
headers.update({
'List-Archive': '<%s/groups/%s>' % (base_url, slug(group)),
'List-Subscribe': '<%s/groups>' % (base_url),
'List-Unsubscribe': '<%s/groups?unsubscribe>' % (base_url,),
})
res['headers'] = repr(headers)
return res
class MailMail(osv.Model):
_inherit = 'mail.mail'
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Short-circuit parent method for mail groups, replace the default
footer with one appropriate for mailing-lists."""
if mail.model == 'mail.group' and mail.res_id:
# no super() call on purpose, no private links that could be quoted!
group = self.pool['mail.group'].browse(cr, uid, mail.res_id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
vals = {
'maillist': _('Mailing-List'),
'post_to': _('Post to'),
'unsub': _('Unsubscribe'),
'mailto': 'mailto:%s@%s' % (group.alias_name, group.alias_domain),
'group_url': '%s/groups/%s' % (base_url, slug(group)),
'unsub_url': '%s/groups?unsubscribe' % (base_url,),
}
footer = """_______________________________________________
%(maillist)s: %(group_url)s
%(post_to)s: %(mailto)s
%(unsub)s: %(unsub_url)s
""" % vals
body = tools.append_content_to_html(mail.body, footer, container_tag='div')
return body
else:
return super(MailMail, self).send_get_mail_body(cr, uid, mail,
partner=partner,
context=context)
|
stackArmor/security_monkey | refs/heads/develop | security_monkey/watchers/custom/awsinspector.py | 1 | """
.. module: security_monkey.watchers.custom.AwsInspector
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Pritam D. Gautam <[email protected]> @nuage
"""
from datetime import datetime, timedelta
from security_monkey import app
from security_monkey.decorators import iter_account_region, record_exception
from security_monkey.watcher import ChangeItem
from security_monkey.watcher import Watcher
class AwsInspector(Watcher):
index = 'awsinspector'
i_am_singular = 'AWS Inspector Issue'
i_am_plural = 'AWS Inspector Issues'
honor_ephemerals = False
def __init__(self, accounts=None, debug=False):
super(AwsInspector, self).__init__(accounts=accounts, debug=debug)
@record_exception()
def list_findings(self, **kwargs):
from security_monkey.common.sts_connect import connect
response_items = []
inspector = connect(kwargs['account_name'], 'boto3.inspector.client', region=kwargs['region'],
assumed_role=kwargs['assumed_role'])
next_token = None
begin_date = datetime.today() - timedelta(days=90)
while True:
if next_token:
response = self.wrap_aws_rate_limited_call(
inspector.list_findings,
nextToken=next_token,
filter={
'creationTimeRange': {
'beginDate': begin_date,
}
}
)
else:
response = self.wrap_aws_rate_limited_call(
inspector.list_findings,
filter={
'creationTimeRange': {
'beginDate': begin_date,
}
}
)
findings = response.get('findingArns')
if findings:
response = self.wrap_aws_rate_limited_call(
inspector.describe_findings,
findingArns=findings,
)
response_items.extend(response.get('findings'))
if response.get('nextToken'):
next_token = response.get('nextToken')
else:
break
return response_items
def slurp(self):
self.prep_for_slurp()
@iter_account_region(index=self.index, accounts=self.accounts, service_name='inspector')
def slurp_items(**kwargs):
item_list = []
exception_map = {}
kwargs['exception_map'] = exception_map
app.logger.debug("Checking {}/{}/{}".format(self.index,
kwargs['account_name'], kwargs['region']))
findings = self.list_findings(**kwargs)
if findings:
for finding in findings:
name = None
if finding.get('Tags') is not None:
for tag in finding.get('Tags'):
if tag['Key'] == 'Name':
name = tag['Value']
break
if name is None:
name = finding.get('title')
if self.check_ignore_list(name):
continue
config = finding
# Converting Date to String as getting the following error while inserting data.
# sqlalchemy.exc.StatementError: datetime.datetime(2018, 8, 18, 17, 23, 15, 413000, tzinfo=tzlocal())
# is not JSON serializable (original cause: TypeError: datetime.datetime(2018, 8, 18, 17, 23, 15, 413000,
# tzinfo=tzlocal()) is not JSON serializable)
config['createdAt'] = str(config['createdAt'])
config['updatedAt'] = str(config['updatedAt'])
item = InspectorItem(region=kwargs['region'],
account=kwargs['account_name'],
name=name,
arn=config.get('arn'),
config=config)
item_list.append(item)
return item_list, exception_map
return slurp_items()
class InspectorItem(ChangeItem):
def __init__(self, account=None, region='Unknown', name=None, arn=None, config=None, audit_issues=None):
super(InspectorItem, self).__init__(
index=AwsInspector.index,
region=region,
account=account,
name=name,
arn=arn,
audit_issues=audit_issues,
new_config=config if config else {},
)
|
heladio/my-blog | refs/heads/master | pelica-env/lib/python2.7/sre.py | 4 | /usr/lib/python2.7/sre.py |
RobertWWong/WebDev | refs/heads/master | djangoApp/ENV/lib/python3.5/site-packages/django/conf/locale/pl/formats.py | 504 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j E Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j E Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'd-m-Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%y-%m-%d', # '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' '
NUMBER_GROUPING = 3
|
anhstudios/swganh | refs/heads/develop | data/scripts/templates/object/mobile/shared_voritor_dasher.py | 2 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_voritor_dasher.iff"
result.attribute_template_id = 9
result.stfName("monster_name","voritor_lizard")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
typhainepl/genome3D | refs/heads/master | update_databases/scope_update.py | 1 | #!/usr/bin/env python
import urllib2
import os
import cx_Oracle
import re
import ConfigParser
import sys
import time
sys.path.insert(0,'/Users/typhaine/Desktop/genome3D/config/')
sys.path.insert(0,'/nfs/msd/work2/typhaine/genome3D/config/')
from config import dosql
configdata = ConfigParser.RawConfigParser()
configdata.read([os.path.expanduser('~/Desktop/genome3D/config/db.cfg'), '/nfs/msd/work2/typhaine/genome3D/config/db.cfg'])
#Connexion to PDBE_TEST database
PDBEUSER=configdata.get('Global', 'pdbeUser')
PDBEPASS=configdata.get('Global', 'pdbePass')
PDBEHOST=configdata.get('Global', 'pdbeHost')
pdbeconnection = cx_Oracle.connect(PDBEUSER+'/'+PDBEPASS+'@'+PDBEHOST)
pdbecursor = pdbeconnection.cursor()
# schema='SIFTS_ADMIN'
tables=['SCOPE_DESCRIPTION','SCOPE_COMMENT','SCOPE_HIERARCHY','SCOPE_CLASS']
# REPO='https://scop.berkeley.edu/downloads/parse/'
# VERSION='2.06'
#
# DESC_REPO=REPO+'dir.des.scope.'+VERSION+'-stable.txt'
# CLASS_REPO=REPO+'dir.cla.scope.'+VERSION+'-stable.txt'
# HIERARCHY_REPO=REPO+'dir.hie.scope.'+VERSION+'-stable.txt'
# COMMENTS_REPO=REPO+'dir.com.scope.'+VERSION+'-stable.txt'
# NEW UPDATE OF SCOPE (06/04/2017)
REPO='https://scop.berkeley.edu/downloads/update/'
UPDATE = '-2017-04-06'
VERSION='2.06'
DESC_REPO=REPO+'dir.des.scope.'+VERSION+UPDATE+'.txt'
CLASS_REPO=REPO+'dir.cla.scope.'+VERSION+UPDATE+'.txt'
HIERARCHY_REPO=REPO+'dir.hie.scope.'+VERSION+UPDATE+'.txt'
COMMENTS_REPO=REPO+'dir.com.scope.'+VERSION+UPDATE+'.txt'
TMP='scop_tmp'
t_description='CREATE TABLE SCOPE_DESCRIPTION_NEW ( \
"SUNID" NUMBER(38,0) NOT NULL ENABLE, \
"ENTRY_TYPE" VARCHAR2(2 BYTE), \
"SCCS" VARCHAR2(20 BYTE), \
"SCOP_ID" VARCHAR2(8 BYTE), \
"DESCRIPTION" VARCHAR2(240 BYTE) \
)'
comment='CREATE TABLE SCOPE_COMMENT_NEW ( \
"SUNID" NUMBER(38,0) NOT NULL ENABLE, \
"ORDINAL" NUMBER(38,0) NOT NULL ENABLE, \
"COMMENT_TEXT" VARCHAR2(2000 BYTE) \
)'
hierarchy='CREATE TABLE SCOPE_HIERARCHY_NEW ( \
"SUNID" NUMBER(38,0) NOT NULL ENABLE, \
"PARENT_ID" NUMBER(38,0), \
"CHILDS_IDS" CLOB \
)'
classtable='CREATE TABLE SCOPE_CLASS_NEW ( \
"SCOP_ID" VARCHAR2(8 BYTE), \
"ENTRY" VARCHAR2(4 BYTE), \
"ORDINAL" NUMBER(38,0) NOT NULL ENABLE, \
"AUTH_ASYM_ID" VARCHAR2(4 BYTE), \
"BEG_SEQ" NUMBER(38,0), \
"BEG_INS_CODE" VARCHAR2(1 BYTE), \
"END_SEQ" NUMBER(38,0), \
"END_INS_CODE" VARCHAR2(1 BYTE), \
"SCCS" VARCHAR2(20 BYTE), \
"SUNID" NUMBER(38,0) NOT NULL ENABLE, \
"CLASS_ID" NUMBER(38,0), \
"FOLD_ID" NUMBER(38,0), \
"SUPERFAMILY_ID" NUMBER(38,0), \
"FAMILY_ID" NUMBER(38,0), \
"DOMAIN_ID" NUMBER(38,0) NOT NULL ENABLE, \
"SPECIES_ID" NUMBER(38,0), \
"PROTEIN_ID" NUMBER(38,0) \
)'
def clean_tmp(path):
os.system('rm -Rf '+path)
os.system('mkdir '+path)
def get_filename(url,path):
return path+'/'+url.split('/')[-1]
def download_file(url,path):
req=urllib2.Request(url)
filename=get_filename(url,path)
try:
data=urllib2.urlopen(req)
f=open(filename,'w')
f.write(data.read())
f.close()
except Exception, e:
print e
return filename
### MAIN program ###
#get current date
datestart = time.strftime("%d/%m/%Y at %H:%M:%S")
print "##### SCOPE update started %s #####" %(datestart)
#clean repertory
clean_tmp(TMP)
#drop old tables and create new ones
for t in tables:
if not dosql(pdbecursor,'DROP TABLE '+t+'_NEW'):
pdbecursor.close()
pdbeconnection.close()
sys.exit(-1)
# if not dosql(pdbecursor,'ALTER TABLE '+t+'_NEW rename to '+t+'_OLD'):
# pdbecursor.close()
# pdbeconnection.close()
# sys.exit(-1)
pdbeconnection.commit()
for t in [t_description,comment,hierarchy,classtable]:
if not dosql(pdbecursor,t):
pdbecursor.close()
pdbeconnection.close()
sys.exit(-1)
pdbeconnection.commit()
## description ##
print "insert data into %s_NEW table" % (tables[0])
#download new data
desc=download_file(DESC_REPO,TMP)
fdesc=open(desc)
desc_list=[]
for row in fdesc.readlines():
if row[0]=='#':
continue
sunid,entry_type,sccs,scop_id,description=row.strip().split(None,4)
obj=(sunid,entry_type,sccs,scop_id,description)
desc_list.append(obj)
fdesc.close()
pdbecursor.executemany('INSERT INTO %s VALUES(:1,:2,:3,:4,:5)' % (tables[0]+'_NEW'),desc_list)
pdbeconnection.commit()
## comments ##
print "insert data into %s_NEW table" % (tables[1])
#download new data
comments=download_file(COMMENTS_REPO,TMP)
fcomments=open(comments)
comments_list=[]
for row in fcomments.readlines():
if row[0]=='#':
continue
sunid,ordinal,comment_text=row.strip().split(None,2)
comment_text=comment_text.split('!')
for (comment,ordinal) in zip(comment_text,range(len(comment_text))):
obj=(sunid,ordinal+1,comment.lstrip())
comments_list.append(obj)
fcomments.close()
pdbecursor.executemany('INSERT INTO %s VALUES(:1,:2,:3)' % (tables[1]+'_NEW'),comments_list)
pdbeconnection.commit()
## hierarchy ##
print "insert data into %s_NEW table" % (tables[2])
#download new data
hierarchy=download_file(HIERARCHY_REPO,TMP)
fhierarchy=open(hierarchy)
hierarchy_list=[]
for row in fhierarchy.readlines():
if row[0]=='#':
continue
sunid,parent_id,childs=row.strip().split(None,2)
if parent_id == '-':
parent_id = None
obj=(sunid,parent_id,childs)
hierarchy_list.append(obj)
fhierarchy.close()
inputsizes=[None] * 3
inputsizes[2]=cx_Oracle.CLOB
i = 0
# The database can't deal with the CLOBs (hangs!!!) so I have insert 100 at a time...
while i < len(hierarchy_list):
pdbecursor.setinputsizes(*inputsizes)
pdbecursor.executemany('INSERT INTO %s VALUES(:1,:2,:3)' % (tables[2]+'_NEW'),hierarchy_list[i:i+100])
i+=100
pdbeconnection.commit()
## classification ##
print "insert data into %s_NEW table" % (tables[3])
#download new data
classification=download_file(CLASS_REPO,TMP)
fclass=open(classification)
class_list=[]
for row in fclass.readlines():
if row[0]=='#':
continue
scop_id,entry_id,asym_id_range,sccs,sunid,classes=row.strip().split(None,5)
ordinal=1
if ',' in asym_id_range:
chains=asym_id_range.split(',')
else:
chains=[asym_id_range]
for c in chains:
auth_asym_id='-'
begin=None
end=None
beg_ins_code=None
end_ins_code=None
if c != '-':
auth_asym_id=c.split(':')[0]
if '-' in c:
limit = c.split(':')[1]
m = re.match(r"(-?\d+[A-Z]?)-(-?\d+[A-Z]?)",limit)
begin = m.group(1)
end = m.group(2)
if begin[-1].isalpha():
beg_ins_code=begin[-1]
begin=begin[:-1]
if end[-1].isalpha():
end_ins_code=end[-1]
end=end[:-1]
classes_split=classes.split(',')
obj = (scop_id,entry_id,ordinal,auth_asym_id,begin,beg_ins_code,end,end_ins_code,sccs,sunid,
classes_split[0].split('=')[1],
classes_split[1].split('=')[1],
classes_split[2].split('=')[1],
classes_split[3].split('=')[1],
classes_split[4].split('=')[1],
classes_split[5].split('=')[1],
classes_split[6].split('=')[1]
)
class_list.append(obj)
ordinal+=1
fclass.close()
pdbecursor.executemany('INSERT INTO %s VALUES(:1,:2,:3,:4,:5,:6,:7,:8,:9,:10,:11,:12,:13,:14,:15,:16,:17)' % (tables[3]+'_NEW'),class_list)
pdbeconnection.commit()
SQL="drop table " + tables[0] +";\
drop table " + tables[1] +";\
drop table " + tables[2] +";\
drop table " + tables[3] +";\
alter table " + tables[0] + "_NEW rename to " + tables[0] + ";\
alter table " + tables[1] + "_NEW rename to " + tables[1] + ";\
alter table " + tables[2] + "_NEW rename to " + tables[2] + ";\
alter table " + tables[3] + "_NEW rename to " + tables[3] + ";\
commit;"
# CREATE INDEX scop_class_entry_auth ON SCOPE_CLASS(entry,auth_asym_id) tablespace SIFTS_ADMIN_I;\
# CREATE INDEX scop_class_entry_auth_id ON SCOPE_CLASS(entry,auth_asym_id,scop_id) tablespace SIFTS_ADMIN_I;\
# CREATE INDEX scop_class_id ON SCOPE_CLASS(scop_id) tablespace SIFTS_ADMIN_I;\
# CREATE INDEX scop_desc_id ON SCOPE_DESCRIPTION(scop_id) tablespace SIFTS_ADMIN_I;\
# commit;"
for command in SQL.split(';')[:-1]:
if not dosql(pdbecursor,command):
pdbecursor.close()
pdbeconnection.close()
sys.exit(-1)
pdbeconnection.commit()
print "End update SCOPE\n"
# print "Description: %d" % len(desc_list)
# print "Comments: %d" % len(comments_list)
# print "Hierarchy: %d" % len(hierarchy_list)
# print "Class: %d" % len(class_list)
pdbecursor.close()
pdbeconnection.close()
|
minlexx/pyevemon | refs/heads/master | esi_client/models/put_fleets_fleet_id_members_member_id_internal_server_error.py | 1 | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class PutFleetsFleetIdMembersMemberIdInternalServerError(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
PutFleetsFleetIdMembersMemberIdInternalServerError - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this PutFleetsFleetIdMembersMemberIdInternalServerError.
Internal server error message
:return: The error of this PutFleetsFleetIdMembersMemberIdInternalServerError.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this PutFleetsFleetIdMembersMemberIdInternalServerError.
Internal server error message
:param error: The error of this PutFleetsFleetIdMembersMemberIdInternalServerError.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, PutFleetsFleetIdMembersMemberIdInternalServerError):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
DongliangGao/pydec | refs/heads/master | pydec/io/tests/test_arrayio.py | 6 | from pydec.testing import *
from scipy import arange, prod, reshape, rand, random, allclose, rank, zeros
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix
from pydec.io.arrayio import write_array, read_array
#TODO replace with tempfile
filename = '/tmp/pydec_arrayio_testfile.dat'
class TestArrayIO():
def setUp(self):
random.seed(0) #make tests repeatable
def tearDown(self):
import os
os.remove(filename)
def test_dense(self):
sizes = [(2,2),(3,3),(5,1),(1,5)]
sizes += [(2,2,2),(4,3,2),(1,1,5),(1,5,1),(5,1,1)]
for dims in sizes:
mats = [arange(prod(dims)).reshape(dims),rand(*dims)]
for A in mats:
formats = ['binary','ascii']
if rank(A) <= 2: formats.append('basic') #use basic when possible
for format in formats:
write_array(filename,A,format=format)
B = read_array(filename)
assert_almost_equal(A,B,decimal=12)
def test_sparse(self):
sizes = [(2,2),(3,3),(1,10),(10,1),(10,10)]
for dims in sizes:
base_mats = []
base_mats.append((rand(*dims) < 0.5)*rand(*dims)) #random matrix with 50% nnz
base_mats.append(zeros(dims)) #empty matrix
base_mats.append(arange(prod(dims)).reshape(dims))
mats = []
for base_mat in base_mats:
mats.append(csr_matrix(base_mat))
mats.append(csc_matrix(base_mat))
mats.append(coo_matrix(base_mat))
for A in mats:
formats = ['binary','ascii']
for format in formats:
write_array(filename,A,format=format)
B = read_array(filename)
assert_almost_equal(A.todense(),B.todense(),decimal=12)
assert_equal(type(A),type(B))
|
jendap/tensorflow | refs/heads/master | tensorflow/python/ops/signal/dct_ops.py | 10 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Discrete Cosine Transform ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math as _math
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops as _array_ops
from tensorflow.python.ops import math_ops as _math_ops
from tensorflow.python.ops.signal import fft_ops
from tensorflow.python.util.tf_export import tf_export
def _validate_dct_arguments(input_tensor, dct_type, n, axis, norm):
"""Checks that DCT/IDCT arguments are compatible and well formed."""
if n is not None:
raise NotImplementedError("The DCT length argument is not implemented.")
if axis != -1:
raise NotImplementedError("axis must be -1. Got: %s" % axis)
if dct_type not in (1, 2, 3):
raise ValueError("Only Types I, II and III (I)DCT are supported.")
if dct_type == 1:
if norm == "ortho":
raise ValueError("Normalization is not supported for the Type-I DCT.")
if input_tensor.shape[-1] is not None and input_tensor.shape[-1] < 2:
raise ValueError(
"Type-I DCT requires the dimension to be greater than one.")
if norm not in (None, "ortho"):
raise ValueError(
"Unknown normalization. Expected None or 'ortho', got: %s" % norm)
# TODO(rjryan): Implement `n` and `axis` parameters.
@tf_export("signal.dct", v1=["signal.dct", "spectral.dct"])
def dct(input, type=2, n=None, axis=-1, norm=None, name=None): # pylint: disable=redefined-builtin
"""Computes the 1D [Discrete Cosine Transform (DCT)][dct] of `input`.
Currently only Types I, II and III are supported.
Type I is implemented using a length `2N` padded `tf.spectral.rfft`.
Type II is implemented using a length `2N` padded `tf.spectral.rfft`, as
described here:
https://dsp.stackexchange.com/a/10606.
Type III is a fairly straightforward inverse of Type II
(i.e. using a length `2N` padded `tf.spectral.irfft`).
@compatibility(scipy)
Equivalent to scipy.fftpack.dct for Type-I, Type-II and Type-III DCT.
https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.fftpack.dct.html
@end_compatibility
Args:
input: A `[..., samples]` `float32` `Tensor` containing the signals to
take the DCT of.
type: The DCT type to perform. Must be 1, 2 or 3.
n: For future expansion. The length of the transform. Must be `None`.
axis: For future expansion. The axis to compute the DCT along. Must be `-1`.
norm: The normalization to apply. `None` for no normalization or `'ortho'`
for orthonormal normalization.
name: An optional name for the operation.
Returns:
A `[..., samples]` `float32` `Tensor` containing the DCT of `input`.
Raises:
ValueError: If `type` is not `1`, `2` or `3`, `n` is not `None, `axis` is
not `-1`, or `norm` is not `None` or `'ortho'`.
ValueError: If `type` is `1` and `norm` is `ortho`.
[dct]: https://en.wikipedia.org/wiki/Discrete_cosine_transform
"""
_validate_dct_arguments(input, type, n, axis, norm)
with _ops.name_scope(name, "dct", [input]):
# We use the RFFT to compute the DCT and TensorFlow only supports float32
# for FFTs at the moment.
input = _ops.convert_to_tensor(input, dtype=_dtypes.float32)
axis_dim = (tensor_shape.dimension_value(input.shape[-1])
or _array_ops.shape(input)[-1])
axis_dim_float = _math_ops.to_float(axis_dim)
if type == 1:
dct1_input = _array_ops.concat([input, input[..., -2:0:-1]], axis=-1)
dct1 = _math_ops.real(fft_ops.rfft(dct1_input))
return dct1
if type == 2:
scale = 2.0 * _math_ops.exp(
_math_ops.complex(
0.0, -_math_ops.range(axis_dim_float) * _math.pi * 0.5 /
axis_dim_float))
# TODO(rjryan): Benchmark performance and memory usage of the various
# approaches to computing a DCT via the RFFT.
dct2 = _math_ops.real(
fft_ops.rfft(
input, fft_length=[2 * axis_dim])[..., :axis_dim] * scale)
if norm == "ortho":
n1 = 0.5 * _math_ops.rsqrt(axis_dim_float)
n2 = n1 * _math_ops.sqrt(2.0)
# Use tf.pad to make a vector of [n1, n2, n2, n2, ...].
weights = _array_ops.pad(
_array_ops.expand_dims(n1, 0), [[0, axis_dim - 1]],
constant_values=n2)
dct2 *= weights
return dct2
elif type == 3:
if norm == "ortho":
n1 = _math_ops.sqrt(axis_dim_float)
n2 = n1 * _math_ops.sqrt(0.5)
# Use tf.pad to make a vector of [n1, n2, n2, n2, ...].
weights = _array_ops.pad(
_array_ops.expand_dims(n1, 0), [[0, axis_dim - 1]],
constant_values=n2)
input *= weights
else:
input *= axis_dim_float
scale = 2.0 * _math_ops.exp(
_math_ops.complex(
0.0,
_math_ops.range(axis_dim_float) * _math.pi * 0.5 /
axis_dim_float))
dct3 = _math_ops.real(
fft_ops.irfft(
scale * _math_ops.complex(input, 0.0),
fft_length=[2 * axis_dim]))[..., :axis_dim]
return dct3
# TODO(rjryan): Implement `n` and `axis` parameters.
@tf_export("signal.idct", v1=["signal.idct", "spectral.idct"])
def idct(input, type=2, n=None, axis=-1, norm=None, name=None): # pylint: disable=redefined-builtin
"""Computes the 1D [Inverse Discrete Cosine Transform (DCT)][idct] of `input`.
Currently only Types I, II and III are supported. Type III is the inverse of
Type II, and vice versa.
Note that you must re-normalize by 1/(2n) to obtain an inverse if `norm` is
not `'ortho'`. That is:
`signal == idct(dct(signal)) * 0.5 / signal.shape[-1]`.
When `norm='ortho'`, we have:
`signal == idct(dct(signal, norm='ortho'), norm='ortho')`.
@compatibility(scipy)
Equivalent to scipy.fftpack.idct for Type-I, Type-II and Type-III DCT.
https://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.fftpack.idct.html
@end_compatibility
Args:
input: A `[..., samples]` `float32` `Tensor` containing the signals to take
the DCT of.
type: The IDCT type to perform. Must be 1, 2 or 3.
n: For future expansion. The length of the transform. Must be `None`.
axis: For future expansion. The axis to compute the DCT along. Must be `-1`.
norm: The normalization to apply. `None` for no normalization or `'ortho'`
for orthonormal normalization.
name: An optional name for the operation.
Returns:
A `[..., samples]` `float32` `Tensor` containing the IDCT of `input`.
Raises:
ValueError: If `type` is not `1`, `2` or `3`, `n` is not `None, `axis` is
not `-1`, or `norm` is not `None` or `'ortho'`.
[idct]:
https://en.wikipedia.org/wiki/Discrete_cosine_transform#Inverse_transforms
"""
_validate_dct_arguments(input, type, n, axis, norm)
inverse_type = {1: 1, 2: 3, 3: 2}[type]
return dct(input, type=inverse_type, n=n, axis=axis, norm=norm, name=name)
|
ryfeus/lambda-packs | refs/heads/master | Sklearn_scipy_numpy/source/scipy/linalg/_cython_wrapper_generators.py | 51 | """
Code generator script to make the Cython BLAS and LAPACK wrappers
from the files "cython_blas_signatures.txt" and
"cython_lapack_signatures.txt" which contain the signatures for
all the BLAS/LAPACK routines that should be included in the wrappers.
"""
from operator import itemgetter
fortran_types = {'int': 'integer',
'c': 'complex',
'd': 'double precision',
's': 'real',
'z': 'complex*16',
'char': 'character',
'bint': 'logical'}
c_types = {'int': 'int',
'c': 'npy_complex64',
'd': 'double',
's': 'float',
'z': 'npy_complex128',
'char': 'char',
'bint': 'int',
'cselect1': '_cselect1',
'cselect2': '_cselect2',
'dselect2': '_dselect2',
'dselect3': '_dselect3',
'sselect2': '_sselect2',
'sselect3': '_sselect3',
'zselect1': '_zselect1',
'zselect2': '_zselect2'}
def arg_names_and_types(args):
return zip(*[arg.split(' *') for arg in args.split(', ')])
pyx_func_template = """
cdef extern from "{header_name}":
void _fortran_{name} "F_FUNC({name}wrp, {upname}WRP)"({ret_type} *out, {fort_args}) nogil
cdef {ret_type} {name}({args}) nogil:
cdef {ret_type} out
_fortran_{name}(&out, {argnames})
return out
"""
npy_types = {'c': 'npy_complex64', 'z': 'npy_complex128',
'cselect1': '_cselect1', 'cselect2': '_cselect2',
'dselect2': '_dselect2', 'dselect3': '_dselect3',
'sselect2': '_sselect2', 'sselect3': '_sselect3',
'zselect1': '_zselect1', 'zselect2': '_zselect2'}
def arg_casts(arg):
if arg in ['npy_complex64', 'npy_complex128', '_cselect1', '_cselect2',
'_dselect2', '_dselect3', '_sselect2', '_sselect3',
'_zselect1', '_zselect2']:
return '<{0}*>'.format(arg)
return ''
def pyx_decl_func(name, ret_type, args, header_name):
argtypes, argnames = arg_names_and_types(args)
# Fix the case where one of the arguments has the same name as the
# abbreviation for the argument type.
# Otherwise the variable passed as an argument is considered overwrites
# the previous typedef and Cython compilation fails.
if ret_type in argnames:
argnames = [n if n != ret_type else ret_type + '_' for n in argnames]
argnames = [n if n not in ['lambda', 'in'] else n + '_'
for n in argnames]
args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argtypes = [npy_types.get(t, t) for t in argtypes]
fort_args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argnames = [arg_casts(t) + n for n, t in zip(argnames, argtypes)]
argnames = ', '.join(argnames)
c_ret_type = c_types[ret_type]
args = args.replace('lambda', 'lambda_')
return pyx_func_template.format(name=name, upname=name.upper(), args=args,
fort_args=fort_args, ret_type=ret_type,
c_ret_type=c_ret_type, argnames=argnames,
header_name=header_name)
pyx_sub_template = """cdef extern from "{header_name}":
void _fortran_{name} "F_FUNC({name},{upname})"({fort_args}) nogil
cdef void {name}({args}) nogil:
_fortran_{name}({argnames})
"""
def pyx_decl_sub(name, args, header_name):
argtypes, argnames = arg_names_and_types(args)
argtypes = [npy_types.get(t, t) for t in argtypes]
argnames = [n if n not in ['lambda', 'in'] else n + '_' for n in argnames]
fort_args = ', '.join([' *'.join([n, t])
for n, t in zip(argtypes, argnames)])
argnames = [arg_casts(t) + n for n, t in zip(argnames, argtypes)]
argnames = ', '.join(argnames)
args = args.replace('*lambda,', '*lambda_,').replace('*in,', '*in_,')
return pyx_sub_template.format(name=name, upname=name.upper(),
args=args, fort_args=fort_args,
argnames=argnames, header_name=header_name)
blas_pyx_preamble = '''# cython: boundscheck = False
# cython: wraparound = False
# cython: cdivision = True
"""
BLAS Functions for Cython
=========================
Usable from Cython via::
cimport scipy.linalg.cython_blas
These wrappers do not check for alignment of arrays.
Alignment should be checked before these wrappers are used.
Raw function pointers (Fortran-style pointer arguments):
- {}
"""
# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_blas
# from scipy.linalg cimport cython_blas
# cimport scipy.linalg.cython_blas as cython_blas
# cimport ..linalg.cython_blas as cython_blas
# Within scipy, if BLAS functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
cdef extern from "fortran_defs.h":
pass
from numpy cimport npy_complex64, npy_complex128
'''
def make_blas_pyx_preamble(all_sigs):
names = [sig[0] for sig in all_sigs]
return blas_pyx_preamble.format("\n- ".join(names))
lapack_pyx_preamble = '''"""
LAPACK functions for Cython
===========================
Usable from Cython via::
cimport scipy.linalg.cython_lapack
This module provides Cython-level wrappers for all primary routines included
in LAPACK 3.1.0 except for ``zcgesv`` since its interface is not consistent
from LAPACK 3.1.0 to 3.6.0. It also provides some of the
fixed-api auxiliary routines.
These wrappers do not check for alignment of arrays.
Alignment should be checked before these wrappers are used.
Raw function pointers (Fortran-style pointer arguments):
- {}
"""
# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_lapack
# from scipy.linalg cimport cython_lapack
# cimport scipy.linalg.cython_lapack as cython_lapack
# cimport ..linalg.cython_lapack as cython_lapack
# Within scipy, if LAPACK functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
cdef extern from "fortran_defs.h":
pass
from numpy cimport npy_complex64, npy_complex128
cdef extern from "_lapack_subroutines.h":
# Function pointer type declarations for
# gees and gges families of functions.
ctypedef bint _cselect1(npy_complex64*)
ctypedef bint _cselect2(npy_complex64*, npy_complex64*)
ctypedef bint _dselect2(d*, d*)
ctypedef bint _dselect3(d*, d*, d*)
ctypedef bint _sselect2(s*, s*)
ctypedef bint _sselect3(s*, s*, s*)
ctypedef bint _zselect1(npy_complex128*)
ctypedef bint _zselect2(npy_complex128*, npy_complex128*)
'''
def make_lapack_pyx_preamble(all_sigs):
names = [sig[0] for sig in all_sigs]
return lapack_pyx_preamble.format("\n- ".join(names))
blas_py_wrappers = """
# Python-accessible wrappers for testing:
cdef inline bint _is_contiguous(double[:,:] a, int axis) nogil:
return (a.strides[axis] == sizeof(a[0,0]) or a.shape[axis] == 1)
cpdef float complex _test_cdotc(float complex[:] cx, float complex[:] cy) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
int incy = cy.strides[0] // sizeof(cy[0])
return cdotc(&n, &cx[0], &incx, &cy[0], &incy)
cpdef float complex _test_cdotu(float complex[:] cx, float complex[:] cy) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
int incy = cy.strides[0] // sizeof(cy[0])
return cdotu(&n, &cx[0], &incx, &cy[0], &incy)
cpdef double _test_dasum(double[:] dx) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
return dasum(&n, &dx[0], &incx)
cpdef double _test_ddot(double[:] dx, double[:] dy) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
int incy = dy.strides[0] // sizeof(dy[0])
return ddot(&n, &dx[0], &incx, &dy[0], &incy)
cpdef int _test_dgemm(double alpha, double[:,:] a, double[:,:] b, double beta,
double[:,:] c) nogil except -1:
cdef:
char *transa
char *transb
int m, n, k, lda, ldb, ldc
double *a0=&a[0,0]
double *b0=&b[0,0]
double *c0=&c[0,0]
# In the case that c is C contiguous, swap a and b and
# swap whether or not each of them is transposed.
# This can be done because a.dot(b) = b.T.dot(a.T).T.
if _is_contiguous(c, 1):
if _is_contiguous(a, 1):
transb = 'n'
ldb = (&a[1,0]) - a0 if a.shape[0] > 1 else 1
elif _is_contiguous(a, 0):
transb = 't'
ldb = (&a[0,1]) - a0 if a.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'a' is neither C nor Fortran contiguous.")
if _is_contiguous(b, 1):
transa = 'n'
lda = (&b[1,0]) - b0 if b.shape[0] > 1 else 1
elif _is_contiguous(b, 0):
transa = 't'
lda = (&b[0,1]) - b0 if b.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'b' is neither C nor Fortran contiguous.")
k = b.shape[0]
if k != a.shape[1]:
with gil:
raise ValueError("Shape mismatch in input arrays.")
m = b.shape[1]
n = a.shape[0]
if n != c.shape[0] or m != c.shape[1]:
with gil:
raise ValueError("Output array does not have the correct shape.")
ldc = (&c[1,0]) - c0 if c.shape[0] > 1 else 1
dgemm(transa, transb, &m, &n, &k, &alpha, b0, &lda, a0,
&ldb, &beta, c0, &ldc)
elif _is_contiguous(c, 0):
if _is_contiguous(a, 1):
transa = 't'
lda = (&a[1,0]) - a0 if a.shape[0] > 1 else 1
elif _is_contiguous(a, 0):
transa = 'n'
lda = (&a[0,1]) - a0 if a.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'a' is neither C nor Fortran contiguous.")
if _is_contiguous(b, 1):
transb = 't'
ldb = (&b[1,0]) - b0 if b.shape[0] > 1 else 1
elif _is_contiguous(b, 0):
transb = 'n'
ldb = (&b[0,1]) - b0 if b.shape[1] > 1 else 1
else:
with gil:
raise ValueError("Input 'b' is neither C nor Fortran contiguous.")
m = a.shape[0]
k = a.shape[1]
if k != b.shape[0]:
with gil:
raise ValueError("Shape mismatch in input arrays.")
n = b.shape[1]
if m != c.shape[0] or n != c.shape[1]:
with gil:
raise ValueError("Output array does not have the correct shape.")
ldc = (&c[0,1]) - c0 if c.shape[1] > 1 else 1
dgemm(transa, transb, &m, &n, &k, &alpha, a0, &lda, b0,
&ldb, &beta, c0, &ldc)
else:
with gil:
raise ValueError("Input 'c' is neither C nor Fortran contiguous.")
return 0
cpdef double _test_dnrm2(double[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return dnrm2(&n, &x[0], &incx)
cpdef double _test_dzasum(double complex[:] zx) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
return dzasum(&n, &zx[0], &incx)
cpdef double _test_dznrm2(double complex[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return dznrm2(&n, &x[0], &incx)
cpdef int _test_icamax(float complex[:] cx) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
return icamax(&n, &cx[0], &incx)
cpdef int _test_idamax(double[:] dx) nogil:
cdef:
int n = dx.shape[0]
int incx = dx.strides[0] // sizeof(dx[0])
return idamax(&n, &dx[0], &incx)
cpdef int _test_isamax(float[:] sx) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.strides[0] // sizeof(sx[0])
return isamax(&n, &sx[0], &incx)
cpdef int _test_izamax(double complex[:] zx) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
return izamax(&n, &zx[0], &incx)
cpdef float _test_sasum(float[:] sx) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.shape[0] // sizeof(sx[0])
return sasum(&n, &sx[0], &incx)
cpdef float _test_scasum(float complex[:] cx) nogil:
cdef:
int n = cx.shape[0]
int incx = cx.strides[0] // sizeof(cx[0])
return scasum(&n, &cx[0], &incx)
cpdef float _test_scnrm2(float complex[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.strides[0] // sizeof(x[0])
return scnrm2(&n, &x[0], &incx)
cpdef float _test_sdot(float[:] sx, float[:] sy) nogil:
cdef:
int n = sx.shape[0]
int incx = sx.strides[0] // sizeof(sx[0])
int incy = sy.strides[0] // sizeof(sy[0])
return sdot(&n, &sx[0], &incx, &sy[0], &incy)
cpdef float _test_snrm2(float[:] x) nogil:
cdef:
int n = x.shape[0]
int incx = x.shape[0] // sizeof(x[0])
return snrm2(&n, &x[0], &incx)
cpdef double complex _test_zdotc(double complex[:] zx, double complex[:] zy) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
int incy = zy.strides[0] // sizeof(zy[0])
return zdotc(&n, &zx[0], &incx, &zy[0], &incy)
cpdef double complex _test_zdotu(double complex[:] zx, double complex[:] zy) nogil:
cdef:
int n = zx.shape[0]
int incx = zx.strides[0] // sizeof(zx[0])
int incy = zy.strides[0] // sizeof(zy[0])
return zdotu(&n, &zx[0], &incx, &zy[0], &incy)
"""
def generate_blas_pyx(func_sigs, sub_sigs, all_sigs, header_name):
funcs = "\n".join(pyx_decl_func(*(s+(header_name,))) for s in func_sigs)
subs = "\n" + "\n".join(pyx_decl_sub(*(s[::2]+(header_name,)))
for s in sub_sigs)
return make_blas_pyx_preamble(all_sigs) + funcs + subs + blas_py_wrappers
lapack_py_wrappers = """
# Python accessible wrappers for testing:
def _test_dlamch(cmach):
# This conversion is necessary to handle Python 3 strings.
cmach_bytes = bytes(cmach)
# Now that it is a bytes representation, a non-temporary variable
# must be passed as a part of the function call.
cdef char* cmach_char = cmach_bytes
return dlamch(cmach_char)
def _test_slamch(cmach):
# This conversion is necessary to handle Python 3 strings.
cmach_bytes = bytes(cmach)
# Now that it is a bytes representation, a non-temporary variable
# must be passed as a part of the function call.
cdef char* cmach_char = cmach_bytes
return slamch(cmach_char)
"""
def generate_lapack_pyx(func_sigs, sub_sigs, all_sigs, header_name):
funcs = "\n".join(pyx_decl_func(*(s+(header_name,))) for s in func_sigs)
subs = "\n" + "\n".join(pyx_decl_sub(*(s[::2]+(header_name,)))
for s in sub_sigs)
preamble = make_lapack_pyx_preamble(all_sigs)
return preamble + funcs + subs + lapack_py_wrappers
pxd_template = """ctypedef {ret_type} {name}_t({args}) nogil
cdef {name}_t *{name}_f
"""
pxd_template = """cdef {ret_type} {name}({args}) nogil
"""
def pxd_decl(name, ret_type, args):
args = args.replace('lambda', 'lambda_').replace('*in,', '*in_,')
return pxd_template.format(name=name, ret_type=ret_type, args=args)
blas_pxd_preamble = """# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_blas
# from scipy.linalg cimport cython_blas
# cimport scipy.linalg.cython_blas as cython_blas
# cimport ..linalg.cython_blas as cython_blas
# Within scipy, if BLAS functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
ctypedef float s
ctypedef double d
ctypedef float complex c
ctypedef double complex z
"""
def generate_blas_pxd(all_sigs):
body = '\n'.join(pxd_decl(*sig) for sig in all_sigs)
return blas_pxd_preamble + body
lapack_pxd_preamble = """# Within scipy, these wrappers can be used via relative or absolute cimport.
# Examples:
# from ..linalg cimport cython_lapack
# from scipy.linalg cimport cython_lapack
# cimport scipy.linalg.cython_lapack as cython_lapack
# cimport ..linalg.cython_lapack as cython_lapack
# Within scipy, if LAPACK functions are needed in C/C++/Fortran,
# these wrappers should not be used.
# The original libraries should be linked directly.
ctypedef float s
ctypedef double d
ctypedef float complex c
ctypedef double complex z
# Function pointer type declarations for
# gees and gges families of functions.
ctypedef bint cselect1(c*)
ctypedef bint cselect2(c*, c*)
ctypedef bint dselect2(d*, d*)
ctypedef bint dselect3(d*, d*, d*)
ctypedef bint sselect2(s*, s*)
ctypedef bint sselect3(s*, s*, s*)
ctypedef bint zselect1(z*)
ctypedef bint zselect2(z*, z*)
"""
def generate_lapack_pxd(all_sigs):
return lapack_pxd_preamble + '\n'.join(pxd_decl(*sig) for sig in all_sigs)
fortran_template = """ subroutine {name}wrp(ret, {argnames})
external {wrapper}
{ret_type} {wrapper}
{ret_type} ret
{argdecls}
ret = {wrapper}({argnames})
end
"""
dims = {'work': '(*)', 'ab': '(ldab,*)', 'a': '(lda,*)', 'dl': '(*)',
'd': '(*)', 'du': '(*)', 'ap': '(*)', 'e': '(*)', 'lld': '(*)'}
def process_fortran_name(name, funcname):
if 'inc' in name:
return name
xy_exclusions = ['ladiv', 'lapy2', 'lapy3']
if ('x' in name or 'y' in name) and funcname[1:] not in xy_exclusions:
return name + '(n)'
if name in dims:
return name + dims[name]
return name
def fort_subroutine_wrapper(name, ret_type, args):
if name[0] in ['c', 's'] or name in ['zladiv', 'zdotu', 'zdotc']:
wrapper = 'w' + name
else:
wrapper = name
types, names = arg_names_and_types(args)
argnames = ', '.join(names)
names = [process_fortran_name(n, name) for n in names]
argdecls = '\n '.join('{0} {1}'.format(fortran_types[t], n)
for n, t in zip(names, types))
return fortran_template.format(name=name, wrapper=wrapper,
argnames=argnames, argdecls=argdecls,
ret_type=fortran_types[ret_type])
def generate_fortran(func_sigs):
return "\n".join(fort_subroutine_wrapper(*sig) for sig in func_sigs)
def make_c_args(args):
types, names = arg_names_and_types(args)
types = [c_types[arg] for arg in types]
return ', '.join('{0} *{1}'.format(t, n) for t, n in zip(types, names))
c_func_template = "void F_FUNC({name}wrp, {upname}WRP)({return_type} *ret, {args});\n"
def c_func_decl(name, return_type, args):
args = make_c_args(args)
return_type = c_types[return_type]
return c_func_template.format(name=name, upname=name.upper(),
return_type=return_type, args=args)
c_sub_template = "void F_FUNC({name},{upname})({args});\n"
def c_sub_decl(name, return_type, args):
args = make_c_args(args)
return c_sub_template.format(name=name, upname=name.upper(), args=args)
c_preamble = """#ifndef SCIPY_LINALG_{lib}_FORTRAN_WRAPPERS_H
#define SCIPY_LINALG_{lib}_FORTRAN_WRAPPERS_H
#include "fortran_defs.h"
#include "numpy/arrayobject.h"
"""
lapack_decls = """
typedef int (*_cselect1)(npy_complex64*);
typedef int (*_cselect2)(npy_complex64*, npy_complex64*);
typedef int (*_dselect2)(double*, double*);
typedef int (*_dselect3)(double*, double*, double*);
typedef int (*_sselect2)(float*, float*);
typedef int (*_sselect3)(float*, float*, float*);
typedef int (*_zselect1)(npy_complex128*);
typedef int (*_zselect2)(npy_complex128*, npy_complex128*);
"""
cpp_guard = """
#ifdef __cplusplus
extern "C" {
#endif
"""
c_end = """
#ifdef __cplusplus
}
#endif
#endif
"""
def generate_c_header(func_sigs, sub_sigs, all_sigs, lib_name):
funcs = "".join(c_func_decl(*sig) for sig in func_sigs)
subs = "\n" + "".join(c_sub_decl(*sig) for sig in sub_sigs)
if lib_name == 'LAPACK':
preamble = (c_preamble.format(lib=lib_name) + lapack_decls)
else:
preamble = c_preamble.format(lib=lib_name)
return "".join([preamble, cpp_guard, funcs, subs, c_end])
def split_signature(sig):
name_and_type, args = sig[:-1].split('(')
ret_type, name = name_and_type.split(' ')
return name, ret_type, args
def filter_lines(ls):
ls = [l.strip() for l in ls if l != '\n' and l[0] != '#']
func_sigs = [split_signature(l) for l in ls if l.split(' ')[0] != 'void']
sub_sigs = [split_signature(l) for l in ls if l.split(' ')[0] == 'void']
all_sigs = list(sorted(func_sigs + sub_sigs, key=itemgetter(0)))
return func_sigs, sub_sigs, all_sigs
def make_all(blas_signature_file="cython_blas_signatures.txt",
lapack_signature_file="cython_lapack_signatures.txt",
blas_name="cython_blas",
lapack_name="cython_lapack",
blas_fortran_name="_blas_subroutine_wrappers.f",
lapack_fortran_name="_lapack_subroutine_wrappers.f",
blas_header_name="_blas_subroutines.h",
lapack_header_name="_lapack_subroutines.h"):
comments = ["This file was generated by _cython_wrapper_generators.py.\n",
"Do not edit this file directly.\n"]
ccomment = ''.join(['// ' + line for line in comments]) + '\n'
pyxcomment = ''.join(['# ' + line for line in comments]) + '\n'
fcomment = ''.join(['c ' + line for line in comments]) + '\n'
with open(blas_signature_file, 'r') as f:
blas_sigs = f.readlines()
blas_sigs = filter_lines(blas_sigs)
blas_pyx = generate_blas_pyx(*(blas_sigs + (blas_header_name,)))
with open(blas_name + '.pyx', 'w') as f:
f.write(pyxcomment)
f.write(blas_pyx)
blas_pxd = generate_blas_pxd(blas_sigs[2])
with open(blas_name + '.pxd', 'w') as f:
f.write(pyxcomment)
f.write(blas_pxd)
blas_fortran = generate_fortran(blas_sigs[0])
with open(blas_fortran_name, 'w') as f:
f.write(fcomment)
f.write(blas_fortran)
blas_c_header = generate_c_header(*(blas_sigs + ('BLAS',)))
with open(blas_header_name, 'w') as f:
f.write(ccomment)
f.write(blas_c_header)
with open(lapack_signature_file, 'r') as f:
lapack_sigs = f.readlines()
lapack_sigs = filter_lines(lapack_sigs)
lapack_pyx = generate_lapack_pyx(*(lapack_sigs + (lapack_header_name,)))
with open(lapack_name + '.pyx', 'w') as f:
f.write(pyxcomment)
f.write(lapack_pyx)
lapack_pxd = generate_lapack_pxd(lapack_sigs[2])
with open(lapack_name + '.pxd', 'w') as f:
f.write(pyxcomment)
f.write(lapack_pxd)
lapack_fortran = generate_fortran(lapack_sigs[0])
with open(lapack_fortran_name, 'w') as f:
f.write(fcomment)
f.write(lapack_fortran)
lapack_c_header = generate_c_header(*(lapack_sigs + ('LAPACK',)))
with open(lapack_header_name, 'w') as f:
f.write(ccomment)
f.write(lapack_c_header)
if __name__ == '__main__':
make_all()
|
samedder/azure-cli | refs/heads/master | src/command_modules/azure-cli-storage/azure/cli/command_modules/storage/tests/test_storage_queue_scenarios.py | 3 | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import time
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer, StorageAccountPreparer, api_version_constraint,
JMESPathCheck, JMESPathCheckExists, NoneCheck)
from azure.cli.core.profiles import ResourceType
@api_version_constraint(ResourceType.MGMT_STORAGE, min_api='2016-12-01')
class StorageQueueScenarioTests(ScenarioTest):
@ResourceGroupPreparer()
@StorageAccountPreparer(sku='Standard_RAGRS')
def test_storage_queue_general_scenario(self, resource_group, storage_account):
account_key = self.get_account_key(resource_group, storage_account)
self.set_env('AZURE_STORAGE_ACCOUNT', storage_account)
self.set_env('AZURE_STORAGE_KEY', account_key)
queue = self.create_random_name('queue', 24)
self.cmd('storage queue create -n {} --fail-on-exist --metadata a=b c=d'.format(queue),
checks=JMESPathCheck('created', True))
self.cmd('storage queue exists -n {}'.format(queue),
checks=JMESPathCheck('exists', True))
res = self.cmd('storage queue list').get_output_in_json()
self.assertIn(queue, [x['name'] for x in res], 'The newly created queue is not listed.')
sas = self.cmd('storage queue generate-sas -n {} --permissions r'.format(queue)).output
self.assertIn('sig', sas, 'The sig segment is not in the sas {}'.format(sas))
self.cmd('storage queue metadata show -n {}'.format(queue), checks=[
JMESPathCheck('a', 'b'),
JMESPathCheck('c', 'd')
])
self.cmd('storage queue metadata update -n {} --metadata e=f g=h'.format(queue))
self.cmd('storage queue metadata show -n {}'.format(queue), checks=[
JMESPathCheck('e', 'f'),
JMESPathCheck('g', 'h')
])
# Queue ACL policy
self.cmd('storage queue policy list -q {}'.format(queue), checks=NoneCheck())
start_time = '2016-01-01T00:00Z'
expiry = '2016-05-01T00:00Z'
policy = self.create_random_name('policy', 16)
self.cmd('storage queue policy create -q {} -n {} --permission raup --start {} --expiry {}'
.format(queue, policy, start_time, expiry))
acl = self.cmd('storage queue policy list -q {}'.format(queue)).get_output_in_json()
self.assertIn(policy, acl)
self.assertEqual(1, len(acl))
returned_permissions = self.cmd('storage queue policy show -q {} -n {}'.format(queue, policy), checks=[
JMESPathCheck('start', '2016-01-01T00:00:00+00:00'),
JMESPathCheck('expiry', '2016-05-01T00:00:00+00:00'),
JMESPathCheckExists('permission')
]).get_output_in_json()['permission']
self.assertIn('r', returned_permissions)
self.assertIn('p', returned_permissions)
self.assertIn('a', returned_permissions)
self.assertIn('u', returned_permissions)
self.cmd('storage queue policy update -q {} -n {} --permission ra'.format(queue, policy))
self.cmd('storage queue policy show -q {} -n {}'.format(queue, policy),
checks=JMESPathCheck('permission', 'ra'))
self.cmd('storage queue policy delete -q {} -n {}'.format(queue, policy))
self.cmd('storage queue policy list -q {}'.format(queue), checks=NoneCheck())
# Queue message operation
self.cmd('storage message put -q {} --content "test message"'.format(queue))
self.cmd('storage message peek -q {}'.format(queue),
checks=JMESPathCheck('[0].content', 'test message'))
first_message = self.cmd('storage message get -q {}'.format(queue),
checks=JMESPathCheck('length(@)', 1)).get_output_in_json()[0]
self.cmd('storage message update -q {} --id {} --pop-receipt {} --visibility-timeout 1 '
'--content "new message!"'.format(queue, first_message['id'],
first_message['popReceipt']))
time.sleep(2) # ensures message should be back in queue
self.cmd('storage message peek -q {}'.format(queue),
checks=JMESPathCheck('[0].content', 'new message!'))
self.cmd('storage message put -q {} --content "second message"'.format(queue))
self.cmd('storage message put -q {} --content "third message"'.format(queue))
self.cmd('storage message peek -q {} --num-messages 32'.format(queue),
checks=JMESPathCheck('length(@)', 3))
third_message = self.cmd('storage message get -q {}'.format(queue)).get_output_in_json()[0]
self.cmd('storage message delete -q {} --id {} --pop-receipt {}'
.format(queue, third_message['id'], third_message['popReceipt']))
self.cmd('storage message peek -q {} --num-messages 32'.format(queue),
checks=JMESPathCheck('length(@)', 2))
self.cmd('storage message clear -q {}'.format(queue))
self.cmd('storage message peek -q {} --num-messages 32'.format(queue), checks=NoneCheck())
# verify delete operation
self.cmd('storage queue delete -n {} --fail-not-exist'.format(queue),
checks=JMESPathCheck('deleted', True))
self.cmd('storage queue exists -n {}'.format(queue),
checks=JMESPathCheck('exists', False))
# check status of the queue
self.cmd('storage queue stats', checks=JMESPathCheck('geoReplication.status', 'live'))
def get_account_key(self, group, name):
return self.cmd('storage account keys list -n {} -g {} --query "[0].value" -otsv'
.format(name, group)).output
if __name__ == '__main__':
import unittest
unittest.main()
|
MrHamdulay/myjvm | refs/heads/master | klasses/java_lang_Float.py | 1 | from __future__ import absolute_import
import struct
def floatToRawIntBits(klass, vm, method, frame):
float_value = frame.get_local(0)
print 'float', float_value
assert isinstance(float_value, float)
int_bits = struct.unpack('>l', struct.pack('>f', float_value))[0]
return int_bits
|
open-power/eCMD | refs/heads/master | ecmd-core/pyapi/gen_apply.py | 2 | #!/usr/bin/env python
from fileinput import input
import re
# Read in a Python module generated by SWIG, extract all parameter names starting with o_
# or io_ and output a list of SWIG %apply statements to map them to output parameters.
o_names = set()
io_names = set()
def output(names, totype, fromtype):
print("%%apply %s { %s };" % (totype, ", ".join(fromtype + name for name in sorted(names))))
for l in input():
for word in re.split(r"\W+", l):
if word.startswith("o_"):
o_names.add(word)
elif word.startswith("io_"):
io_names.add(word)
output(o_names, "int &OUTPUT", "enum SWIGTYPE &")
output(o_names, "int &OUTPUT", "uint32_t &")
output(o_names, "int &OUTPUT", "uint64_t &")
output(o_names, "std::string &OUTPUT", "std::string &")
output(io_names, "int &INOUT", "enum SWIGTYPE &")
output(io_names, "int &INOUT", "uint32_t &")
output(io_names, "int &INOUT", "uint64_t &")
output(io_names, "std::string &INOUT", "std::string &")
|
benoitsteiner/tensorflow-xsmm | refs/heads/master | tensorflow/contrib/training/python/training/evaluation_test.py | 15 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.training.evaluation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import os
import time
import numpy as np
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.contrib.training.python.training import evaluation
from tensorflow.contrib.training.python.training import training
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import gfile
from tensorflow.python.platform import test
from tensorflow.python.summary import summary as summary_lib
from tensorflow.python.summary import summary_iterator
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import saver as saver_lib
class CheckpointIteratorTest(test.TestCase):
def testReturnsEmptyIfNoCheckpointsFound(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'no_checkpoints_found')
num_found = 0
for _ in evaluation.checkpoints_iterator(checkpoint_dir, timeout=0):
num_found += 1
self.assertEqual(num_found, 0)
def testReturnsSingleCheckpointIfOneCheckpointFound(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'one_checkpoint_found')
if not gfile.Exists(checkpoint_dir):
gfile.MakeDirs(checkpoint_dir)
global_step = variables.get_or_create_global_step()
saver = saver_lib.Saver() # Saves the global step.
with self.test_session() as session:
session.run(variables_lib.global_variables_initializer())
save_path = os.path.join(checkpoint_dir, 'model.ckpt')
saver.save(session, save_path, global_step=global_step)
num_found = 0
for _ in evaluation.checkpoints_iterator(checkpoint_dir, timeout=0):
num_found += 1
self.assertEqual(num_found, 1)
def testReturnsSingleCheckpointIfOneShardedCheckpoint(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'one_checkpoint_found_sharded')
if not gfile.Exists(checkpoint_dir):
gfile.MakeDirs(checkpoint_dir)
global_step = variables.get_or_create_global_step()
# This will result in 3 different checkpoint shard files.
with ops.device('/cpu:0'):
variables_lib.Variable(10, name='v0')
with ops.device('/cpu:1'):
variables_lib.Variable(20, name='v1')
saver = saver_lib.Saver(sharded=True)
with session_lib.Session(
target='',
config=config_pb2.ConfigProto(device_count={'CPU': 2})) as session:
session.run(variables_lib.global_variables_initializer())
save_path = os.path.join(checkpoint_dir, 'model.ckpt')
saver.save(session, save_path, global_step=global_step)
num_found = 0
for _ in evaluation.checkpoints_iterator(checkpoint_dir, timeout=0):
num_found += 1
self.assertEqual(num_found, 1)
def testTimeoutFn(self):
timeout_fn_calls = [0]
def timeout_fn():
timeout_fn_calls[0] += 1
return timeout_fn_calls[0] > 3
results = list(
evaluation.checkpoints_iterator(
'/non-existent-dir', timeout=0.1, timeout_fn=timeout_fn))
self.assertEqual([], results)
self.assertEqual(4, timeout_fn_calls[0])
class WaitForNewCheckpointTest(test.TestCase):
def testReturnsNoneAfterTimeout(self):
start = time.time()
ret = evaluation.wait_for_new_checkpoint(
'/non-existent-dir', 'foo', timeout=1.0, seconds_to_sleep=0.5)
end = time.time()
self.assertIsNone(ret)
# We've waited one second.
self.assertGreater(end, start + 0.5)
# The timeout kicked in.
self.assertLess(end, start + 1.1)
def logistic_classifier(inputs):
return layers.fully_connected(inputs, 1, activation_fn=math_ops.sigmoid)
class EvaluateOnceTest(test.TestCase):
def setUp(self):
super(EvaluateOnceTest, self).setUp()
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def _train_model(self, checkpoint_dir, num_steps):
"""Trains a simple classification model.
Note that the data has been configured such that after around 300 steps,
the model has memorized the dataset (e.g. we can expect %100 accuracy).
Args:
checkpoint_dir: The directory where the checkpoint is written to.
num_steps: The number of steps to train for.
"""
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = logistic_classifier(tf_inputs)
loss = loss_ops.log_loss(tf_predictions, tf_labels)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = training.create_train_op(loss, optimizer)
loss = training.train(
train_op,
checkpoint_dir,
hooks=[basic_session_run_hooks.StopAtStepHook(num_steps)])
if num_steps >= 300:
assert loss < .015
def testEvaluatePerfectModel(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluate_perfect_model_once')
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Run
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
labels = constant_op.constant(self._labels, dtype=dtypes.float32)
logits = logistic_classifier(inputs)
predictions = math_ops.round(logits)
accuracy, update_op = metrics.accuracy(
predictions=predictions, labels=labels)
checkpoint_path = evaluation.wait_for_new_checkpoint(checkpoint_dir)
final_ops_values = evaluation.evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=update_op,
final_ops={'accuracy': accuracy},
hooks=[
evaluation.StopAfterNEvalsHook(1),
])
self.assertTrue(final_ops_values['accuracy'] > .99)
def testEvalOpAndFinalOp(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'eval_ops_and_final_ops')
# Train a model for a single step to get a checkpoint.
self._train_model(checkpoint_dir, num_steps=1)
checkpoint_path = evaluation.wait_for_new_checkpoint(checkpoint_dir)
# Create the model so we have something to restore.
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
num_evals = 5
final_increment = 9.0
my_var = variables.local_variable(0.0, name='MyVar')
eval_ops = state_ops.assign_add(my_var, 1.0)
final_ops = array_ops.identity(my_var) + final_increment
final_ops_values = evaluation.evaluate_once(
checkpoint_path=checkpoint_path,
eval_ops=eval_ops,
final_ops={'value': final_ops},
hooks=[
evaluation.StopAfterNEvalsHook(num_evals),
])
self.assertEqual(final_ops_values['value'], num_evals + final_increment)
def testOnlyFinalOp(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'only_final_ops')
# Train a model for a single step to get a checkpoint.
self._train_model(checkpoint_dir, num_steps=1)
checkpoint_path = evaluation.wait_for_new_checkpoint(checkpoint_dir)
# Create the model so we have something to restore.
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
final_increment = 9.0
my_var = variables.local_variable(0.0, name='MyVar')
final_ops = array_ops.identity(my_var) + final_increment
final_ops_values = evaluation.evaluate_once(
checkpoint_path=checkpoint_path, final_ops={'value': final_ops})
self.assertEqual(final_ops_values['value'], final_increment)
class EvaluateRepeatedlyTest(test.TestCase):
def setUp(self):
super(EvaluateRepeatedlyTest, self).setUp()
# Create an easy training set:
np.random.seed(0)
self._inputs = np.zeros((16, 4))
self._labels = np.random.randint(0, 2, size=(16, 1)).astype(np.float32)
for i in range(16):
j = int(2 * self._labels[i] + np.random.randint(0, 2))
self._inputs[i, j] = 1
def _train_model(self, checkpoint_dir, num_steps):
"""Trains a simple classification model.
Note that the data has been configured such that after around 300 steps,
the model has memorized the dataset (e.g. we can expect %100 accuracy).
Args:
checkpoint_dir: The directory where the checkpoint is written to.
num_steps: The number of steps to train for.
"""
with ops.Graph().as_default():
random_seed.set_random_seed(0)
tf_inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
tf_labels = constant_op.constant(self._labels, dtype=dtypes.float32)
tf_predictions = logistic_classifier(tf_inputs)
loss = loss_ops.log_loss(tf_predictions, tf_labels)
optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=1.0)
train_op = training.create_train_op(loss, optimizer)
loss = training.train(
train_op,
checkpoint_dir,
hooks=[basic_session_run_hooks.StopAtStepHook(num_steps)])
def testEvaluatePerfectModel(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluate_perfect_model_repeated')
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Run
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
labels = constant_op.constant(self._labels, dtype=dtypes.float32)
logits = logistic_classifier(inputs)
predictions = math_ops.round(logits)
accuracy, update_op = metrics.accuracy(
predictions=predictions, labels=labels)
final_values = evaluation.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
eval_ops=update_op,
final_ops={'accuracy': accuracy},
hooks=[
evaluation.StopAfterNEvalsHook(1),
],
max_number_of_evaluations=1)
self.assertTrue(final_values['accuracy'] > .99)
def testEvaluationLoopTimeout(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluation_loop_timeout')
if not gfile.Exists(checkpoint_dir):
gfile.MakeDirs(checkpoint_dir)
# We need a variable that the saver will try to restore.
variables.get_or_create_global_step()
# Run with placeholders. If we actually try to evaluate this, we'd fail
# since we're not using a feed_dict.
cant_run_op = array_ops.placeholder(dtype=dtypes.float32)
start = time.time()
final_values = evaluation.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
eval_ops=cant_run_op,
hooks=[evaluation.StopAfterNEvalsHook(10)],
timeout=6)
end = time.time()
self.assertFalse(final_values)
# Assert that we've waited for the duration of the timeout (minus the sleep
# time).
self.assertGreater(end - start, 5.0)
# Then the timeout kicked in and stops the loop.
self.assertLess(end - start, 7)
def testEvaluationLoopTimeoutWithTimeoutFn(self):
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluation_loop_timeout_with_timeout_fn')
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Run
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
labels = constant_op.constant(self._labels, dtype=dtypes.float32)
logits = logistic_classifier(inputs)
predictions = math_ops.round(logits)
accuracy, update_op = metrics.accuracy(
predictions=predictions, labels=labels)
timeout_fn_calls = [0]
def timeout_fn():
timeout_fn_calls[0] += 1
return timeout_fn_calls[0] > 3
final_values = evaluation.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
eval_ops=update_op,
final_ops={'accuracy': accuracy},
hooks=[
evaluation.StopAfterNEvalsHook(1),
],
eval_interval_secs=1,
max_number_of_evaluations=2,
timeout=0.1,
timeout_fn=timeout_fn)
# We should have evaluated once.
self.assertTrue(final_values['accuracy'] > .99)
# And called 4 times the timeout fn
self.assertEqual(4, timeout_fn_calls[0])
def testEvaluateWithEvalFeedDict(self):
# Create a checkpoint.
checkpoint_dir = os.path.join(self.get_temp_dir(),
'evaluate_with_eval_feed_dict')
self._train_model(checkpoint_dir, num_steps=1)
# We need a variable that the saver will try to restore.
variables.get_or_create_global_step()
# Create a variable and an eval op that increments it with a placeholder.
my_var = variables.local_variable(0.0, name='my_var')
increment = array_ops.placeholder(dtype=dtypes.float32)
eval_ops = state_ops.assign_add(my_var, increment)
increment_value = 3
num_evals = 5
expected_value = increment_value * num_evals
final_values = evaluation.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
eval_ops=eval_ops,
feed_dict={increment: 3},
final_ops={'my_var': array_ops.identity(my_var)},
hooks=[
evaluation.StopAfterNEvalsHook(num_evals),
],
max_number_of_evaluations=1)
self.assertEqual(final_values['my_var'], expected_value)
def _create_names_to_metrics(self, predictions, labels):
accuracy0, update_op0 = metrics.accuracy(labels, predictions)
accuracy1, update_op1 = metrics.accuracy(labels, predictions + 1)
names_to_values = {'Accuracy': accuracy0, 'Another_accuracy': accuracy1}
names_to_updates = {'Accuracy': update_op0, 'Another_accuracy': update_op1}
return names_to_values, names_to_updates
def _verify_summaries(self, output_dir, names_to_values):
"""Verifies that the given `names_to_values` are found in the summaries.
Args:
output_dir: An existing directory where summaries are found.
names_to_values: A dictionary of strings to values.
"""
# Check that the results were saved. The events file may have additional
# entries, e.g. the event version stamp, so have to parse things a bit.
output_filepath = glob.glob(os.path.join(output_dir, '*'))
self.assertEqual(len(output_filepath), 1)
events = summary_iterator.summary_iterator(output_filepath[0])
summaries = [e.summary for e in events if e.summary.value]
values = []
for summary in summaries:
for value in summary.value:
values.append(value)
saved_results = {v.tag: v.simple_value for v in values}
for name in names_to_values:
self.assertAlmostEqual(names_to_values[name], saved_results[name], 5)
def testSummariesAreFlushedToDisk(self):
checkpoint_dir = os.path.join(self.get_temp_dir(), 'summaries_are_flushed')
logdir = os.path.join(self.get_temp_dir(), 'summaries_are_flushed_eval')
if gfile.Exists(logdir):
gfile.DeleteRecursively(logdir)
# Train a Model to completion:
self._train_model(checkpoint_dir, num_steps=300)
# Create the model (which can be restored).
inputs = constant_op.constant(self._inputs, dtype=dtypes.float32)
logistic_classifier(inputs)
names_to_values = {'bread': 3.4, 'cheese': 4.5, 'tomato': 2.0}
for k in names_to_values:
v = names_to_values[k]
summary_lib.scalar(k, v)
evaluation.evaluate_repeatedly(
checkpoint_dir=checkpoint_dir,
hooks=[
evaluation.SummaryAtEndHook(log_dir=logdir),
],
max_number_of_evaluations=1)
self._verify_summaries(logdir, names_to_values)
if __name__ == '__main__':
test.main()
|
servo/rust | refs/heads/master | src/etc/get-snapshot.py | 9 | #!/usr/bin/env python
#
# Copyright 2011-2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import os, tarfile, re, shutil, sys
from snapshot import *
def unpack_snapshot(triple, dl_path):
print("opening snapshot " + dl_path)
tar = tarfile.open(dl_path)
kernel = get_kernel(triple)
stagep = os.path.join(triple, "stage0")
# Remove files from prior unpackings, since snapshot rustc may not
# be able to disambiguate between multiple candidate libraries.
# (Leave dirs in place since extracting step still needs them.)
for root, _, files in os.walk(stagep):
for f in files:
print("removing " + os.path.join(root, f))
os.unlink(os.path.join(root, f))
for p in tar.getnames():
name = p.replace("rust-stage0/", "", 1);
fp = os.path.join(stagep, name)
print("extracting " + p)
tar.extract(p, download_unpack_base)
tp = os.path.join(download_unpack_base, p)
if os.path.isdir(tp) and os.path.exists(fp):
continue
shutil.move(tp, fp)
tar.close()
shutil.rmtree(download_unpack_base)
# Main
# this gets called with one or two arguments:
# The first is the O/S triple.
# The second is an optional path to the snapshot to use.
triple = sys.argv[1]
if len(sys.argv) == 3:
dl_path = sys.argv[2]
else:
# There are no 64-bit Windows snapshots yet, so we'll use 32-bit ones instead, for now
snap_triple = triple if triple != "x86_64-w64-mingw32" else "i686-pc-mingw32"
snap = determine_curr_snapshot(snap_triple)
dl = os.path.join(download_dir_base, snap)
url = download_url_base + "/" + snap
print("determined most recent snapshot: " + snap)
if (not os.path.exists(dl)):
get_url_to_file(url, dl)
if (snap_filename_hash_part(snap) == hash_file(dl)):
print("got download with ok hash")
else:
raise Exception("bad hash on download")
dl_path = os.path.join(download_dir_base, snap)
unpack_snapshot(triple, dl_path)
|
confluenity/uiside | refs/heads/master | app.py | 1 | # -*- coding: utf-8 -*-
# Test PySide-based GUI application application
import sys
import traceback
from PySide import QtGui
from PySide import QtCore
from uiside.dialogs.file import OpenFileDialog
from uiside.dialogs.file import SaveFileDialog
from uiside.dialogs.msg import MessageBox
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.openButton = QtGui.QPushButton('Open file dialog')
self.openMultiButton = QtGui.QPushButton('Open many files dialog')
self.saveButton = QtGui.QPushButton('Save file dialog')
self.msgButton = QtGui.QPushButton('Message Box')
self.resize(480, 240)
self.move(240, 160)
self.setWindowTitle("uiside example")
self.setUpWidgets()
def setUpWidgets(self):
cw = QtGui.QWidget()
self.setCentralWidget(cw)
grid = QtGui.QGridLayout()
grid.setSpacing(10)
grid.setAlignment(QtCore.Qt.AlignTop)
self.openButton.clicked.connect(self.execOpenFileDialog)
grid.addWidget(self.openButton, 0, 0)
self.openMultiButton.clicked.connect(self.execOpenMultiFileDialog)
grid.addWidget(self.openMultiButton, 1, 0)
self.saveButton.clicked.connect(self.execSaveFileDialog)
grid.addWidget(self.saveButton, 0, 1)
self.msgButton.clicked.connect(self.execMessageBox)
grid.addWidget(self.msgButton, 1, 1)
cw.setLayout(grid)
def execOpenFileDialog(self):
dialog = OpenFileDialog(self)
dialog.setFilters([('Video Files', ['*.avi', '*.mp4']), ('All Files', ['*.*'])])
res = dialog.exec_()
if res:
fname, dname, names = res
self.openButton.setText(names[0])
else:
self.openButton.setText('nothing')
print res
def execOpenMultiFileDialog(self):
dialog = OpenFileDialog(self, multi=True)
dialog.setFilters([('Image Files', ['*.png', '*.jpg', '*.gif']), ('All Files', ['*.*'])])
res = dialog.exec_()
if res:
fname, dname, names = res
self.openMultiButton.setText('Open %s files' % len(names))
else:
self.openMultiButton.setText('nothing')
print res
def execSaveFileDialog(self):
dialog = SaveFileDialog(self)
dialog.setShowHidden(True)
res = dialog.exec_()
if res:
fname, dname, names = res
self.saveButton.setText(names[0])
else:
self.saveButton.setText('nothing')
print res
def execMessageBox(self):
title = 'Message Box Example'
text = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.'
buttons = QtGui.QMessageBox.Yes | QtGui.QMessageBox.No
default = QtGui.QMessageBox.No
glyph = QtGui.QMessageBox.Question
box = MessageBox(title, text, 'Do not ask next time', buttons, default, glyph, self)
print box.exec_()
def main():
application = QtGui.QApplication(sys.argv)
application.setOrganizationName('Confluenity')
application.setOrganizationDomain('confluenity.com')
application.setApplicationVersion('0.1.0')
application.setApplicationName('uiside')
mainWindow = MainWindow() # create and show main form
mainWindow.show()
sys.exit(application.exec_())
if __name__ == '__main__':
try:
main()
except SystemExit, code:
pass
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.extract_tb(exc_traceback)
print "%s %s:" % (exc_type, exc_value)
for src, line, method, snippet in reversed(lines):
print " at %s : %s" % (src, line)
|
Serag8/Bachelor | refs/heads/master | google_appengine/lib/django-1.3/django/contrib/gis/gdal/tests/test_ds.py | 137 | import os
import unittest
from django.contrib.gis.gdal import DataSource, Envelope, OGRGeometry, OGRException, OGRIndexError, GDAL_VERSION
from django.contrib.gis.gdal.field import OFTReal, OFTInteger, OFTString
from django.contrib.gis.geometry.test_data import get_ds_file, TestDS, TEST_DATA
# List of acceptable data sources.
ds_list = (TestDS('test_point', nfeat=5, nfld=3, geom='POINT', gtype=1, driver='ESRI Shapefile',
fields={'dbl' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
extent=(-1.35011,0.166623,-0.524093,0.824508), # Got extent from QGIS
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]',
field_values={'dbl' : [float(i) for i in range(1, 6)], 'int' : range(1, 6), 'str' : [str(i) for i in range(1, 6)]},
fids=range(5)),
TestDS('test_vrt', ext='vrt', nfeat=3, nfld=3, geom='POINT', gtype='Point25D', driver='VRT',
fields={'POINT_X' : OFTString, 'POINT_Y' : OFTString, 'NUM' : OFTString}, # VRT uses CSV, which all types are OFTString.
extent=(1.0, 2.0, 100.0, 523.5), # Min/Max from CSV
field_values={'POINT_X' : ['1.0', '5.0', '100.0'], 'POINT_Y' : ['2.0', '23.0', '523.5'], 'NUM' : ['5', '17', '23']},
fids=range(1,4)),
TestDS('test_poly', nfeat=3, nfld=3, geom='POLYGON', gtype=3,
driver='ESRI Shapefile',
fields={'float' : OFTReal, 'int' : OFTInteger, 'str' : OFTString,},
extent=(-1.01513,-0.558245,0.161876,0.839637), # Got extent from QGIS
srs_wkt='GEOGCS["GCS_WGS_1984",DATUM["WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]]'),
)
bad_ds = (TestDS('foo'),
)
class DataSourceTest(unittest.TestCase):
def test01_valid_shp(self):
"Testing valid SHP Data Source files."
for source in ds_list:
# Loading up the data source
ds = DataSource(source.ds)
# Making sure the layer count is what's expected (only 1 layer in a SHP file)
self.assertEqual(1, len(ds))
# Making sure GetName works
self.assertEqual(source.ds, ds.name)
# Making sure the driver name matches up
self.assertEqual(source.driver, str(ds.driver))
# Making sure indexing works
try:
ds[len(ds)]
except OGRIndexError:
pass
else:
self.fail('Expected an IndexError!')
def test02_invalid_shp(self):
"Testing invalid SHP files for the Data Source."
for source in bad_ds:
self.assertRaises(OGRException, DataSource, source.ds)
def test03a_layers(self):
"Testing Data Source Layers."
print "\nBEGIN - expecting out of range feature id error; safe to ignore.\n"
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer, this tests DataSource.__iter__
for layer in ds:
# Making sure we get the number of features we expect
self.assertEqual(len(layer), source.nfeat)
# Making sure we get the number of fields we expect
self.assertEqual(source.nfld, layer.num_fields)
self.assertEqual(source.nfld, len(layer.fields))
# Testing the layer's extent (an Envelope), and it's properties
if source.driver == 'VRT' and (GDAL_VERSION >= (1, 7, 0) and GDAL_VERSION < (1, 7, 3)):
# There's a known GDAL regression with retrieving the extent
# of a VRT layer in versions 1.7.0-1.7.2:
# http://trac.osgeo.org/gdal/ticket/3783
pass
else:
self.assertEqual(True, isinstance(layer.extent, Envelope))
self.assertAlmostEqual(source.extent[0], layer.extent.min_x, 5)
self.assertAlmostEqual(source.extent[1], layer.extent.min_y, 5)
self.assertAlmostEqual(source.extent[2], layer.extent.max_x, 5)
self.assertAlmostEqual(source.extent[3], layer.extent.max_y, 5)
# Now checking the field names.
flds = layer.fields
for f in flds: self.assertEqual(True, f in source.fields)
# Negative FIDs are not allowed.
self.assertRaises(OGRIndexError, layer.__getitem__, -1)
self.assertRaises(OGRIndexError, layer.__getitem__, 50000)
if hasattr(source, 'field_values'):
fld_names = source.field_values.keys()
# Testing `Layer.get_fields` (which uses Layer.__iter__)
for fld_name in fld_names:
self.assertEqual(source.field_values[fld_name], layer.get_fields(fld_name))
# Testing `Layer.__getitem__`.
for i, fid in enumerate(source.fids):
feat = layer[fid]
self.assertEqual(fid, feat.fid)
# Maybe this should be in the test below, but we might as well test
# the feature values here while in this loop.
for fld_name in fld_names:
self.assertEqual(source.field_values[fld_name][i], feat.get(fld_name))
print "\nEND - expecting out of range feature id error; safe to ignore."
def test03b_layer_slice(self):
"Test indexing and slicing on Layers."
# Using the first data-source because the same slice
# can be used for both the layer and the control values.
source = ds_list[0]
ds = DataSource(source.ds)
sl = slice(1, 3)
feats = ds[0][sl]
for fld_name in ds[0].fields:
test_vals = [feat.get(fld_name) for feat in feats]
control_vals = source.field_values[fld_name][sl]
self.assertEqual(control_vals, test_vals)
def test03c_layer_references(self):
"Test to make sure Layer access is still available without the DataSource."
source = ds_list[0]
# See ticket #9448.
def get_layer():
# This DataSource object is not accessible outside this
# scope. However, a reference should still be kept alive
# on the `Layer` returned.
ds = DataSource(source.ds)
return ds[0]
# Making sure we can call OGR routines on the Layer returned.
lyr = get_layer()
self.assertEqual(source.nfeat, len(lyr))
self.assertEqual(source.gtype, lyr.geom_type.num)
def test04_features(self):
"Testing Data Source Features."
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer
for layer in ds:
# Incrementing through each feature in the layer
for feat in layer:
# Making sure the number of fields, and the geometry type
# are what's expected.
self.assertEqual(source.nfld, len(list(feat)))
self.assertEqual(source.gtype, feat.geom_type)
# Making sure the fields match to an appropriate OFT type.
for k, v in source.fields.items():
# Making sure we get the proper OGR Field instance, using
# a string value index for the feature.
self.assertEqual(True, isinstance(feat[k], v))
# Testing Feature.__iter__
for fld in feat: self.assertEqual(True, fld.name in source.fields.keys())
def test05_geometries(self):
"Testing Geometries from Data Source Features."
for source in ds_list:
ds = DataSource(source.ds)
# Incrementing through each layer and feature.
for layer in ds:
for feat in layer:
g = feat.geom
# Making sure we get the right Geometry name & type
self.assertEqual(source.geom, g.geom_name)
self.assertEqual(source.gtype, g.geom_type)
# Making sure the SpatialReference is as expected.
if hasattr(source, 'srs_wkt'):
self.assertEqual(source.srs_wkt, g.srs.wkt)
def test06_spatial_filter(self):
"Testing the Layer.spatial_filter property."
ds = DataSource(get_ds_file('cities', 'shp'))
lyr = ds[0]
# When not set, it should be None.
self.assertEqual(None, lyr.spatial_filter)
# Must be set a/an OGRGeometry or 4-tuple.
self.assertRaises(TypeError, lyr._set_spatial_filter, 'foo')
# Setting the spatial filter with a tuple/list with the extent of
# a buffer centering around Pueblo.
self.assertRaises(ValueError, lyr._set_spatial_filter, range(5))
filter_extent = (-105.609252, 37.255001, -103.609252, 39.255001)
lyr.spatial_filter = (-105.609252, 37.255001, -103.609252, 39.255001)
self.assertEqual(OGRGeometry.from_bbox(filter_extent), lyr.spatial_filter)
feats = [feat for feat in lyr]
self.assertEqual(1, len(feats))
self.assertEqual('Pueblo', feats[0].get('Name'))
# Setting the spatial filter with an OGRGeometry for buffer centering
# around Houston.
filter_geom = OGRGeometry('POLYGON((-96.363151 28.763374,-94.363151 28.763374,-94.363151 30.763374,-96.363151 30.763374,-96.363151 28.763374))')
lyr.spatial_filter = filter_geom
self.assertEqual(filter_geom, lyr.spatial_filter)
feats = [feat for feat in lyr]
self.assertEqual(1, len(feats))
self.assertEqual('Houston', feats[0].get('Name'))
# Clearing the spatial filter by setting it to None. Now
# should indicate that there are 3 features in the Layer.
lyr.spatial_filter = None
self.assertEqual(3, len(lyr))
def test07_integer_overflow(self):
"Testing that OFTReal fields, treated as OFTInteger, do not overflow."
# Using *.dbf from Census 2010 TIGER Shapefile for Texas,
# which has land area ('ALAND10') stored in a Real field
# with no precision.
ds = DataSource(os.path.join(TEST_DATA, 'texas.dbf'))
feat = ds[0][0]
# Reference value obtained using `ogrinfo`.
self.assertEqual(676586997978, feat.get('ALAND10'))
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(DataSourceTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
jamesmf/recommenderW2V | refs/heads/master | scripts/keras/keras/initializations.py | 9 | from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
from .utils.theano_utils import sharedX, shared_zeros
def get_fans(shape):
fan_in = shape[0] if len(shape) == 2 else np.prod(shape[1:])
fan_out = shape[1] if len(shape) == 2 else shape[0]
return fan_in, fan_out
def uniform(shape, scale=0.05):
return sharedX(np.random.uniform(low=-scale, high=scale, size=shape))
def normal(shape, scale=0.05):
return sharedX(np.random.randn(*shape) * scale)
def lecun_uniform(shape):
''' Reference: LeCun 98, Efficient Backprop
http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf
'''
fan_in, fan_out = get_fans(shape)
scale = 1./np.sqrt(fan_in)
return uniform(shape, scale)
def glorot_normal(shape):
''' Reference: Glorot & Bengio, AISTATS 2010
'''
fan_in, fan_out = get_fans(shape)
s = np.sqrt(2. / (fan_in + fan_out))
return normal(shape, s)
def glorot_uniform(shape):
fan_in, fan_out = get_fans(shape)
s = np.sqrt(2. / (fan_in + fan_out))
return uniform(shape, s)
def he_normal(shape):
''' Reference: He et al., http://arxiv.org/abs/1502.01852
'''
fan_in, fan_out = get_fans(shape)
s = np.sqrt(2. / fan_in)
return normal(shape, s)
def he_uniform(shape):
fan_in, fan_out = get_fans(shape)
s = np.sqrt(2. / fan_in)
return uniform(shape, s)
def orthogonal(shape, scale=1.1):
''' From Lasagne
'''
flat_shape = (shape[0], np.prod(shape[1:]))
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
q = u if u.shape == flat_shape else v # pick the one with the correct shape
q = q.reshape(shape)
return sharedX(scale * q[:shape[0], :shape[1]])
def zero(shape):
return shared_zeros(shape)
from .utils.generic_utils import get_from_module
def get(identifier):
return get_from_module(identifier, globals(), 'initialization')
|
galenhz/micropython | refs/heads/master | examples/SDdatalogger/datalogger.py | 98 | # datalogger.py
# Logs the data from the acceleromter to a file on the SD-card
import pyb
# creating objects
accel = pyb.Accel()
blue = pyb.LED(4)
switch = pyb.Switch()
# loop
while True:
# wait for interrupt
# this reduces power consumption while waiting for switch press
pyb.wfi()
# start if switch is pressed
if switch():
pyb.delay(200) # delay avoids detection of multiple presses
blue.on() # blue LED indicates file open
log = open('/sd/log.csv', 'w') # open file on SD (SD: '/sd/', flash: '/flash/)
# until switch is pressed again
while not switch():
t = pyb.millis() # get time
x, y, z = accel.filtered_xyz() # get acceleration data
log.write('{},{},{},{}\n'.format(t,x,y,z)) # write data to file
# end after switch is pressed again
log.close() # close file
blue.off() # blue LED indicates file closed
pyb.delay(200) # delay avoids detection of multiple presses
|
davidlmorton/spikepy | refs/heads/master | spikepy/developer/file_interpreter.py | 1 | # Copyright (C) 2012 David Morton
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from spikepy.common.trial_manager import Trial
from spikepy.common.strategy import Strategy
class FileInterpreter(object):
'''
This class should be subclassed in order for developers to add a new file
interpreter to spikepy.
There is no need to instantiate (create an object from) the subclass,
spikepy will handle that internally. Therefore it is important to have
an __init__ method which requires no arguments (asside from 'self' of course).
Methods that subclasses are REQUIRED to implement:
- read_data_file(fullpath)
-- This method recieves only a string representation of the
fullpath to the data file. It is required to return a list of
Trial and or Strategy objects, even if only one was created.
'''
# A list of one or more file extentions this interpreter can open.
extentions = []
# Higher priority means that this FileInterpreter will be tried first
# if spikepy tries more than one FileInterpreter.
priority = 10
def read_data_file(self, fullpath):
raise NotImplementedError
|
ssh1/stbgui | refs/heads/master | tests/test_timer.py | 56 | import enigma
import sys
import time
import tests
#enigma.reset()
def test_timer(repeat = 0, timer_start = 3600, timer_length = 1000, sim_length = 86400 * 7):
import NavigationInstance
at = time.time()
t = NavigationInstance.instance.RecordTimer
print t
print "old mwt:", t.MaxWaitTime
t.MaxWaitTime = 86400 * 1000
# hack:
NavigationInstance.instance.SleepTimer.MaxWaitTime = 86400 * 1000
t.processed_timers = [ ]
t.timer_list = [ ]
# generate a timer to test
import xml.etree.cElementTree
import RecordTimer
timer = RecordTimer.createTimer(xml.etree.cElementTree.fromstring(
"""
<timer
begin="%d"
end="%d"
serviceref="1:0:1:6DD2:44D:1:C00000:0:0:0:"
repeated="%d"
name="Test Event Name"
description="Test Event Description"
afterevent="nothing"
eit="56422"
disabled="0"
justplay="0">
</timer>""" % (at + timer_start, at + timer_start + timer_length, repeat)
))
t.record(timer)
# run virtual environment
enigma.run(sim_length)
print "done."
timers = t.processed_timers + t.timer_list
print "start: %s" % (time.ctime(at + 10))
assert len(timers) == 1
for t in timers:
print "begin=%d, end=%d, repeated=%d, state=%d" % (t.begin - at, t.end - at, t.repeated, t.state)
print "begin: %s" % (time.ctime(t.begin))
print "end: %s" % (time.ctime(t.end))
# if repeat, check if the calculated repeated time of day matches the initial time of day
if repeat:
t_initial = time.localtime(at + timer_start)
t_repeated = time.localtime(timers[0].begin)
print t_initial
print t_repeated
if t_initial[3:6] != t_repeated[3:6]:
raise tests.TestError("repeated timer time of day does not match")
import FakeNotifications
#sys.modules["Tools.Notifications"] = FakeNotifications
#sys.modules["Tools.NumericalTextInput.NumericalTextInput"] = FakeNotifications
# required stuff for timer (we try to keep this minimal)
enigma.init_nav()
enigma.init_record_config()
enigma.init_parental_control()
from events import log
import calendar
import os
# we are operating in CET/CEST
os.environ['TZ'] = 'CET'
time.tzset()
#log(test_timer, test_name = "test_timer_repeating", base_time = calendar.timegm((2007, 3, 1, 12, 0, 0)), repeat=0x7f, sim_length = 86400 * 7)
log(test_timer, test_name = "test_timer_repeating_dst_skip", base_time = calendar.timegm((2007, 03, 20, 0, 0, 0)), timer_start = 3600, repeat=0x7f, sim_length = 86400 * 7)
#log(test_timer, test_name = "test_timer_repeating_dst_start", base_time = calendar.timegm((2007, 03, 20, 0, 0, 0)), timer_start = 10000, repeat=0x7f, sim_length = 86400 * 7)
|
eammx/proyectosWeb | refs/heads/master | proyectoPython/env/lib/python3.6/site-packages/werkzeug/__init__.py | 1 | """
werkzeug
~~~~~~~~
Werkzeug is the Swiss Army knife of Python web development.
It provides useful classes and functions for any WSGI application to
make the life of a Python web developer much easier. All of the provided
classes are independent from each other so you can mix it with any other
library.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
from .serving import run_simple
from .test import Client
from .wrappers import Request
from .wrappers import Response
__version__ = "1.0.1"
|
ar45/django | refs/heads/master | django/contrib/gis/db/backends/postgis/models.py | 396 | """
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class PostGISGeometryColumns(models.Model):
"""
The 'geometry_columns' table from the PostGIS. See the PostGIS
documentation at Ch. 4.3.2.
On PostGIS 2, this is a view.
"""
f_table_catalog = models.CharField(max_length=256)
f_table_schema = models.CharField(max_length=256)
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
type = models.CharField(max_length=30)
class Meta:
app_label = 'gis'
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
documentaiton at Ch. 4.2.1.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
srtext = models.CharField(max_length=2048)
proj4text = models.CharField(max_length=2048)
class Meta:
app_label = 'gis'
db_table = 'spatial_ref_sys'
managed = False
@property
def wkt(self):
return self.srtext
@classmethod
def wkt_col(cls):
return 'srtext'
|
saifrahmed/bokeh | refs/heads/master | bokeh/tests/test_objects.py | 42 | from __future__ import absolute_import
import unittest
from mock import Mock
from six import add_metaclass
from six.moves import xrange
import copy
def large_plot(n):
from bokeh.models import (Plot, PlotContext, LinearAxis, Grid, GlyphRenderer,
ColumnDataSource, DataRange1d, PanTool, WheelZoomTool, BoxZoomTool,
BoxSelectTool, BoxSelectionOverlay, ResizeTool, PreviewSaveTool,
ResetTool)
from bokeh.models.glyphs import Line
context = PlotContext()
objects = set([context])
for i in xrange(n):
source = ColumnDataSource(data=dict(x=[0, i + 1], y=[0, i + 1]))
xdr = DataRange1d()
ydr = DataRange1d()
plot = Plot(x_range=xdr, y_range=ydr)
xaxis = LinearAxis(plot=plot)
yaxis = LinearAxis(plot=plot)
xgrid = Grid(plot=plot, dimension=0)
ygrid = Grid(plot=plot, dimension=1)
tickers = [xaxis.ticker, xaxis.formatter, yaxis.ticker, yaxis.formatter]
glyph = Line(x='x', y='y')
renderer = GlyphRenderer(data_source=source, glyph=glyph)
plot.renderers.append(renderer)
pan = PanTool(plot=plot)
wheel_zoom = WheelZoomTool(plot=plot)
box_zoom = BoxZoomTool(plot=plot)
box_select = BoxSelectTool(plot=plot)
box_selection = BoxSelectionOverlay(tool=box_select)
resize = ResizeTool(plot=plot)
previewsave = PreviewSaveTool(plot=plot)
reset = ResetTool(plot=plot)
tools = [pan, wheel_zoom, box_zoom, box_select, box_selection, resize, previewsave, reset]
plot.tools.append(tools)
context.children.append(plot)
objects |= set([source, xdr, ydr, plot, xaxis, yaxis, xgrid, ygrid, renderer, glyph, plot.tool_events] + tickers + tools)
return context, objects
class TestViewable(unittest.TestCase):
def setUp(self):
from bokeh.plot_object import Viewable
self.viewable = Viewable
self.old_map = copy.copy(self.viewable.model_class_reverse_map)
def tearDown(self):
self.viewable.model_class_reverse_map = self.old_map
def mkclass(self):
@add_metaclass(self.viewable)
class Test_Class():
foo = 1
return Test_Class
def test_metaclassing(self):
tclass = self.mkclass()
self.assertTrue(hasattr(tclass, '__view_model__'))
self.assertRaises(Warning, self.mkclass)
def test_get_class(self):
self.mkclass()
tclass = self.viewable.get_class('Test_Class')
self.assertTrue(hasattr(tclass, 'foo'))
self.assertRaises(KeyError, self.viewable.get_class, 'Imaginary_Class')
class TestCollectPlotObjects(unittest.TestCase):
def test_references_large(self):
context, objects = large_plot(500)
self.assertEqual(set(context.references()), objects)
class TestPlotObject(unittest.TestCase):
def setUp(self):
from bokeh.models import PlotObject
self.pObjectClass = PlotObject
def test_init(self):
oldmethod = self.pObjectClass.setup_events
self.pObjectClass.setup_events = Mock()
testObject = self.pObjectClass(id='test_id', _block_events=True)
self.assertFalse(testObject.setup_events.called)
self.assertEqual(testObject._id, 'test_id')
testObject2 = self.pObjectClass()
self.assertTrue(testObject2.setup_events.called)
self.assertIsNot(testObject2._id, None)
self.pObjectClass.setup_events = oldmethod
def test_ref(self):
testObject = self.pObjectClass(id='test_id')
self.assertEqual({'type': 'PlotObject', 'id': 'test_id'}, testObject.ref)
def test_load_json(self):
cls = self.pObjectClass.get_class("Plot")
obj = cls.load_json({'id': 'test_id', 'min_border': 100})
self.assertEqual(obj._id, 'test_id')
self.assertEqual(obj.title, '')
self.assertEqual(obj.min_border, 100)
obj.load_json({'id': 'test_id', 'title': 'xyz'}, instance=obj)
self.assertEqual(obj._id, 'test_id')
self.assertEqual(obj.title, 'xyz')
self.assertEqual(obj.min_border, 100)
def test_references_by_ref_by_value(self):
from bokeh.properties import HasProps, Instance, Int
class T(self.pObjectClass):
t = Int(0)
class Y(self.pObjectClass):
t1 = Instance(T)
class Z1(HasProps):
t2 = Instance(T)
class Z2(self.pObjectClass):
t2 = Instance(T)
class X1(self.pObjectClass):
y = Instance(Y)
z1 = Instance(Z1)
class X2(self.pObjectClass):
y = Instance(Y)
z2 = Instance(Z2)
t1, t2 = T(t=1), T(t=2)
y = Y(t1=t1)
z1, z2 = Z1(t2=t2), Z2(t2=t2)
x1 = X1(y=y, z1=z1)
x2 = X2(y=y, z2=z2)
self.assertEqual(x1.references(), {t1, y, t2, x1})
self.assertEqual(x2.references(), {t1, y, t2, z2, x2})
def test_references_in_containers(self):
from bokeh.properties import Int, String, Instance, List, Tuple, Dict
# XXX: can't use Y, because of:
#
# Warning: Duplicate __view_model__ declaration of 'Y' for class Y.
# Previous definition: <class 'bokeh.tests.test_objects.Y'>
class U(self.pObjectClass):
a = Int
class V(self.pObjectClass):
u1 = Instance(U)
u2 = List(Instance(U))
u3 = Tuple(Int, Instance(U))
u4 = Dict(String, Instance(U))
u5 = Dict(String, List(Instance(U)))
u1, u2, u3, u4, u5 = U(a=1), U(a=2), U(a=3), U(a=4), U(a=5)
v = V(u1=u1, u2=[u2], u3=(3, u3), u4={"4": u4}, u5={"5": [u5]})
self.assertEqual(v.references(), set([v, u1, u2, u3, u4, u5]))
if __name__ == "__main__":
unittest.main()
|
johnnyLadders/Nathive_CITA | refs/heads/master | utils/docgen/docclass.py | 1 | #!/usr/bin/env python
#coding=utf-8
# Nathive (and this file) is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or newer.
#
# You should have received a copy of the GNU General Public License along with
# this file. If not, see <http://www.gnu.org/licenses/>.
import re
import docgen
class DocClass(object):
def __init__(self, code):
self.code = code
self.name = ''
self.parent = ''
self.title = ''
self.logic = ''
self.docstring = ''
self.methods = []
self.parse()
def parse(self):
definition = self.code[0]
res = re.search('class (.*)\((.*)\):', definition)
self.name = res.group(1)
self.parent = res.group(2)
self.title = 'class %s' % self.name
if self.parent != 'object': self.title += '(%s)' % self.parent
self.code.append('')
methodlines = []
for i, line in enumerate(self.code):
if line.startswith('\tdef '): methodlines.append(i)
for i, methodline in enumerate(methodlines):
start = methodline
if i+1 < len(methodlines): end = methodlines[i+1] - 1
else: end = -1
if start != end: methodcode = self.code[start:end]
else: methodcode = [self.code[start]]
amethod = docgen.DocMethod(methodcode, self.name)
if amethod.name == '__del__': continue
self.methods.append(amethod)
def dump(self):
rst = ''
rst += '\n\n\n%s\n' % self.title
rst += '-' * len(self.title)
for method in self.methods: rst += method.dump()
return rst
|
eromoe/pyspider | refs/heads/master | pyspider/libs/bench.py | 7 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<[email protected]>
# http://binux.me
# Created on 2014-12-08 22:23:10
# rate: 10000000000
# burst: 10000000000
import time
import logging
logger = logging.getLogger('bench')
from six.moves import queue as Queue
from pyspider.scheduler import ThreadBaseScheduler as Scheduler
from pyspider.fetcher.tornado_fetcher import Fetcher
from pyspider.processor import Processor
from pyspider.result import ResultWorker
from pyspider.libs.utils import md5string
def bench_test_taskdb(taskdb):
project_name = '__bench_test__'
task = {
"fetch": {
"fetch_type": "js",
"headers": {
"User-Agent": "BaiDuSpider"
}
},
"process": {
"callback": "detail_page"
},
"project": project_name,
"taskid": "553300d2582154413b4982c00c34a2d5",
"url": "http://www.sciencedirect.com/science/article/pii/S1674200109000704"
}
track = {
"fetch": {
"content": None,
"encoding": "unicode",
"error": None,
"headers": {
"last-modified": "Wed, 04 Mar 2015 09:24:33 GMT"
},
"ok": True,
"redirect_url": None,
"status_code": 200,
"time": 5.543
},
"process": {
"exception": None,
"follows": 4,
"logs": "",
"ok": True,
"result": "{'url': u'",
"time": 0.07105398178100586
}
}
def test_insert(n, start=0):
logger.info("taskdb insert %d", n)
start_time = time.time()
for i in range(n):
task['url'] = 'http://bench.pyspider.org/?l=%d' % (i + start)
task['taskid'] = md5string(task['url'])
task['track'] = {}
taskdb.insert(task['project'], task['taskid'], task)
end_time = time.time()
cost_time = end_time - start_time
logger.info("cost %.2fs, %.2f/s %.2fms",
cost_time, n * 1.0 / cost_time, cost_time / n * 1000)
def test_update(n, start=0):
logger.info("taskdb update %d" % n)
start_time = time.time()
for i in range(n):
task['url'] = 'http://bench.pyspider.org/?l=%d' % (i + start)
task['taskid'] = md5string(task['url'])
task['track'] = track
taskdb.update(task['project'], task['taskid'], task)
end_time = time.time()
cost_time = end_time - start_time
logger.info("cost %.2fs, %.2f/s %.2fms",
cost_time, n * 1.0 / cost_time, cost_time / n * 1000)
request_task_fields = [
'taskid',
'project',
'url',
'status',
'fetch',
'process',
'track',
'lastcrawltime'
]
def test_get(n, start=0, random=True, fields=request_task_fields):
logger.info("taskdb get %d %s" % (n, "randomly" if random else ""))
range_n = list(range(n))
if random:
from random import shuffle
shuffle(range_n)
start_time = time.time()
for i in range_n:
task['url'] = 'http://bench.pyspider.org/?l=%d' % (i + start)
task['taskid'] = md5string(task['url'])
task['track'] = track
taskdb.get_task(task['project'], task['taskid'], fields=fields)
end_time = time.time()
cost_time = end_time - start_time
logger.info("cost %.2fs, %.2f/s %.2fms",
cost_time, n * 1.0 / cost_time, cost_time / n * 1000)
try:
test_insert(1000)
test_update(1000)
test_get(1000)
test_insert(10000, 1000)
test_update(10000, 1000)
test_get(10000, 1000)
except Exception as e:
logger.exception(e)
finally:
taskdb.drop(project_name)
def bench_test_message_queue(queue):
task = {
"fetch": {
"fetch_type": "js",
"headers": {
"User-Agent": "BaiDuSpider"
}
},
"process": {
"callback": "detail_page"
},
"project": "__bench_test__",
"taskid": "553300d2582154413b4982c00c34a2d5",
"url": "http://www.sciencedirect.com/science/article/pii/S1674200109000704"
}
def test_put(n):
logger.info("message queue put %d", n)
start_time = time.time()
for i in range(n):
task['url'] = 'http://bench.pyspider.org/?l=%d' % i
task['taskid'] = md5string(task['url'])
queue.put(task, block=True, timeout=1)
end_time = time.time()
cost_time = end_time - start_time
logger.info("cost %.2fs, %.2f/s %.2fms",
cost_time, n * 1.0 / cost_time, cost_time / n * 1000)
def test_get(n):
logger.info("message queue get %d", n)
start_time = time.time()
for i in range(n):
try:
queue.get(True, 1)
except Queue.Empty:
logger.error('message queue empty while get %d', i)
raise
end_time = time.time()
cost_time = end_time - start_time
logger.info("cost %.2fs, %.2f/s %.2fms",
cost_time, n * 1.0 / cost_time, cost_time / n * 1000)
try:
test_put(1000)
test_get(1000)
test_put(10000)
test_get(10000)
except Exception as e:
logger.exception(e)
finally:
if hasattr(queue, 'channel'):
queue.channel.queue_purge(queue.name)
# clear message queue
try:
while queue.get(False):
continue
except Queue.Empty:
pass
class BenchMixin(object):
"""Report to logger for bench test"""
def _bench_init(self):
self.done_cnt = 0
self.start_time = time.time()
self.last_cnt = 0
self.last_report = 0
def _bench_report(self, name, prefix=0, rjust=0):
self.done_cnt += 1
now = time.time()
if now - self.last_report >= 1:
rps = float(self.done_cnt - self.last_cnt) / (now - self.last_report)
output = ''
if prefix:
output += " " * prefix
output += ("%s %s pages (at %d pages/min)" % (
name, self.done_cnt, rps * 60.0)).rjust(rjust)
logger.info(output)
self.last_cnt = self.done_cnt
self.last_report = now
class BenchScheduler(Scheduler, BenchMixin):
def __init__(self, *args, **kwargs):
super(BenchScheduler, self).__init__(*args, **kwargs)
self._bench_init()
def on_task_status(self, task):
self._bench_report('Crawled')
return super(BenchScheduler, self).on_task_status(task)
class BenchFetcher(Fetcher, BenchMixin):
def __init__(self, *args, **kwargs):
super(BenchFetcher, self).__init__(*args, **kwargs)
self._bench_init()
def on_result(self, type, task, result):
self._bench_report("Fetched", 0, 75)
return super(BenchFetcher, self).on_result(type, task, result)
class BenchProcessor(Processor, BenchMixin):
def __init__(self, *args, **kwargs):
super(BenchProcessor, self).__init__(*args, **kwargs)
self._bench_init()
def on_task(self, task, response):
self._bench_report("Processed", 75)
return super(BenchProcessor, self).on_task(task, response)
class BenchResultWorker(ResultWorker, BenchMixin):
def __init__(self, *args, **kwargs):
super(BenchResultWorker, self).__init__(*args, **kwargs)
self._bench_init()
def on_result(self, task, result):
self._bench_report("Saved", 0, 150)
super(BenchResultWorker, self).on_result(task, result)
from pyspider.libs.base_handler import BaseHandler
class Handler(BaseHandler):
def on_start(self, response):
self.crawl('http://127.0.0.1:5000/bench',
params={'total': response.save.get('total', 10000), 'show': response.save.get('show', 20)},
callback=self.index_page)
def index_page(self, response):
for each in response.doc('a[href^="http://"]').items():
self.crawl(each.attr.href, callback=self.index_page)
return response.url
|
kuri-kustar/pixhawk_plotting_tools | refs/heads/master | px4tools_scripts/px4tools/version.py | 1 |
# THIS FILE IS GENERATED FROM SETUP.PY
short_version = '0.7.7'
version = '0.7.7'
full_version = '0.7.7'
git_revision = 'Unknown'
release = True
if not release:
version = full_version
|
ALPSquid/thebutton-monitor | refs/heads/master | src/examples/limitlessled_example.py | 1 | import ledcontroller
from thebutton import TheButton
import math
class ButtonApp():
def __init__(self):
CONTROLLER_ADDRESS="192.168.1.119"
self.led = ledcontroller.LedController(CONTROLLER_ADDRESS)
self.group = 0 #Limitless LED Bulb Bridge Group of LEDs to use. 0 is all.
# Create a new instance of the button client. Does nothing until start() is called
self.the_button = TheButton()
self.last_lowest = 60.0
def run(self):
# The WebSocketApp loop runs in it's own thread,
# so make sure you call TheButton.close() when you're done with it!
self.the_button.start()
previous_colour='0' # Initialise previous colour
try:
while True:
# Colours are in hexadecimal but the PlayBulb Candle required saturation in front of the value
colour = self.the_button.limitless_colour
# Set the PlayBulbs to the current flair colour
# Resource: Protocols for PlayBulb products (https://github.com/Phhere/Playbulb)
if colour != previous_colour:
self.led.set_color(colour, self.group)
previous_colour=colour
# There's no built-in time persistence, so by default, lowest time is for the current session
if self.the_button.lowest_time < self.last_lowest:
self.led.set_color('white', self.group) # Flash when a new record is set
self.led.set_color(colour, self.group)
self.last_lowest = self.the_button.lowest_time
print("New button record! " + str(math.floor(self.last_lowest)))
previous_colour='0' # Reset previous colour so flashing stops before next colour change
except KeyboardInterrupt:
pass
self.close()
def close(self):
# The Button WebSocketApp runs in it's own thread, so make sure it's closed. This also closes the socket
self.the_button.close()
if __name__ == "__main__":
button_app = ButtonApp()
button_app.run()
|
stevenewey/django | refs/heads/master | django/utils/dateformat.py | 365 | """
PHP date() style date formatting
See http://www.php.net/date for format strings
Usage:
>>> import datetime
>>> d = datetime.datetime.now()
>>> df = DateFormat(d)
>>> print(df.format('jS F Y H:i'))
7th October 2003 11:39
>>>
"""
from __future__ import unicode_literals
import calendar
import datetime
import re
import time
from django.utils import six
from django.utils.dates import (
MONTHS, MONTHS_3, MONTHS_ALT, MONTHS_AP, WEEKDAYS, WEEKDAYS_ABBR,
)
from django.utils.encoding import force_text
from django.utils.timezone import get_default_timezone, is_aware, is_naive
from django.utils.translation import ugettext as _
re_formatchars = re.compile(r'(?<!\\)([aAbBcdDeEfFgGhHiIjlLmMnNoOPrsStTUuwWyYzZ])')
re_escaped = re.compile(r'\\(.)')
class Formatter(object):
def format(self, formatstr):
pieces = []
for i, piece in enumerate(re_formatchars.split(force_text(formatstr))):
if i % 2:
pieces.append(force_text(getattr(self, piece)()))
elif piece:
pieces.append(re_escaped.sub(r'\1', piece))
return ''.join(pieces)
class TimeFormat(Formatter):
def __init__(self, obj):
self.data = obj
self.timezone = None
# We only support timezone when formatting datetime objects,
# not date objects (timezone information not appropriate),
# or time objects (against established django policy).
if isinstance(obj, datetime.datetime):
if is_naive(obj):
self.timezone = get_default_timezone()
else:
self.timezone = obj.tzinfo
def a(self):
"'a.m.' or 'p.m.'"
if self.data.hour > 11:
return _('p.m.')
return _('a.m.')
def A(self):
"'AM' or 'PM'"
if self.data.hour > 11:
return _('PM')
return _('AM')
def B(self):
"Swatch Internet time"
raise NotImplementedError('may be implemented in a future release')
def e(self):
"""
Timezone name.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
try:
if hasattr(self.data, 'tzinfo') and self.data.tzinfo:
# Have to use tzinfo.tzname and not datetime.tzname
# because datatime.tzname does not expect Unicode
return self.data.tzinfo.tzname(self.data) or ""
except NotImplementedError:
pass
return ""
def f(self):
"""
Time, in 12-hour hours and minutes, with minutes left off if they're
zero.
Examples: '1', '1:30', '2:05', '2'
Proprietary extension.
"""
if self.data.minute == 0:
return self.g()
return '%s:%s' % (self.g(), self.i())
def g(self):
"Hour, 12-hour format without leading zeros; i.e. '1' to '12'"
if self.data.hour == 0:
return 12
if self.data.hour > 12:
return self.data.hour - 12
return self.data.hour
def G(self):
"Hour, 24-hour format without leading zeros; i.e. '0' to '23'"
return self.data.hour
def h(self):
"Hour, 12-hour format; i.e. '01' to '12'"
return '%02d' % self.g()
def H(self):
"Hour, 24-hour format; i.e. '00' to '23'"
return '%02d' % self.G()
def i(self):
"Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute
def O(self):
"""
Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
seconds = self.Z()
sign = '-' if seconds < 0 else '+'
seconds = abs(seconds)
return "%s%02d%02d" % (sign, seconds // 3600, (seconds // 60) % 60)
def P(self):
"""
Time, in 12-hour hours, minutes and 'a.m.'/'p.m.', with minutes left off
if they're zero and the strings 'midnight' and 'noon' if appropriate.
Examples: '1 a.m.', '1:30 p.m.', 'midnight', 'noon', '12:30 p.m.'
Proprietary extension.
"""
if self.data.minute == 0 and self.data.hour == 0:
return _('midnight')
if self.data.minute == 0 and self.data.hour == 12:
return _('noon')
return '%s %s' % (self.f(), self.a())
def s(self):
"Seconds; i.e. '00' to '59'"
return '%02d' % self.data.second
def T(self):
"""
Time zone of this machine; e.g. 'EST' or 'MDT'.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
name = self.timezone.tzname(self.data) if self.timezone else None
if name is None:
name = self.format('O')
return six.text_type(name)
def u(self):
"Microseconds; i.e. '000000' to '999999'"
return '%06d' % self.data.microsecond
def Z(self):
"""
Time zone offset in seconds (i.e. '-43200' to '43200'). The offset for
timezones west of UTC is always negative, and for those east of UTC is
always positive.
If timezone information is not available, this method returns
an empty string.
"""
if not self.timezone:
return ""
offset = self.timezone.utcoffset(self.data)
# `offset` is a datetime.timedelta. For negative values (to the west of
# UTC) only days can be negative (days=-1) and seconds are always
# positive. e.g. UTC-1 -> timedelta(days=-1, seconds=82800, microseconds=0)
# Positive offsets have days=0
return offset.days * 86400 + offset.seconds
class DateFormat(TimeFormat):
year_days = [None, 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334]
def b(self):
"Month, textual, 3 letters, lowercase; e.g. 'jan'"
return MONTHS_3[self.data.month]
def c(self):
"""
ISO 8601 Format
Example : '2008-01-02T10:30:00.000123'
"""
return self.data.isoformat()
def d(self):
"Day of the month, 2 digits with leading zeros; i.e. '01' to '31'"
return '%02d' % self.data.day
def D(self):
"Day of the week, textual, 3 letters; e.g. 'Fri'"
return WEEKDAYS_ABBR[self.data.weekday()]
def E(self):
"Alternative month names as required by some locales. Proprietary extension."
return MONTHS_ALT[self.data.month]
def F(self):
"Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month]
def I(self):
"'1' if Daylight Savings Time, '0' otherwise."
if self.timezone and self.timezone.dst(self.data):
return '1'
else:
return '0'
def j(self):
"Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day
def l(self):
"Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()]
def L(self):
"Boolean for whether it is a leap year; i.e. True or False"
return calendar.isleap(self.data.year)
def m(self):
"Month; i.e. '01' to '12'"
return '%02d' % self.data.month
def M(self):
"Month, textual, 3 letters; e.g. 'Jan'"
return MONTHS_3[self.data.month].title()
def n(self):
"Month without leading zeros; i.e. '1' to '12'"
return self.data.month
def N(self):
"Month abbreviation in Associated Press style. Proprietary extension."
return MONTHS_AP[self.data.month]
def o(self):
"ISO 8601 year number matching the ISO week number (W)"
return self.data.isocalendar()[0]
def r(self):
"RFC 2822 formatted date; e.g. 'Thu, 21 Dec 2000 16:01:07 +0200'"
return self.format('D, j M Y H:i:s O')
def S(self):
"English ordinal suffix for the day of the month, 2 characters; i.e. 'st', 'nd', 'rd' or 'th'"
if self.data.day in (11, 12, 13): # Special case
return 'th'
last = self.data.day % 10
if last == 1:
return 'st'
if last == 2:
return 'nd'
if last == 3:
return 'rd'
return 'th'
def t(self):
"Number of days in the given month; i.e. '28' to '31'"
return '%02d' % calendar.monthrange(self.data.year, self.data.month)[1]
def U(self):
"Seconds since the Unix epoch (January 1 1970 00:00:00 GMT)"
if isinstance(self.data, datetime.datetime) and is_aware(self.data):
return int(calendar.timegm(self.data.utctimetuple()))
else:
return int(time.mktime(self.data.timetuple()))
def w(self):
"Day of the week, numeric, i.e. '0' (Sunday) to '6' (Saturday)"
return (self.data.weekday() + 1) % 7
def W(self):
"ISO-8601 week number of year, weeks starting on Monday"
# Algorithm from http://www.personal.ecu.edu/mccartyr/ISOwdALG.txt
week_number = None
jan1_weekday = self.data.replace(month=1, day=1).weekday() + 1
weekday = self.data.weekday() + 1
day_of_year = self.z()
if day_of_year <= (8 - jan1_weekday) and jan1_weekday > 4:
if jan1_weekday == 5 or (jan1_weekday == 6 and calendar.isleap(self.data.year - 1)):
week_number = 53
else:
week_number = 52
else:
if calendar.isleap(self.data.year):
i = 366
else:
i = 365
if (i - day_of_year) < (4 - weekday):
week_number = 1
else:
j = day_of_year + (7 - weekday) + (jan1_weekday - 1)
week_number = j // 7
if jan1_weekday > 4:
week_number -= 1
return week_number
def y(self):
"Year, 2 digits; e.g. '99'"
return six.text_type(self.data.year)[2:]
def Y(self):
"Year, 4 digits; e.g. '1999'"
return self.data.year
def z(self):
"Day of the year; i.e. '0' to '365'"
doy = self.year_days[self.data.month] + self.data.day
if self.L() and self.data.month > 2:
doy += 1
return doy
def format(value, format_string):
"Convenience function"
df = DateFormat(value)
return df.format(format_string)
def time_format(value, format_string):
"Convenience function"
tf = TimeFormat(value)
return tf.format(format_string)
|
phil65/xbmc | refs/heads/master | addons/service.xbmc.versioncheck/lib/aptdeamonhandler.py | 177 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Team-XBMC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import xbmc
from common import *
try:
#import apt
import apt
from aptdaemon import client
from aptdaemon import errors
except:
log('python apt import error')
class AptdeamonHandler:
def __init__(self):
self.aptclient = client.AptClient()
def _check_versions(self, package):
if not self._update_cache():
return False, False
try:
trans = self.aptclient.upgrade_packages([package])
#trans = self.aptclient.upgrade_packages("bla")
trans.simulate(reply_handler=self._apttransstarted, error_handler=self._apterrorhandler)
pkg = trans.packages[4][0]
if pkg == package:
cache=apt.Cache()
cache.open(None)
cache.upgrade()
if cache[pkg].installed:
return cache[pkg].installed.version, cache[pkg].candidate.version
return False, False
except Exception as error:
log("Exception while checking versions: %s" %error)
return False, False
def _update_cache(self):
try:
if self.aptclient.update_cache(wait=True) == "exit-success":
return True
else:
return False
except errors.NotAuthorizedError:
log("You are not allowed to update the cache")
return False
def check_upgrade_available(self, package):
'''returns True if newer package is available in the repositories'''
installed, candidate = self._check_versions(package)
if installed and candidate:
if installed != candidate:
log("Version installed %s" %installed)
log("Version available %s" %candidate)
return True
else:
log("Already on newest version")
elif not installed:
log("No installed package found")
return False
else:
return False
def upgrade_package(self, package):
try:
log("Installing new version")
if self.aptclient.upgrade_packages([package], wait=True) == "exit-success":
log("Upgrade successful")
return True
except Exception as error:
log("Exception during upgrade: %s" %error)
return False
def upgrade_system(self):
try:
log("Upgrading system")
if self.aptclient.upgrade_system(wait=True) == "exit-success":
return True
except Exception as error:
log("Exception during system upgrade: %s" %error)
return False
def _getpassword(self):
if len(self._pwd) == 0:
self._pwd = get_password_from_user()
return self._pwd
def _apttransstarted(self):
pass
def _apterrorhandler(self, error):
log("Apt Error %s" %error) |
mesutcang/kendoku | refs/heads/master | runlab.py | 1 | #!/usr/bin/python2
# -*- encoding: utf-8 -*-
#
# Author: Peter Schüller (2014)
# Adapted from a script posted by Adam Marshall Smith on the potassco mailing list (2014)
#
import sys
import re
import json
import subprocess
import collections
import traceback
def extractExtensions(answerset):
#print(repr(answer_set))
field_pattern = re.compile(r'(\w+)\(f\((\d+),(\d+)\)(?:,([0-9]+|[a-z][a-zA-Z0-9]*|"[^"]*"))?\)')
extensions = collections.defaultdict(lambda: set())
for l in answerset:
try:
args = field_pattern.match(l).groups()
#print "for {} got field pattern match {}".format(l, repr(args))
# first arg = predicate
# second/third arg = coordinates
# rest is taken as string if not None but " are stripped
head = args[0]
rest = [int(args[1]), int(args[2])]
if args[3]:
rest.append(str(args[3]).strip('"'))
#sys.stderr.write(
# "got head {} and rest {}\n".format(repr(head), repr(rest)))
extensions[head].add(tuple(rest))
except:
#sys.stderr.write("exception ignored: "+traceback.format_exc())
pass
return extensions
def render_svg(literals,size=20):
import xmlbuilder
answer_set = extractExtensions(literals)
maxx = max(map(lambda x: x[0], answer_set['field']))
maxy = max(map(lambda x: x[1], answer_set['field']))
svg = xmlbuilder.XMLBuilder(
'svg',
viewBox="0 0 %d %d"%(10*(maxx+1),10*(maxy+1)),
xmlns="http://www.w3.org/2000/svg",
**{'xmlns:xlink':"http://www.w3.org/1999/xlink"})
#with svg.linearGradient(id="grad"):
# svg.stop(offset="0", **{'stop-color': "#f00"})
# svg.stop(offset="1", **{'stop-color': "#ff0"})
#with svg.g():
# for (x,y) in room.values():
# svg.circle(cx=str(5+10*x),cy=str(5+10*y),r="2")
with svg.g():
for (x, y) in answer_set['field']:
x = int(x)
y = int(y)
svg.rect(x=str(10*x - 5),
y=str(10*y - 5),
width=str(10),
height=str(10),
style="stroke: black; stroke-width: 1px; fill:white;")
for (x, y) in answer_set['wall']:
x = int(x)
y = int(y)
svg.rect(x=str(10*x - 5),
y=str(10*y - 5),
width=str(10),
height=str(10),
style="stroke: black; stroke-width: 1px; fill:black;")
x = int(x)
y = int(y)
svg.rect(x=str(5*x - 5),
y=str(5*y - 5),
width=str(10),
height=str(10),
style="stroke: black; stroke-width: 1px; fill:black;")
for (x, y) in answer_set['exit']:
x = int(x)
y = int(y)
svg.circle(cx=str(10*x), cy=str(10*y), r=str(3), style="stroke: red; fill:red; ")
for (x, y) in answer_set['mark']:
x = int(x)
y = int(y)
svg.circle(cx=str(10*x), cy=str(10*y), r=str(2), style="stroke: blue; fill:blue; ")
for (x, y, text) in answer_set['text']:
x = int(x)
y = int(y)
text = str(text)
print("SVG %d %d %s" % (x, y, text))
svg.text(text, x=str(10*x-3), y=str(10*y+3), style="stroke: green; font-size: 9px; ")
#return IPython.display.SVG(data=str(svg))
with file("out.svg","w+") as f:
f.write(str(svg))
import Tkinter as tk
class Window:
def __init__(self,answersets):
self.answersets = answersets
self.selections = range(0,len(self.answersets))
self.selected = 0
self.root = tk.Tk()
self.main = tk.Frame(self.root)
self.main.pack(fill=tk.BOTH, expand=1)
self.canvas = tk.Canvas(self.main, bg="white")
self.canvas.pack(fill=tk.BOTH, expand=1, side=tk.TOP)
self.selector = tk.Scale(self.main, orient=tk.HORIZONTAL, showvalue=0, command=self.select)
self.selector.pack(side=tk.BOTTOM,fill=tk.X)
self.root.bind("<Right>", lambda x:self.go(+1))
self.root.bind("<Left>", lambda x:self.go(-1))
self.root.bind("q", exit) # TODO more graceful quitting
self.items = []
self.updateView()
def select(self,which):
self.selected = int(which)
self.updateView()
def go(self,direction):
self.selected = (self.selected + direction) % len(self.answersets)
self.updateView()
def updateView(self):
self.selector.config(from_=0, to=len(self.answersets)-1)
SIZE=100
FIELD_FILL='#FFF'
WALL_FILL='#444'
MARK_FILL='#A77'
TEXT_FILL='#000'
def fieldRect(x,y,offset=SIZE):
x, y = int(x), int(y)
return (x*SIZE-offset/2, y*SIZE-offset/2, x*SIZE+offset/2, y*SIZE+offset/2)
def fieldRect2(x,y,offset=SIZE):
x, y = int(x), int(y)
return (x*SIZE+9-offset/2, y*SIZE+9-offset/2, x*SIZE-9+offset/2, y*SIZE-9+offset/2)
# delete old items
for i in self.items:
self.canvas.delete(i)
# create new items
self.items = []
ext = extractExtensions(self.answersets[self.selected])
#print repr(ext)
maxx = max(map(lambda x: x[0], ext['field']))
maxy = max(map(lambda x: x[1], ext['field']))
self.root.geometry("{}x{}+0+0".format((maxx+1)*SIZE, (maxy+2)*SIZE))
for (x, y) in ext['field']:
self.items.append( self.canvas.create_rectangle( * fieldRect(x,y), fill=FIELD_FILL) )
for (x, y, text) in ext['color']:
print(text)
self.items.append( self.canvas.create_rectangle( * fieldRect2(x,y), fill=text) ) # x and y coordinates
for (x, y) in ext['wall']:
self.items.append( self.canvas.create_rectangle( * fieldRect(x,y), fill=WALL_FILL) )
for (x, y, sign) in ext['sign']:
fr = fieldRect(x,y)
self.items.append( self.canvas.create_text( (fr[0]+fr[2])/2, (fr[1]+fr[3])/2, anchor=tk.SE, text=str(sign), fill=WALL_FILL, font=("Courier", 33)) )
for (x, y) in ext['mark']:
self.items.append( self.canvas.create_oval( *fieldRect(x,y,10), fill=MARK_FILL) )
for (x, y, text) in ext['text']:
fr = fieldRect(x,y)
#print("TK %d %d %s" % (x, y, repr(text)))
self.items.append( self.canvas.create_text( (fr[0]+fr[2])/2, (fr[1]+fr[3])/2, text=str(text), fill=TEXT_FILL) )
def display_tk(answersets):
w = Window(answersets)
MAXANS=100
clingo = subprocess.Popen(
"clingo --outf=2 laby.lp {maxans}".format(maxans=MAXANS),
shell=True, stdout=subprocess.PIPE)
clingoout, clingoerr = clingo.communicate()
del clingo
clingoout = json.loads(clingoout)
#print(repr(clingoout))
#print(repr(clingoout['Call'][0]['Witnesses']))
#print(repr(clingoout['Call'][0]['Witnesses'][0]['Value']))
witnesses = clingoout['Call'][0]['Witnesses']
import random
#render_svg(random.choice(witn)['Value'])
display_tk(map(lambda witness: witness['Value'], witnesses))
tk.mainloop() |
neiudemo1/django | refs/heads/master | tests/cache/liberal_backend.py | 446 | from django.core.cache.backends.locmem import LocMemCache
class LiberalKeyValidationMixin(object):
def validate_key(self, key):
pass
class CacheClass(LiberalKeyValidationMixin, LocMemCache):
pass
|
florian-dacosta/OCB | refs/heads/8.0 | addons/portal/wizard/share_wizard.py | 158 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
UID_ROOT = 1
SHARED_DOCS_MENU = "Documents"
SHARED_DOCS_CHILD_MENU = "Shared Documents"
class share_wizard_portal(osv.TransientModel):
"""Inherited share wizard to automatically create appropriate
menus in the selected portal upon sharing with a portal group."""
_inherit = "share.wizard"
def _user_type_selection(self, cr, uid, context=None):
selection = super(share_wizard_portal, self)._user_type_selection(cr, uid, context=context)
selection.extend([('existing',_('Users you already shared with')),
('groups',_('Existing Groups (e.g Portal Groups)'))])
return selection
_columns = {
'user_ids': fields.many2many('res.users', 'share_wizard_res_user_rel', 'share_id', 'user_id', 'Existing users', domain=[('share', '=', True)]),
'group_ids': fields.many2many('res.groups', 'share_wizard_res_group_rel', 'share_id', 'group_id', 'Existing groups', domain=[('share', '=', False)]),
}
def _check_preconditions(self, cr, uid, wizard_data, context=None):
if wizard_data.user_type == 'existing':
self._assert(wizard_data.user_ids,
_('Please select at least one user to share with'),
context=context)
elif wizard_data.user_type == 'groups':
self._assert(wizard_data.group_ids,
_('Please select at least one group to share with'),
context=context)
return super(share_wizard_portal, self)._check_preconditions(cr, uid, wizard_data, context=context)
def _create_or_get_submenu_named(self, cr, uid, parent_menu_id, menu_name, context=None):
if not parent_menu_id:
return
Menus = self.pool.get('ir.ui.menu')
parent_menu = Menus.browse(cr, uid, parent_menu_id) # No context
menu_id = None
max_seq = 10
for child_menu in parent_menu.child_id:
max_seq = max(max_seq, child_menu.sequence)
if child_menu.name == menu_name:
menu_id = child_menu.id
break
if not menu_id:
# not found, create it
menu_id = Menus.create(cr, UID_ROOT,
{'name': menu_name,
'parent_id': parent_menu.id,
'sequence': max_seq + 10, # at the bottom
})
return menu_id
def _sharing_root_menu_id(self, cr, uid, portal, context=None):
"""Create or retrieve root ID of sharing menu in portal menu
:param portal: browse_record of portal, constructed with a context WITHOUT language
"""
parent_menu_id = self._create_or_get_submenu_named(cr, uid, portal.parent_menu_id.id, SHARED_DOCS_MENU, context=context)
if parent_menu_id:
child_menu_id = self._create_or_get_submenu_named(cr, uid, parent_menu_id, SHARED_DOCS_CHILD_MENU, context=context)
return child_menu_id
def _create_shared_data_menu(self, cr, uid, wizard_data, portal, context=None):
"""Create sharing menus in portal menu according to share wizard options.
:param wizard_data: browse_record of share.wizard
:param portal: browse_record of portal, constructed with a context WITHOUT language
"""
root_menu_id = self._sharing_root_menu_id(cr, uid, portal, context=context)
if not root_menu_id:
# no specific parent menu, cannot create the sharing menu at all.
return
# Create the shared action and menu
action_def = self._shared_action_def(cr, uid, wizard_data, context=None)
action_id = self.pool.get('ir.actions.act_window').create(cr, UID_ROOT, action_def)
menu_data = {'name': action_def['name'],
'sequence': 10,
'action': 'ir.actions.act_window,'+str(action_id),
'parent_id': root_menu_id,
'icon': 'STOCK_JUSTIFY_FILL'}
menu_id = self.pool.get('ir.ui.menu').create(cr, UID_ROOT, menu_data)
return menu_id
def _create_share_users_group(self, cr, uid, wizard_data, context=None):
# Override of super() to handle the possibly selected "existing users"
# and "existing groups".
# In both cases, we call super() to create the share group, but when
# sharing with existing groups, we will later delete it, and copy its
# access rights and rules to the selected groups.
super_result = super(share_wizard_portal,self)._create_share_users_group(cr, uid, wizard_data, context=context)
# For sharing with existing groups, we don't create a share group, instead we'll
# alter the rules of the groups so they can see the shared data
if wizard_data.group_ids:
# get the list of portals and the related groups to install their menus.
res_groups = self.pool.get('res.groups')
all_portal_group_ids = res_groups.search(cr, UID_ROOT, [('is_portal', '=', True)])
# populate result lines with the users of each group and
# setup the menu for portal groups
for group in wizard_data.group_ids:
if group.id in all_portal_group_ids:
self._create_shared_data_menu(cr, uid, wizard_data, group.id, context=context)
for user in group.users:
new_line = {'user_id': user.id,
'newly_created': False}
wizard_data.write({'result_line_ids': [(0,0,new_line)]})
elif wizard_data.user_ids:
# must take care of existing users, by adding them to the new group, which is super_result[0],
# and adding the shortcut
selected_user_ids = [x.id for x in wizard_data.user_ids]
self.pool.get('res.users').write(cr, UID_ROOT, selected_user_ids, {'groups_id': [(4, super_result[0])]})
self._setup_action_and_shortcut(cr, uid, wizard_data, selected_user_ids, make_home=False, context=context)
# populate the result lines for existing users too
for user in wizard_data.user_ids:
new_line = { 'user_id': user.id,
'newly_created': False}
wizard_data.write({'result_line_ids': [(0,0,new_line)]})
return super_result
def copy_share_group_access_and_delete(self, cr, wizard_data, share_group_id, context=None):
# In the case of sharing with existing groups, the strategy is to copy
# access rights and rules from the share group, so that we can
if not wizard_data.group_ids: return
Groups = self.pool.get('res.groups')
Rules = self.pool.get('ir.rule')
Rights = self.pool.get('ir.model.access')
share_group = Groups.browse(cr, UID_ROOT, share_group_id)
share_rule_ids = [r.id for r in share_group.rule_groups]
for target_group in wizard_data.group_ids:
# Link the rules to the group. This is appropriate because as of
# v6.1, the algorithm for combining them will OR the rules, hence
# extending the visible data.
Rules.write(cr, UID_ROOT, share_rule_ids, {'groups': [(4,target_group.id)]})
_logger.debug("Linked sharing rules from temporary sharing group to group %s", target_group)
# Copy the access rights. This is appropriate too because
# groups have the UNION of all permissions granted by their
# access right lines.
for access_line in share_group.model_access:
Rights.copy(cr, UID_ROOT, access_line.id, default={'group_id': target_group.id})
_logger.debug("Copied access rights from temporary sharing group to group %s", target_group)
# finally, delete it after removing its users
Groups.write(cr, UID_ROOT, [share_group_id], {'users': [(6,0,[])]})
Groups.unlink(cr, UID_ROOT, [share_group_id])
_logger.debug("Deleted temporary sharing group %s", share_group_id)
def _finish_result_lines(self, cr, uid, wizard_data, share_group_id, context=None):
super(share_wizard_portal,self)._finish_result_lines(cr, uid, wizard_data, share_group_id, context=context)
self.copy_share_group_access_and_delete(cr, wizard_data, share_group_id, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
TshepangRas/tshilo-dikotla | refs/heads/develop | td_maternal/admin/maternal_ultrasound_initial_admin.py | 2 | from collections import OrderedDict
from django.contrib import admin
from edc_export.actions import export_as_csv_action
from ..forms import MaternalUltraSoundInitialForm
from ..models import MaternalUltraSoundInitial
from .base_maternal_model_admin import BaseMaternalModelAdmin
class MaternalUltraSoundInitialAdmin(BaseMaternalModelAdmin):
form = MaternalUltraSoundInitialForm
fields = ('maternal_visit',
'report_datetime',
'number_of_gestations',
'bpd',
'hc',
'ac',
'fl',
# 'hl',
'ga_by_lmp',
'ga_by_ultrasound_wks',
'ga_by_ultrasound_days',
'ga_confirmed',
'est_fetal_weight',
'est_edd_ultrasound',
'edd_confirmed',
# 'lateral_ventricle',
# 'cerebellum',
# 'cistema_magna',
# 'malformations',
'amniotic_fluid_volume')
readonly_fields = ('edd_confirmed', 'ga_confirmed', 'ga_by_lmp')
radio_fields = {'number_of_gestations': admin.VERTICAL,
'amniotic_fluid_volume': admin.VERTICAL,}
list_display = ('report_datetime', 'number_of_gestations', 'ga_confrimation_method', 'edd_confirmed',
'ga_confirmed', 'ga_by_lmp')
list_filter = ('report_datetime', 'number_of_gestations', 'ga_confrimation_method')
# filter_horizontal = ('malformations',)
admin.site.register(MaternalUltraSoundInitial, MaternalUltraSoundInitialAdmin)
|
mars-knowsnothing/amos-bot | refs/heads/master | src/Lib/site-packages/wheel/egg2wheel.py | 471 | #!/usr/bin/env python
import os.path
import re
import sys
import tempfile
import zipfile
import wheel.bdist_wheel
import shutil
import distutils.dist
from distutils.archive_util import make_archive
from argparse import ArgumentParser
from glob import iglob
egg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?)
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE)
def egg2wheel(egg_path, dest_dir):
egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict()
dir = tempfile.mkdtemp(suffix="_e2w")
if os.path.isfile(egg_path):
# assume we have a bdist_egg otherwise
egg = zipfile.ZipFile(egg_path)
egg.extractall(dir)
else:
# support buildout-style installed eggs directories
for pth in os.listdir(egg_path):
src = os.path.join(egg_path, pth)
if os.path.isfile(src):
shutil.copy2(src, dir)
else:
shutil.copytree(src, os.path.join(dir, pth))
dist_info = "%s-%s" % (egg_info['name'], egg_info['ver'])
abi = 'none'
pyver = egg_info['pyver'].replace('.', '')
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
if arch != 'any':
# assume all binary eggs are for CPython
pyver = 'cp' + pyver[2:]
wheel_name = '-'.join((
dist_info,
pyver,
abi,
arch
))
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
bw.root_is_purelib = egg_info['arch'] is None
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, 'EGG-INFO'),
dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
bw.write_record(dir, dist_info_dir)
filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)
os.rename(filename, filename[:-3] + 'whl')
shutil.rmtree(dir)
def main():
parser = ArgumentParser()
parser.add_argument('eggs', nargs='*', help="Eggs to convert")
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse_args()
for pat in args.eggs:
for egg in iglob(pat):
if args.verbose:
sys.stdout.write("{0}... ".format(egg))
egg2wheel(egg, args.dest_dir)
if args.verbose:
sys.stdout.write("OK\n")
if __name__ == "__main__":
main()
|
go38/anki | refs/heads/master | aqt/customstudy.py | 18 | # Copyright: Damien Elmes <[email protected]>
# -*- coding: utf-8 -*-
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from aqt.qt import *
import aqt
from aqt.utils import showInfo, showWarning
from anki.consts import *
RADIO_NEW = 1
RADIO_REV = 2
RADIO_FORGOT = 3
RADIO_AHEAD = 4
RADIO_PREVIEW = 5
RADIO_CRAM = 6
TYPE_NEW = 0
TYPE_DUE = 1
TYPE_ALL = 2
class CustomStudy(QDialog):
def __init__(self, mw):
QDialog.__init__(self, mw)
self.mw = mw
self.deck = self.mw.col.decks.current()
self.form = f = aqt.forms.customstudy.Ui_Dialog()
f.setupUi(self)
self.setWindowModality(Qt.WindowModal)
self.setupSignals()
f.radio1.click()
self.exec_()
def setupSignals(self):
f = self.form; c = self.connect; s = SIGNAL("clicked()")
c(f.radio1, s, lambda: self.onRadioChange(1))
c(f.radio2, s, lambda: self.onRadioChange(2))
c(f.radio3, s, lambda: self.onRadioChange(3))
c(f.radio4, s, lambda: self.onRadioChange(4))
c(f.radio5, s, lambda: self.onRadioChange(5))
c(f.radio6, s, lambda: self.onRadioChange(6))
def onRadioChange(self, idx):
f = self.form; sp = f.spin
smin = 1; smax = DYN_MAX_SIZE; sval = 1
post = _("cards")
tit = ""
spShow = True
typeShow = False
ok = _("OK")
def plus(num):
if num == 1000:
num = "1000+"
return "<b>"+str(num)+"</b>"
if idx == RADIO_NEW:
new = self.mw.col.sched.totalNewForCurrentDeck()
self.deck['newToday']
tit = _("New cards in deck: %s") % plus(new)
pre = _("Increase today's new card limit by")
sval = min(new, self.deck.get('extendNew', 10))
smax = new
elif idx == RADIO_REV:
rev = self.mw.col.sched.totalRevForCurrentDeck()
tit = _("Reviews due in deck: %s") % plus(rev)
pre = _("Increase today's review limit by")
sval = min(rev, self.deck.get('extendRev', 10))
elif idx == RADIO_FORGOT:
pre = _("Review cards forgotten in last")
post = _("days")
smax = 30
elif idx == RADIO_AHEAD:
pre = _("Review ahead by")
post = _("days")
elif idx == RADIO_PREVIEW:
pre = _("Preview new cards added in the last")
post = _("days")
sval = 1
elif idx == RADIO_CRAM:
pre = _("Select")
post = _("cards from the deck")
#tit = _("After pressing OK, you can choose which tags to include.")
ok = _("Choose Tags")
sval = 100
typeShow = True
sp.setVisible(spShow)
f.cardType.setVisible(typeShow)
f.title.setText(tit)
f.title.setVisible(not not tit)
f.spin.setMinimum(smin)
f.spin.setMaximum(smax)
f.spin.setValue(sval)
f.preSpin.setText(pre)
f.postSpin.setText(post)
f.buttonBox.button(QDialogButtonBox.Ok).setText(ok)
self.radioIdx = idx
def accept(self):
f = self.form; i = self.radioIdx; spin = f.spin.value()
if i == RADIO_NEW:
self.deck['extendNew'] = spin
self.mw.col.decks.save(self.deck)
self.mw.col.sched.extendLimits(spin, 0)
self.mw.reset()
return QDialog.accept(self)
elif i == RADIO_REV:
self.deck['extendRev'] = spin
self.mw.col.decks.save(self.deck)
self.mw.col.sched.extendLimits(0, spin)
self.mw.reset()
return QDialog.accept(self)
elif i == RADIO_CRAM:
tags = self._getTags()
# the rest create a filtered deck
cur = self.mw.col.decks.byName(_("Custom Study Session"))
if cur:
if not cur['dyn']:
showInfo("Please rename the existing Custom Study deck first.")
return QDialog.accept(self)
else:
# safe to empty
self.mw.col.sched.emptyDyn(cur['id'])
# reuse; don't delete as it may have children
dyn = cur
self.mw.col.decks.select(cur['id'])
else:
did = self.mw.col.decks.newDyn(_("Custom Study Session"))
dyn = self.mw.col.decks.get(did)
# and then set various options
if i == RADIO_FORGOT:
dyn['delays'] = [1]
dyn['terms'][0] = ['rated:%d:1' % spin, DYN_MAX_SIZE, DYN_RANDOM]
dyn['resched'] = False
elif i == RADIO_AHEAD:
dyn['delays'] = None
dyn['terms'][0] = ['prop:due<=%d' % spin, DYN_MAX_SIZE, DYN_DUE]
dyn['resched'] = True
elif i == RADIO_PREVIEW:
dyn['delays'] = None
dyn['terms'][0] = ['is:new added:%s'%spin, DYN_MAX_SIZE, DYN_OLDEST]
dyn['resched'] = False
elif i == RADIO_CRAM:
dyn['delays'] = None
type = f.cardType.currentRow()
if type == TYPE_NEW:
terms = "is:new "
ord = DYN_ADDED
dyn['resched'] = True
elif type == TYPE_DUE:
terms = "is:due "
ord = DYN_DUE
dyn['resched'] = True
else:
terms = ""
ord = DYN_RANDOM
dyn['resched'] = False
dyn['terms'][0] = [(terms+tags).strip(), spin, ord]
# add deck limit
dyn['terms'][0][0] = "deck:\"%s\" %s " % (self.deck['name'], dyn['terms'][0][0])
# generate cards
if not self.mw.col.sched.rebuildDyn():
return showWarning(_("No cards matched the criteria you provided."))
self.mw.moveToState("overview")
QDialog.accept(self)
def _getTags(self):
from aqt.taglimit import TagLimit
t = TagLimit(self.mw, self)
return t.tags
|
ygol/odoo | refs/heads/8.0 | addons/l10n_fr/wizard/fr_report_compute_resultant.py | 374 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
from openerp.osv import fields, osv
class account_cdr_report(osv.osv_memory):
_name = 'account.cdr.report'
_description = 'Account CDR Report'
def _get_defaults(self, cr, uid, context=None):
fiscalyear_id = self.pool.get('account.fiscalyear').find(cr, uid)
return fiscalyear_id
_columns = {
'fiscalyear_id': fields.many2one('account.fiscalyear', 'Fiscal Year', required=True),
}
_defaults = {
'fiscalyear_id': _get_defaults
}
def print_cdr_report(self, cr, uid, ids, context=None):
active_ids = context.get('active_ids', [])
data = {}
data['form'] = {}
data['ids'] = active_ids
data['form']['fiscalyear_id'] = self.browse(cr, uid, ids)[0].fiscalyear_id.id
return self.pool['report'].get_action(
cr, uid, ids, 'l10n_fr.report_l10nfrresultat', data=data, context=context
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
osmfj/sotmjp-website | refs/heads/master | sotmjp/proposals/apps.py | 2 | from __future__ import unicode_literals
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class ProposalsConfig(AppConfig):
name = "sotmjp.proposals"
verbose_name = _("Proposals")
|
block8437/python-gui-builder | refs/heads/master | loader.py | 1 | import os, gui
import xml.etree.ElementTree as ET
from Tkinter import *
import tkMessageBox
class NonexistantFileError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class InvalidGUILineError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GUILoader():
def __init__(self, gui_file,script_file=None):
self.functions = {}
self.script_file = script_file
if os.path.exists(self.script_file):
exec open(self.script_file).read()
self.loaded = False
self.elements = {"buttons": [],
"text-input": [],
"image": []}
self.window_size = (0,0)
self.window_name = "Custom Window"
if not os.path.exists(gui_file):
raise NonexistantFileError("GUI File doesn't exist")
self.dom_tree = ET.parse(gui_file)
def loadToMem(self):
root = self.dom_tree.getroot()
if root.tag != "window":
raise InvalidGUILineError("First object must be a window object")
window_attribs = root.attrib
try:
window_size = window_attribs['size']
window_size = window_size.split(' ')
self.window_size = (int(window_size[0]),int(window_size[1]))
except:
raise InvalidGUILineError("Window size not specified")
try:
self.window_name = window_attribs['name']
except:
pass
for button in root.iter('button'):
attrib = button.attrib
butt = [attrib["text"],(int(attrib["position"].split(' ')[0]),int(attrib["position"].split(' ')[1])), attrib["side"]]
try:
alert = attrib["alert"]
except:
try:
butt.append(self.functions[attrib["function"]])
except:
pass
self.elements["buttons"].append(butt)
self.loaded = True
def startGUI(self):
if self.loaded == False:
return False
else:
root = Tk()
self.app = gui.BuiltApplication(master=root, loader=self)
self.app.mainloop()
root.destroy()
loader = GUILoader("test.gui", "test.guiscript")
loader.loadToMem()
loader.startGUI() |
twobob/buildroot-kindle | refs/heads/master | output/build/host-python-2.7.2/Lib/stringprep.py | 278 | # This file is generated by mkstringprep.py. DO NOT EDIT.
"""Library that exposes various tables found in the StringPrep RFC 3454.
There are two kinds of tables: sets, for which a member test is provided,
and mappings, for which a mapping function is provided.
"""
from unicodedata import ucd_3_2_0 as unicodedata
assert unicodedata.unidata_version == '3.2.0'
def in_table_a1(code):
if unicodedata.category(code) != 'Cn': return False
c = ord(code)
if 0xFDD0 <= c < 0xFDF0: return False
return (c & 0xFFFF) not in (0xFFFE, 0xFFFF)
b1_set = set([173, 847, 6150, 6155, 6156, 6157, 8203, 8204, 8205, 8288, 65279] + range(65024,65040))
def in_table_b1(code):
return ord(code) in b1_set
b3_exceptions = {
0xb5:u'\u03bc', 0xdf:u'ss', 0x130:u'i\u0307', 0x149:u'\u02bcn',
0x17f:u's', 0x1f0:u'j\u030c', 0x345:u'\u03b9', 0x37a:u' \u03b9',
0x390:u'\u03b9\u0308\u0301', 0x3b0:u'\u03c5\u0308\u0301', 0x3c2:u'\u03c3', 0x3d0:u'\u03b2',
0x3d1:u'\u03b8', 0x3d2:u'\u03c5', 0x3d3:u'\u03cd', 0x3d4:u'\u03cb',
0x3d5:u'\u03c6', 0x3d6:u'\u03c0', 0x3f0:u'\u03ba', 0x3f1:u'\u03c1',
0x3f2:u'\u03c3', 0x3f5:u'\u03b5', 0x587:u'\u0565\u0582', 0x1e96:u'h\u0331',
0x1e97:u't\u0308', 0x1e98:u'w\u030a', 0x1e99:u'y\u030a', 0x1e9a:u'a\u02be',
0x1e9b:u'\u1e61', 0x1f50:u'\u03c5\u0313', 0x1f52:u'\u03c5\u0313\u0300', 0x1f54:u'\u03c5\u0313\u0301',
0x1f56:u'\u03c5\u0313\u0342', 0x1f80:u'\u1f00\u03b9', 0x1f81:u'\u1f01\u03b9', 0x1f82:u'\u1f02\u03b9',
0x1f83:u'\u1f03\u03b9', 0x1f84:u'\u1f04\u03b9', 0x1f85:u'\u1f05\u03b9', 0x1f86:u'\u1f06\u03b9',
0x1f87:u'\u1f07\u03b9', 0x1f88:u'\u1f00\u03b9', 0x1f89:u'\u1f01\u03b9', 0x1f8a:u'\u1f02\u03b9',
0x1f8b:u'\u1f03\u03b9', 0x1f8c:u'\u1f04\u03b9', 0x1f8d:u'\u1f05\u03b9', 0x1f8e:u'\u1f06\u03b9',
0x1f8f:u'\u1f07\u03b9', 0x1f90:u'\u1f20\u03b9', 0x1f91:u'\u1f21\u03b9', 0x1f92:u'\u1f22\u03b9',
0x1f93:u'\u1f23\u03b9', 0x1f94:u'\u1f24\u03b9', 0x1f95:u'\u1f25\u03b9', 0x1f96:u'\u1f26\u03b9',
0x1f97:u'\u1f27\u03b9', 0x1f98:u'\u1f20\u03b9', 0x1f99:u'\u1f21\u03b9', 0x1f9a:u'\u1f22\u03b9',
0x1f9b:u'\u1f23\u03b9', 0x1f9c:u'\u1f24\u03b9', 0x1f9d:u'\u1f25\u03b9', 0x1f9e:u'\u1f26\u03b9',
0x1f9f:u'\u1f27\u03b9', 0x1fa0:u'\u1f60\u03b9', 0x1fa1:u'\u1f61\u03b9', 0x1fa2:u'\u1f62\u03b9',
0x1fa3:u'\u1f63\u03b9', 0x1fa4:u'\u1f64\u03b9', 0x1fa5:u'\u1f65\u03b9', 0x1fa6:u'\u1f66\u03b9',
0x1fa7:u'\u1f67\u03b9', 0x1fa8:u'\u1f60\u03b9', 0x1fa9:u'\u1f61\u03b9', 0x1faa:u'\u1f62\u03b9',
0x1fab:u'\u1f63\u03b9', 0x1fac:u'\u1f64\u03b9', 0x1fad:u'\u1f65\u03b9', 0x1fae:u'\u1f66\u03b9',
0x1faf:u'\u1f67\u03b9', 0x1fb2:u'\u1f70\u03b9', 0x1fb3:u'\u03b1\u03b9', 0x1fb4:u'\u03ac\u03b9',
0x1fb6:u'\u03b1\u0342', 0x1fb7:u'\u03b1\u0342\u03b9', 0x1fbc:u'\u03b1\u03b9', 0x1fbe:u'\u03b9',
0x1fc2:u'\u1f74\u03b9', 0x1fc3:u'\u03b7\u03b9', 0x1fc4:u'\u03ae\u03b9', 0x1fc6:u'\u03b7\u0342',
0x1fc7:u'\u03b7\u0342\u03b9', 0x1fcc:u'\u03b7\u03b9', 0x1fd2:u'\u03b9\u0308\u0300', 0x1fd3:u'\u03b9\u0308\u0301',
0x1fd6:u'\u03b9\u0342', 0x1fd7:u'\u03b9\u0308\u0342', 0x1fe2:u'\u03c5\u0308\u0300', 0x1fe3:u'\u03c5\u0308\u0301',
0x1fe4:u'\u03c1\u0313', 0x1fe6:u'\u03c5\u0342', 0x1fe7:u'\u03c5\u0308\u0342', 0x1ff2:u'\u1f7c\u03b9',
0x1ff3:u'\u03c9\u03b9', 0x1ff4:u'\u03ce\u03b9', 0x1ff6:u'\u03c9\u0342', 0x1ff7:u'\u03c9\u0342\u03b9',
0x1ffc:u'\u03c9\u03b9', 0x20a8:u'rs', 0x2102:u'c', 0x2103:u'\xb0c',
0x2107:u'\u025b', 0x2109:u'\xb0f', 0x210b:u'h', 0x210c:u'h',
0x210d:u'h', 0x2110:u'i', 0x2111:u'i', 0x2112:u'l',
0x2115:u'n', 0x2116:u'no', 0x2119:u'p', 0x211a:u'q',
0x211b:u'r', 0x211c:u'r', 0x211d:u'r', 0x2120:u'sm',
0x2121:u'tel', 0x2122:u'tm', 0x2124:u'z', 0x2128:u'z',
0x212c:u'b', 0x212d:u'c', 0x2130:u'e', 0x2131:u'f',
0x2133:u'm', 0x213e:u'\u03b3', 0x213f:u'\u03c0', 0x2145:u'd',
0x3371:u'hpa', 0x3373:u'au', 0x3375:u'ov', 0x3380:u'pa',
0x3381:u'na', 0x3382:u'\u03bca', 0x3383:u'ma', 0x3384:u'ka',
0x3385:u'kb', 0x3386:u'mb', 0x3387:u'gb', 0x338a:u'pf',
0x338b:u'nf', 0x338c:u'\u03bcf', 0x3390:u'hz', 0x3391:u'khz',
0x3392:u'mhz', 0x3393:u'ghz', 0x3394:u'thz', 0x33a9:u'pa',
0x33aa:u'kpa', 0x33ab:u'mpa', 0x33ac:u'gpa', 0x33b4:u'pv',
0x33b5:u'nv', 0x33b6:u'\u03bcv', 0x33b7:u'mv', 0x33b8:u'kv',
0x33b9:u'mv', 0x33ba:u'pw', 0x33bb:u'nw', 0x33bc:u'\u03bcw',
0x33bd:u'mw', 0x33be:u'kw', 0x33bf:u'mw', 0x33c0:u'k\u03c9',
0x33c1:u'm\u03c9', 0x33c3:u'bq', 0x33c6:u'c\u2215kg', 0x33c7:u'co.',
0x33c8:u'db', 0x33c9:u'gy', 0x33cb:u'hp', 0x33cd:u'kk',
0x33ce:u'km', 0x33d7:u'ph', 0x33d9:u'ppm', 0x33da:u'pr',
0x33dc:u'sv', 0x33dd:u'wb', 0xfb00:u'ff', 0xfb01:u'fi',
0xfb02:u'fl', 0xfb03:u'ffi', 0xfb04:u'ffl', 0xfb05:u'st',
0xfb06:u'st', 0xfb13:u'\u0574\u0576', 0xfb14:u'\u0574\u0565', 0xfb15:u'\u0574\u056b',
0xfb16:u'\u057e\u0576', 0xfb17:u'\u0574\u056d', 0x1d400:u'a', 0x1d401:u'b',
0x1d402:u'c', 0x1d403:u'd', 0x1d404:u'e', 0x1d405:u'f',
0x1d406:u'g', 0x1d407:u'h', 0x1d408:u'i', 0x1d409:u'j',
0x1d40a:u'k', 0x1d40b:u'l', 0x1d40c:u'm', 0x1d40d:u'n',
0x1d40e:u'o', 0x1d40f:u'p', 0x1d410:u'q', 0x1d411:u'r',
0x1d412:u's', 0x1d413:u't', 0x1d414:u'u', 0x1d415:u'v',
0x1d416:u'w', 0x1d417:u'x', 0x1d418:u'y', 0x1d419:u'z',
0x1d434:u'a', 0x1d435:u'b', 0x1d436:u'c', 0x1d437:u'd',
0x1d438:u'e', 0x1d439:u'f', 0x1d43a:u'g', 0x1d43b:u'h',
0x1d43c:u'i', 0x1d43d:u'j', 0x1d43e:u'k', 0x1d43f:u'l',
0x1d440:u'm', 0x1d441:u'n', 0x1d442:u'o', 0x1d443:u'p',
0x1d444:u'q', 0x1d445:u'r', 0x1d446:u's', 0x1d447:u't',
0x1d448:u'u', 0x1d449:u'v', 0x1d44a:u'w', 0x1d44b:u'x',
0x1d44c:u'y', 0x1d44d:u'z', 0x1d468:u'a', 0x1d469:u'b',
0x1d46a:u'c', 0x1d46b:u'd', 0x1d46c:u'e', 0x1d46d:u'f',
0x1d46e:u'g', 0x1d46f:u'h', 0x1d470:u'i', 0x1d471:u'j',
0x1d472:u'k', 0x1d473:u'l', 0x1d474:u'm', 0x1d475:u'n',
0x1d476:u'o', 0x1d477:u'p', 0x1d478:u'q', 0x1d479:u'r',
0x1d47a:u's', 0x1d47b:u't', 0x1d47c:u'u', 0x1d47d:u'v',
0x1d47e:u'w', 0x1d47f:u'x', 0x1d480:u'y', 0x1d481:u'z',
0x1d49c:u'a', 0x1d49e:u'c', 0x1d49f:u'd', 0x1d4a2:u'g',
0x1d4a5:u'j', 0x1d4a6:u'k', 0x1d4a9:u'n', 0x1d4aa:u'o',
0x1d4ab:u'p', 0x1d4ac:u'q', 0x1d4ae:u's', 0x1d4af:u't',
0x1d4b0:u'u', 0x1d4b1:u'v', 0x1d4b2:u'w', 0x1d4b3:u'x',
0x1d4b4:u'y', 0x1d4b5:u'z', 0x1d4d0:u'a', 0x1d4d1:u'b',
0x1d4d2:u'c', 0x1d4d3:u'd', 0x1d4d4:u'e', 0x1d4d5:u'f',
0x1d4d6:u'g', 0x1d4d7:u'h', 0x1d4d8:u'i', 0x1d4d9:u'j',
0x1d4da:u'k', 0x1d4db:u'l', 0x1d4dc:u'm', 0x1d4dd:u'n',
0x1d4de:u'o', 0x1d4df:u'p', 0x1d4e0:u'q', 0x1d4e1:u'r',
0x1d4e2:u's', 0x1d4e3:u't', 0x1d4e4:u'u', 0x1d4e5:u'v',
0x1d4e6:u'w', 0x1d4e7:u'x', 0x1d4e8:u'y', 0x1d4e9:u'z',
0x1d504:u'a', 0x1d505:u'b', 0x1d507:u'd', 0x1d508:u'e',
0x1d509:u'f', 0x1d50a:u'g', 0x1d50d:u'j', 0x1d50e:u'k',
0x1d50f:u'l', 0x1d510:u'm', 0x1d511:u'n', 0x1d512:u'o',
0x1d513:u'p', 0x1d514:u'q', 0x1d516:u's', 0x1d517:u't',
0x1d518:u'u', 0x1d519:u'v', 0x1d51a:u'w', 0x1d51b:u'x',
0x1d51c:u'y', 0x1d538:u'a', 0x1d539:u'b', 0x1d53b:u'd',
0x1d53c:u'e', 0x1d53d:u'f', 0x1d53e:u'g', 0x1d540:u'i',
0x1d541:u'j', 0x1d542:u'k', 0x1d543:u'l', 0x1d544:u'm',
0x1d546:u'o', 0x1d54a:u's', 0x1d54b:u't', 0x1d54c:u'u',
0x1d54d:u'v', 0x1d54e:u'w', 0x1d54f:u'x', 0x1d550:u'y',
0x1d56c:u'a', 0x1d56d:u'b', 0x1d56e:u'c', 0x1d56f:u'd',
0x1d570:u'e', 0x1d571:u'f', 0x1d572:u'g', 0x1d573:u'h',
0x1d574:u'i', 0x1d575:u'j', 0x1d576:u'k', 0x1d577:u'l',
0x1d578:u'm', 0x1d579:u'n', 0x1d57a:u'o', 0x1d57b:u'p',
0x1d57c:u'q', 0x1d57d:u'r', 0x1d57e:u's', 0x1d57f:u't',
0x1d580:u'u', 0x1d581:u'v', 0x1d582:u'w', 0x1d583:u'x',
0x1d584:u'y', 0x1d585:u'z', 0x1d5a0:u'a', 0x1d5a1:u'b',
0x1d5a2:u'c', 0x1d5a3:u'd', 0x1d5a4:u'e', 0x1d5a5:u'f',
0x1d5a6:u'g', 0x1d5a7:u'h', 0x1d5a8:u'i', 0x1d5a9:u'j',
0x1d5aa:u'k', 0x1d5ab:u'l', 0x1d5ac:u'm', 0x1d5ad:u'n',
0x1d5ae:u'o', 0x1d5af:u'p', 0x1d5b0:u'q', 0x1d5b1:u'r',
0x1d5b2:u's', 0x1d5b3:u't', 0x1d5b4:u'u', 0x1d5b5:u'v',
0x1d5b6:u'w', 0x1d5b7:u'x', 0x1d5b8:u'y', 0x1d5b9:u'z',
0x1d5d4:u'a', 0x1d5d5:u'b', 0x1d5d6:u'c', 0x1d5d7:u'd',
0x1d5d8:u'e', 0x1d5d9:u'f', 0x1d5da:u'g', 0x1d5db:u'h',
0x1d5dc:u'i', 0x1d5dd:u'j', 0x1d5de:u'k', 0x1d5df:u'l',
0x1d5e0:u'm', 0x1d5e1:u'n', 0x1d5e2:u'o', 0x1d5e3:u'p',
0x1d5e4:u'q', 0x1d5e5:u'r', 0x1d5e6:u's', 0x1d5e7:u't',
0x1d5e8:u'u', 0x1d5e9:u'v', 0x1d5ea:u'w', 0x1d5eb:u'x',
0x1d5ec:u'y', 0x1d5ed:u'z', 0x1d608:u'a', 0x1d609:u'b',
0x1d60a:u'c', 0x1d60b:u'd', 0x1d60c:u'e', 0x1d60d:u'f',
0x1d60e:u'g', 0x1d60f:u'h', 0x1d610:u'i', 0x1d611:u'j',
0x1d612:u'k', 0x1d613:u'l', 0x1d614:u'm', 0x1d615:u'n',
0x1d616:u'o', 0x1d617:u'p', 0x1d618:u'q', 0x1d619:u'r',
0x1d61a:u's', 0x1d61b:u't', 0x1d61c:u'u', 0x1d61d:u'v',
0x1d61e:u'w', 0x1d61f:u'x', 0x1d620:u'y', 0x1d621:u'z',
0x1d63c:u'a', 0x1d63d:u'b', 0x1d63e:u'c', 0x1d63f:u'd',
0x1d640:u'e', 0x1d641:u'f', 0x1d642:u'g', 0x1d643:u'h',
0x1d644:u'i', 0x1d645:u'j', 0x1d646:u'k', 0x1d647:u'l',
0x1d648:u'm', 0x1d649:u'n', 0x1d64a:u'o', 0x1d64b:u'p',
0x1d64c:u'q', 0x1d64d:u'r', 0x1d64e:u's', 0x1d64f:u't',
0x1d650:u'u', 0x1d651:u'v', 0x1d652:u'w', 0x1d653:u'x',
0x1d654:u'y', 0x1d655:u'z', 0x1d670:u'a', 0x1d671:u'b',
0x1d672:u'c', 0x1d673:u'd', 0x1d674:u'e', 0x1d675:u'f',
0x1d676:u'g', 0x1d677:u'h', 0x1d678:u'i', 0x1d679:u'j',
0x1d67a:u'k', 0x1d67b:u'l', 0x1d67c:u'm', 0x1d67d:u'n',
0x1d67e:u'o', 0x1d67f:u'p', 0x1d680:u'q', 0x1d681:u'r',
0x1d682:u's', 0x1d683:u't', 0x1d684:u'u', 0x1d685:u'v',
0x1d686:u'w', 0x1d687:u'x', 0x1d688:u'y', 0x1d689:u'z',
0x1d6a8:u'\u03b1', 0x1d6a9:u'\u03b2', 0x1d6aa:u'\u03b3', 0x1d6ab:u'\u03b4',
0x1d6ac:u'\u03b5', 0x1d6ad:u'\u03b6', 0x1d6ae:u'\u03b7', 0x1d6af:u'\u03b8',
0x1d6b0:u'\u03b9', 0x1d6b1:u'\u03ba', 0x1d6b2:u'\u03bb', 0x1d6b3:u'\u03bc',
0x1d6b4:u'\u03bd', 0x1d6b5:u'\u03be', 0x1d6b6:u'\u03bf', 0x1d6b7:u'\u03c0',
0x1d6b8:u'\u03c1', 0x1d6b9:u'\u03b8', 0x1d6ba:u'\u03c3', 0x1d6bb:u'\u03c4',
0x1d6bc:u'\u03c5', 0x1d6bd:u'\u03c6', 0x1d6be:u'\u03c7', 0x1d6bf:u'\u03c8',
0x1d6c0:u'\u03c9', 0x1d6d3:u'\u03c3', 0x1d6e2:u'\u03b1', 0x1d6e3:u'\u03b2',
0x1d6e4:u'\u03b3', 0x1d6e5:u'\u03b4', 0x1d6e6:u'\u03b5', 0x1d6e7:u'\u03b6',
0x1d6e8:u'\u03b7', 0x1d6e9:u'\u03b8', 0x1d6ea:u'\u03b9', 0x1d6eb:u'\u03ba',
0x1d6ec:u'\u03bb', 0x1d6ed:u'\u03bc', 0x1d6ee:u'\u03bd', 0x1d6ef:u'\u03be',
0x1d6f0:u'\u03bf', 0x1d6f1:u'\u03c0', 0x1d6f2:u'\u03c1', 0x1d6f3:u'\u03b8',
0x1d6f4:u'\u03c3', 0x1d6f5:u'\u03c4', 0x1d6f6:u'\u03c5', 0x1d6f7:u'\u03c6',
0x1d6f8:u'\u03c7', 0x1d6f9:u'\u03c8', 0x1d6fa:u'\u03c9', 0x1d70d:u'\u03c3',
0x1d71c:u'\u03b1', 0x1d71d:u'\u03b2', 0x1d71e:u'\u03b3', 0x1d71f:u'\u03b4',
0x1d720:u'\u03b5', 0x1d721:u'\u03b6', 0x1d722:u'\u03b7', 0x1d723:u'\u03b8',
0x1d724:u'\u03b9', 0x1d725:u'\u03ba', 0x1d726:u'\u03bb', 0x1d727:u'\u03bc',
0x1d728:u'\u03bd', 0x1d729:u'\u03be', 0x1d72a:u'\u03bf', 0x1d72b:u'\u03c0',
0x1d72c:u'\u03c1', 0x1d72d:u'\u03b8', 0x1d72e:u'\u03c3', 0x1d72f:u'\u03c4',
0x1d730:u'\u03c5', 0x1d731:u'\u03c6', 0x1d732:u'\u03c7', 0x1d733:u'\u03c8',
0x1d734:u'\u03c9', 0x1d747:u'\u03c3', 0x1d756:u'\u03b1', 0x1d757:u'\u03b2',
0x1d758:u'\u03b3', 0x1d759:u'\u03b4', 0x1d75a:u'\u03b5', 0x1d75b:u'\u03b6',
0x1d75c:u'\u03b7', 0x1d75d:u'\u03b8', 0x1d75e:u'\u03b9', 0x1d75f:u'\u03ba',
0x1d760:u'\u03bb', 0x1d761:u'\u03bc', 0x1d762:u'\u03bd', 0x1d763:u'\u03be',
0x1d764:u'\u03bf', 0x1d765:u'\u03c0', 0x1d766:u'\u03c1', 0x1d767:u'\u03b8',
0x1d768:u'\u03c3', 0x1d769:u'\u03c4', 0x1d76a:u'\u03c5', 0x1d76b:u'\u03c6',
0x1d76c:u'\u03c7', 0x1d76d:u'\u03c8', 0x1d76e:u'\u03c9', 0x1d781:u'\u03c3',
0x1d790:u'\u03b1', 0x1d791:u'\u03b2', 0x1d792:u'\u03b3', 0x1d793:u'\u03b4',
0x1d794:u'\u03b5', 0x1d795:u'\u03b6', 0x1d796:u'\u03b7', 0x1d797:u'\u03b8',
0x1d798:u'\u03b9', 0x1d799:u'\u03ba', 0x1d79a:u'\u03bb', 0x1d79b:u'\u03bc',
0x1d79c:u'\u03bd', 0x1d79d:u'\u03be', 0x1d79e:u'\u03bf', 0x1d79f:u'\u03c0',
0x1d7a0:u'\u03c1', 0x1d7a1:u'\u03b8', 0x1d7a2:u'\u03c3', 0x1d7a3:u'\u03c4',
0x1d7a4:u'\u03c5', 0x1d7a5:u'\u03c6', 0x1d7a6:u'\u03c7', 0x1d7a7:u'\u03c8',
0x1d7a8:u'\u03c9', 0x1d7bb:u'\u03c3', }
def map_table_b3(code):
r = b3_exceptions.get(ord(code))
if r is not None: return r
return code.lower()
def map_table_b2(a):
al = map_table_b3(a)
b = unicodedata.normalize("NFKC", al)
bl = u"".join([map_table_b3(ch) for ch in b])
c = unicodedata.normalize("NFKC", bl)
if b != c:
return c
else:
return al
def in_table_c11(code):
return code == u" "
def in_table_c12(code):
return unicodedata.category(code) == "Zs" and code != u" "
def in_table_c11_c12(code):
return unicodedata.category(code) == "Zs"
def in_table_c21(code):
return ord(code) < 128 and unicodedata.category(code) == "Cc"
c22_specials = set([1757, 1807, 6158, 8204, 8205, 8232, 8233, 65279] + range(8288,8292) + range(8298,8304) + range(65529,65533) + range(119155,119163))
def in_table_c22(code):
c = ord(code)
if c < 128: return False
if unicodedata.category(code) == "Cc": return True
return c in c22_specials
def in_table_c21_c22(code):
return unicodedata.category(code) == "Cc" or \
ord(code) in c22_specials
def in_table_c3(code):
return unicodedata.category(code) == "Co"
def in_table_c4(code):
c = ord(code)
if c < 0xFDD0: return False
if c < 0xFDF0: return True
return (ord(code) & 0xFFFF) in (0xFFFE, 0xFFFF)
def in_table_c5(code):
return unicodedata.category(code) == "Cs"
c6_set = set(range(65529,65534))
def in_table_c6(code):
return ord(code) in c6_set
c7_set = set(range(12272,12284))
def in_table_c7(code):
return ord(code) in c7_set
c8_set = set([832, 833, 8206, 8207] + range(8234,8239) + range(8298,8304))
def in_table_c8(code):
return ord(code) in c8_set
c9_set = set([917505] + range(917536,917632))
def in_table_c9(code):
return ord(code) in c9_set
def in_table_d1(code):
return unicodedata.bidirectional(code) in ("R","AL")
def in_table_d2(code):
return unicodedata.bidirectional(code) == "L"
|
transt/cloud-init-0.7.5 | refs/heads/master | cloudinit/config/cc_disk_setup.py | 6 | # vi: ts=4 expandtab
#
# Copyright (C) 2009-2010 Canonical Ltd.
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
#
# Author: Ben Howard <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from cloudinit.settings import PER_INSTANCE
from cloudinit import util
import logging
import os
import shlex
frequency = PER_INSTANCE
# Define the commands to use
UDEVADM_CMD = util.which('udevadm')
SFDISK_CMD = util.which("sfdisk")
LSBLK_CMD = util.which("lsblk")
BLKID_CMD = util.which("blkid")
BLKDEV_CMD = util.which("blockdev")
WIPEFS_CMD = util.which("wipefs")
LOG = logging.getLogger(__name__)
def handle(_name, cfg, cloud, log, _args):
"""
See doc/examples/cloud-config_disk-setup.txt for documentation on the
format.
"""
disk_setup = cfg.get("disk_setup")
if isinstance(disk_setup, dict):
update_disk_setup_devices(disk_setup, cloud.device_name_to_device)
log.debug("Partitioning disks: %s", str(disk_setup))
for disk, definition in disk_setup.items():
if not isinstance(definition, dict):
log.warn("Invalid disk definition for %s" % disk)
continue
try:
log.debug("Creating new partition table/disk")
util.log_time(logfunc=LOG.debug,
msg="Creating partition on %s" % disk,
func=mkpart, args=(disk, definition))
except Exception as e:
util.logexc(LOG, "Failed partitioning operation\n%s" % e)
fs_setup = cfg.get("fs_setup")
if isinstance(fs_setup, list):
log.debug("setting up filesystems: %s", str(fs_setup))
update_fs_setup_devices(fs_setup, cloud.device_name_to_device)
for definition in fs_setup:
if not isinstance(definition, dict):
log.warn("Invalid file system definition: %s" % definition)
continue
try:
log.debug("Creating new filesystem.")
device = definition.get('device')
util.log_time(logfunc=LOG.debug,
msg="Creating fs for %s" % device,
func=mkfs, args=(definition,))
except Exception as e:
util.logexc(LOG, "Failed during filesystem operation\n%s" % e)
def update_disk_setup_devices(disk_setup, tformer):
# update 'disk_setup' dictionary anywhere were a device may occur
# update it with the response from 'tformer'
for origname in disk_setup.keys():
transformed = tformer(origname)
if transformed is None or transformed == origname:
continue
if transformed in disk_setup:
LOG.info("Replacing %s in disk_setup for translation of %s",
origname, transformed)
del disk_setup[transformed]
disk_setup[transformed] = disk_setup[origname]
disk_setup[transformed]['_origname'] = origname
del disk_setup[origname]
LOG.debug("updated disk_setup device entry '%s' to '%s'",
origname, transformed)
def update_fs_setup_devices(disk_setup, tformer):
# update 'fs_setup' dictionary anywhere were a device may occur
# update it with the response from 'tformer'
for definition in disk_setup:
if not isinstance(definition, dict):
LOG.warn("entry in disk_setup not a dict: %s", definition)
continue
origname = definition.get('device')
if origname is None:
continue
(dev, part) = util.expand_dotted_devname(origname)
tformed = tformer(dev)
if tformed is not None:
dev = tformed
LOG.debug("%s is mapped to disk=%s part=%s",
origname, tformed, part)
definition['_origname'] = origname
definition['device'] = tformed
if part and 'partition' in definition:
definition['_partition'] = definition['partition']
definition['partition'] = part
def value_splitter(values, start=None):
"""
Returns the key/value pairs of output sent as string
like: FOO='BAR' HOME='127.0.0.1'
"""
_values = shlex.split(values)
if start:
_values = _values[start:]
for key, value in [x.split('=') for x in _values]:
yield key, value
def enumerate_disk(device, nodeps=False):
"""
Enumerate the elements of a child device.
Parameters:
device: the kernel device name
nodeps <BOOL>: don't enumerate children devices
Return a dict describing the disk:
type: the entry type, i.e disk or part
fstype: the filesystem type, if it exists
label: file system label, if it exists
name: the device name, i.e. sda
"""
lsblk_cmd = [LSBLK_CMD, '--pairs', '--out', 'NAME,TYPE,FSTYPE,LABEL',
device]
if nodeps:
lsblk_cmd.append('--nodeps')
info = None
try:
info, _err = util.subp(lsblk_cmd)
except Exception as e:
raise Exception("Failed during disk check for %s\n%s" % (device, e))
parts = [x for x in (info.strip()).splitlines() if len(x.split()) > 0]
for part in parts:
d = {'name': None,
'type': None,
'fstype': None,
'label': None,
}
for key, value in value_splitter(part):
d[key.lower()] = value
yield d
def device_type(device):
"""
Return the device type of the device by calling lsblk.
"""
for d in enumerate_disk(device, nodeps=True):
if "type" in d:
return d["type"].lower()
return None
def is_device_valid(name, partition=False):
"""
Check if the device is a valid device.
"""
d_type = ""
try:
d_type = device_type(name)
except:
LOG.warn("Query against device %s failed" % name)
return False
if partition and d_type == 'part':
return True
elif not partition and d_type == 'disk':
return True
return False
def check_fs(device):
"""
Check if the device has a filesystem on it
Output of blkid is generally something like:
/dev/sda: LABEL="Backup500G" UUID="..." TYPE="ext4"
Return values are device, label, type, uuid
"""
out, label, fs_type, uuid = None, None, None, None
blkid_cmd = [BLKID_CMD, '-c', '/dev/null', device]
try:
out, _err = util.subp(blkid_cmd, rcs=[0, 2])
except Exception as e:
raise Exception("Failed during disk check for %s\n%s" % (device, e))
if out:
if len(out.splitlines()) == 1:
for key, value in value_splitter(out, start=1):
if key.lower() == 'label':
label = value
elif key.lower() == 'type':
fs_type = value
elif key.lower() == 'uuid':
uuid = value
return label, fs_type, uuid
def is_filesystem(device):
"""
Returns true if the device has a file system.
"""
_, fs_type, _ = check_fs(device)
return fs_type
def find_device_node(device, fs_type=None, label=None, valid_targets=None,
label_match=True, replace_fs=None):
"""
Find a device that is either matches the spec, or the first
The return is value is (<device>, <bool>) where the device is the
device to use and the bool is whether the device matches the
fs_type and label.
Note: This works with GPT partition tables!
"""
# label of None is same as no label
if label is None:
label = ""
if not valid_targets:
valid_targets = ['disk', 'part']
raw_device_used = False
for d in enumerate_disk(device):
if d['fstype'] == replace_fs and label_match is False:
# We found a device where we want to replace the FS
return ('/dev/%s' % d['name'], False)
if (d['fstype'] == fs_type and
((label_match and d['label'] == label) or not label_match)):
# If we find a matching device, we return that
return ('/dev/%s' % d['name'], True)
if d['type'] in valid_targets:
if d['type'] != 'disk' or d['fstype']:
raw_device_used = True
if d['type'] == 'disk':
# Skip the raw disk, its the default
pass
elif not d['fstype']:
return ('/dev/%s' % d['name'], False)
if not raw_device_used:
return (device, False)
LOG.warn("Failed to find device during available device search.")
return (None, False)
def is_disk_used(device):
"""
Check if the device is currently used. Returns true if the device
has either a file system or a partition entry
is no filesystem found on the disk.
"""
# If the child count is higher 1, then there are child nodes
# such as partition or device mapper nodes
use_count = [x for x in enumerate_disk(device)]
if len(use_count.splitlines()) > 1:
return True
# If we see a file system, then its used
_, check_fstype, _ = check_fs(device)
if check_fstype:
return True
return False
def get_hdd_size(device):
"""
Returns the hard disk size.
This works with any disk type, including GPT.
"""
size_cmd = [SFDISK_CMD, '--show-size', device]
size = None
try:
size, _err = util.subp(size_cmd)
except Exception as e:
raise Exception("Failed to get %s size\n%s" % (device, e))
return int(size.strip())
def get_dyn_func(*args):
"""
Call the appropriate function.
The first value is the template for function name
The second value is the template replacement
The remain values are passed to the function
For example: get_dyn_func("foo_%s", 'bar', 1, 2, 3,)
would call "foo_bar" with args of 1, 2, 3
"""
if len(args) < 2:
raise Exception("Unable to determine dynamic funcation name")
func_name = (args[0] % args[1])
func_args = args[2:]
try:
if func_args:
return globals()[func_name](*func_args)
else:
return globals()[func_name]
except KeyError:
raise Exception("No such function %s to call!" % func_name)
def check_partition_mbr_layout(device, layout):
"""
Returns true if the partition layout matches the one on the disk
Layout should be a list of values. At this time, this only
verifies that the number of partitions and their labels is correct.
"""
read_parttbl(device)
prt_cmd = [SFDISK_CMD, "-l", device]
try:
out, _err = util.subp(prt_cmd, data="%s\n" % layout)
except Exception as e:
raise Exception("Error running partition command on %s\n%s" % (
device, e))
found_layout = []
for line in out.splitlines():
_line = line.split()
if len(_line) == 0:
continue
if device in _line[0]:
# We don't understand extended partitions yet
if _line[-1].lower() in ['extended', 'empty']:
continue
# Find the partition types
type_label = None
for x in sorted(range(1, len(_line)), reverse=True):
if _line[x].isdigit() and _line[x] != '/':
type_label = _line[x]
break
found_layout.append(type_label)
if isinstance(layout, bool):
# if we are using auto partitioning, or "True" be happy
# if a single partition exists.
if layout and len(found_layout) >= 1:
return True
return False
else:
if len(found_layout) != len(layout):
return False
else:
# This just makes sure that the number of requested
# partitions and the type labels are right
for x in range(1, len(layout) + 1):
if isinstance(layout[x - 1], tuple):
_, part_type = layout[x]
if int(found_layout[x]) != int(part_type):
return False
return True
return False
def check_partition_layout(table_type, device, layout):
"""
See if the partition lay out matches.
This is future a future proofing function. In order
to add support for other disk layout schemes, add a
function called check_partition_%s_layout
"""
return get_dyn_func("check_partition_%s_layout", table_type, device,
layout)
def get_partition_mbr_layout(size, layout):
"""
Calculate the layout of the partition table. Partition sizes
are defined as percentage values or a tuple of percentage and
partition type.
For example:
[ 33, [66: 82] ]
Defines the first partition to be a size of 1/3 the disk,
while the remaining 2/3's will be of type Linux Swap.
"""
if not isinstance(layout, list) and isinstance(layout, bool):
# Create a single partition
return "0,"
if ((len(layout) == 0 and isinstance(layout, list)) or
not isinstance(layout, list)):
raise Exception("Partition layout is invalid")
last_part_num = len(layout)
if last_part_num > 4:
raise Exception("Only simply partitioning is allowed.")
part_definition = []
part_num = 0
for part in layout:
part_type = 83 # Default to Linux
percent = part
part_num += 1
if isinstance(part, list):
if len(part) != 2:
raise Exception("Partition was incorrectly defined: %s" % part)
percent, part_type = part
part_size = int((float(size) * (float(percent) / 100)) / 1024)
if part_num == last_part_num:
part_definition.append(",,%s" % part_type)
else:
part_definition.append(",%s,%s" % (part_size, part_type))
sfdisk_definition = "\n".join(part_definition)
if len(part_definition) > 4:
raise Exception("Calculated partition definition is too big\n%s" %
sfdisk_definition)
return sfdisk_definition
def purge_disk_ptable(device):
# wipe the first and last megabyte of a disk (or file)
# gpt stores partition table both at front and at end.
null = '\0' # pylint: disable=W1401
start_len = 1024 * 1024
end_len = 1024 * 1024
with open(device, "rb+") as fp:
fp.write(null * (start_len))
fp.seek(-end_len, os.SEEK_END)
fp.write(null * end_len)
fp.flush()
read_parttbl(device)
def purge_disk(device):
"""
Remove parition table entries
"""
# wipe any file systems first
for d in enumerate_disk(device):
if d['type'] not in ["disk", "crypt"]:
wipefs_cmd = [WIPEFS_CMD, "--all", "/dev/%s" % d['name']]
try:
LOG.info("Purging filesystem on /dev/%s" % d['name'])
util.subp(wipefs_cmd)
except Exception:
raise Exception("Failed FS purge of /dev/%s" % d['name'])
purge_disk_ptable(device)
def get_partition_layout(table_type, size, layout):
"""
Call the appropriate function for creating the table
definition. Returns the table definition
This is a future proofing function. To add support for
other layouts, simply add a "get_partition_%s_layout"
function.
"""
return get_dyn_func("get_partition_%s_layout", table_type, size, layout)
def read_parttbl(device):
"""
Use partprobe instead of 'udevadm'. Partprobe is the only
reliable way to probe the partition table.
"""
blkdev_cmd = [BLKDEV_CMD, '--rereadpt', device]
udev_cmd = [UDEVADM_CMD, 'settle']
try:
util.subp(udev_cmd)
util.subp(blkdev_cmd)
util.subp(udev_cmd)
except Exception as e:
util.logexc(LOG, "Failed reading the partition table %s" % e)
def exec_mkpart_mbr(device, layout):
"""
Break out of mbr partition to allow for future partition
types, i.e. gpt
"""
# Create the partitions
prt_cmd = [SFDISK_CMD, "--Linux", "-uM", device]
try:
util.subp(prt_cmd, data="%s\n" % layout)
except Exception as e:
raise Exception("Failed to partition device %s\n%s" % (device, e))
read_parttbl(device)
def exec_mkpart(table_type, device, layout):
"""
Fetches the function for creating the table type.
This allows to dynamically find which function to call.
Paramaters:
table_type: type of partition table to use
device: the device to work on
layout: layout definition specific to partition table
"""
return get_dyn_func("exec_mkpart_%s", table_type, device, layout)
def mkpart(device, definition):
"""
Creates the partition table.
Parameters:
definition: dictionary describing how to create the partition.
The following are supported values in the dict:
overwrite: Should the partition table be created regardless
of any pre-exisiting data?
layout: the layout of the partition table
table_type: Which partition table to use, defaults to MBR
device: the device to work on.
"""
LOG.debug("Checking values for %s definition" % device)
overwrite = definition.get('overwrite', False)
layout = definition.get('layout', False)
table_type = definition.get('table_type', 'mbr')
# Check if the default device is a partition or not
LOG.debug("Checking against default devices")
if (isinstance(layout, bool) and not layout) or not layout:
LOG.debug("Device is not to be partitioned, skipping")
return # Device is not to be partitioned
# This prevents you from overwriting the device
LOG.debug("Checking if device %s is a valid device", device)
if not is_device_valid(device):
raise Exception("Device %s is not a disk device!", device)
# Remove the partition table entries
if isinstance(layout, str) and layout.lower() == "remove":
LOG.debug("Instructed to remove partition table entries")
purge_disk(device)
return
LOG.debug("Checking if device layout matches")
if check_partition_layout(table_type, device, layout):
LOG.debug("Device partitioning layout matches")
return True
LOG.debug("Checking if device is safe to partition")
if not overwrite and (is_disk_used(device) or is_filesystem(device)):
LOG.debug("Skipping partitioning on configured device %s" % device)
return
LOG.debug("Checking for device size")
device_size = get_hdd_size(device)
LOG.debug("Calculating partition layout")
part_definition = get_partition_layout(table_type, device_size, layout)
LOG.debug(" Layout is: %s" % part_definition)
LOG.debug("Creating partition table on %s", device)
exec_mkpart(table_type, device, part_definition)
LOG.debug("Partition table created for %s", device)
def lookup_force_flag(fs):
"""
A force flag might be -F or -F, this look it up
"""
flags = {'ext': '-F',
'btrfs': '-f',
'xfs': '-f',
'reiserfs': '-f',
}
if 'ext' in fs.lower():
fs = 'ext'
if fs.lower() in flags:
return flags[fs]
LOG.warn("Force flag for %s is unknown." % fs)
return ''
def mkfs(fs_cfg):
"""
Create a file system on the device.
label: defines the label to use on the device
fs_cfg: defines how the filesystem is to look
The following values are required generally:
device: which device or cloud defined default_device
filesystem: which file system type
overwrite: indiscriminately create the file system
partition: when device does not define a partition,
setting this to a number will mean
device + partition. When set to 'auto', the
first free device or the first device which
matches both label and type will be used.
'any' means the first filesystem that matches
on the device.
When 'cmd' is provided then no other parameter is required.
"""
label = fs_cfg.get('label')
device = fs_cfg.get('device')
partition = str(fs_cfg.get('partition', 'any'))
fs_type = fs_cfg.get('filesystem')
fs_cmd = fs_cfg.get('cmd', [])
fs_opts = fs_cfg.get('extra_opts', [])
fs_replace = fs_cfg.get('replace_fs', False)
overwrite = fs_cfg.get('overwrite', False)
# This allows you to define the default ephemeral or swap
LOG.debug("Checking %s against default devices", device)
if not partition or partition.isdigit():
# Handle manual definition of partition
if partition.isdigit():
device = "%s%s" % (device, partition)
LOG.debug("Manual request of partition %s for %s",
partition, device)
# Check to see if the fs already exists
LOG.debug("Checking device %s", device)
check_label, check_fstype, _ = check_fs(device)
LOG.debug("Device %s has %s %s", device, check_label, check_fstype)
if check_label == label and check_fstype == fs_type:
LOG.debug("Existing file system found at %s", device)
if not overwrite:
LOG.debug("Device %s has required file system", device)
return
else:
LOG.warn("Destroying filesystem on %s", device)
else:
LOG.debug("Device %s is cleared for formating", device)
elif partition and str(partition).lower() in ('auto', 'any'):
# For auto devices, we match if the filesystem does exist
odevice = device
LOG.debug("Identifying device to create %s filesytem on", label)
# any mean pick the first match on the device with matching fs_type
label_match = True
if partition.lower() == 'any':
label_match = False
device, reuse = find_device_node(device, fs_type=fs_type, label=label,
label_match=label_match,
replace_fs=fs_replace)
LOG.debug("Automatic device for %s identified as %s", odevice, device)
if reuse:
LOG.debug("Found filesystem match, skipping formating.")
return
if not reuse and fs_replace and device:
LOG.debug("Replacing file system on %s as instructed." % device)
if not device:
LOG.debug("No device aviable that matches request. "
"Skipping fs creation for %s", fs_cfg)
return
elif not partition or str(partition).lower() == 'none':
LOG.debug("Using the raw device to place filesystem %s on" % label)
else:
LOG.debug("Error in device identification handling.")
return
LOG.debug("File system %s will be created on %s", label, device)
# Make sure the device is defined
if not device:
LOG.warn("Device is not known: %s", device)
return
# Check that we can create the FS
if not (fs_type or fs_cmd):
raise Exception("No way to create filesystem '%s'. fs_type or fs_cmd "
"must be set.", label)
# Create the commands
if fs_cmd:
fs_cmd = fs_cfg['cmd'] % {'label': label,
'filesystem': fs_type,
'device': device,
}
else:
# Find the mkfs command
mkfs_cmd = util.which("mkfs.%s" % fs_type)
if not mkfs_cmd:
mkfs_cmd = util.which("mk%s" % fs_type)
if not mkfs_cmd:
LOG.warn("Cannot create fstype '%s'. No mkfs.%s command", fs_type,
fs_type)
return
fs_cmd = [mkfs_cmd, device]
if label:
fs_cmd.extend(["-L", label])
# File systems that support the -F flag
if not fs_cmd and (overwrite or device_type(device) == "disk"):
fs_cmd.append(lookup_force_flag(fs_type))
# Add the extends FS options
if fs_opts:
fs_cmd.extend(fs_opts)
LOG.debug("Creating file system %s on %s", label, device)
LOG.debug(" Using cmd: %s", " ".join(fs_cmd))
try:
util.subp(fs_cmd)
except Exception as e:
raise Exception("Failed to exec of '%s':\n%s" % (fs_cmd, e))
|
GabrielNicolasAvellaneda/kafka | refs/heads/trunk | system_test/system_test_env.py | 116 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#!/usr/bin/env python
# ===================================
# system_test_env.py
# ===================================
import copy
import json
import os
import sys
from utils import system_test_utils
class SystemTestEnv():
# private:
_cwdFullPath = os.getcwd()
_thisScriptFullPathName = os.path.realpath(__file__)
_thisScriptBaseDir = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0])))
# public:
SYSTEM_TEST_BASE_DIR = os.path.abspath(_thisScriptBaseDir)
SYSTEM_TEST_UTIL_DIR = os.path.abspath(SYSTEM_TEST_BASE_DIR + "/utils")
SYSTEM_TEST_SUITE_SUFFIX = "_testsuite"
SYSTEM_TEST_CASE_PREFIX = "testcase_"
SYSTEM_TEST_MODULE_EXT = ".py"
CLUSTER_CONFIG_FILENAME = "cluster_config.json"
CLUSTER_CONFIG_PATHNAME = os.path.abspath(SYSTEM_TEST_BASE_DIR + "/" + CLUSTER_CONFIG_FILENAME)
METRICS_FILENAME = "metrics.json"
METRICS_PATHNAME = os.path.abspath(SYSTEM_TEST_BASE_DIR + "/" + METRICS_FILENAME)
TESTCASE_TO_RUN_FILENAME = "testcase_to_run.json"
TESTCASE_TO_RUN_PATHNAME = os.path.abspath(SYSTEM_TEST_BASE_DIR + "/" + TESTCASE_TO_RUN_FILENAME)
TESTCASE_TO_SKIP_FILENAME = "testcase_to_skip.json"
TESTCASE_TO_SKIP_PATHNAME = os.path.abspath(SYSTEM_TEST_BASE_DIR + "/" + TESTCASE_TO_SKIP_FILENAME)
clusterEntityConfigDictList = [] # cluster entity config for current level
clusterEntityConfigDictListInSystemTestLevel = [] # cluster entity config defined in system level
clusterEntityConfigDictListLastFoundInTestSuite = [] # cluster entity config last found in testsuite level
clusterEntityConfigDictListLastFoundInTestCase = [] # cluster entity config last found in testcase level
systemTestResultsList = []
testCaseToRunListDict = {}
testCaseToSkipListDict = {}
printTestDescriptionsOnly = False
doNotValidateRemoteHost = False
def __init__(self):
"Create an object with this system test session environment"
# load the system level cluster config
system_test_utils.load_cluster_config(self.CLUSTER_CONFIG_PATHNAME, self.clusterEntityConfigDictList)
# save the system level cluster config
self.clusterEntityConfigDictListInSystemTestLevel = copy.deepcopy(self.clusterEntityConfigDictList)
# retrieve testcases to run from testcase_to_run.json
try:
testcaseToRunFileContent = open(self.TESTCASE_TO_RUN_PATHNAME, "r").read()
testcaseToRunData = json.loads(testcaseToRunFileContent)
for testClassName, caseList in testcaseToRunData.items():
self.testCaseToRunListDict[testClassName] = caseList
except:
pass
# retrieve testcases to skip from testcase_to_skip.json
try:
testcaseToSkipFileContent = open(self.TESTCASE_TO_SKIP_PATHNAME, "r").read()
testcaseToSkipData = json.loads(testcaseToSkipFileContent)
for testClassName, caseList in testcaseToSkipData.items():
self.testCaseToSkipListDict[testClassName] = caseList
except:
pass
def isTestCaseToSkip(self, testClassName, testcaseDirName):
testCaseToRunList = {}
testCaseToSkipList = {}
try:
testCaseToRunList = self.testCaseToRunListDict[testClassName]
except:
# no 'testClassName' found => no need to run any cases for this test class
return True
try:
testCaseToSkipList = self.testCaseToSkipListDict[testClassName]
except:
pass
# if testCaseToRunList has elements, it takes precedence:
if len(testCaseToRunList) > 0:
#print "#### testClassName => ", testClassName
#print "#### testCaseToRunList => ", testCaseToRunList
#print "#### testcaseDirName => ", testcaseDirName
if not testcaseDirName in testCaseToRunList:
#self.log_message("Skipping : " + testcaseDirName)
return True
elif len(testCaseToSkipList) > 0:
#print "#### testClassName => ", testClassName
#print "#### testCaseToSkipList => ", testCaseToSkipList
#print "#### testcaseDirName => ", testcaseDirName
if testcaseDirName in testCaseToSkipList:
#self.log_message("Skipping : " + testcaseDirName)
return True
return False
def getSystemTestEnvDict(self):
envDict = {}
envDict["system_test_base_dir"] = self.SYSTEM_TEST_BASE_DIR
envDict["system_test_util_dir"] = self.SYSTEM_TEST_UTIL_DIR
envDict["cluster_config_pathname"] = self.CLUSTER_CONFIG_PATHNAME
envDict["system_test_suite_suffix"] = self.SYSTEM_TEST_SUITE_SUFFIX
envDict["system_test_case_prefix"] = self.SYSTEM_TEST_CASE_PREFIX
envDict["system_test_module_ext"] = self.SYSTEM_TEST_MODULE_EXT
envDict["cluster_config_pathname"] = self.CLUSTER_CONFIG_PATHNAME
envDict["cluster_entity_config_dict_list"] = self.clusterEntityConfigDictList
envDict["system_test_results_list"] = self.systemTestResultsList
return envDict
|
caelan/stripstream | refs/heads/master | robotics/openrave/transforms.py | 1 | from openravepy import poseTransformPoints, matrixFromPose, matrixFromQuat, matrixFromAxisAngle, rotationMatrixFromQuat, quatFromAxisAngle, poseFromMatrix, axisAngleFromRotationMatrix, quatFromRotationMatrix, quatMult, quatInverse, quatRotateDirection, quatSlerp, RaveGetAffineDOFValuesFromTransform, DOFAffine, transformLookat
import numpy as np
from math import sin, cos
def norm(vector, order=2):
return np.linalg.norm(vector, ord=order)
def length(vector):
return np.linalg.norm(vector)
def length2(vector):
return np.dot(vector, vector)
def normalize(vector):
return 1. / length(vector) * vector
def unit_x():
return np.array((1, 0, 0))
def unit_y():
return np.array((0, 1, 0))
def unit_z():
return np.array((0, 0, 1))
def unit_point():
return np.zeros(3)
def unit_quat():
return np.array((1, 0, 0, 0))
def unit_rot():
return np.identity(3)
def unit_pose():
return np.array((1, 0, 0, 0, 0, 0, 0))
def unit_trans():
return np.identity(4)
def trans_transform_point(trans, point):
return trans.dot(np.concatenate([point, [1]]).T)[:3]
def trans_transform_points(trans, points):
return np.dot(trans[:3, :3], points) + np.tile(trans[:3, 3].T, (points.shape[1], 1)).T
def pose_transform_point(pose, point):
return poseTransformPoints(pose, np.array([point]))[0]
def quat_transform_point(quat, point):
return pose_transform_point(pose_from_quat_point(quat, unit_point()), point)
def rot_transform_point(rot, point):
return rot.dot(point)
def quat_from_pose(pose):
return pose[:4]
def point_from_pose(pose):
return pose[4:]
def pose_from_quat_point(quat, point):
return np.concatenate([quat, point])
def trans_from_quat_point(quat, point):
return trans_from_pose(pose_from_quat_point(quat, point))
def trans_from_pose(pose):
return matrixFromPose(pose)
def pose_from_trans(trans):
return poseFromMatrix(trans)
def trans_from_point(x, y, z):
return trans_from_quat_point(unit_quat(), np.array([x, y, z]))
def trans_from_quat(quat):
return matrixFromQuat(quat)
def trans_from_axis_angle(x_angle, y_angle, z_angle):
return trans_from_quat(quat_from_axis_angle(x_angle, y_angle, z_angle))
def trans_from_rot_point(rot, point):
trans = unit_trans()
trans[:3, :3] = rot
trans[:3, 3] = point
return trans
def trans_from_rot(rot):
return trans_from_rot_point(rot, unit_point())
def rot_from_trans(trans):
return trans[:3, :3]
def quat_from_trans(trans):
return quat_from_rot(rot_from_trans(trans))
def point_from_trans(trans):
return trans[:3, 3]
def quat_from_axis_angle(x_angle, y_angle, z_angle):
return quatFromAxisAngle(np.array((x_angle, y_angle, z_angle)))
def rot_from_axis_angle(x_angle, y_angle, z_angle):
return matrixFromAxisAngle(np.array((x_angle, y_angle, z_angle)))
def axis_angle_from_rot(rot):
return axisAngleFromRotationMatrix(rot)
def quat_from_rot(rot):
return quatFromRotationMatrix(rot)
def rot_from_quat(quat):
return rotationMatrixFromQuat(quat)
def quat_from_angle_vector(angle, vector):
return np.concatenate([[cos(angle / 2)], sin(angle / 2) * normalize(np.array(vector))])
def rot_from_angle_vector(angle, vector):
return rot_from_quat(quat_from_angle_vector(angle, vector))
def trans_dot(*trans):
return np.dot(*trans)
def trans_inv(trans):
return np.linalg.inv(trans)
def quat_dot(*quats):
return reduce(quatMult, quats)
def quat_inv(quat):
return quatInverse(quat)
def quat_look_at(vector1, vector2=None):
if vector2 is None:
vector2 = vector1
vector1 = unit_x()
return quatRotateDirection(vector1, vector2)
def rot_look_at(vector1, vector2=None):
return rot_from_quat(quat_look_at(vector1, vector2))
def camera_look_at(point, look_point=unit_point()):
return transformLookat(np.array(look_point) - np.array(point), np.array(point), -unit_z())
def quat_interpolate(quat1, quat2, t=.5):
return quatSlerp(quat1, quat2, t, True)
def pose_interpolate(pose1, pose2, t=.5):
return pose_from_quat_point(quat_interpolate(quat_from_pose(pose1), quat_from_pose(pose2), t),
t * point_from_pose(pose1) + (1 - t) * point_from_pose(pose2))
def vector_trans(trans, vector):
return trans_from_pose(pose_from_trans(trans) + np.concatenate([np.zeros((4,)), vector]))
def quat_from_z_rot(theta):
return quat_from_axis_angle(0, 0, theta)
def base_values_from_trans(trans):
return RaveGetAffineDOFValuesFromTransform(trans, DOFAffine.X | DOFAffine.Y | DOFAffine.RotationAxis, [0, 0, 1])
def xyzt_from_trans(trans):
return RaveGetAffineDOFValuesFromTransform(trans, DOFAffine.X | DOFAffine.Y | DOFAffine.Z | DOFAffine.RotationAxis, [0, 0, 1])
def base_values_from_pose(pose):
return base_values_from_trans(trans_from_pose(pose))
def is_upright(trans):
return abs(abs(trans[2, 2]) - 1) < 1e-6
def pose_from_base_values(base_values, z=0.0):
x, y, theta = base_values
return pose_from_quat_point(quat_from_z_rot(theta), [x, y, z])
def trans_from_base_values(base_values, z=0.0):
return trans_from_pose(pose_from_base_values(base_values, z=z))
def trans2D_from_trans(trans):
return trans_from_base_values(base_values_from_trans(trans))
def point_from_full_config(config):
return config[-7:-4]
def quat_from_full_config(config):
return config[-4:]
def pose_from_full_config(config):
return np.concatenate([quat_from_full_config(config), point_from_full_config(config)])
def trans_from_full_config(config):
return trans_from_pose(pose_from_full_config(config))
def base_values_from_full_config(config):
return base_values_from_pose(pose_from_full_config(config))
def arm_from_full_config(arm, config):
return config[arm.GetArmIndices()]
def arm_and_base_from_full_config(arm, config):
return np.concatenate([arm_from_full_config(arm, config), base_values_from_full_config(config)])
def full_config_from_pose(pose, config):
new_config = config.copy()
new_config[-7:-4] = pose[-3:]
new_config[-4:] = pose[:4]
return new_config
def full_config_from_trans(trans, config):
return full_config_from_pose(pose_from_trans(trans), config)
def full_config_from_base_values(base_values, config):
_, _, z = point_from_full_config(config)
return full_config_from_pose(pose_from_base_values(base_values, z=z), config)
def get_trans(body):
return body.GetTransform()
def get_pose(body):
return pose_from_trans(get_trans(body))
def get_point(body):
return point_from_trans(get_trans(body))
def get_quat(body):
return quat_from_pose(get_pose(body))
def get_config(body, joint_indices=None):
if joint_indices is None:
return body.GetDOFValues()
return body.GetDOFValues(indices=joint_indices)
def get_active_config(body):
return body.GetActiveDOFValues()
def get_active_indices(body):
return body.GetActiveDOFIndices()
def get_full_config(body, dof_indices=None):
if dof_indices is None:
return body.GetConfigurationValues()
return body.GetConfigurationValues()[dof_indices]
def set_trans(body, trans):
body.SetTransform(trans)
def set_pose(body, pose):
set_trans(body, trans_from_pose(pose))
def set_xy(body, x, y):
point = get_point(body)
set_point(body, np.array([x, y, point[2]]))
def set_point(body, point):
set_pose(body, pose_from_quat_point(get_quat(body), point))
def set_quat(body, quat):
set_pose(body, pose_from_quat_point(quat, get_point(body)))
def set_config(body, config, joint_indices=None):
if joint_indices is None:
body.SetDOFValues(config)
else:
body.SetDOFValues(config, joint_indices)
def set_active_config(body, config):
body.SetActiveDOFValues(config)
def set_active_indices(body, indices):
body.SetActiveDOFs(indices)
def set_full_config(body, config, dof_indices=None):
if dof_indices is None:
body.SetConfigurationValues(config)
else:
full_config = get_full_config(body)
full_config[dof_indices] = config
body.SetConfigurationValues(full_config)
def set_base_values(body, base_values):
trans = get_trans(body)
trans[:3, :3] = rot_from_quat(quat_from_z_rot(base_values[-1]))
trans[:2, 3] = base_values[:2]
set_trans(body, trans)
def set_manipulator_values(manipulator, values):
set_config(manipulator.GetRobot(), values, manipulator.GetArmIndices())
def object_trans_from_manip_trans(manip_trans, grasp):
return np.dot(manip_trans, grasp)
def manip_trans_from_object_trans(object_trans, grasp):
return np.linalg.solve(grasp.T, object_trans.T).T
def compute_grasp(manip_trans, object_trans):
return np.linalg.solve(manip_trans, object_trans)
|
RuudBurger/CouchPotatoServer | refs/heads/master | libs/synchronousdeluge/exceptions.py | 159 | __all__ = ["DelugeRPCError"]
class DelugeRPCError(Exception):
def __init__(self, name, msg, traceback):
self.name = name
self.msg = msg
self.traceback = traceback
def __str__(self):
return "{0}: {1}: {2}".format(self.__class__.__name__, self.name, self.msg)
|
ProfessionalIT/maxigenios-website | refs/heads/master | sdk/google_appengine/google/appengine/tools/devappserver2/endpoints/endpoints_server_test.py | 8 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for the endpoints_server module."""
import httplib
import json
import logging
import unittest
import google
import mox
from google.appengine.tools.devappserver2 import dispatcher
from google.appengine.tools.devappserver2.endpoints import api_config_manager
from google.appengine.tools.devappserver2.endpoints import api_request
from google.appengine.tools.devappserver2.endpoints import discovery_api_proxy
from google.appengine.tools.devappserver2.endpoints import endpoints_server
from google.appengine.tools.devappserver2.endpoints import errors
from google.appengine.tools.devappserver2.endpoints import test_utils
class JsonMatches(mox.Comparator):
"""A Mox comparator to compare a string of a JSON object to a JSON object."""
def __init__(self, json_object):
"""Constructor.
Args:
json_object: The JSON object to compare against.
"""
self._json_object = json_object
def equals(self, json_string):
"""Check if the given object matches our json object.
This converts json_string from a string to a JSON object, then compares it
against our json object.
Args:
json_string: A string containing a JSON object to be compared against.
Returns:
True if the object matches, False if not.
"""
other_json = json.loads(json_string)
return self._json_object == other_json
def __repr__(self):
return '<JsonMatches %r>' % self._json_object
class DevAppserverEndpointsServerTest(test_utils.TestsWithStartResponse):
def setUp(self):
"""Set up a dev Endpoints server."""
super(DevAppserverEndpointsServerTest, self).setUp()
self.mox = mox.Mox()
self.config_manager = api_config_manager.ApiConfigManager()
self.mock_dispatcher = self.mox.CreateMock(dispatcher.Dispatcher)
self.server = endpoints_server.EndpointsDispatcher(self.mock_dispatcher,
self.config_manager)
def tearDown(self):
self.mox.UnsetStubs()
def prepare_dispatch(self, config):
# The dispatch call will make a call to get_api_configs, making a
# dispatcher request. Set up that request.
request_method = 'POST'
request_path = '/_ah/spi/BackendService.getApiConfigs'
request_headers = [('Content-Type', 'application/json')]
request_body = '{}'
response_body = json.dumps({'items': [config]})
self.mock_dispatcher.add_request(
request_method, request_path, request_headers, request_body,
endpoints_server._SERVER_SOURCE_IP).AndReturn(
dispatcher.ResponseTuple('200 OK',
[('Content-Type', 'application/json'),
('Content-Length',
str(len(response_body)))],
response_body))
def assert_dispatch_to_spi(self, request, config, spi_path,
expected_spi_body_json=None):
"""Assert that dispatching a request to the SPI works.
Mock out the dispatcher.add_request and handle_spi_response, and use these
to ensure that the correct request is being sent to the back end when
Dispatch is called.
Args:
request: An ApiRequest, the request to dispatch.
config: A dict containing the API configuration.
spi_path: A string containing the relative path to the SPI.
expected_spi_body_json: If not None, this is a JSON object containing
the mock response sent by the back end. If None, this will create an
empty response.
"""
self.prepare_dispatch(config)
spi_headers = [('Content-Type', 'application/json')]
spi_body_json = expected_spi_body_json or {}
spi_response = dispatcher.ResponseTuple('200 OK', [], 'Test')
self.mock_dispatcher.add_request(
'POST', spi_path, spi_headers, JsonMatches(spi_body_json),
request.source_ip).AndReturn(spi_response)
self.mox.StubOutWithMock(self.server, 'handle_spi_response')
self.server.handle_spi_response(
mox.IsA(api_request.ApiRequest), mox.IsA(api_request.ApiRequest),
spi_response, mox.IsA(dict), self.start_response).AndReturn('Test')
# Run the test.
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
self.assertEqual('Test', response)
def test_dispatch_invalid_path(self):
config = json.dumps({
'name': 'guestbook_api',
'version': 'v1',
'methods': {
'guestbook.get': {
'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'MyApi.greetings_get'
}
}
})
request = test_utils.build_request('/_ah/api/foo')
self.prepare_dispatch(config)
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
self.assert_http_match(response, 404,
[('Content-Type', 'text/plain'),
('Content-Length', '9')],
'Not Found')
def test_dispatch_invalid_enum(self):
config = json.dumps({
'name': 'guestbook_api',
'version': 'v1',
'methods': {
'guestbook.get': {
'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'MyApi.greetings_get',
'request': {
'body': 'empty',
'parameters': {'gid': {'enum': {'X': {'backendValue': 'X'}},
'type': 'string'
}
}
}
}
}
})
request = test_utils.build_request(
'/_ah/api/guestbook_api/v1/greetings/invalid_enum')
self.prepare_dispatch(config)
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
logging.warning('Config %s', self.server.config_manager.configs)
self.assertEqual(self.response_status, '400 Bad Request')
body = ''.join(response)
body_json = json.loads(body)
self.assertEqual(1, len(body_json['error']['errors']))
self.assertEqual('gid', body_json['error']['errors'][0]['location'])
self.assertEqual('invalidParameter',
body_json['error']['errors'][0]['reason'])
def test_dispatch_spi_error(self):
"""Check the error response if the SPI returns an error."""
config = json.dumps({
'name': 'guestbook_api',
'version': 'v1',
'methods': {
'guestbook.get': {
'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'MyApi.greetings_get'
}
}
})
request = test_utils.build_request('/_ah/api/foo')
self.prepare_dispatch(config)
self.mox.StubOutWithMock(self.server, 'call_spi')
# The application chose to throw a 404 error.
response = dispatcher.ResponseTuple('404 Not Found', [],
('{"state": "APPLICATION_ERROR",'
' "error_message": "Test error"}'))
self.server.call_spi(request, mox.IgnoreArg()).AndRaise(
errors.BackendError(response))
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
expected_response = (
'{\n'
' "error": {\n'
' "code": 404, \n'
' "errors": [\n'
' {\n'
' "domain": "global", \n'
' "message": "Test error", \n'
' "reason": "notFound"\n'
' }\n'
' ], \n'
' "message": "Test error"\n'
' }\n'
'}')
response = ''.join(response)
self.assert_http_match(response, '404 Not Found',
[('Content-Length', '%d' % len(expected_response)),
('Content-Type', 'application/json')],
expected_response)
def test_dispatch_rpc_error(self):
"""Test than an RPC call that returns an error is handled properly."""
config = json.dumps({
'name': 'guestbook_api',
'version': 'v1',
'methods': {
'guestbook.get': {
'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'MyApi.greetings_get'
}
}
})
request = test_utils.build_request(
'/_ah/api/rpc',
'{"method": "foo.bar", "apiVersion": "X", "id": "gapiRpc"}')
self.prepare_dispatch(config)
self.mox.StubOutWithMock(self.server, 'call_spi')
# The application chose to throw a 404 error.
response = dispatcher.ResponseTuple('404 Not Found', [],
('{"state": "APPLICATION_ERROR",'
' "error_message": "Test error"}'))
self.server.call_spi(request, mox.IgnoreArg()).AndRaise(
errors.BackendError(response))
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
expected_response = {'error': {'code': 404,
'message': 'Test error',
'data': [{
'domain': 'global',
'reason': 'notFound',
'message': 'Test error',
}]
},
'id': 'gapiRpc'
}
response = ''.join(response)
self.assertEqual('200 OK', self.response_status)
self.assertEqual(expected_response, json.loads(response))
def test_dispatch_json_rpc(self):
config = json.dumps({
'name': 'guestbook_api',
'version': 'X',
'methods': {
'foo.bar': {
'httpMethod': 'GET',
'path': 'greetings/{gid}',
'rosyMethod': 'baz.bim'
}
}
})
request = test_utils.build_request(
'/_ah/api/rpc',
'{"method": "foo.bar", "apiVersion": "X"}')
self.assert_dispatch_to_spi(request, config,
'/_ah/spi/baz.bim')
def test_dispatch_rest(self):
config = json.dumps({
'name': 'myapi',
'version': 'v1',
'methods': {
'bar': {
'httpMethod': 'GET',
'path': 'foo/{id}',
'rosyMethod': 'baz.bim'
}
}
})
request = test_utils.build_request('/_ah/api/myapi/v1/foo/testId')
self.assert_dispatch_to_spi(request, config,
'/_ah/spi/baz.bim',
{'id': 'testId'})
def test_explorer_redirect(self):
request = test_utils.build_request('/_ah/api/explorer')
response = self.server.dispatch(request, self.start_response)
self.assert_http_match(response, 302,
[('Content-Length', '0'),
('Location', ('http://apis-explorer.appspot.com/'
'apis-explorer/?base='
'http://localhost:42/_ah/api'))],
'')
def test_static_existing_file(self):
relative_url = '/_ah/api/static/proxy.html'
# Set up mocks for the call to DiscoveryApiProxy.get_static_file.
discovery_api = self.mox.CreateMock(
discovery_api_proxy.DiscoveryApiProxy)
self.mox.StubOutWithMock(discovery_api_proxy, 'DiscoveryApiProxy')
discovery_api_proxy.DiscoveryApiProxy().AndReturn(discovery_api)
static_response = self.mox.CreateMock(httplib.HTTPResponse)
static_response.status = 200
static_response.reason = 'OK'
static_response.getheader('Content-Type').AndReturn('test/type')
test_body = 'test body'
discovery_api.get_static_file(relative_url).AndReturn(
(static_response, test_body))
# Make sure the dispatch works as expected.
request = test_utils.build_request(relative_url)
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
response = ''.join(response)
self.assert_http_match(response, '200 OK',
[('Content-Length', '%d' % len(test_body)),
('Content-Type', 'test/type')],
test_body)
def test_static_non_existing_file(self):
relative_url = '/_ah/api/static/blah.html'
# Set up mocks for the call to DiscoveryApiProxy.get_static_file.
discovery_api = self.mox.CreateMock(
discovery_api_proxy.DiscoveryApiProxy)
self.mox.StubOutWithMock(discovery_api_proxy, 'DiscoveryApiProxy')
discovery_api_proxy.DiscoveryApiProxy().AndReturn(discovery_api)
static_response = self.mox.CreateMock(httplib.HTTPResponse)
static_response.status = 404
static_response.reason = 'Not Found'
static_response.getheaders().AndReturn([('Content-Type', 'test/type')])
test_body = 'No Body'
discovery_api.get_static_file(relative_url).AndReturn(
(static_response, test_body))
# Make sure the dispatch works as expected.
request = test_utils.build_request(relative_url)
self.mox.ReplayAll()
response = self.server.dispatch(request, self.start_response)
self.mox.VerifyAll()
response = ''.join(response)
self.assert_http_match(response, '404 Not Found',
[('Content-Length', '%d' % len(test_body)),
('Content-Type', 'test/type')],
test_body)
def test_handle_non_json_spi_response(self):
orig_request = test_utils.build_request('/_ah/api/fake/path')
spi_request = orig_request.copy()
spi_response = dispatcher.ResponseTuple(
200, [('Content-type', 'text/plain')],
'This is an invalid response.')
response = self.server.handle_spi_response(orig_request, spi_request,
spi_response, {},
self.start_response)
error_json = {'error': {'message':
'Non-JSON reply: This is an invalid response.'}}
body = json.dumps(error_json)
self.assert_http_match(response, '500',
[('Content-Type', 'application/json'),
('Content-Length', '%d' % len(body))],
body)
def test_handle_non_json_spi_response_cors(self):
"""Test that an error response still handles CORS headers."""
server_response = dispatcher.ResponseTuple(
'200 OK', [('Content-type', 'text/plain')],
'This is an invalid response.')
response = self.check_cors([('origin', 'test.com')], True, 'test.com',
server_response=server_response)
self.assertEqual(
{'error': {'message': 'Non-JSON reply: This is an invalid response.'}},
json.loads(response))
def check_cors(self, request_headers, expect_response, expected_origin=None,
expected_allow_headers=None, server_response=None):
"""Check that CORS headers are handled correctly.
Args:
request_headers: A list of (header, value), to be used as headers in the
request.
expect_response: A boolean, whether or not CORS headers are expected in
the response.
expected_origin: A string or None. If this is a string, this is the value
that's expected in the response's allow origin header. This can be
None if expect_response is False.
expected_allow_headers: A string or None. If this is a string, this is
the value that's expected in the response's allow headers header. If
this is None, then the response shouldn't have any allow headers
headers.
server_response: A dispatcher.ResponseTuple or None. The backend's
response, to be wrapped and returned as the server's response. If
this is None, a generic response will be generated.
Returns:
A string containing the body of the response that would be sent.
"""
orig_request = test_utils.build_request('/_ah/api/fake/path',
http_headers=request_headers)
spi_request = orig_request.copy()
if server_response is None:
server_response = dispatcher.ResponseTuple(
'200 OK', [('Content-type', 'application/json')], '{}')
response = self.server.handle_spi_response(orig_request, spi_request,
server_response, {},
self.start_response)
headers = dict(self.response_headers)
if expect_response:
self.assertIn(endpoints_server._CORS_HEADER_ALLOW_ORIGIN, headers)
self.assertEqual(
headers[endpoints_server._CORS_HEADER_ALLOW_ORIGIN],
expected_origin)
self.assertIn(endpoints_server._CORS_HEADER_ALLOW_METHODS, headers)
self.assertEqual(set(headers[
endpoints_server._CORS_HEADER_ALLOW_METHODS].split(',')),
endpoints_server._CORS_ALLOWED_METHODS)
if expected_allow_headers is not None:
self.assertIn(endpoints_server._CORS_HEADER_ALLOW_HEADERS,
headers)
self.assertEqual(
headers[endpoints_server._CORS_HEADER_ALLOW_HEADERS],
expected_allow_headers)
else:
self.assertNotIn(endpoints_server._CORS_HEADER_ALLOW_HEADERS,
headers)
else:
self.assertNotIn(endpoints_server._CORS_HEADER_ALLOW_ORIGIN,
headers)
self.assertNotIn(endpoints_server._CORS_HEADER_ALLOW_METHODS,
headers)
self.assertNotIn(endpoints_server._CORS_HEADER_ALLOW_HEADERS,
headers)
return ''.join(response)
def test_handle_cors(self):
"""Test CORS support on a regular request."""
self.check_cors([('origin', 'test.com')], True, 'test.com')
def test_handle_cors_preflight(self):
"""Test a CORS preflight request."""
self.check_cors([('origin', 'http://example.com'),
('Access-control-request-method', 'GET')], True,
'http://example.com')
def test_handle_cors_preflight_invalid(self):
"""Test a CORS preflight request for an unaccepted OPTIONS request."""
self.check_cors([('origin', 'http://example.com'),
('Access-control-request-method', 'OPTIONS')], False)
def test_handle_cors_preflight_request_headers(self):
"""Test a CORS preflight request."""
self.check_cors([('origin', 'http://example.com'),
('Access-control-request-method', 'GET'),
('Access-Control-Request-Headers', 'Date,Expires')], True,
'http://example.com', 'Date,Expires')
def test_lily_uses_python_method_name(self):
"""Verify Lily protocol correctly uses python method name.
This test verifies the fix to http://b/7189819
"""
config = json.dumps({
'name': 'guestbook_api',
'version': 'X',
'methods': {
'author.greeting.info.get': {
'httpMethod': 'GET',
'path': 'authors/{aid}/greetings/{gid}/infos/{iid}',
'rosyMethod': 'InfoService.get'
}
}
})
request = test_utils.build_request(
'/_ah/api/rpc',
'{"method": "author.greeting.info.get", "apiVersion": "X"}')
self.assert_dispatch_to_spi(request, config,
'/_ah/spi/InfoService.get',
{})
def test_handle_spi_response_json_rpc(self):
"""Verify headers transformed, JsonRpc response transformed, written."""
orig_request = test_utils.build_request(
'/_ah/api/rpc', '{"method": "foo.bar", "apiVersion": "X"}')
self.assertTrue(orig_request.is_rpc())
orig_request.request_id = 'Z'
spi_request = orig_request.copy()
spi_response = dispatcher.ResponseTuple('200 OK', [('a', 'b')],
'{"some": "response"}')
response = self.server.handle_spi_response(orig_request, spi_request,
spi_response, {},
self.start_response)
response = ''.join(response) # Merge response iterator into single body.
self.assertEqual(self.response_status, '200 OK')
self.assertIn(('a', 'b'), self.response_headers)
self.assertEqual({'id': 'Z', 'result': {'some': 'response'}},
json.loads(response))
def test_handle_spi_response_batch_json_rpc(self):
"""Verify that batch requests have an appropriate batch response."""
orig_request = test_utils.build_request(
'/_ah/api/rpc', '[{"method": "foo.bar", "apiVersion": "X"}]')
self.assertTrue(orig_request.is_batch())
self.assertTrue(orig_request.is_rpc())
orig_request.request_id = 'Z'
spi_request = orig_request.copy()
spi_response = dispatcher.ResponseTuple('200 OK', [('a', 'b')],
'{"some": "response"}')
response = self.server.handle_spi_response(orig_request, spi_request,
spi_response, {},
self.start_response)
response = ''.join(response) # Merge response iterator into single body.
self.assertEqual(self.response_status, '200 OK')
self.assertIn(('a', 'b'), self.response_headers)
self.assertEqual([{'id': 'Z', 'result': {'some': 'response'}}],
json.loads(response))
def test_handle_spi_response_rest(self):
orig_request = test_utils.build_request('/_ah/api/test', '{}')
spi_request = orig_request.copy()
body = json.dumps({'some': 'response'}, indent=1)
spi_response = dispatcher.ResponseTuple('200 OK', [('a', 'b')], body)
response = self.server.handle_spi_response(orig_request, spi_request,
spi_response, {},
self.start_response)
self.assert_http_match(response, '200 OK',
[('a', 'b'),
('Content-Length', '%d' % len(body))],
body)
def test_transform_rest_response(self):
"""Verify the response is reformatted correctly."""
orig_response = '{"sample": "test", "value1": {"value2": 2}}'
expected_response = ('{\n'
' "sample": "test", \n'
' "value1": {\n'
' "value2": 2\n'
' }\n'
'}')
self.assertEqual(expected_response,
self.server.transform_rest_response(orig_response))
def test_transform_json_rpc_response_batch(self):
"""Verify request_id inserted into the body, and body into body.result."""
orig_request = test_utils.build_request(
'/_ah/api/rpc', '[{"params": {"sample": "body"}, "id": "42"}]')
request = orig_request.copy()
request.request_id = '42'
orig_response = '{"sample": "body"}'
response = self.server.transform_jsonrpc_response(request, orig_response)
self.assertEqual([{'result': {'sample': 'body'}, 'id': '42'}],
json.loads(response))
def test_lookup_rpc_method_no_body(self):
orig_request = test_utils.build_request('/_ah/api/rpc', '')
self.assertEqual(None, self.server.lookup_rpc_method(orig_request))
def test_lookup_rpc_method(self):
self.mox.StubOutWithMock(self.server.config_manager, 'lookup_rpc_method')
self.server.config_manager.lookup_rpc_method('foo', 'v1').AndReturn('bar')
self.mox.ReplayAll()
orig_request = test_utils.build_request(
'/_ah/api/rpc', '{"method": "foo", "apiVersion": "v1"}')
self.assertEqual('bar', self.server.lookup_rpc_method(orig_request))
self.mox.VerifyAll()
def test_verify_response(self):
response = dispatcher.ResponseTuple('200', [('Content-Type', 'a')], '')
# Expected response
self.assertEqual(True, self.server.verify_response(response, 200, 'a'))
# Any content type accepted
self.assertEqual(True, self.server.verify_response(response, 200, None))
# Status code mismatch
self.assertEqual(False, self.server.verify_response(response, 400, 'a'))
# Content type mismatch
self.assertEqual(False, self.server.verify_response(response, 200, 'b'))
response = dispatcher.ResponseTuple('200', [('Content-Length', '10')], '')
# Any content type accepted
self.assertEqual(True, self.server.verify_response(response, 200, None))
# Specified content type not matched
self.assertEqual(False, self.server.verify_response(response, 200, 'a'))
def test_check_empty_response(self):
"""Test that check_empty_response returns 204 for an empty response."""
orig_request = test_utils.build_request('/_ah/api/test', '{}')
method_config = {'response': {'body': 'empty'}}
empty_response = self.server.check_empty_response(orig_request,
method_config,
self.start_response)
self.assert_http_match(empty_response, 204, [('Content-Length', '0')], '')
def test_check_non_empty_response(self):
"""Test that check_empty_response returns None for a non-empty response."""
orig_request = test_utils.build_request('/_ah/api/test', '{}')
method_config = {'response': {'body': 'autoTemplate(backendResponse)'}}
empty_response = self.server.check_empty_response(orig_request,
method_config,
self.start_response)
self.assertIsNone(empty_response)
self.assertIsNone(self.response_status)
self.assertIsNone(self.response_headers)
self.assertIsNone(self.response_exc_info)
class TransformRequestTests(unittest.TestCase):
"""Tests that only hit the request transformation functions."""
def setUp(self):
"""Set up a dev Endpoints server."""
super(TransformRequestTests, self).setUp()
self.mox = mox.Mox()
self.config_manager = api_config_manager.ApiConfigManager()
self.mock_dispatcher = self.mox.CreateMock(dispatcher.Dispatcher)
self.server = endpoints_server.EndpointsDispatcher(self.mock_dispatcher,
self.config_manager)
def tearDown(self):
self.mox.UnsetStubs()
def test_transform_request(self):
"""Verify path is method name after a request is transformed."""
request = test_utils.build_request('/_ah/api/test/{gid}',
'{"sample": "body"}')
method_config = {'rosyMethod': 'GuestbookApi.greetings_get'}
new_request = self.server.transform_request(request, {'gid': 'X'},
method_config)
self.assertEqual({'sample': 'body', 'gid': 'X'},
json.loads(new_request.body))
self.assertEqual('GuestbookApi.greetings_get', new_request.path)
def test_transform_json_rpc_request(self):
"""Verify request_id is extracted and body is scoped to body.params."""
orig_request = test_utils.build_request(
'/_ah/api/rpc', '{"params": {"sample": "body"}, "id": "42"}')
new_request = self.server.transform_jsonrpc_request(orig_request)
self.assertEqual({'sample': 'body'},
json.loads(new_request.body))
self.assertEqual('42', new_request.request_id)
def _try_transform_rest_request(self, path_parameters, query_parameters,
body_json, expected, method_params=None):
"""Takes body, query and path values from a rest request for testing.
Args:
path_parameters: A dict containing the parameters parsed from the path.
For example if the request came through /a/b for the template /a/{x}
then we'd have {'x': 'b'}.
query_parameters: A dict containing the parameters parsed from the query
string.
body_json: A dict with the JSON object from the request body.
expected: A dict with the expected JSON body after being transformed.
method_params: Optional dictionary specifying the parameter configuration
associated with the method.
"""
method_params = method_params or {}
test_request = test_utils.build_request('/_ah/api/test')
test_request.body_json = body_json
test_request.body = json.dumps(body_json)
test_request.parameters = query_parameters
transformed_request = self.server.transform_rest_request(test_request,
path_parameters,
method_params)
self.assertEqual(expected, transformed_request.body_json)
self.assertEqual(transformed_request.body_json,
json.loads(transformed_request.body))
# Path only
def test_transform_rest_request_path_only(self):
path_parameters = {'gid': 'X'}
query_parameters = {}
body_object = {}
expected = {'gid': 'X'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_only_message_field(self):
path_parameters = {'gid.val': 'X'}
query_parameters = {}
body_object = {}
expected = {'gid': {'val': 'X'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_only_enum(self):
query_parameters = {}
body_object = {}
enum_descriptor = {'X': {'backendValue': 'X'}}
method_params = {'gid': {'enum': enum_descriptor}}
# Good enum
path_parameters = {'gid': 'X'}
expected = {'gid': 'X'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Bad enum
path_parameters = {'gid': 'Y'}
expected = {'gid': 'Y'}
try:
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
self.fail('Bad enum should have caused failure.')
except errors.EnumRejectionError as error:
self.assertEqual(error.parameter_name, 'gid')
# Query only
def test_transform_rest_request_query_only(self):
path_parameters = {}
query_parameters = {'foo': ['bar']}
body_object = {}
expected = {'foo': 'bar'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_only_message_field(self):
path_parameters = {}
query_parameters = {'gid.val': ['X']}
body_object = {}
expected = {'gid': {'val': 'X'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_only_multiple_values_not_repeated(self):
path_parameters = {}
query_parameters = {'foo': ['bar', 'baz']}
body_object = {}
expected = {'foo': 'bar'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_only_multiple_values_repeated(self):
path_parameters = {}
query_parameters = {'foo': ['bar', 'baz']}
body_object = {}
method_params = {'foo': {'repeated': True}}
expected = {'foo': ['bar', 'baz']}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
def test_transform_rest_request_query_only_enum(self):
path_parameters = {}
body_object = {}
enum_descriptor = {'X': {'backendValue': 'X'}}
method_params = {'gid': {'enum': enum_descriptor}}
# Good enum
query_parameters = {'gid': ['X']}
expected = {'gid': 'X'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Bad enum
query_parameters = {'gid': ['Y']}
expected = {'gid': 'Y'}
try:
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
self.fail('Bad enum should have caused failure.')
except errors.EnumRejectionError as error:
self.assertEqual(error.parameter_name, 'gid')
def test_transform_rest_request_query_only_repeated_enum(self):
path_parameters = {}
body_object = {}
enum_descriptor = {'X': {'backendValue': 'X'}, 'Y': {'backendValue': 'Y'}}
method_params = {'gid': {'enum': enum_descriptor, 'repeated': True}}
# Good enum
query_parameters = {'gid': ['X', 'Y']}
expected = {'gid': ['X', 'Y']}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Bad enum
query_parameters = {'gid': ['X', 'Y', 'Z']}
expected = {'gid': ['X', 'Y', 'Z']}
try:
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
self.fail('Bad enum should have caused failure.')
except errors.EnumRejectionError as error:
self.assertEqual(error.parameter_name, 'gid[2]')
# Body only
def test_transform_rest_request_body_only(self):
path_parameters = {}
query_parameters = {}
body_object = {'sample': 'body'}
expected = {'sample': 'body'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_body_only_any_old_value(self):
path_parameters = {}
query_parameters = {}
body_object = {'sample': {'body': ['can', 'be', 'anything']}}
expected = {'sample': {'body': ['can', 'be', 'anything']}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_body_only_message_field(self):
path_parameters = {}
query_parameters = {}
body_object = {'gid': {'val': 'X'}}
expected = {'gid': {'val': 'X'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_body_only_enum(self):
path_parameters = {}
query_parameters = {}
enum_descriptor = {'X': {'backendValue': 'X'}}
method_params = {'gid': {'enum': enum_descriptor}}
# Good enum
body_object = {'gid': 'X'}
expected = {'gid': 'X'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Bad enum
body_object = {'gid': 'Y'}
expected = {'gid': 'Y'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Path and query only
def test_transform_rest_request_path_query_no_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {'c': ['d']}
body_object = {}
expected = {'a': 'b', 'c': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_query_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {'a': ['d']}
body_object = {}
expected = {'a': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_query_collision_in_repeated_param(self):
path_parameters = {'a': 'b'}
query_parameters = {'a': ['d', 'c']}
body_object = {}
expected = {'a': ['d', 'c', 'b']}
method_params = {'a': {'repeated': True}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Path and body only
def test_transform_rest_request_path_body_no_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {}
body_object = {'c': 'd'}
expected = {'a': 'b', 'c': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_body_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {}
body_object = {'a': 'd'}
expected = {'a': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_body_collision_in_repeated_param(self):
path_parameters = {'a': 'b'}
query_parameters = {}
body_object = {'a': ['d']}
expected = {'a': ['d']}
method_params = {'a': {'repeated': True}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
def test_transform_rest_request_path_body_message_field_cooperative(self):
path_parameters = {'gid.val1': 'X'}
query_parameters = {}
body_object = {'gid': {'val2': 'Y'}}
expected = {'gid': {'val1': 'X', 'val2': 'Y'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_body_message_field_collision(self):
path_parameters = {'gid.val': 'X'}
query_parameters = {}
body_object = {'gid': {'val': 'Y'}}
expected = {'gid': {'val': 'Y'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
# Query and body only
def test_transform_rest_request_query_body_no_collision(self):
path_parameters = {}
query_parameters = {'a': ['b']}
body_object = {'c': 'd'}
expected = {'a': 'b', 'c': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_body_collision(self):
path_parameters = {}
query_parameters = {'a': ['b']}
body_object = {'a': 'd'}
expected = {'a': 'd'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_body_collision_in_repeated_param(self):
path_parameters = {}
query_parameters = {'a': ['b']}
body_object = {'a': ['d']}
expected = {'a': ['d']}
method_params = {'a': {'repeated': True}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
def test_transform_rest_request_query_body_message_field_cooperative(self):
path_parameters = {}
query_parameters = {'gid.val1': ['X']}
body_object = {'gid': {'val2': 'Y'}}
expected = {'gid': {'val1': 'X', 'val2': 'Y'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_query_body_message_field_collision(self):
path_parameters = {}
query_parameters = {'gid.val': ['X']}
body_object = {'gid': {'val': 'Y'}}
expected = {'gid': {'val': 'Y'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
# Path, body and query
def test_transform_rest_request_path_query_body_no_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {'c': ['d']}
body_object = {'e': 'f'}
expected = {'a': 'b', 'c': 'd', 'e': 'f'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_path_query_body_collision(self):
path_parameters = {'a': 'b'}
query_parameters = {'a': ['d']}
body_object = {'a': 'f'}
expected = {'a': 'f'}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected)
def test_transform_rest_request_unknown_parameters(self):
path_parameters = {'a': 'b'}
query_parameters = {'c': ['d']}
body_object = {'e': 'f'}
expected = {'a': 'b', 'c': 'd', 'e': 'f'}
method_params = {'X': {}, 'Y': {}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
# Other tests.
def test_type_conversions(self):
"""Verify that type conversion matches prod."""
path_parameters = {'int32_val': '1', 'uint32_val': '2',
'int64_val': '3', 'uint64_val': '4',
'true_bool_val': 'true', 'false_bool_val': 'FALSE'}
query_parameters = {'float_val': ['5.25'], 'double_val': ['6.5']}
body_object = {'int_body_val': '7'}
expected = {'int32_val': 1,
'uint32_val': 2,
'int64_val': '3',
'uint64_val': '4',
'true_bool_val': True,
'false_bool_val': False,
'float_val': 5.25,
'double_val': 6.5,
'int_body_val': '7'}
method_params = {'int32_val': {'type': 'int32'},
'uint32_val': {'type': 'uint32'},
'int64_val': {'type': 'int64'},
'uint64_val': {'type': 'uint64'},
'true_bool_val': {'type': 'boolean'},
'false_bool_val': {'type': 'boolean'},
'float_val': {'type': 'float'},
'double_val': {'type': 'double'},
'int_body_val': {'type': 'int32'}}
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected, method_params)
def test_invalid_conversions(self):
"""Verify that invalid parameter values for basic types raise errors."""
for type_name in ('int32', 'uint32', 'boolean', 'float', 'double'):
param_name = '%s_val' % type_name
path_parameters = {param_name: 'invalid'}
query_parameters = {}
body_object = {}
expected = {}
method_params = {param_name: {'type': type_name}}
try:
self._try_transform_rest_request(path_parameters, query_parameters,
body_object, expected,
method_params=method_params)
self.fail('Bad %s value should have caused failure.' % type_name)
except errors.BasicTypeParameterError as error:
self.assertEqual(error.parameter_name, param_name)
if __name__ == '__main__':
unittest.main()
|
dmanev/ArchExtractor | refs/heads/master | ArchExtractor/tests/testgen/Base/SwComponent/PortInterface/DataElement_test.py | 1 | # auto-generated test file
import unittest
import umlgen.Base.SwComponent.PortInterface.DataElement
# Start of user code imports
from Datatype.DataType import DataType
# End of user code
class DataElementTest(unittest.TestCase):
def setUp(self):
# self._testInstance = umlgen.Base.SwComponent.PortInterface.DataElement.DataElement()
# Start of user code setUp
self._testInstance = umlgen.Base.SwComponent.PortInterface.DataElement.DataElement()
# End of user code
pass
def tearDown(self):
# Start of user code tearDown
# End of user code
pass
def test_get_itsDataType(self):
# Start of user code get_itsDataType
# End of user code
pass
def test_set_itsDataType(self):
# Start of user code set_itsDataType
self._testInstance.setItsDataType(DataType())
self.assertIsInstance(self._testInstance.getItsDataType(), DataType)
# End of user code
pass
if __name__ == '__main__':
unittest.main()
|
pacificIT/mopidy | refs/heads/develop | mopidy/internal/__init__.py | 190 | from __future__ import absolute_import, unicode_literals
|
kpdyer/regex2dfa | refs/heads/master | third_party/re2/re2/testing/unicode_test.py | 325 | #!/usr/bin/python2.4
#
# Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Unittest for the util/regexp/re2/unicode.py module."""
import os
import StringIO
from google3.pyglib import flags
from google3.testing.pybase import googletest
from google3.util.regexp.re2 import unicode
_UNICODE_DIR = os.path.join(flags.FLAGS.test_srcdir, "google3", "third_party",
"unicode", "ucd-5.1.0")
class ConvertTest(googletest.TestCase):
"""Test the conversion functions."""
def testUInt(self):
self.assertEquals(0x0000, unicode._UInt("0000"))
self.assertEquals(0x263A, unicode._UInt("263A"))
self.assertEquals(0x10FFFF, unicode._UInt("10FFFF"))
self.assertRaises(unicode.InputError, unicode._UInt, "263")
self.assertRaises(unicode.InputError, unicode._UInt, "263AAAA")
self.assertRaises(unicode.InputError, unicode._UInt, "110000")
def testURange(self):
self.assertEquals([1, 2, 3], unicode._URange("0001..0003"))
self.assertEquals([1], unicode._URange("0001"))
self.assertRaises(unicode.InputError, unicode._URange, "0001..0003..0005")
self.assertRaises(unicode.InputError, unicode._URange, "0003..0001")
self.assertRaises(unicode.InputError, unicode._URange, "0001..0001")
def testUStr(self):
self.assertEquals("0x263A", unicode._UStr(0x263a))
self.assertEquals("0x10FFFF", unicode._UStr(0x10FFFF))
self.assertRaises(unicode.InputError, unicode._UStr, 0x110000)
self.assertRaises(unicode.InputError, unicode._UStr, -1)
_UNICODE_TABLE = """# Commented line, should be ignored.
# The next line is blank and should be ignored.
0041;Capital A;Line 1
0061..007A;Lowercase;Line 2
1F00;<Greek, First>;Ignored
1FFE;<Greek, Last>;Line 3
10FFFF;Runemax;Line 4
0000;Zero;Line 5
"""
_BAD_TABLE1 = """
111111;Not a code point;
"""
_BAD_TABLE2 = """
0000;<Zero, First>;Missing <Zero, Last>
"""
_BAD_TABLE3 = """
0010..0001;Bad range;
"""
class AbortError(Exception):
"""Function should not have been called."""
def Abort():
raise AbortError("Abort")
def StringTable(s, n, f):
unicode.ReadUnicodeTable(StringIO.StringIO(s), n, f)
class ReadUnicodeTableTest(googletest.TestCase):
"""Test the ReadUnicodeTable function."""
def testSimpleTable(self):
ncall = [0] # can't assign to ordinary int in DoLine
def DoLine(codes, fields):
self.assertEquals(3, len(fields))
ncall[0] += 1
self.assertEquals("Line %d" % (ncall[0],), fields[2])
if ncall[0] == 1:
self.assertEquals([0x0041], codes)
self.assertEquals("0041", fields[0])
self.assertEquals("Capital A", fields[1])
elif ncall[0] == 2:
self.assertEquals(range(0x0061, 0x007A + 1), codes)
self.assertEquals("0061..007A", fields[0])
self.assertEquals("Lowercase", fields[1])
elif ncall[0] == 3:
self.assertEquals(range(0x1F00, 0x1FFE + 1), codes)
self.assertEquals("1F00..1FFE", fields[0])
self.assertEquals("Greek", fields[1])
elif ncall[0] == 4:
self.assertEquals([0x10FFFF], codes)
self.assertEquals("10FFFF", fields[0])
self.assertEquals("Runemax", fields[1])
elif ncall[0] == 5:
self.assertEquals([0x0000], codes)
self.assertEquals("0000", fields[0])
self.assertEquals("Zero", fields[1])
StringTable(_UNICODE_TABLE, 3, DoLine)
self.assertEquals(5, ncall[0])
def testErrorTables(self):
self.assertRaises(unicode.InputError, StringTable, _UNICODE_TABLE, 4, Abort)
self.assertRaises(unicode.InputError, StringTable, _UNICODE_TABLE, 2, Abort)
self.assertRaises(unicode.InputError, StringTable, _BAD_TABLE1, 3, Abort)
self.assertRaises(unicode.InputError, StringTable, _BAD_TABLE2, 3, Abort)
self.assertRaises(unicode.InputError, StringTable, _BAD_TABLE3, 3, Abort)
class ParseContinueTest(googletest.TestCase):
"""Test the ParseContinue function."""
def testParseContinue(self):
self.assertEquals(("Private Use", "First"),
unicode._ParseContinue("<Private Use, First>"))
self.assertEquals(("Private Use", "Last"),
unicode._ParseContinue("<Private Use, Last>"))
self.assertEquals(("<Private Use, Blah>", None),
unicode._ParseContinue("<Private Use, Blah>"))
class CaseGroupsTest(googletest.TestCase):
"""Test the CaseGroups function (and the CaseFoldingReader)."""
def FindGroup(self, c):
if type(c) == str:
c = ord(c)
for g in self.groups:
if c in g:
return g
return None
def testCaseGroups(self):
self.groups = unicode.CaseGroups(unicode_dir=_UNICODE_DIR)
self.assertEquals([ord("A"), ord("a")], self.FindGroup("a"))
self.assertEquals(None, self.FindGroup("0"))
class ScriptsTest(googletest.TestCase):
"""Test the Scripts function (and the ScriptsReader)."""
def FindScript(self, c):
if type(c) == str:
c = ord(c)
for script, codes in self.scripts.items():
for code in codes:
if c == code:
return script
return None
def testScripts(self):
self.scripts = unicode.Scripts(unicode_dir=_UNICODE_DIR)
self.assertEquals("Latin", self.FindScript("a"))
self.assertEquals("Common", self.FindScript("0"))
self.assertEquals(None, self.FindScript(0xFFFE))
class CategoriesTest(googletest.TestCase):
"""Test the Categories function (and the UnicodeDataReader)."""
def FindCategory(self, c):
if type(c) == str:
c = ord(c)
short = None
for category, codes in self.categories.items():
for code in codes:
if code == c:
# prefer category Nd over N
if len(category) > 1:
return category
if short == None:
short = category
return short
def testCategories(self):
self.categories = unicode.Categories(unicode_dir=_UNICODE_DIR)
self.assertEquals("Ll", self.FindCategory("a"))
self.assertEquals("Nd", self.FindCategory("0"))
self.assertEquals("Lo", self.FindCategory(0xAD00)) # in First, Last range
self.assertEquals(None, self.FindCategory(0xFFFE))
self.assertEquals("Lo", self.FindCategory(0x8B5A))
self.assertEquals("Lo", self.FindCategory(0x6C38))
self.assertEquals("Lo", self.FindCategory(0x92D2))
self.assertTrue(ord("a") in self.categories["L"])
self.assertTrue(ord("0") in self.categories["N"])
self.assertTrue(0x8B5A in self.categories["L"])
self.assertTrue(0x6C38 in self.categories["L"])
self.assertTrue(0x92D2 in self.categories["L"])
def main():
googletest.main()
if __name__ == "__main__":
main()
|
pfhayes/boto | refs/heads/develop | boto/s3/keyfile.py | 203 | # Copyright 2013 Google Inc.
# Copyright 2011, Nexenta Systems Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Wrapper class to expose a Key being read via a partial implementaiton of the
Python file interface. The only functions supported are those needed for seeking
in a Key open for reading.
"""
import os
from boto.exception import StorageResponseError
class KeyFile():
def __init__(self, key):
self.key = key
self.key.open_read()
self.location = 0
self.closed = False
self.softspace = -1 # Not implemented.
self.mode = 'r'
self.encoding = 'Undefined in KeyFile'
self.errors = 'Undefined in KeyFile'
self.newlines = 'Undefined in KeyFile'
self.name = key.name
def tell(self):
if self.location is None:
raise ValueError("I/O operation on closed file")
return self.location
def seek(self, pos, whence=os.SEEK_SET):
self.key.close(fast=True)
if whence == os.SEEK_END:
# We need special handling for this case because sending an HTTP range GET
# with EOF for the range start would cause an invalid range error. Instead
# we position to one before EOF (plus pos) and then read one byte to
# position at EOF.
if self.key.size == 0:
# Don't try to seek with an empty key.
return
pos = self.key.size + pos - 1
if pos < 0:
raise IOError("Invalid argument")
self.key.open_read(headers={"Range": "bytes=%d-" % pos})
self.key.read(1)
self.location = pos + 1
return
if whence == os.SEEK_SET:
if pos < 0:
raise IOError("Invalid argument")
elif whence == os.SEEK_CUR:
pos += self.location
else:
raise IOError('Invalid whence param (%d) passed to seek' % whence)
try:
self.key.open_read(headers={"Range": "bytes=%d-" % pos})
except StorageResponseError as e:
# 416 Invalid Range means that the given starting byte was past the end
# of file. We catch this because the Python file interface allows silently
# seeking past the end of the file.
if e.status != 416:
raise
self.location = pos
def read(self, size):
self.location += size
return self.key.read(size)
def close(self):
self.key.close()
self.location = None
self.closed = True
def isatty(self):
return False
# Non-file interface, useful for code that wants to dig into underlying Key
# state.
def getkey(self):
return self.key
# Unimplemented interfaces below here.
def write(self, buf):
raise NotImplementedError('write not implemented in KeyFile')
def fileno(self):
raise NotImplementedError('fileno not implemented in KeyFile')
def flush(self):
raise NotImplementedError('flush not implemented in KeyFile')
def next(self):
raise NotImplementedError('next not implemented in KeyFile')
def readinto(self):
raise NotImplementedError('readinto not implemented in KeyFile')
def readline(self):
raise NotImplementedError('readline not implemented in KeyFile')
def readlines(self):
raise NotImplementedError('readlines not implemented in KeyFile')
def truncate(self):
raise NotImplementedError('truncate not implemented in KeyFile')
def writelines(self):
raise NotImplementedError('writelines not implemented in KeyFile')
def xreadlines(self):
raise NotImplementedError('xreadlines not implemented in KeyFile')
|
songmonit/CTTMSONLINE_V8 | refs/heads/master | addons/account/report/account_invoice_report.py | 224 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
import openerp.addons.decimal_precision as dp
from openerp.osv import fields,osv
class account_invoice_report(osv.osv):
_name = "account.invoice.report"
_description = "Invoices Statistics"
_auto = False
_rec_name = 'date'
def _compute_amounts_in_user_currency(self, cr, uid, ids, field_names, args, context=None):
"""Compute the amounts in the currency of the user
"""
if context is None:
context={}
currency_obj = self.pool.get('res.currency')
currency_rate_obj = self.pool.get('res.currency.rate')
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
user_currency_id = user.company_id.currency_id.id
currency_rate_id = currency_rate_obj.search(
cr, uid, [
('rate', '=', 1),
'|',
('currency_id.company_id', '=', user.company_id.id),
('currency_id.company_id', '=', False)
], limit=1, context=context)[0]
base_currency_id = currency_rate_obj.browse(cr, uid, currency_rate_id, context=context).currency_id.id
res = {}
ctx = context.copy()
for item in self.browse(cr, uid, ids, context=context):
ctx['date'] = item.date
price_total = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_total, context=ctx)
price_average = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_average, context=ctx)
residual = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.residual, context=ctx)
res[item.id] = {
'user_currency_price_total': price_total,
'user_currency_price_average': price_average,
'user_currency_residual': residual,
}
return res
_columns = {
'date': fields.date('Date', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_qty':fields.float('Product Quantity', readonly=True),
'uom_name': fields.char('Reference Unit of Measure', size=128, readonly=True),
'payment_term': fields.many2one('account.payment.term', 'Payment Term', readonly=True),
'period_id': fields.many2one('account.period', 'Force Period', domain=[('state','<>','done')], readonly=True),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position', readonly=True),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'journal_id': fields.many2one('account.journal', 'Journal', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'commercial_partner_id': fields.many2one('res.partner', 'Partner Company', help="Commercial Entity"),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Without Tax', readonly=True),
'user_currency_price_total': fields.function(_compute_amounts_in_user_currency, string="Total Without Tax", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'price_average': fields.float('Average Price', readonly=True, group_operator="avg"),
'user_currency_price_average': fields.function(_compute_amounts_in_user_currency, string="Average Price", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'currency_rate': fields.float('Currency Rate', readonly=True),
'nbr': fields.integer('# of Invoices', readonly=True), # TDE FIXME master: rename into nbr_lines
'type': fields.selection([
('out_invoice','Customer Invoice'),
('in_invoice','Supplier Invoice'),
('out_refund','Customer Refund'),
('in_refund','Supplier Refund'),
],'Type', readonly=True),
'state': fields.selection([
('draft','Draft'),
('proforma','Pro-forma'),
('proforma2','Pro-forma'),
('open','Open'),
('paid','Done'),
('cancel','Cancelled')
], 'Invoice Status', readonly=True),
'date_due': fields.date('Due Date', readonly=True),
'account_id': fields.many2one('account.account', 'Account',readonly=True),
'account_line_id': fields.many2one('account.account', 'Account Line',readonly=True),
'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account',readonly=True),
'residual': fields.float('Total Residual', readonly=True),
'user_currency_residual': fields.function(_compute_amounts_in_user_currency, string="Total Residual", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'country_id': fields.many2one('res.country', 'Country of the Partner Company'),
}
_order = 'date desc'
_depends = {
'account.invoice': [
'account_id', 'amount_total', 'commercial_partner_id', 'company_id',
'currency_id', 'date_due', 'date_invoice', 'fiscal_position',
'journal_id', 'partner_bank_id', 'partner_id', 'payment_term',
'period_id', 'residual', 'state', 'type', 'user_id',
],
'account.invoice.line': [
'account_id', 'invoice_id', 'price_subtotal', 'product_id',
'quantity', 'uos_id',
],
'product.product': ['product_tmpl_id'],
'product.template': ['categ_id'],
'product.uom': ['category_id', 'factor', 'name', 'uom_type'],
'res.currency.rate': ['currency_id', 'name'],
'res.partner': ['country_id'],
}
def _select(self):
select_str = """
SELECT sub.id, sub.date, sub.product_id, sub.partner_id, sub.country_id,
sub.payment_term, sub.period_id, sub.uom_name, sub.currency_id, sub.journal_id,
sub.fiscal_position, sub.user_id, sub.company_id, sub.nbr, sub.type, sub.state,
sub.categ_id, sub.date_due, sub.account_id, sub.account_line_id, sub.partner_bank_id,
sub.product_qty, sub.price_total / cr.rate as price_total, sub.price_average /cr.rate as price_average,
cr.rate as currency_rate, sub.residual / cr.rate as residual, sub.commercial_partner_id as commercial_partner_id
"""
return select_str
def _sub_select(self):
select_str = """
SELECT min(ail.id) AS id,
ai.date_invoice AS date,
ail.product_id, ai.partner_id, ai.payment_term, ai.period_id,
u2.name AS uom_name,
ai.currency_id, ai.journal_id, ai.fiscal_position, ai.user_id, ai.company_id,
count(ail.*) AS nbr,
ai.type, ai.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id AS account_line_id,
ai.partner_bank_id,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN (- ail.quantity) / u.factor * u2.factor
ELSE ail.quantity / u.factor * u2.factor
END) AS product_qty,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ail.price_subtotal
ELSE ail.price_subtotal
END) AS price_total,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM(- ail.price_subtotal)
ELSE SUM(ail.price_subtotal)
END / CASE
WHEN SUM(ail.quantity / u.factor * u2.factor) <> 0::numeric
THEN CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM((- ail.quantity) / u.factor * u2.factor)
ELSE SUM(ail.quantity / u.factor * u2.factor)
END
ELSE 1::numeric
END AS price_average,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ai.residual
ELSE ai.residual
END / (SELECT count(*) FROM account_invoice_line l where invoice_id = ai.id) *
count(*) AS residual,
ai.commercial_partner_id as commercial_partner_id,
partner.country_id
"""
return select_str
def _from(self):
from_str = """
FROM account_invoice_line ail
JOIN account_invoice ai ON ai.id = ail.invoice_id
JOIN res_partner partner ON ai.commercial_partner_id = partner.id
LEFT JOIN product_product pr ON pr.id = ail.product_id
left JOIN product_template pt ON pt.id = pr.product_tmpl_id
LEFT JOIN product_uom u ON u.id = ail.uos_id
LEFT JOIN product_uom u2 ON u2.id = pt.uom_id
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY ail.product_id, ai.date_invoice, ai.id,
ai.partner_id, ai.payment_term, ai.period_id, u2.name, u2.id, ai.currency_id, ai.journal_id,
ai.fiscal_position, ai.user_id, ai.company_id, ai.type, ai.state, pt.categ_id,
ai.date_due, ai.account_id, ail.account_id, ai.partner_bank_id, ai.residual,
ai.amount_total, ai.commercial_partner_id, partner.country_id
"""
return group_by_str
def init(self, cr):
# self._table = account_invoice_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
WITH currency_rate (currency_id, rate, date_start, date_end) AS (
SELECT r.currency_id, r.rate, r.name AS date_start,
(SELECT name FROM res_currency_rate r2
WHERE r2.name > r.name AND
r2.currency_id = r.currency_id
ORDER BY r2.name ASC
LIMIT 1) AS date_end
FROM res_currency_rate r
)
%s
FROM (
%s %s %s
) AS sub
JOIN currency_rate cr ON
(cr.currency_id = sub.currency_id AND
cr.date_start <= COALESCE(sub.date, NOW()) AND
(cr.date_end IS NULL OR cr.date_end > COALESCE(sub.date, NOW())))
)""" % (
self._table,
self._select(), self._sub_select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
malayaleecoder/servo | refs/heads/master | python/mach/mach/mixin/logging.py | 131 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, unicode_literals
import logging
class LoggingMixin(object):
"""Provides functionality to control logging."""
def populate_logger(self, name=None):
"""Ensure this class instance has a logger associated with it.
Users of this mixin that call log() will need to ensure self._logger is
a logging.Logger instance before they call log(). This function ensures
self._logger is defined by populating it if it isn't.
"""
if hasattr(self, '_logger'):
return
if name is None:
name = '.'.join([self.__module__, self.__class__.__name__])
self._logger = logging.getLogger(name)
def log(self, level, action, params, format_str):
"""Log a structured log event.
A structured log event consists of a logging level, a string action, a
dictionary of attributes, and a formatting string.
The logging level is one of the logging.* constants, such as
logging.INFO.
The action string is essentially the enumeration of the event. Each
different type of logged event should have a different action.
The params dict is the metadata constituting the logged event.
The formatting string is used to convert the structured message back to
human-readable format. Conversion back to human-readable form is
performed by calling format() on this string, feeding into it the dict
of attributes constituting the event.
Example Usage
-------------
self.log(logging.DEBUG, 'login', {'username': 'johndoe'},
'User login: {username}')
"""
self._logger.log(level, format_str,
extra={'action': action, 'params': params})
|
sparkslabs/kamaelia_ | refs/heads/master | Sketches/AB/backup/AB-Dev/Kamaelia/Apps/Whiteboard/SmartBoard.py | 3 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
import Axon
from Axon.Ipc import producerFinished, shutdownMicroprocess
import time
try:
import usb.core
import usb.util
except Exception, e:
print("SMART Board controls require PyUSB")
class SmartBoard(Axon.Component.component):
colours = { "black" : (0,0,0),
"red" : (192,0,0),
"green" : (0,192,0),
"blue": (0,0,255),
}
Outboxes = { "colour" : "colour selected",
"erase" : "eraser selected",
"toTicker" : "data to ticker",
}
def __init__(self):
super(SmartBoard,self).__init__()
def main(self):
yield 1
if(1): #try
dev = usb.core.find(idVendor=0x0b8c,idProduct=0x0001)
interface = dev.get_interface_altsetting()
if dev.is_kernel_driver_active(interface.bInterfaceNumber):
print("Detaching kernel driver")
dev.detach_kernel_driver(interface.bInterfaceNumber)
dev.set_configuration(1)
for cfg in dev:
print("Config")
print cfg.bConfigurationValue
for i in cfg:
print("Interface")
print i.bInterfaceNumber
for e in i:
print ("Endpoint")
print e.bEndpointAddress
print usb.util.endpoint_direction(e.bEndpointAddress)
# get an endpoint instance
epin = usb.util.find_descriptor(
dev.get_interface_altsetting(), # first interface
# match the first IN endpoint
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_IN
)
assert epin is not None
# get an endpoint instance
epout = usb.util.find_descriptor(
dev.get_interface_altsetting(), # first interface
# match the first OUT endpoint
custom_match = \
lambda e: \
usb.util.endpoint_direction(e.bEndpointAddress) == \
usb.util.ENDPOINT_OUT
)
assert epout is not None
print(epin)
print(epout)
# write the data
epout.write([0xd2,0x02,0x04,0x00,0xd4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
epout.write([0xd2,0x02,0x10,0x10,0xd0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
epout.write([0xd2,0x02,0x04,0x00,0xd4,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
epout.write([0xc3,0x80,0x01,0x00,0x03,0x41,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
print(epin.read(32))
epout.write([0xd2,0x02,0x80,0x80,0xd0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
print(epin.read(32))
print(epin.read(32))
epout.write([0xf0,0x0e,0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
print(epin.read(32))
epout.write([0xf0,0x0e,0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
print(epin.read(32))
epout.write([0xf0,0x0e,0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00])
print(epin.read(32))
if dev is None:
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART Board not detected", "toTicker")
else:
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART Board initialised", "toTicker")
datain = [0xe1,0x05,0x10,0x00] # Example
recval = datain[2]
if (recval == 0x00):
# No tool selected
self.send(self.colours["black"],"colour")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: No tools selected, assuming black pen", "toTicker")
elif (recval == 0x01):
# Blue pen
self.send(self.colours["blue"],"colour")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: Blue pen selected", "toTicker")
elif (recval == 0x02):
# Green pen
self.send(self.colours["green"],"colour")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: Green pen selected", "toTicker")
elif (recval == 0x04):
# Eraser
self.send("erase","erase")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: Eraser selected", "toTicker")
elif (recval == 0x08):
# Red pen
self.send(self.colours["red"],"colour")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: Red pen selected", "toTicker")
elif (recval == 0x10):
# Black pen
self.send(self.colours["black"],"colour")
self.send(chr(0) + "CLRTKR", "toTicker")
self.send("SMART: Black pen selected", "toTicker")
#except Exception, e:
# pass
|
paulfitz/phantomjs | refs/heads/master | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/workspace.py | 189 | # Copyright (c) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# A home for file logic which should sit above FileSystem, but
# below more complicated objects.
import logging
import zipfile
from webkitpy.common.system.executive import ScriptError
_log = logging.getLogger(__name__)
class Workspace(object):
def __init__(self, filesystem, executive):
self._filesystem = filesystem
self._executive = executive # FIXME: Remove if create_zip is moved to python.
def find_unused_filename(self, directory, name, extension, search_limit=100):
for count in range(search_limit):
if count:
target_name = "%s-%s.%s" % (name, count, extension)
else:
target_name = "%s.%s" % (name, extension)
target_path = self._filesystem.join(directory, target_name)
if not self._filesystem.exists(target_path):
return target_path
# If we can't find an unused name in search_limit tries, just give up.
return None
def create_zip(self, zip_path, source_path, zip_class=zipfile.ZipFile):
# It's possible to create zips with Python:
# zip_file = ZipFile(zip_path, 'w')
# for root, dirs, files in os.walk(source_path):
# for path in files:
# absolute_path = os.path.join(root, path)
# zip_file.write(os.path.relpath(path, source_path))
# However, getting the paths, encoding and compression correct could be non-trivial.
# So, for now we depend on the environment having "zip" installed (likely fails on Win32)
try:
self._executive.run_command(['zip', '-9', '-r', zip_path, '.'], cwd=source_path)
except ScriptError, e:
_log.error("Workspace.create_zip failed in %s:\n%s" % (source_path, e.message_with_output()))
return None
return zip_class(zip_path)
|
daniarherikurniawan/Chameleon512 | refs/heads/master | src/contrib/hod/hodlib/GridServices/__init__.py | 182 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from mapred import MapReduce, MapReduceExternal
from hdfs import Hdfs, HdfsExternal
|
hanzorama/magenta | refs/heads/master | magenta/music/music21_to_note_sequence_io_test.py | 1 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for consistency between PrettyMusic21 and NoteSequence proto."""
import os
# internal imports
import music21
import tensorflow as tf
from magenta.music import pretty_music21
from magenta.music.music21_to_note_sequence_io import _MUSIC21_TO_NOTE_SEQUENCE_MODE
from magenta.music.music21_to_note_sequence_io import _PRETTY_MUSIC21_TO_NOTE_SEQUENCE_KEY_NAME
from magenta.music.music21_to_note_sequence_io import music21_to_sequence_proto
from magenta.music.music21_to_note_sequence_io import pretty_music21_to_sequence_proto
from magenta.protobuf import music_pb2
class Music21ScoretoNoteSequenceTest(tf.test.TestCase):
def setUp(self):
"""Get the file path to the test MusicXML file."""
fname = 'bach-one_phrase-4_voices.xml'
self.source_fpath = os.path.join(tf.resource_loader.get_data_files_path(),
'testdata', fname)
def testMusic21ToSequenceFromMusicXML(self):
"""Test consistency between pretty_music21 and NoteSequence store of XML."""
parser = music21.musicxml.xmlToM21.MusicXMLImporter()
music21_score = parser.scoreFromFile(self.source_fpath)
simple_score = pretty_music21.PrettyMusic21(
music21_score, os.path.basename(self.source_fpath))
sequence_proto = music21_to_sequence_proto(
music21_score, os.path.basename(self.source_fpath))
self.CompareNoteSequenceAndMusic21Score(sequence_proto, simple_score)
def testPrettyMusic21ToSequenceFromMusicXML(self):
"""Test consistency between pretty_music21 and NoteSequence store of XML."""
parser = music21.musicxml.xmlToM21.MusicXMLImporter()
music21_score = parser.scoreFromFile(self.source_fpath)
simple_score = pretty_music21.PrettyMusic21(
music21_score, os.path.basename(self.source_fpath))
sequence_proto = pretty_music21_to_sequence_proto(
simple_score, os.path.basename(self.source_fpath))
self.CompareNoteSequenceAndMusic21Score(sequence_proto, simple_score)
def testPrettyMusic21ToSequenceFromMusicXMLWithSourceFnamePassedToFormer(
self):
"""Test consistency between pretty_music21 and NoteSequence store of XML."""
parser = music21.musicxml.xmlToM21.MusicXMLImporter()
music21_score = parser.scoreFromFile(self.source_fpath)
simple_score = pretty_music21.PrettyMusic21(
music21_score, os.path.basename(self.source_fpath))
sequence_proto = pretty_music21_to_sequence_proto(simple_score)
self.assertEqual(sequence_proto.filename, simple_score.filename)
def CompareNoteSequenceAndMusic21Score(self, sequence_proto, score):
"""Compares a NoteSequence proto to a PrettyMusic21 object.
Args:
sequence_proto: A tensorflow.magenta.Sequence proto.
score: A pretty_music21.PrettyMusic21 object.
"""
# Test score info.
self.assertEqual(sequence_proto.source_info.parser,
music_pb2.NoteSequence.SourceInfo.MUSIC21)
self.assertEqual(sequence_proto.filename, score.filename)
# Test time signature changes.
self.assertEqual(
len(score.time_signature_changes), len(sequence_proto.time_signatures))
for score_time, sequence_time in zip(score.time_signature_changes,
sequence_proto.time_signatures):
self.assertEqual(score_time.numerator, sequence_time.numerator)
self.assertEqual(score_time.denominator, sequence_time.denominator)
self.assertAlmostEqual(score_time.time, sequence_time.time)
# Test key signature changes.
self.assertEqual(
len(score.key_signature_changes), len(sequence_proto.key_signatures))
for score_key, sequence_key in zip(score.key_signature_changes,
sequence_proto.key_signatures):
key_pitch_idx = _PRETTY_MUSIC21_TO_NOTE_SEQUENCE_KEY_NAME.values().index(
sequence_key.key)
self.assertEqual(
score_key.key.upper(),
_PRETTY_MUSIC21_TO_NOTE_SEQUENCE_KEY_NAME.keys()[key_pitch_idx])
key_mode_idx = _MUSIC21_TO_NOTE_SEQUENCE_MODE.values().index(
sequence_key.mode)
self.assertEqual(score_key.mode,
_MUSIC21_TO_NOTE_SEQUENCE_MODE.keys()[key_mode_idx])
self.assertAlmostEqual(score_key.time, sequence_key.time)
# Test tempos.
self.assertEqual(len(score.tempo_changes), len(sequence_proto.tempos))
for score_tempo, sequence_tempo in zip(score.tempo_changes,
sequence_proto.tempos):
self.assertAlmostEqual(score_tempo.qpm, sequence_tempo.qpm)
self.assertAlmostEqual(score_tempo.time, sequence_tempo.time)
# Test part info.
self.assertEqual(len(score.part_infos), len(sequence_proto.part_infos))
for score_part_infos, sequence_part_infos in zip(
score.part_infos, sequence_proto.part_infos):
self.assertEqual(score_part_infos.index, sequence_part_infos.part)
self.assertEqual(score_part_infos.name, sequence_part_infos.name)
# Test parts and notes.
for score_note, sequence_note in zip(score.sorted_notes,
sequence_proto.notes):
self.assertAlmostEqual(score_note.pitch_midi, sequence_note.pitch)
self.assertAlmostEqual(score_note.start_time, sequence_note.start_time)
self.assertAlmostEqual(score_note.end_time, sequence_note.end_time)
self.assertEqual(score_note.part_index, sequence_note.part)
if __name__ == '__main__':
tf.test.main()
|
gary-pickens/HouseMonitor | refs/heads/master | housemonitor/lib/moduleloader.py | 1 | '''
Created on Sep 17, 2012
@author: Gary
'''
import imp
import os
import sys
from sets import Set
import re
import inspect
from housemonitor.lib.base import Base
from housemonitor.configuration.xmlconfiguration import XmlConfiguration
from housemonitor.lib.getdatetime import GetDateTime
class ModuleLoader( Base ):
'''
classdocs
'''
# TODO: put directories in configuation file
# A list of directories to search through for module that will be
# instantiated.
directories = ['steps']
# A list of instantiated classes that have been loaded
instances = []
def __init__( self ):
'''
Constructor
'''
super( ModuleLoader, self ).__init__()
@property
def logger_name( self ):
""" Set the logger level. This needs to be added to house_monitoring_logging.conf"""
return 'lib'
def file_name( self ):
return __name__
def load( self, data ):
'''
Walk though a directory and load all the *py* and *pyc* modules
'''
# use a set to insure that no two files are loaded twice
names = Set()
for directory in self.directories:
for root, dirs, files in os.walk( directory ):
for filename in files:
# Get just the file name - no extension
name, ext = os.path.splitext( filename )
if ( ext.lower() == ".py" and
not name.lower().endswith( "_test" ) ):
# and to names which will prevent doubles
names.add( name )
self.logger.error( "name = {} ext = {}".format( name, ext ) )
for name in names:
f = None
filename = None
description = None
package = None
continue_processing = True
try:
f, filename, description = imp.find_module( name, \
self.directories )
except ImportError as er:
self.logger.error( "error finding {}: error is {}" \
.format( name, er ) )
continue_processing = False
# Check if we failed. If so then jump to the beginning
# and start next file
if ( not continue_processing ):
continue
try:
package = imp.load_module( name, f, filename, description )
except ImportError as ex:
self.logger.error( "error importing {}: error is {}".\
format( name, ex ) )
continue_processing = False
finally:
if f:
self.close_file( f )
# Check if we failed. If so then jump to the beginning
# and start next file
if ( not continue_processing ):
continue
try:
# pprint(inspect.getmembers(package))
# pprint(inspect.getsourcelines(package.instantuate_me))
instance = package.instantuate_me( data )
if ( None != instance ):
self.instances.append( instance )
self.logger.info( "Class {} instantiated".format( name ) )
except AttributeError as err:
self.logger.error( \
"The function \"instantiate_me\" was not found in {}: {}"\
.format( name, err ) )
def close_file( self, f ): # pragma: no cover
# Module added for unit test
f.close() # pragma: no cover
def get_class_name( self, package ): # pragma: no cover
# Module added for unit test
return package.instantiate_me() # pragma: no cover
|
dcroc16/skunk_works | refs/heads/master | google_appengine/lib/django-1.3/tests/regressiontests/backends/models.py | 55 | from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db import connection
class Square(models.Model):
root = models.IntegerField()
square = models.PositiveIntegerField()
def __unicode__(self):
return "%s ** 2 == %s" % (self.root, self.square)
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __unicode__(self):
return u'%s %s' % (self.first_name, self.last_name)
class SchoolClass(models.Model):
year = models.PositiveIntegerField()
day = models.CharField(max_length=9, blank=True)
last_updated = models.DateTimeField()
# Unfortunately, the following model breaks MySQL hard.
# Until #13711 is fixed, this test can't be run under MySQL.
if connection.features.supports_long_model_names:
class VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ(models.Model):
class Meta:
# We need to use a short actual table name or
# we hit issue #8548 which we're not testing!
verbose_name = 'model_with_long_table_name'
primary_key_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.AutoField(primary_key=True)
charfield_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.CharField(max_length=100)
m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.ManyToManyField(Person,blank=True)
class Tag(models.Model):
name = models.CharField(max_length=30)
content_type = models.ForeignKey(ContentType, related_name='backend_tags')
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
class Post(models.Model):
name = models.CharField(max_length=30)
text = models.TextField()
tags = generic.GenericRelation('Tag')
class Meta:
db_table = 'CaseSensitive_Post'
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter)
def __unicode__(self):
return self.headline
|
jsoref/django | refs/heads/master | django/contrib/gis/geos/linestring.py | 8 | from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import (
GEOSGeometry, ProjectInterpolateMixin,
)
from django.contrib.gis.geos.point import Point
from django.contrib.gis.shortcuts import numpy
from django.utils.six.moves import range
class LineString(ProjectInterpolateMixin, GEOSGeometry):
_init_func = capi.create_linestring
_minlength = 2
has_cs = True
def __init__(self, *args, **kwargs):
"""
Initializes on the given sequence -- may take lists, tuples, NumPy arrays
of X,Y pairs, or Point objects. If Point objects are used, ownership is
_not_ transferred to the LineString object.
Examples:
ls = LineString((1, 1), (2, 2))
ls = LineString([(1, 1), (2, 2)])
ls = LineString(array([(1, 1), (2, 2)]))
ls = LineString(Point(1, 1), Point(2, 2))
"""
# If only one argument provided, set the coords array appropriately
if len(args) == 1:
coords = args[0]
else:
coords = args
if not (isinstance(coords, (tuple, list)) or numpy and isinstance(coords, numpy.ndarray)):
raise TypeError('Invalid initialization input for LineStrings.')
ncoords = len(coords)
if ncoords < self._minlength:
raise ValueError(
'%s requires at least %d points, got %s.' % (
self.__class__.__name__,
self._minlength,
ncoords,
)
)
if isinstance(coords, (tuple, list)):
# Getting the number of coords and the number of dimensions -- which
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
ndim = None
# Incrementing through each of the coordinates and verifying
for coord in coords:
if not isinstance(coord, (tuple, list, Point)):
raise TypeError('Each coordinate should be a sequence (list or tuple)')
if ndim is None:
ndim = len(coord)
self._checkdim(ndim)
elif len(coord) != ndim:
raise TypeError('Dimension mismatch.')
numpy_coords = False
else:
shape = coords.shape # Using numpy's shape.
if len(shape) != 2:
raise TypeError('Too many dimensions.')
self._checkdim(shape[1])
ndim = shape[1]
numpy_coords = True
# Creating a coordinate sequence object because it is easier to
# set the points using GEOSCoordSeq.__setitem__().
cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim == 3))
for i in range(ncoords):
if numpy_coords:
cs[i] = coords[i, :]
elif isinstance(coords[i], Point):
cs[i] = coords[i].tuple
else:
cs[i] = coords[i]
# If SRID was passed in with the keyword arguments
srid = kwargs.get('srid')
# Calling the base geometry initialization with the returned pointer
# from the function.
super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)
def __iter__(self):
"Allows iteration over this LineString."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of points in this LineString."
return len(self._cs)
def _get_single_external(self, index):
return self._cs[index]
_get_single_internal = _get_single_external
def _set_list(self, length, items):
ndim = self._cs.dims
hasz = self._cs.hasz # I don't understand why these are different
# create a new coordinate sequence and populate accordingly
cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz)
for i, c in enumerate(items):
cs[i] = c
ptr = self._init_func(cs.ptr)
if ptr:
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(self.srid)
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._checkindex(index)
self._cs[index] = value
def _checkdim(self, dim):
if dim not in (2, 3):
raise TypeError('Dimension mismatch.')
# #### Sequence Properties ####
@property
def tuple(self):
"Returns a tuple version of the geometry from the coordinate sequence."
return self._cs.tuple
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function. Will return a numpy array if possible.
"""
lst = [func(i) for i in range(len(self))]
if numpy:
return numpy.array(lst) # ARRRR!
else:
return lst
@property
def array(self):
"Returns a numpy array for the LineString."
return self._listarr(self._cs.__getitem__)
@property
def merged(self):
"Returns the line merge of this LineString."
return self._topology(capi.geos_linemerge(self.ptr))
@property
def x(self):
"Returns a list or numpy array of the X variable."
return self._listarr(self._cs.getX)
@property
def y(self):
"Returns a list or numpy array of the Y variable."
return self._listarr(self._cs.getY)
@property
def z(self):
"Returns a list or numpy array of the Z variable."
if not self.hasz:
return None
else:
return self._listarr(self._cs.getZ)
# LinearRings are LineStrings used within Polygons.
class LinearRing(LineString):
_minlength = 4
_init_func = capi.create_linearring
|
SnappleCap/oh-mainline | refs/heads/master | vendor/packages/kombu/funtests/tests/test_couchdb.py | 22 | from nose import SkipTest
from funtests import transport
class test_couchdb(transport.TransportCase):
transport = 'couchdb'
prefix = 'couchdb'
event_loop_max = 100
def before_connect(self):
try:
import couchdb # noqa
except ImportError:
raise SkipTest('couchdb not installed')
def after_connect(self, connection):
connection.channel().client
|
mariecpereira/IA369Z | refs/heads/master | deliver/ia870/iagrain.py | 2 | # -*- encoding: utf-8 -*-
# Module iagrain
import numpy as np
from string import upper
def iagrain(fr, f, measurement, option="image"):
measurement = upper(measurement)
option = upper(option)
if fr.ndim == 1: fr = fr[newaxis,:]
n = fr.max()
if option == 'DATA': y = np.empty((n,),np.float)
else : y = np.zeros(fr.shape)
if measurement == 'MAX':
for i in range(1,n+1):
val = f[fr==i].max()
if option == 'DATA': y[i-1] = val
else : y[fr==i] = val
elif measurement == 'MIN':
for i in range(1,n+1):
val = f[fr==i].min()
if option == 'DATA': y[i-1] = val
else : y[fr==i] = val
elif measurement == 'SUM':
for i in range(1,n+1):
val = f[fr==i].sum()
if option == 'DATA': y[i-1] = val
else : y[fr==i] = val
elif measurement == 'MEAN':
for i in range(1,n+1):
val = f[fr==i].mean()
if option == 'DATA': y[i-1] = val
else : y[fr==i] = val
elif measurement == 'STD':
for i in range(1,n+1):
v = f[fr==i].std()
if len(v) < 2: val = 0
else : val = v.std()
if option == 'DATA': y[i-1] = val
else : y[fr==i] = val
elif measurement == 'STD1':
print "'STD1' is not implemented"
else:
print "Measurement should be 'MAX', 'MIN', 'MEAN', 'SUM', 'STD', 'STD1'."
return y
|
showgood/YCM_windows | refs/heads/master | python/ycm/completers/all/identifier_completer.py | 4 | #!/usr/bin/env python
#
# Copyright (C) 2011, 2012 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
import ycm_core
from collections import defaultdict
from ycm.completers.general_completer import GeneralCompleter
# from ycm.completers.general import syntax_parse
from ycm import utils
from ycm.utils import ToUtf8IfNeeded
from ycm.server import responses
MAX_IDENTIFIER_COMPLETIONS_RETURNED = 10
SYNTAX_FILENAME = 'YCM_PLACEHOLDER_FOR_SYNTAX'
class IdentifierCompleter( GeneralCompleter ):
def __init__( self, user_options ):
super( IdentifierCompleter, self ).__init__( user_options )
self._completer = ycm_core.IdentifierCompleter()
self._tags_file_last_mtime = defaultdict( int )
self._logger = logging.getLogger( __name__ )
def ShouldUseNow( self, request_data ):
return self.QueryLengthAboveMinThreshold( request_data )
def ComputeCandidates( self, request_data ):
if not self.ShouldUseNow( request_data ):
return []
completions = self._completer.CandidatesForQueryAndType(
ToUtf8IfNeeded( utils.SanitizeQuery( request_data[ 'query' ] ) ),
ToUtf8IfNeeded( request_data[ 'filetypes' ][ 0 ] ) )
completions = completions[ : MAX_IDENTIFIER_COMPLETIONS_RETURNED ]
completions = _RemoveSmallCandidates(
completions, self.user_options[ 'min_num_identifier_candidate_chars' ] )
return [ responses.BuildCompletionData( x ) for x in completions ]
def AddIdentifier( self, identifier, request_data ):
filetype = request_data[ 'filetypes' ][ 0 ]
filepath = request_data[ 'filepath' ]
if not filetype or not filepath or not identifier:
return
vector = ycm_core.StringVec()
vector.append( ToUtf8IfNeeded( identifier ) )
self._logger.info( 'Adding ONE buffer identifier for file: %s', filepath )
self._completer.AddIdentifiersToDatabase( vector,
ToUtf8IfNeeded( filetype ),
ToUtf8IfNeeded( filepath ) )
def AddPreviousIdentifier( self, request_data ):
self.AddIdentifier(
_PreviousIdentifier(
self.user_options[ 'min_num_of_chars_for_completion' ],
request_data ),
request_data )
def AddIdentifierUnderCursor( self, request_data ):
cursor_identifier = _GetCursorIdentifier( request_data )
if not cursor_identifier:
return
self.AddIdentifier( cursor_identifier, request_data )
def AddBufferIdentifiers( self, request_data ):
filetype = request_data[ 'filetypes' ][ 0 ]
filepath = request_data[ 'filepath' ]
collect_from_comments_and_strings = bool( self.user_options[
'collect_identifiers_from_comments_and_strings' ] )
if not filetype or not filepath:
return
text = request_data[ 'file_data' ][ filepath ][ 'contents' ]
self._logger.info( 'Adding buffer identifiers for file: %s', filepath )
self._completer.AddIdentifiersToDatabaseFromBuffer(
ToUtf8IfNeeded( text ),
ToUtf8IfNeeded( filetype ),
ToUtf8IfNeeded( filepath ),
collect_from_comments_and_strings )
def AddIdentifiersFromTagFiles( self, tag_files ):
absolute_paths_to_tag_files = ycm_core.StringVec()
for tag_file in tag_files:
try:
current_mtime = os.path.getmtime( tag_file )
except:
continue
last_mtime = self._tags_file_last_mtime[ tag_file ]
# We don't want to repeatedly process the same file over and over; we only
# process if it's changed since the last time we looked at it
if current_mtime <= last_mtime:
continue
self._tags_file_last_mtime[ tag_file ] = current_mtime
absolute_paths_to_tag_files.append( ToUtf8IfNeeded( tag_file ) )
if not absolute_paths_to_tag_files:
return
self._completer.AddIdentifiersToDatabaseFromTagFiles(
absolute_paths_to_tag_files )
def AddIdentifiersFromSyntax( self, keyword_list, filetypes ):
keyword_vector = ycm_core.StringVec()
for keyword in keyword_list:
keyword_vector.append( ToUtf8IfNeeded( keyword ) )
filepath = SYNTAX_FILENAME + filetypes[ 0 ]
self._completer.AddIdentifiersToDatabase( keyword_vector,
ToUtf8IfNeeded( filetypes[ 0 ] ),
ToUtf8IfNeeded( filepath ) )
def OnFileReadyToParse( self, request_data ):
self.AddBufferIdentifiers( request_data )
if 'tag_files' in request_data:
self.AddIdentifiersFromTagFiles( request_data[ 'tag_files' ] )
if 'syntax_keywords' in request_data:
self.AddIdentifiersFromSyntax( request_data[ 'syntax_keywords' ],
request_data[ 'filetypes' ] )
def OnInsertLeave( self, request_data ):
self.AddIdentifierUnderCursor( request_data )
def OnCurrentIdentifierFinished( self, request_data ):
self.AddPreviousIdentifier( request_data )
def _PreviousIdentifier( min_num_completion_start_chars, request_data ):
line_num = request_data[ 'line_num' ]
column_num = request_data[ 'column_num' ]
filepath = request_data[ 'filepath' ]
contents_per_line = (
request_data[ 'file_data' ][ filepath ][ 'contents' ].split( '\n' ) )
line = contents_per_line[ line_num ]
end_column = column_num
while end_column > 0 and not utils.IsIdentifierChar( line[ end_column - 1 ] ):
end_column -= 1
# Look at the previous line if we reached the end of the current one
if end_column == 0:
try:
line = contents_per_line[ line_num - 1 ]
except:
return ""
end_column = len( line )
while end_column > 0 and not utils.IsIdentifierChar(
line[ end_column - 1 ] ):
end_column -= 1
start_column = end_column
while start_column > 0 and utils.IsIdentifierChar( line[ start_column - 1 ] ):
start_column -= 1
if end_column - start_column < min_num_completion_start_chars:
return ""
return line[ start_column : end_column ]
def _RemoveSmallCandidates( candidates, min_num_candidate_size_chars ):
if min_num_candidate_size_chars == 0:
return candidates
return [ x for x in candidates if len( x ) >= min_num_candidate_size_chars ]
# This is meant to behave like 'expand("<cword")' in Vim, thus starting at the
# cursor column and returning the "cursor word". If the cursor is not on a valid
# character, it searches forward until a valid identifier is found.
def _GetCursorIdentifier( request_data ):
def FindFirstValidChar( line, column ):
current_column = column
while not utils.IsIdentifierChar( line[ current_column ] ):
current_column += 1
return current_column
def FindIdentifierStart( line, valid_char_column ):
identifier_start = valid_char_column
while identifier_start > 0 and utils.IsIdentifierChar( line[
identifier_start - 1 ] ):
identifier_start -= 1
return identifier_start
def FindIdentifierEnd( line, valid_char_column ):
identifier_end = valid_char_column
while identifier_end < len( line ) - 1 and utils.IsIdentifierChar( line[
identifier_end + 1 ] ):
identifier_end += 1
return identifier_end + 1
column_num = request_data[ 'column_num' ]
line = request_data[ 'line_value' ]
try:
valid_char_column = FindFirstValidChar( line, column_num )
return line[ FindIdentifierStart( line, valid_char_column ) :
FindIdentifierEnd( line, valid_char_column ) ]
except:
return ''
|
googledatalab/pydatalab | refs/heads/master | google/datalab/utils/facets/generic_feature_statistics_generator.py | 4 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for generating the feature_statistics proto from generic data.
The proto is used as input for the Overview visualization.
"""
import warnings
from .base_generic_feature_statistics_generator import BaseGenericFeatureStatisticsGenerator
from . import feature_statistics_pb2 as fs
class GenericFeatureStatisticsGenerator(BaseGenericFeatureStatisticsGenerator):
"""Generator of stats proto from generic data."""
def __init__(self):
BaseGenericFeatureStatisticsGenerator.__init__(
self, fs.FeatureNameStatistics, fs.DatasetFeatureStatisticsList,
fs.Histogram)
def ProtoFromDataFrames(dataframes):
"""Creates a feature statistics proto from a set of pandas dataframes.
Args:
dataframes: A list of dicts describing tables for each dataset for the
proto. Each entry contains a 'table' field of the dataframe of the
data
and a 'name' field to identify the dataset in the proto.
Returns:
The feature statistics proto for the provided tables.
"""
warnings.warn(
'Use GenericFeatureStatisticsGenerator class method instead.',
DeprecationWarning)
return GenericFeatureStatisticsGenerator().ProtoFromDataFrames(dataframes)
|
turbomanage/training-data-analyst | refs/heads/master | courses/machine_learning/deepdive2/structured/solutions/serving/application/lib/werkzeug/useragents.py | 7 | # -*- coding: utf-8 -*-
"""
werkzeug.useragents
~~~~~~~~~~~~~~~~~~~
This module provides a helper to inspect user agent strings. This module
is far from complete but should work for most of the currently available
browsers.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import re
class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
("cros", "chromeos"),
("iphone|ios", "iphone"),
("ipad", "ipad"),
(r"darwin|mac|os\s*x", "macos"),
("win", "windows"),
(r"android", "android"),
("netbsd", "netbsd"),
("openbsd", "openbsd"),
("freebsd", "freebsd"),
("dragonfly", "dragonflybsd"),
("(sun|i86)os", "solaris"),
(r"x11|lin(\b|ux)?", "linux"),
(r"nintendo\s+wii", "wii"),
("irix", "irix"),
("hp-?ux", "hpux"),
("aix", "aix"),
("sco|unix_sv", "sco"),
("bsd", "bsd"),
("amiga", "amiga"),
("blackberry|playbook", "blackberry"),
("symbian", "symbian"),
)
browsers = (
("googlebot", "google"),
("msnbot", "msn"),
("yahoo", "yahoo"),
("ask jeeves", "ask"),
(r"aol|america\s+online\s+browser", "aol"),
("opera", "opera"),
("edge", "edge"),
("chrome|crios", "chrome"),
("seamonkey", "seamonkey"),
("firefox|firebird|phoenix|iceweasel", "firefox"),
("galeon", "galeon"),
("safari|version", "safari"),
("webkit", "webkit"),
("camino", "camino"),
("konqueror", "konqueror"),
("k-meleon", "kmeleon"),
("netscape", "netscape"),
(r"msie|microsoft\s+internet\s+explorer|trident/.+? rv:", "msie"),
("lynx", "lynx"),
("links", "links"),
("Baiduspider", "baidu"),
("bingbot", "bing"),
("mozilla", "mozilla"),
)
_browser_version_re = r"(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?"
_language_re = re.compile(
r"(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|"
r"(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)"
)
def __init__(self):
self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms]
self.browsers = [
(b, re.compile(self._browser_version_re % a, re.I))
for a, b in self.browsers
]
def __call__(self, user_agent):
for platform, regex in self.platforms: # noqa: B007
match = regex.search(user_agent)
if match is not None:
break
else:
platform = None
for browser, regex in self.browsers: # noqa: B007
match = regex.search(user_agent)
if match is not None:
version = match.group(1)
break
else:
browser = version = None
match = self._language_re.search(user_agent)
if match is not None:
language = match.group(1) or match.group(2)
else:
language = None
return platform, browser, version, language
class UserAgent(object):
"""Represents a user agent. Pass it a WSGI environment or a user agent
string and you can inspect some of the details from the user agent
string via the attributes. The following attributes exist:
.. attribute:: string
the raw user agent string
.. attribute:: platform
the browser platform. The following platforms are currently
recognized:
- `aix`
- `amiga`
- `android`
- `blackberry`
- `bsd`
- `chromeos`
- `dragonflybsd`
- `freebsd`
- `hpux`
- `ipad`
- `iphone`
- `irix`
- `linux`
- `macos`
- `netbsd`
- `openbsd`
- `sco`
- `solaris`
- `symbian`
- `wii`
- `windows`
.. attribute:: browser
the name of the browser. The following browsers are currently
recognized:
- `aol` *
- `ask` *
- `baidu` *
- `bing` *
- `camino`
- `chrome`
- `edge`
- `firefox`
- `galeon`
- `google` *
- `kmeleon`
- `konqueror`
- `links`
- `lynx`
- `mozilla`
- `msie`
- `msn`
- `netscape`
- `opera`
- `safari`
- `seamonkey`
- `webkit`
- `yahoo` *
(Browsers marked with a star (``*``) are crawlers.)
.. attribute:: version
the version of the browser
.. attribute:: language
the language of the browser
"""
_parser = UserAgentParser()
def __init__(self, environ_or_string):
if isinstance(environ_or_string, dict):
environ_or_string = environ_or_string.get("HTTP_USER_AGENT", "")
self.string = environ_or_string
self.platform, self.browser, self.version, self.language = self._parser(
environ_or_string
)
def to_header(self):
return self.string
def __str__(self):
return self.string
def __nonzero__(self):
return bool(self.browser)
__bool__ = __nonzero__
def __repr__(self):
return "<%s %r/%s>" % (self.__class__.__name__, self.browser, self.version)
from werkzeug import _DeprecatedImportModule
_DeprecatedImportModule(
__name__, {".wrappers.user_agent": ["UserAgentMixin"]}, "Werkzeug 1.0"
)
del _DeprecatedImportModule
|
Azure/azure-sdk-for-python | refs/heads/sync-eng/common-js-nightly-docs-2-1768-ForTestPipeline | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/aio/operations/_top_level_domains_operations.py | 1 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TopLevelDomainsOperations:
"""TopLevelDomainsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.TopLevelDomainCollection"]:
"""Get all top-level domains supported for registration.
Description for Get all top-level domains supported for registration.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TopLevelDomainCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_06_01.models.TopLevelDomainCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TopLevelDomainCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('TopLevelDomainCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains'} # type: ignore
async def get(
self,
name: str,
**kwargs: Any
) -> "_models.TopLevelDomain":
"""Get details of a top-level domain.
Description for Get details of a top-level domain.
:param name: Name of the top-level domain.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TopLevelDomain, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2020_06_01.models.TopLevelDomain
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TopLevelDomain"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('TopLevelDomain', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains/{name}'} # type: ignore
def list_agreements(
self,
name: str,
agreement_option: "_models.TopLevelDomainAgreementOption",
**kwargs: Any
) -> AsyncIterable["_models.TldLegalAgreementCollection"]:
"""Gets all legal agreements that user needs to accept before purchasing a domain.
Description for Gets all legal agreements that user needs to accept before purchasing a domain.
:param name: Name of the top-level domain.
:type name: str
:param agreement_option: Domain agreement options.
:type agreement_option: ~azure.mgmt.web.v2020_06_01.models.TopLevelDomainAgreementOption
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TldLegalAgreementCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2020_06_01.models.TldLegalAgreementCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.TldLegalAgreementCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_agreements.metadata['url'] # type: ignore
path_format_arguments = {
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(agreement_option, 'TopLevelDomainAgreementOption')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(agreement_option, 'TopLevelDomainAgreementOption')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('TldLegalAgreementCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_agreements.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DomainRegistration/topLevelDomains/{name}/listAgreements'} # type: ignore
|
jopohl/urh | refs/heads/master | src/urh/ui/ElidedLabel.py | 1 | from PyQt5.QtCore import QSize, Qt
from PyQt5.QtGui import QFontMetrics
from PyQt5.QtWidgets import QLabel
class ElidedLabel(QLabel):
def __init__(self, parent=None):
super().__init__(parent)
self.full_text = ""
def __set_elided_text(self):
fm = QFontMetrics(self.font())
super().setText(fm.elidedText(self.full_text, Qt.ElideRight, self.width()))
self.setToolTip(self.full_text)
def setText(self, text: str):
self.full_text = text
self.__set_elided_text()
def resizeEvent(self, event) -> None:
super().resizeEvent(event)
self.__set_elided_text()
def minimumSizeHint(self) -> QSize:
return QSize(0, super().minimumSizeHint().height())
|
vikas1885/test1 | refs/heads/master | cms/djangoapps/contentstore/views/tests/utils.py | 198 | """
Utilities for view tests.
"""
import json
from contentstore.tests.utils import CourseTestCase
from contentstore.views.helpers import xblock_studio_url
from xmodule.modulestore.tests.factories import ItemFactory
class StudioPageTestCase(CourseTestCase):
"""
Base class for all tests of Studio pages.
"""
def setUp(self):
super(StudioPageTestCase, self).setUp()
self.chapter = ItemFactory.create(parent_location=self.course.location,
category='chapter', display_name="Week 1")
self.sequential = ItemFactory.create(parent_location=self.chapter.location,
category='sequential', display_name="Lesson 1")
def get_page_html(self, xblock):
"""
Returns the HTML for the page representing the xblock.
"""
url = xblock_studio_url(xblock)
self.assertIsNotNone(url)
resp = self.client.get_html(url)
self.assertEqual(resp.status_code, 200)
return resp.content
def get_preview_html(self, xblock, view_name):
"""
Returns the HTML for the xblock when shown within a unit or container page.
"""
preview_url = '/xblock/{usage_key}/{view_name}'.format(usage_key=xblock.location, view_name=view_name)
resp = self.client.get_json(preview_url)
self.assertEqual(resp.status_code, 200)
resp_content = json.loads(resp.content)
return resp_content['html']
def validate_preview_html(self, xblock, view_name, can_add=True):
"""
Verify that the specified xblock's preview has the expected HTML elements.
"""
html = self.get_preview_html(xblock, view_name)
self.validate_html_for_add_buttons(html, can_add)
# Verify drag handles always appear.
drag_handle_html = '<span data-tooltip="Drag to reorder" class="drag-handle action"></span>'
self.assertIn(drag_handle_html, html)
# Verify that there are no action buttons for public blocks
expected_button_html = [
'<a href="#" class="edit-button action-button">',
'<a href="#" data-tooltip="Delete" class="delete-button action-button">',
'<a href="#" data-tooltip="Duplicate" class="duplicate-button action-button">'
]
for button_html in expected_button_html:
self.assertIn(button_html, html)
def validate_html_for_add_buttons(self, html, can_add=True):
"""
Validate that the specified HTML has the appropriate add actions for the current publish state.
"""
# Verify that there are no add buttons for public blocks
add_button_html = '<div class="add-xblock-component new-component-item adding"></div>'
if can_add:
self.assertIn(add_button_html, html)
else:
self.assertNotIn(add_button_html, html)
|
mesosphere-mergebot/mergebot-test-dcos | refs/heads/master | packages/adminrouter/extra/src/test-harness/modules/mocker/endpoints/reflectors.py | 13 | # Copyright (C) Mesosphere, Inc. See LICENSE file for details.
"""All the code relevant for reflecting mocker, both Unix Socket and TCP/IP based"""
import logging
from mocker.endpoints.basehandler import BaseHTTPRequestHandler
from mocker.endpoints.generic import TcpIpHttpEndpoint, UnixSocketHTTPEndpoint
# pylint: disable=C0103
log = logging.getLogger(__name__)
# pylint: disable=R0903
class ReflectingHTTPRequestHandler(BaseHTTPRequestHandler):
"""A request hander class implementing sending back all the headers/request
parameters,etc... back to the client.
"""
def _calculate_response(self, base_path, url_args, body_args=None):
"""Gather all the request data into single dict and prepare it for
sending it to the client for inspection, irrespective of the request
URI.
Please refer to the description of the BaseHTTPRequestHandler class
method with the same name for details on the arguments and return value
of this method.
"""
return self._reflect_request(base_path, url_args, body_args)
# pylint: disable=R0903,C0103
class ReflectingTcpIpEndpoint(TcpIpHttpEndpoint):
"""ReflectingTcpIpEndpoint is just a plain TCP/IP endpoint with a
request handler that pushes back request data to the client."""
def __init__(self, port, ip='', keyfile=None, certfile=None):
super().__init__(ReflectingHTTPRequestHandler, port, ip, keyfile, certfile)
# pylint: disable=R0903
class ReflectingUnixSocketEndpoint(UnixSocketHTTPEndpoint):
"""ReflectingUnixSocketEndpoint is just a plain Unix Socket endpoint with a
request handler that pushes back request data to the client."""
def __init__(self, path, keyfile=None, certfile=None):
super().__init__(ReflectingHTTPRequestHandler, path, keyfile, certfile)
|
darkleons/BE | refs/heads/master | addons/website_event_sale/__init__.py | 1577 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
import models
|
fuhongliang/erpnext | refs/heads/develop | erpnext/hr/doctype/job_opening/test_job_opening.py | 87 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
# test_records = frappe.get_test_records('Job Opening')
class TestJobOpening(unittest.TestCase):
pass
|
opennetworkinglab/onos | refs/heads/master | tools/test/topos/sol.py | 43 | #!/usr/bin/python
import sys, solar
topo = solar.Solar(cips=sys.argv[1:])
topo.run()
|
tangyiyong/odoo | refs/heads/8.0 | setup/win32/win32_service.py | 362 | # -*- coding: utf-8 -*-
import servicemanager
import win32api
import win32process
import win32service
import win32serviceutil
import subprocess
import sys
from os.path import dirname, join, split
execfile(join(dirname(__file__), '..', 'server', 'openerp', 'release.py'))
class OdooService(win32serviceutil.ServiceFramework):
_svc_name_ = nt_service_name
_svc_display_name_ = "%s %s" % (nt_service_name, serie)
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.odooprocess = None # Reference to the server's process
def SvcStop(self):
# Before we do anything, tell the SCM we are starting the stop process.
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
# Stop the running Odoo: say it's a normal exit
win32api.TerminateProcess(int(self.odooprocess._handle), 0)
servicemanager.LogInfoMsg("Odoo stopped correctly")
def SvcDoRun(self):
# We start Odoo as an independent process, but we keep its handle
service_dir = dirname(sys.argv[0])
server_dir = split(service_dir)[0]
server_path = join(server_dir, 'server', 'openerp-server.exe')
self.odooprocess = subprocess.Popen(
[server_path], cwd=server_dir, creationflags=win32process.CREATE_NO_WINDOW
)
servicemanager.LogInfoMsg('Odoo up and running')
# exit with same exit code as Odoo process
sys.exit(self.odooprocess.wait())
def option_handler(opts):
# configure the service to auto restart on failures...
subprocess.call([
'sc', 'failure', nt_service_name, 'reset=', '0', 'actions=', 'restart/0/restart/0/restart/0'
])
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(OdooService, customOptionHandler=option_handler)
|
nvoron23/hue | refs/heads/master | desktop/core/ext-py/python-openid-2.2.5/openid/test/test_negotiation.py | 74 |
import unittest
from support import CatchLogs
from openid.message import Message, OPENID2_NS, OPENID1_NS, OPENID_NS
from openid import association
from openid.consumer.consumer import GenericConsumer, ServerError
from openid.consumer.discover import OpenIDServiceEndpoint, OPENID_2_0_TYPE
class ErrorRaisingConsumer(GenericConsumer):
"""
A consumer whose _requestAssocation will return predefined results
instead of trying to actually perform association requests.
"""
# The list of objects to be returned by successive calls to
# _requestAssocation. Each call will pop the first element from
# this list and return it to _negotiateAssociation. If the
# element is a Message object, it will be wrapped in a ServerError
# exception. Otherwise it will be returned as-is.
return_messages = []
def _requestAssociation(self, endpoint, assoc_type, session_type):
m = self.return_messages.pop(0)
if isinstance(m, Message):
raise ServerError.fromMessage(m)
else:
return m
class TestOpenID2SessionNegotiation(unittest.TestCase, CatchLogs):
"""
Test the session type negotiation behavior of an OpenID 2
consumer.
"""
def setUp(self):
CatchLogs.setUp(self)
self.consumer = ErrorRaisingConsumer(store=None)
self.endpoint = OpenIDServiceEndpoint()
self.endpoint.type_uris = [OPENID_2_0_TYPE]
self.endpoint.server_url = 'bogus'
def testBadResponse(self):
"""
Test the case where the response to an associate request is a
server error or is otherwise undecipherable.
"""
self.consumer.return_messages = [Message(self.endpoint.preferredNamespace())]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Server error when requesting an association')
def testEmptyAssocType(self):
"""
Test the case where the association type (assoc_type) returned
in an unsupported-type response is absent.
"""
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
# not set: msg.delArg(OPENID_NS, 'assoc_type')
msg.setArg(OPENID_NS, 'session_type', 'new-session-type')
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Unsupported association type',
'Server responded with unsupported association ' +
'session but did not supply a fallback.')
def testEmptySessionType(self):
"""
Test the case where the session type (session_type) returned
in an unsupported-type response is absent.
"""
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'new-assoc-type')
# not set: msg.setArg(OPENID_NS, 'session_type', None)
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Unsupported association type',
'Server responded with unsupported association ' +
'session but did not supply a fallback.')
def testNotAllowed(self):
"""
Test the case where an unsupported-type response specifies a
preferred (assoc_type, session_type) combination that is not
allowed by the consumer's SessionNegotiator.
"""
allowed_types = []
negotiator = association.SessionNegotiator(allowed_types)
self.consumer.negotiator = negotiator
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'not-allowed')
msg.setArg(OPENID_NS, 'session_type', 'not-allowed')
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Unsupported association type',
'Server sent unsupported session/association type:')
def testUnsupportedWithRetry(self):
"""
Test the case where an unsupported-type response triggers a
retry to get an association with the new preferred type.
"""
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'HMAC-SHA1')
msg.setArg(OPENID_NS, 'session_type', 'DH-SHA1')
assoc = association.Association(
'handle', 'secret', 'issued', 10000, 'HMAC-SHA1')
self.consumer.return_messages = [msg, assoc]
self.failUnless(self.consumer._negotiateAssociation(self.endpoint) is assoc)
self.failUnlessLogMatches('Unsupported association type')
def testUnsupportedWithRetryAndFail(self):
"""
Test the case where an unsupported-typ response triggers a
retry, but the retry fails and None is returned instead.
"""
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'HMAC-SHA1')
msg.setArg(OPENID_NS, 'session_type', 'DH-SHA1')
self.consumer.return_messages = [msg,
Message(self.endpoint.preferredNamespace())]
self.failUnlessEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Unsupported association type',
'Server %s refused' % (self.endpoint.server_url))
def testValid(self):
"""
Test the valid case, wherein an association is returned on the
first attempt to get one.
"""
assoc = association.Association(
'handle', 'secret', 'issued', 10000, 'HMAC-SHA1')
self.consumer.return_messages = [assoc]
self.failUnless(self.consumer._negotiateAssociation(self.endpoint) is assoc)
self.failUnlessLogEmpty()
class TestOpenID1SessionNegotiation(unittest.TestCase, CatchLogs):
"""
Tests for the OpenID 1 consumer association session behavior. See
the docs for TestOpenID2SessionNegotiation. Notice that this
class is not a subclass of the OpenID 2 tests. Instead, it uses
many of the same inputs but inspects the log messages logged with
oidutil.log. See the calls to self.failUnlessLogMatches. Some of
these tests pass openid2-style messages to the openid 1
association processing logic to be sure it ignores the extra data.
"""
def setUp(self):
CatchLogs.setUp(self)
self.consumer = ErrorRaisingConsumer(store=None)
self.endpoint = OpenIDServiceEndpoint()
self.endpoint.type_uris = [OPENID1_NS]
self.endpoint.server_url = 'bogus'
def testBadResponse(self):
self.consumer.return_messages = [Message(self.endpoint.preferredNamespace())]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Server error when requesting an association')
def testEmptyAssocType(self):
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
# not set: msg.setArg(OPENID_NS, 'assoc_type', None)
msg.setArg(OPENID_NS, 'session_type', 'new-session-type')
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Server error when requesting an association')
def testEmptySessionType(self):
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'new-assoc-type')
# not set: msg.setArg(OPENID_NS, 'session_type', None)
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Server error when requesting an association')
def testNotAllowed(self):
allowed_types = []
negotiator = association.SessionNegotiator(allowed_types)
self.consumer.negotiator = negotiator
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'not-allowed')
msg.setArg(OPENID_NS, 'session_type', 'not-allowed')
self.consumer.return_messages = [msg]
self.assertEqual(self.consumer._negotiateAssociation(self.endpoint), None)
self.failUnlessLogMatches('Server error when requesting an association')
def testUnsupportedWithRetry(self):
msg = Message(self.endpoint.preferredNamespace())
msg.setArg(OPENID_NS, 'error', 'Unsupported type')
msg.setArg(OPENID_NS, 'error_code', 'unsupported-type')
msg.setArg(OPENID_NS, 'assoc_type', 'HMAC-SHA1')
msg.setArg(OPENID_NS, 'session_type', 'DH-SHA1')
assoc = association.Association(
'handle', 'secret', 'issued', 10000, 'HMAC-SHA1')
self.consumer.return_messages = [msg, assoc]
self.failUnless(self.consumer._negotiateAssociation(self.endpoint) is None)
self.failUnlessLogMatches('Server error when requesting an association')
def testValid(self):
assoc = association.Association(
'handle', 'secret', 'issued', 10000, 'HMAC-SHA1')
self.consumer.return_messages = [assoc]
self.failUnless(self.consumer._negotiateAssociation(self.endpoint) is assoc)
self.failUnlessLogEmpty()
class TestNegotiatorBehaviors(unittest.TestCase, CatchLogs):
def setUp(self):
self.allowed_types = [
('HMAC-SHA1', 'no-encryption'),
('HMAC-SHA256', 'no-encryption'),
]
self.n = association.SessionNegotiator(self.allowed_types)
def testAddAllowedTypeNoSessionTypes(self):
self.assertRaises(ValueError, self.n.addAllowedType, 'invalid')
def testAddAllowedTypeBadSessionType(self):
self.assertRaises(ValueError, self.n.addAllowedType, 'assoc1', 'invalid')
def testAddAllowedTypeContents(self):
assoc_type = 'HMAC-SHA1'
self.failUnless(self.n.addAllowedType(assoc_type) is None)
for typ in association.getSessionTypes(assoc_type):
self.failUnless((assoc_type, typ) in self.n.allowed_types)
if __name__ == '__main__':
unittest.main()
|
saurabh6790/test-med-app | refs/heads/master | patches/february_2013/p04_remove_old_doctypes.py | 30 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import webnotes, os
def execute():
webnotes.delete_doc("DocType", "Product")
webnotes.delete_doc("DocType", "Test")
webnotes.delete_doc("Module Def", "Test")
os.system("rm -rf app/test")
os.system("rm -rf app/website/doctype/product")
|
Subsets and Splits