repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
atzengin/OCC | oc-utils/python/modtool/code_generator.py | 1 | 2298 | #
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
""" A code generator (needed by ModToolAdd) """
from templates import Templates
import Cheetah.Template
from util_functions import str_to_fancyc_comment
from util_functions import str_to_python_comment
from util_functions import strip_default_values
from util_functions import strip_arg_types
from util_functions import strip_arg_types_occ
class GRMTemplate(Cheetah.Template.Template):
""" An extended template class """
def __init__(self, src, searchList):
self.grtypelist = {
'sync': 'sync_block',
'sink': 'sync_block',
'source': 'sync_block',
'decimator': 'sync_decimator',
'interpolator': 'sync_interpolator',
'general': 'block',
'tagged_stream': 'tagged_stream_block',
'hier': 'hier_block2',
'noblock': ''}
searchList['str_to_fancyc_comment'] = str_to_fancyc_comment
searchList['str_to_python_comment'] = str_to_python_comment
searchList['strip_default_values'] = strip_default_values
searchList['strip_arg_types'] = strip_arg_types
searchList['strip_arg_types_occ'] = strip_arg_types_occ
Cheetah.Template.Template.__init__(self, src, searchList=searchList)
self.grblocktype = self.grtypelist[searchList['blocktype']]
def get_template(tpl_id, **kwargs):
""" Return the template given by tpl_id, parsed through Cheetah """
return str(GRMTemplate(Templates[tpl_id], searchList=kwargs))
| gpl-3.0 | -1,651,171,289,484,127,200 | 41.555556 | 76 | 0.681027 | false |
mostaphaRoudsari/Honeybee | src/Honeybee_AskMe.py | 1 | 1992 | #
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <[email protected]>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Use this component to get basic information on Honeybee Objects, whether they are HBSrfs or HBZones.
-
Provided by Honeybee 0.0.66
Args:
_HBObjects: Any valid Honeybee object.
Returns:
readMe!: Information about the Honeybee object. Connect to a panel to visualize.
"""
ghenv.Component.Name = "Honeybee_AskMe"
ghenv.Component.NickName = 'askMe'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "00 | Honeybee"
#compatibleHBVersion = VER 0.0.56\nJUL_24_2017
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "1"
except: pass
import scriptcontext as sc
try:
# call the objects from the lib
hb_hive = sc.sticky["honeybee_Hive"]()
HBObjectsFromHive = hb_hive.visualizeFromHoneybeeHive(_HBObjects)
for HBO in HBObjectsFromHive:
print HBO
except Exception, e:
print "Honeybee has no idea what this object is! Vviiiiiiz!"
pass
| gpl-3.0 | -3,768,216,618,678,705,700 | 35.218182 | 100 | 0.744478 | false |
wdv4758h/ZipPy | edu.uci.python.benchmark/src/benchmarks/euler31-timed.py | 1 | 1614 | #runas solve()
#unittest.skip recursive generator
#pythran export solve()
# 01/08/14 modified for benchmarking by Wei Zhang
import sys, time
COINS = [1, 2, 5, 10, 20, 50, 100, 200]
# test
def _sum(iterable):
sum = None
for i in iterable:
if sum is None:
sum = i
else:
sum += i
return sum
def balance(pattern):
return _sum(COINS[x]*pattern[x] for x in range(0, len(pattern)))
def gen(pattern, coinnum, num):
coin = COINS[coinnum]
for p in range(0, num//coin + 1):
newpat = pattern[:coinnum] + (p,)
bal = balance(newpat)
if bal > num:
return
elif bal == num:
yield newpat
elif coinnum < len(COINS)-1:
for pat in gen(newpat, coinnum+1, num):
yield pat
def solve(total):
'''
In England the currency is made up of pound, P, and pence, p, and there are eight coins in general circulation:
1p, 2p, 5p, 10p, 20p, 50p, P1 (100p) and P2 (200p).
It is possible to make P2 in the following way:
1 P1 + 1 50p + 2 20p + 1 5p + 1 2p + 3 1p
How many different ways can P2 be made using any number of coins?
'''
return _sum(1 for pat in gen((), 0, total))
def measure():
input = int(sys.argv[1]) # 200
for i in range(3):
solve(input)
print("Start timing...")
start = time.time()
result = solve(input)
print('total number of different ways: ', result)
duration = "%.3f\n" % (time.time() - start)
print("euler31: " + duration)
# warm up
for i in range(2000): # 300
solve(40)
measure()
| bsd-3-clause | 7,452,297,059,408,330,000 | 23.830769 | 115 | 0.576208 | false |
longde123/MultiversePlatform | lib/IPCE/Lib/ctypes.py | 1 | 5974 | # Copyright (c) 2006 Seo Sanghyeon
# 2006-06-08 sanxiyn Created
# 2006-06-11 sanxiyn Implemented .value on primitive types
# 2006-11-02 sanxiyn Support for multiple signatures
__all__ = [
'c_int', 'c_float', 'c_double', 'c_char_p', 'c_void_p',
'LibraryLoader', 'CDLL', 'cdll',
'byref', 'sizeof'
]
# --------------------------------------------------------------------
# Dynamic module definition
from System import AppDomain
from System.Reflection import AssemblyName
from System.Reflection.Emit import AssemblyBuilderAccess
def pinvoke_module():
domain = AppDomain.CurrentDomain
name = AssemblyName('pinvoke')
flag = AssemblyBuilderAccess.Run
assembly = domain.DefineDynamicAssembly(name, flag)
module = assembly.DefineDynamicModule('pinvoke')
return module
# --------------------------------------------------------------------
# General interface
class pinvoke_value:
type = None
value = None
def get_type(obj):
if isinstance(obj, pinvoke_value):
return obj.type
else:
return type(obj)
def get_value(obj):
if isinstance(obj, pinvoke_value):
return obj.value
else:
return obj
# --------------------------------------------------------------------
# Primitive types
from System import Single, Double, IntPtr
class pinvoke_primitive(pinvoke_value):
def __init__(self, value=None):
if value is None:
value = self.type()
if not isinstance(value, self.type):
expected = self.type.__name__
given = value.__class__.__name__
msg = "%s expected instead of %s" % (expected, given)
raise TypeError(msg)
self.value = value
def __repr__(self):
clsname = self.__class__.__name__
return "%s(%r)" % (clsname, self.value)
class c_int(pinvoke_primitive):
type = int
class c_float(pinvoke_primitive):
type = Single
class c_double(pinvoke_primitive):
type = Double
class c_char_p(pinvoke_primitive):
type = str
class c_void_p(pinvoke_primitive):
type = IntPtr
# --------------------------------------------------------------------
# Reference
from System import Type
class pinvoke_reference(pinvoke_value):
def __init__(self, obj):
self.obj = obj
self.type = Type.MakeByRefType(obj.type)
self.value = obj.value
def __repr__(self):
return "byref(%r)" % (self.obj,)
def byref(obj):
if not isinstance(obj, pinvoke_value):
raise TypeError("byref() argument must be a ctypes instance")
ref = pinvoke_reference(obj)
return ref
# --------------------------------------------------------------------
# Utility
from System.Runtime.InteropServices import Marshal
def sizeof(obj):
return Marshal.SizeOf(obj.type)
# --------------------------------------------------------------------
# Dynamic P/Invoke
from System import Array
from System.Reflection import CallingConventions, MethodAttributes
from System.Runtime.InteropServices import CallingConvention, CharSet
from IronPython.Runtime.Calls import BuiltinFunction, FunctionType
class pinvoke_method:
pinvoke_attributes = (
MethodAttributes.Public |
MethodAttributes.Static |
MethodAttributes.PinvokeImpl
)
calling_convention = None
return_type = None
def __init__(self, dll, entry):
self.dll = dll
self.entry = entry
self.restype = None
self.argtypes = None
self.func = None
self.signatures = set()
def create(self, restype, argtypes):
dll = self.dll
entry = self.entry
attributes = self.pinvoke_attributes
cc = self.calling_convention
clr_argtypes = Array[Type](argtypes)
module = pinvoke_module()
module.DefinePInvokeMethod(
entry, dll, attributes, CallingConventions.Standard,
restype, clr_argtypes, cc, CharSet.Ansi)
module.CreateGlobalFunctions()
method = module.GetMethod(entry)
self.func = BuiltinFunction.MakeOrAdd(
self.func, entry, method, FunctionType.Function)
self.signatures.add((restype, argtypes))
def __call__(self, *args):
if self.restype:
restype = self.restype.type
else:
restype = self.return_type.type
if self.argtypes:
argtypes = [argtype.type for argtype in self.argtypes]
else:
argtypes = [get_type(arg) for arg in args]
argtypes = tuple(argtypes)
if (restype, argtypes) not in self.signatures:
self.create(restype, argtypes)
args = [get_value(arg) for arg in args]
result = self.func(*args)
return result
# --------------------------------------------------------------------
# Function loader
def is_special_name(name):
return name.startswith('__') and name.endswith('__')
class pinvoke_dll:
method_class = None
def __init__(self, name):
self.name = name
def __repr__(self):
clsname = self.__class__.__name__
return "<%s '%s'>" % (clsname, self.name)
def __getattr__(self, name):
if is_special_name(name):
raise AttributeError(name)
method = self.method_class(self.name, name)
setattr(self, name, method)
return method
class CDLL(pinvoke_dll):
class method_class(pinvoke_method):
calling_convention = CallingConvention.Cdecl
return_type = c_int
# --------------------------------------------------------------------
# Library loader
class LibraryLoader(object):
def __init__(self, dlltype):
self.dlltype = dlltype
def __getattr__(self, name):
if is_special_name(name):
raise AttributeError(name)
dll = self.dlltype(name)
setattr(self, name, dll)
return dll
def LoadLibrary(self, name):
return self.dlltype(name)
cdll = LibraryLoader(CDLL)
| mit | -2,151,948,963,422,639,000 | 25.789238 | 70 | 0.571142 | false |
bm5w/second_dataS | test_quicksort.py | 1 | 1531 | import pytest
from quicksort import quicksort
def test_quicksort_simple():
input = [1, 3, 2]
assert quicksort(input) == [1, 2, 3]
def test_quicksort():
input = [54, 26, 93, 17, 71, 31, 44, 55, 20]
assert quicksort(input) == [17, 20, 26, 31, 44, 54, 55, 71, 93]
def test_quicksort_duplicate():
input = [54, 26, 93, 17, 71, 31, 44, 55, 20, 20]
assert quicksort(input) == [17, 20, 20, 26, 31, 44, 54, 55, 71, 93]
def test_quicksort_more_duplicates():
input = [54, 26, 93, 17, 71, 31, 44, 55, 20, 20, 20, 20]
assert quicksort(input) == [17, 20, 20, 20, 20, 26, 31, 44, 54, 55, 71, 93]
def test_quicksort_large():
input = range(10000)
assert quicksort(input) == range(10000)
def test_quicksort_big_floats():
input = [x*0.01 for x in range(0, 1000)]
assert quicksort(input) == [x*0.01 for x in range(0, 1000)]
def test_wrong_type():
input = 'x'
with pytest.raises(TypeError):
quicksort(input)
def test_quicksort_big_reverse():
input = range(1000)[::-1]
assert quicksort(input) == range(1000)
def test_quicksort_big_increase_decrease():
input = range(500)+range(500)[::-1]
expected = range(500)*2
expected.sort()
assert quicksort(input) == expected
def test_quicksort_duplicates():
input = (range(100)+range(100))[::-1]
expected = range(100)+range(100)
expected.sort()
assert quicksort(input) == expected
def test_quicksort_all_duplicates():
input = [100]*20
assert quicksort(input) == [100]*20
| mit | -4,190,833,842,785,767,400 | 20.56338 | 79 | 0.613978 | false |
avsm/lifedb-server | client/python/lifedb/tests/client.py | 1 | 5909 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import doctest
import os
import unittest
import StringIO
import time
import tempfile
from lifedb import client
import httplib2
httplib2.debuglevel = 0
class BaseTestCase(unittest.TestCase):
def setUp(self):
self.uri = os.environ.get('LIFEDB_URI', client.DEFAULT_BASE_URI)
self.username = os.environ.get('LIFEDB_TEST_USERNAME', 'foo')
self.password = os.environ.get('LIFEDB_TEST_PASSWORD', 'bar')
def tearDown(self):
pass
class LoginOKTestCase(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.server = client.Server(self.username, self.password, uri=self.uri)
class LoginBadTestCase(BaseTestCase):
def setUp(self):
BaseTestCase.setUp(self)
self.server = client.Server("BADUSERNAME", "BADPASSWD", uri=self.uri)
class BasicPassTestCase(LoginOKTestCase):
def test_logged_in_ping(self):
data = self.server.ping()
self.assertEquals(data, "pong")
class BasicFailTestCase(LoginBadTestCase):
def test_not_logged_in_ping(self):
self.assertRaises(client.ResourceForbidden, self.server.ping)
class TasksPassTestCase(LoginOKTestCase):
def test_task_create(self):
self.server.task_create("foo","Dummy","single","dummy", args={'WANTSLEEP':'100'})
tasks = self.server.task_list()
self.assert_('foo' in tasks)
self.destroy_and_check("foo")
def destroy_and_check(self, name):
self.server.task_destroy(name)
tasks = self.server.task_list ()
self.assert_('foo' not in tasks)
def long_test_task_periodic_create(self):
period=3
tmp = tempfile.NamedTemporaryFile()
args = { 'TMPFILELOC' : tmp.name }
cmd="echo foo >> %s" % tmp.name
self.server.task_create("bar","Dummy","periodic","",period=period, args=args)
tasks = self.server.task_list()
self.assert_('bar' in tasks)
time.sleep(period*4+1)
self.destroy_and_check("bar")
f = open(tmp.name, 'r')
lines = map(str.strip, f.readlines())
f.close()
tmp.close()
print lines
self.assertEquals(lines,['foo','foo','foo','foo'])
def test_task_constant_create(self):
self.server.task_create("foo","Dummy","constant","",args={'WANTSLEEP':'100'})
tasks = self.server.task_list()
self.assert_('foo' in tasks)
self.assertEquals(tasks['foo']['info']['mode'], 'constant')
self.destroy_and_check('foo')
def test_task_get(self):
self.server.task_create("xxx", "Dummy", "single", "", args={'WANTSLEEP':'50'})
task = self.server.task_get("xxx")
self.assertEquals(task['info']['plugin'], 'Dummy')
self.destroy_and_check('xxx')
def test_task_negative_get(self):
self.assertRaises(client.ResourceNotFound, self.server.task_get, "nonexistent")
def test_task_create_invalid(self):
self.assertRaises(client.ServerError, self.server.task_create,
'invalid', 'xxx', 'yyy', '')
def long_test_task_overload(self):
max_tasks = 10
for t in range(max_tasks):
self.server.task_create("foo%d" % t, "Dummy", "single","", args={'WANTSLEEP':'100000'})
for t in range(5):
self.assertRaises(client.ServerError, self.server.task_create,
"bar", "Dummy", "single", "")
for t in range(max_tasks):
self.server.task_destroy("foo%d" % t)
def very_long_test_task_fd_leak(self):
for t in range(2000):
self.server.task_create("foo", "Dummy", "single", "", args={'WANTSLEEP':'100'})
self.server.task_create("bar", "Dummy", "single", "", args={'WANTSLEEP':'100'})
self.server.task_destroy("foo")
self.server.task_destroy("bar")
class TasksFailTestCase(LoginBadTestCase):
def test_task_create_not_logged_in(self):
self.assertRaises(client.ResourceForbidden, self.server.task_create,
"foo", "Dummy", "single", "")
def test_task_get_not_logged_in(self):
self.assertRaises(client.ResourceForbidden, self.server.task_get, "nonexistent")
class PasswordPassTestCase(LoginOKTestCase):
def test_passwd_create(self):
username="notsecret"
password="verysecret"
service="arandomwebsite"
self.server.password_create(service, username, password)
rpass = self.server.password_get(service, username)
self.assertEqual(rpass, password)
self.server.password_delete(service, username)
self.assertRaises(client.ResourceNotFound, self.server.password_get, username, password)
class PasswordFailTestCase(LoginBadTestCase):
def test_passwd_create(self):
self.assertRaises(client.ResourceForbidden, self.server.password_create, 'x','x','x')
self.assertRaises(client.ResourceForbidden, self.server.password_get, 'x', 'x')
self.assertRaises(client.ResourceForbidden, self.server.password_delete, 'x', 'x')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(PasswordPassTestCase, 'test'))
suite.addTest(unittest.makeSuite(PasswordFailTestCase, 'test'))
suite.addTest(unittest.makeSuite(BasicPassTestCase, 'test'))
suite.addTest(unittest.makeSuite(BasicFailTestCase, 'test'))
suite.addTest(unittest.makeSuite(TasksPassTestCase, 'test'))
suite.addTest(unittest.makeSuite(TasksFailTestCase, 'test'))
suite.addTest(unittest.makeSuite(TasksPassTestCase, 'long_test'))
#suite.addTest(unittest.makeSuite(TasksPassTestCase, 'very_long_test'))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| gpl-2.0 | 5,814,957,335,077,162,000 | 37.875 | 100 | 0.651379 | false |
bergolho1337/URI-Online-Judge | Basicos/Python/1061/main.py | 1 | 1292 | # -*- coding: utf-8 -*-
def converteString (dia, hora):
evento = []
# Parse do dia
num = dia[4:6]
evento.append(int(num))
# Parse da hora
num = hora[0:2]
evento.append(int(num))
# Parse dos minutos
num = hora[5:7]
evento.append(int(num))
# Parse dos segundos
num = hora[10:12]
evento.append(int(num))
return evento
def calculaDuracao (inicio, fim):
inicio_seg = (inicio[0]*86400)+(inicio[1]*3600)+(inicio[2]*60)+(inicio[3])
fim_seg = (fim[0]*86400)+(fim[1]*3600)+(fim[2]*60)+(fim[3])
duracao_seg = fim_seg - inicio_seg
dias = duracao_seg / 86400
duracao_seg = duracao_seg - (dias*86400)
horas = duracao_seg / 3600
duracao_seg = duracao_seg - (horas*3600)
minutos = duracao_seg / 60
duracao_seg = duracao_seg - (minutos*60)
segundos = duracao_seg
return dias, horas, minutos, segundos
dia_inicio = raw_input()
hora_inicio = raw_input()
dia_fim = raw_input()
hora_fim = raw_input()
evento_inicio = converteString(dia_inicio,hora_inicio)
evento_fim = converteString(dia_fim,hora_fim)
dias, horas, minutos, segundos = calculaDuracao(evento_inicio,evento_fim)
print("%d dia(s)" % dias)
print("%d hora(s)" % horas)
print("%d minuto(s)" % minutos)
print("%d segundo(s)" % segundos) | gpl-2.0 | -1,258,978,114,268,866,600 | 25.387755 | 78 | 0.629257 | false |
google/material-design-icons | update/venv/lib/python3.9/site-packages/fontTools/varLib/plot.py | 5 | 4153 | """Visualize DesignSpaceDocument and resulting VariationModel."""
from fontTools.varLib.models import VariationModel, supportScalar
from fontTools.designspaceLib import DesignSpaceDocument
from matplotlib import pyplot
from mpl_toolkits.mplot3d import axes3d
from itertools import cycle
import math
import logging
import sys
log = logging.getLogger(__name__)
def stops(support, count=10):
a,b,c = support
return [a + (b - a) * i / count for i in range(count)] + \
[b + (c - b) * i / count for i in range(count)] + \
[c]
def _plotLocationsDots(locations, axes, subplot, **kwargs):
for loc, color in zip(locations, cycle(pyplot.cm.Set1.colors)):
if len(axes) == 1:
subplot.plot(
[loc.get(axes[0], 0)],
[1.],
'o',
color=color,
**kwargs
)
elif len(axes) == 2:
subplot.plot(
[loc.get(axes[0], 0)],
[loc.get(axes[1], 0)],
[1.],
'o',
color=color,
**kwargs
)
else:
raise AssertionError(len(axes))
def plotLocations(locations, fig, names=None, **kwargs):
n = len(locations)
cols = math.ceil(n**.5)
rows = math.ceil(n / cols)
if names is None:
names = [None] * len(locations)
model = VariationModel(locations)
names = [names[model.reverseMapping[i]] for i in range(len(names))]
axes = sorted(locations[0].keys())
if len(axes) == 1:
_plotLocations2D(
model, axes[0], fig, cols, rows, names=names, **kwargs
)
elif len(axes) == 2:
_plotLocations3D(
model, axes, fig, cols, rows, names=names, **kwargs
)
else:
raise ValueError("Only 1 or 2 axes are supported")
def _plotLocations2D(model, axis, fig, cols, rows, names, **kwargs):
subplot = fig.add_subplot(111)
for i, (support, color, name) in enumerate(
zip(model.supports, cycle(pyplot.cm.Set1.colors), cycle(names))
):
if name is not None:
subplot.set_title(name)
subplot.set_xlabel(axis)
pyplot.xlim(-1.,+1.)
Xs = support.get(axis, (-1.,0.,+1.))
X, Y = [], []
for x in stops(Xs):
y = supportScalar({axis:x}, support)
X.append(x)
Y.append(y)
subplot.plot(X, Y, color=color, **kwargs)
_plotLocationsDots(model.locations, [axis], subplot)
def _plotLocations3D(model, axes, fig, rows, cols, names, **kwargs):
ax1, ax2 = axes
axis3D = fig.add_subplot(111, projection='3d')
for i, (support, color, name) in enumerate(
zip(model.supports, cycle(pyplot.cm.Set1.colors), cycle(names))
):
if name is not None:
axis3D.set_title(name)
axis3D.set_xlabel(ax1)
axis3D.set_ylabel(ax2)
pyplot.xlim(-1.,+1.)
pyplot.ylim(-1.,+1.)
Xs = support.get(ax1, (-1.,0.,+1.))
Ys = support.get(ax2, (-1.,0.,+1.))
for x in stops(Xs):
X, Y, Z = [], [], []
for y in Ys:
z = supportScalar({ax1:x, ax2:y}, support)
X.append(x)
Y.append(y)
Z.append(z)
axis3D.plot(X, Y, Z, color=color, **kwargs)
for y in stops(Ys):
X, Y, Z = [], [], []
for x in Xs:
z = supportScalar({ax1:x, ax2:y}, support)
X.append(x)
Y.append(y)
Z.append(z)
axis3D.plot(X, Y, Z, color=color, **kwargs)
_plotLocationsDots(model.locations, [ax1, ax2], axis3D)
def plotDocument(doc, fig, **kwargs):
doc.normalize()
locations = [s.location for s in doc.sources]
names = [s.name for s in doc.sources]
plotLocations(locations, fig, names, **kwargs)
def main(args=None):
from fontTools import configLogger
if args is None:
args = sys.argv[1:]
# configure the library logger (for >= WARNING)
configLogger()
# comment this out to enable debug messages from logger
# log.setLevel(logging.DEBUG)
if len(args) < 1:
print("usage: fonttools varLib.plot source.designspace", file=sys.stderr)
print(" or")
print("usage: fonttools varLib.plot location1 location2 ...", file=sys.stderr)
sys.exit(1)
fig = pyplot.figure()
fig.set_tight_layout(True)
if len(args) == 1 and args[0].endswith('.designspace'):
doc = DesignSpaceDocument()
doc.read(args[0])
plotDocument(doc, fig)
else:
axes = [chr(c) for c in range(ord('A'), ord('Z')+1)]
locs = [dict(zip(axes, (float(v) for v in s.split(',')))) for s in args]
plotLocations(locs, fig)
pyplot.show()
if __name__ == '__main__':
import sys
sys.exit(main())
| apache-2.0 | -5,397,906,847,771,724,000 | 23.868263 | 80 | 0.642427 | false |
inonit/django-chemtrails | tests/testapp/migrations/0005_guild.py | 1 | 1061 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-10 13:14
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('testapp', '0004_book_view_book_permission'),
]
operations = [
migrations.CreateModel(
name='Guild',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('contact', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='guild_contacts', to='testapp.Author')),
('members', models.ManyToManyField(related_name='guild_set', to='testapp.Author', verbose_name='members')),
],
),
migrations.AddField(
model_name='author',
name='guilds',
field=models.ManyToManyField(blank=True, to='testapp.Guild'),
),
]
| mit | 6,104,085,431,111,180,000 | 34.366667 | 144 | 0.600377 | false |
alfred82santa/tarrabme2 | src/orgs/models.py | 1 | 1856 | from django.db import models
from common.models import CommonModel, AbstractContact, AbstractAddress
from django.contrib.auth.models import Group
from imagekit.models import ProcessedImageField, ImageSpecField
from imagekit.processors import ResizeToFill
class Organization(CommonModel):
name = models.CharField(max_length=100, unique=True)
commercial_name = models.CharField(max_length=150, unique=True)
prefix = models.CharField(max_length=6, unique=True)
active = models.BooleanField('active', default=True)
logo = ProcessedImageField(
upload_to="logos",
processors=[ResizeToFill(400, 400)],
)
logo_thumbnail = ImageSpecField(source='logo',
processors=[ResizeToFill(50, 50)],)
def logo_thumbnail_img(self):
return '<img src="%s"/>' % self.logo_thumbnail.url
logo_thumbnail_img.allow_tags = True
logo_thumbnail_img.short_description = ''
class Meta:
pass
def __unicode__(self):
return self.name
class Contact(AbstractContact):
organization = models.ForeignKey(Organization, blank=False,
null=False, related_name="contacts_list"
)
class BillingAccount(AbstractAddress):
fiscal_number = models.CharField(max_length=126, unique=True)
payment_method = models.CharField(max_length=126, unique=True)
payment_data = models.CharField(max_length=126, unique=True)
organization = models.ForeignKey(Organization, blank=False,
null=False, related_name="contacts"
)
class OrganizationRole(Group):
organization = models.ForeignKey(Organization, blank=False,
null=False, related_name="roles"
)
| gpl-3.0 | -9,086,944,784,570,360,000 | 34.692308 | 77 | 0.634698 | false |
anneline/Bika-LIMS | bika/lims/utils/__init__.py | 1 | 13899 | from time import time
from AccessControl import ModuleSecurityInfo, allow_module
from bika.lims import logger
from bika.lims.browser import BrowserView
from DateTime import DateTime
from email import Encoders
from email.MIMEBase import MIMEBase
from plone.memoize import ram
from plone.registry.interfaces import IRegistry
from Products.Archetypes.public import DisplayList
from Products.CMFCore.utils import getToolByName
from Products.CMFPlone.utils import safe_unicode
from zope.component import queryUtility
from zope.i18n import translate
from zope.i18n.locales import locales
import App
import Globals
import os
import re
import urllib2
ModuleSecurityInfo('email.Utils').declarePublic('formataddr')
allow_module('csv')
def to_utf8(text):
if text is None:
text = ''
return safe_unicode(text).encode('utf-8')
def to_unicode(text):
if text is None:
text = ''
return safe_unicode(text)
def t(i18n_msg):
"""Safely translate and convert to UTF8, any zope i18n msgid returned from
a bikaMessageFactory _
"""
return to_utf8(translate(i18n_msg))
# Wrapper for PortalTransport's sendmail - don't know why there sendmail
# method is marked private
ModuleSecurityInfo('Products.bika.utils').declarePublic('sendmail')
# Protected( Publish, 'sendmail')
def sendmail(portal, from_addr, to_addrs, msg):
mailspool = portal.portal_mailspool
mailspool.sendmail(from_addr, to_addrs, msg)
class js_log(BrowserView):
def __call__(self, message):
"""Javascript sends a string for us to place into the log.
"""
self.logger.info(message)
class js_err(BrowserView):
def __call__(self, message):
"""Javascript sends a string for us to place into the error log
"""
self.logger.error(message);
ModuleSecurityInfo('Products.bika.utils').declarePublic('printfile')
def printfile(portal, from_addr, to_addrs, msg):
""" set the path, then the cmd 'lpr filepath'
temp_path = 'C:/Zope2/Products/Bika/version.txt'
os.system('lpr "%s"' %temp_path)
"""
pass
def _cache_key_getUsers(method, context, roles=[], allow_empty=True):
key = time() // (60 * 60), roles, allow_empty
return key
@ram.cache(_cache_key_getUsers)
def getUsers(context, roles, allow_empty=True):
""" Present a DisplayList containing users in the specified
list of roles
"""
mtool = getToolByName(context, 'portal_membership')
pairs = allow_empty and [['', '']] or []
users = mtool.searchForMembers(roles=roles)
for user in users:
uid = user.getId()
fullname = user.getProperty('fullname')
if not fullname:
fullname = uid
pairs.append((uid, fullname))
pairs.sort(lambda x, y: cmp(x[1], y[1]))
return DisplayList(pairs)
def isActive(obj):
""" Check if obj is inactive or cancelled.
"""
wf = getToolByName(obj, 'portal_workflow')
if (hasattr(obj, 'inactive_state') and obj.inactive_state == 'inactive') or \
wf.getInfoFor(obj, 'inactive_state', 'active') == 'inactive':
return False
if (hasattr(obj, 'cancellation_state') and obj.inactive_state == 'cancelled') or \
wf.getInfoFor(obj, 'cancellation_state', 'active') == 'cancelled':
return False
return True
def formatDateQuery(context, date_id):
""" Obtain and reformat the from and to dates
into a date query construct
"""
from_date = context.REQUEST.get('%s_fromdate' % date_id, None)
if from_date:
from_date = from_date + ' 00:00'
to_date = context.REQUEST.get('%s_todate' % date_id, None)
if to_date:
to_date = to_date + ' 23:59'
date_query = {}
if from_date and to_date:
date_query = {'query': [from_date, to_date],
'range': 'min:max'}
elif from_date or to_date:
date_query = {'query': from_date or to_date,
'range': from_date and 'min' or 'max'}
return date_query
def formatDateParms(context, date_id):
""" Obtain and reformat the from and to dates
into a printable date parameter construct
"""
from_date = context.REQUEST.get('%s_fromdate' % date_id, None)
to_date = context.REQUEST.get('%s_todate' % date_id, None)
date_parms = {}
if from_date and to_date:
date_parms = 'from %s to %s' % (from_date, to_date)
elif from_date:
date_parms = 'from %s' % (from_date)
elif to_date:
date_parms = 'to %s' % (to_date)
return date_parms
def formatDuration(context, totminutes):
""" Format a time period in a usable manner: eg. 3h24m
"""
mins = totminutes % 60
hours = (totminutes - mins) / 60
if mins:
mins_str = '%sm' % mins
else:
mins_str = ''
if hours:
hours_str = '%sh' % hours
else:
hours_str = ''
return '%s%s' % (hours_str, mins_str)
def formatDecimalMark(value, decimalmark='.'):
""" Dummy method to replace decimal mark from an input string.
Assumes that 'value' uses '.' as decimal mark and ',' as
thousand mark.
"""
rawval = value
if decimalmark == ',':
rawval = rawval.replace('.', '[comma]')
rawval = rawval.replace(',', '.')
rawval = rawval.replace('[comma]', ',')
return rawval
# encode_header function copied from roundup's rfc2822 package.
hqre = re.compile(r'^[A-z0-9!"#$%%&\'()*+,-./:;<=>?@\[\]^_`{|}~ ]+$')
ModuleSecurityInfo('Products.bika.utils').declarePublic('encode_header')
def encode_header(header, charset='utf-8'):
""" Will encode in quoted-printable encoding only if header
contains non latin characters
"""
# Return empty headers unchanged
if not header:
return header
# return plain header if it does not contain non-ascii characters
if hqre.match(header):
return header
quoted = ''
# max_encoded = 76 - len(charset) - 7
for c in header:
# Space may be represented as _ instead of =20 for readability
if c == ' ':
quoted += '_'
# These characters can be included verbatim
elif hqre.match(c):
quoted += c
# Otherwise, replace with hex value like =E2
else:
quoted += "=%02X" % ord(c)
return '=?%s?q?%s?=' % (charset, quoted)
def zero_fill(matchobj):
return matchobj.group().zfill(8)
num_sort_regex = re.compile('\d+')
ModuleSecurityInfo('Products.bika.utils').declarePublic('sortable_title')
def sortable_title(portal, title):
"""Convert title to sortable title
"""
if not title:
return ''
def_charset = portal.plone_utils.getSiteEncoding()
sortabletitle = title.lower().strip()
# Replace numbers with zero filled numbers
sortabletitle = num_sort_regex.sub(zero_fill, sortabletitle)
# Truncate to prevent bloat
for charset in [def_charset, 'latin-1', 'utf-8']:
try:
sortabletitle = safe_unicode(sortabletitle, charset)[:30]
sortabletitle = sortabletitle.encode(def_charset or 'utf-8')
break
except UnicodeError:
pass
except TypeError:
# If we get a TypeError if we already have a unicode string
sortabletitle = sortabletitle[:30]
break
return sortabletitle
def logged_in_client(context, member=None):
if not member:
membership_tool = getToolByName(context, 'portal_membership')
member = membership_tool.getAuthenticatedMember()
client = None
groups_tool = context.portal_groups
member_groups = [groups_tool.getGroupById(group.id).getGroupName()
for group in groups_tool.getGroupsByUserId(member.id)]
if 'Clients' in member_groups:
for obj in context.clients.objectValues("Client"):
if member.id in obj.users_with_local_role('Owner'):
client = obj
return client
def changeWorkflowState(content, wf_id, state_id, acquire_permissions=False,
portal_workflow=None, **kw):
"""Change the workflow state of an object
@param content: Content obj which state will be changed
@param state_id: name of the state to put on content
@param acquire_permissions: True->All permissions unchecked and on riles and
acquired
False->Applies new state security map
@param portal_workflow: Provide workflow tool (optimisation) if known
@param kw: change the values of same name of the state mapping
@return: None
"""
if portal_workflow is None:
portal_workflow = getToolByName(content, 'portal_workflow')
# Might raise IndexError if no workflow is associated to this type
found_wf = 0
for wf_def in portal_workflow.getWorkflowsFor(content):
if wf_id == wf_def.getId():
found_wf = 1
break
if not found_wf:
logger.error("%s: Cannot find workflow id %s" % (content, wf_id))
wf_state = {
'action': None,
'actor': None,
'comments': "Setting state to %s" % state_id,
'review_state': state_id,
'time': DateTime(),
}
# Updating wf_state from keyword args
for k in kw.keys():
# Remove unknown items
if k not in wf_state:
del kw[k]
if 'review_state' in kw:
del kw['review_state']
wf_state.update(kw)
portal_workflow.setStatusOf(wf_id, content, wf_state)
if acquire_permissions:
# Acquire all permissions
for permission in content.possible_permissions():
content.manage_permission(permission, acquire=1)
else:
# Setting new state permissions
wf_def.updateRoleMappingsFor(content)
# Map changes to the catalogs
content.reindexObject(idxs=['allowedRolesAndUsers', 'review_state'])
return
def tmpID():
import binascii
return binascii.hexlify(os.urandom(16))
def isnumber(s):
try:
float(s)
return True
except ValueError:
return False
def createPdf(htmlreport, outfile=None, css=None):
debug_mode = App.config.getConfiguration().debug_mode
# XXX css must be a local file - urllib fails under robotframework tests.
css_def = ''
if css:
if css.startswith("http://") or css.startswith("https://"):
# Download css file in temp dir
u = urllib2.urlopen(css)
_cssfile = Globals.INSTANCE_HOME + '/var/' + tmpID() + '.css'
localFile = open(_cssfile, 'w')
localFile.write(u.read())
localFile.close()
else:
_cssfile = css
cssfile = open(_cssfile, 'r')
css_def = cssfile.read()
if not outfile:
outfile = Globals.INSTANCE_HOME + "/var/" + tmpID() + ".pdf"
from weasyprint import HTML, CSS
import os
if css:
HTML(string=htmlreport, encoding='utf-8').write_pdf(outfile,
stylesheets=[CSS(string=css_def)])
else:
HTML(string=htmlreport, encoding='utf-8').write_pdf(outfile)
if debug_mode:
htmlfilepath = Globals.INSTANCE_HOME + "/var/" + tmpID() + ".html"
htmlfile = open(htmlfilepath, 'w')
htmlfile.write(htmlreport)
htmlfile.close()
return open(outfile, 'r').read();
def attachPdf(mimemultipart, pdfreport, filename=None):
part = MIMEBase('application', "application/pdf")
part.add_header('Content-Disposition',
'attachment; filename="%s.pdf"' % (filename or tmpID()))
part.set_payload(pdfreport)
Encoders.encode_base64(part)
mimemultipart.attach(part)
def get_invoice_item_description(obj):
if obj.portal_type == 'AnalysisRequest':
sample = obj.getSample()
samplepoint = sample.getSamplePoint()
samplepoint = samplepoint and samplepoint.Title() or ''
sampletype = sample.getSampleType()
sampletype = sampletype and sampletype.Title() or ''
description = sampletype + ' ' + samplepoint
elif obj.portal_type == 'SupplyOrder':
products = obj.folderlistingFolderContents()
products = [o.getProduct().Title() for o in products]
description = ', '.join(products)
return description
def currency_format(context, locale):
locale = locales.getLocale(locale)
currency = context.bika_setup.getCurrency()
symbol = locale.numbers.currencies[currency].symbol
def format(val):
return '%s %0.2f' % (symbol, val)
return format
def getHiddenAttributesForClass(classname):
try:
registry = queryUtility(IRegistry)
hiddenattributes = registry.get('bika.lims.hiddenattributes', ())
if hiddenattributes is not None:
for alist in hiddenattributes:
if alist[0] == classname:
return alist[1:]
except:
logger.warning(
'Probem accessing optionally hidden attributes in registry')
return []
def isAttributeHidden(classname, fieldname):
try:
registry = queryUtility(IRegistry)
hiddenattributes = registry.get('bika.lims.hiddenattributes', ())
if hiddenattributes is not None:
for alist in hiddenattributes:
if alist[0] == classname:
return fieldname in alist[1:]
except:
logger.warning(
'Probem accessing optionally hidden attributes in registry')
return False
def dicts_to_dict(dictionaries, key_subfieldname):
"""Convert a list of dictionaries into a dictionary of dictionaries.
key_subfieldname must exist in each Record's subfields and have a value,
which will be used as the key for the new dictionary. If a key is duplicated,
the earlier value will be overwritten.
"""
result = {}
for d in dictionaries:
result[d[key_subfieldname]] = d
return result
| agpl-3.0 | -5,184,133,056,224,156,000 | 29.818182 | 86 | 0.630189 | false |
zmarvel/slowboy | slowboy/util.py | 1 | 1753 |
import abc
from collections import namedtuple
Op = namedtuple('Op', ['function', 'cycles', 'description'])
class ClockListener(metaclass=abc.ABCMeta):
@abc.abstractmethod
def notify(self, clock: int, cycles: int):
"""Notify the listener that the clock has advanced.
:param clock: The new value of the CPU clock.
:param cycles: The number of cycles that have passed since the last
notification."""
pass
def uint8toBCD(uint8):
"""Convert an 8-bit unsigned integer to binary-coded decimal."""
d1 = uint8 // 10
d0 = uint8 % 10
return (d1 << 4) | d0
def sub_s8(x, y):
"""Subtract two 8-bit integers stored in two's complement."""
return (x + twoscompl8(y)) & 0xff
def sub_s16(x, y):
"""Subtract two 16-bit integers stored in two's complement."""
return (x + twoscompl16(y)) & 0xffff
def add_s8(x, y):
"""Add two 8-bit integers stored in two's complement."""
return (x + y) & 0xff
def add_s16(x, y):
"""Add two 16-bit integers stored in two's complement."""
return (x + y) & 0xffff
def twoscompl8(x):
"""Returns the reciprocal of 8-bit x in two's complement."""
return ((x ^ 0xff) + 1) & 0xff
def twoscompl16(x):
"""Returns the reciprocal of 16-bit x in two's complement."""
return ((x ^ 0xffff) + 1) & 0xffff
def hexdump(bytes, line_len, start=0):
line = []
j = 0
for b in bytes:
s = '{:02x}'.format(b)
if j % line_len == 0 and j > 0:
yield '{:04x}: {}'.format(start+j-line_len, ' '.join(line))
line = []
j += 1
line.append(s)
yield '{:04x}: {}'.format(start+j-line_len, ' '.join(line))
def print_lines(it):
for line in it:
print(line)
| mit | 8,089,310,398,789,706,000 | 24.405797 | 75 | 0.590416 | false |
koreiklein/fantasia | ui/render/gl/distances.py | 1 | 1104 | # Copyright (C) 2013 Korei Klein <[email protected]>
# Constants for gl rendering of basic are collected here.
from ui.render.gl import colors
epsilon = 0.0001
divider_spacing = 15.0
notThickness = 22.0
notShiftThickness = notThickness + 21.0
# Amount by which to shift the value contained inside a Not.
notShiftOffset = [notShiftThickness + 5, notShiftThickness, 0.0]
quantifier_variables_spacing = 100.0
variable_binding_spacing = 20.0
quantifier_before_divider_spacing = 10.0
quantifier_after_divider_spacing = 55.0
infixSpacing = 88.0
applySpacing = 16.0
productVariableHorizontalSpacing = 0.0
productVariableBorder = 10.0
symbolBackgroundBorderWidth = 30.0
variableBackgroundBorderWidth = 30.0
holdsSpacing = 60.0
iffSpacing = 35.0
exponential_border_width = 40.0
min_unit_divider_length = 100.0
min_intersect_divider_length = 250.0
unit_width = 20.0
quantifier_divider_width = 20.0
conjunctiveDividerWidth = 20.0
def capLengthOfDividerByLength(length):
return min(35.0, length / 7.0)
inject_spacing = 8.0
before_dot_spacing = 8.0
after_dot_spacing = 8.0
dotWidth = 15.0
| gpl-2.0 | -6,752,107,023,383,802,000 | 19.444444 | 64 | 0.764493 | false |
shakfu/start-vm | default/bin/normalize.py | 1 | 1259 | #!/usr/bin/env python
import hashlib
import os
import sys
from datetime import datetime
HASH = hashlib.md5(str(datetime.now())).hexdigest()
def normalize(path, file_func=None, dir_func=None):
''' recursive normalization of directory and file names
applies the following changes to directory and filenames:
- lowercasing
- converts spaces to '-'
'''
norm_func = lambda x: x.lower().replace(' ', '-')
if not file_func:
file_func = norm_func
if not dir_func:
dir_func = norm_func
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
f = os.path.join(root, name)
print(file_func(f))
for name in dirs:
d = os.path.join(root, name)
#print(dir_func(d))
def norm_func(path):
entry = os.path.basename(path)
parent = os.path.dirname(path)
entry_norm = entry.lower().replace(' ', '-')
p = os.path.join(parent, entry_norm)+HASH
os.rename(path, p)
new = p.strip(HASH)
os.rename(p, new)
return new
def norm_path(path=None):
if not path:
path = sys.argv[1]
normalize(path, norm_func)
#normalize(path, None, norm_func)
if __name__ == '__main__':
norm_path()
| mit | -1,232,118,386,050,333,700 | 23.211538 | 65 | 0.597299 | false |
luci/luci-py | appengine/components/components/auth/change_log_test.py | 2 | 45674 | #!/usr/bin/env vpython
# Copyright 2014 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import datetime
import sys
import unittest
from test_support import test_env
test_env.setup_test_env()
from google.appengine.ext import ndb
from components import utils
from components.auth import change_log
from components.auth import model
from components.auth.proto import realms_pb2
from components.auth.proto import security_config_pb2
from test_support import test_case
class MakeInitialSnapshotTest(test_case.TestCase):
"""Tests for ensure_initial_snapshot function."""
def test_works(self):
# Initial entities. Don't call 'record_revision' to imitate "old"
# application without history related code.
@ndb.transactional
def make_auth_db():
model.AuthGlobalConfig(key=model.root_key()).put()
model.AuthIPWhitelistAssignments(
key=model.ip_whitelist_assignments_key()).put()
model.AuthGroup(key=model.group_key('A group')).put()
model.AuthIPWhitelist(key=model.ip_whitelist_key('A whitelist')).put()
model.replicate_auth_db()
make_auth_db()
# Bump auth_db once more to avoid hitting trivial case of "processing first
# revision ever".
auth_db_rev = ndb.transaction(model.replicate_auth_db)
self.assertEqual(2, auth_db_rev)
# Now do the work.
change_log.ensure_initial_snapshot(auth_db_rev)
# Generated new AuthDB rev with updated entities.
self.assertEqual(3, model.get_auth_db_revision())
# Check all *History entities exist now.
p = model.historical_revision_key(3)
self.assertIsNotNone(
ndb.Key('AuthGlobalConfigHistory', 'root', parent=p).get())
self.assertIsNotNone(
ndb.Key(
'AuthIPWhitelistAssignmentsHistory', 'default', parent=p).get())
self.assertIsNotNone(ndb.Key('AuthGroupHistory', 'A group', parent=p).get())
self.assertIsNotNone(
ndb.Key('AuthIPWhitelistHistory', 'A whitelist', parent=p).get())
# Call again, should be noop (marker is set).
change_log.ensure_initial_snapshot(3)
self.assertEqual(3, model.get_auth_db_revision())
ident = lambda x: model.Identity.from_bytes('user:' + x)
glob = lambda x: model.IdentityGlob.from_bytes('user:' + x)
def make_group(name, comment, **kwargs):
group = model.AuthGroup(key=model.group_key(name), **kwargs)
group.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment=comment)
group.put()
def make_ip_whitelist(name, comment, **kwargs):
wl = model.AuthIPWhitelist(key=model.ip_whitelist_key(name), **kwargs)
wl.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment=comment)
wl.put()
def security_config(regexps):
msg = security_config_pb2.SecurityConfig(internal_service_regexp=regexps)
return msg.SerializeToString()
class GenerateChangesTest(test_case.TestCase):
"""Tests for generate_changes function."""
def setUp(self):
super(GenerateChangesTest, self).setUp()
self.mock(change_log, 'enqueue_process_change_task', lambda _: None)
self.mock_now(datetime.datetime(2015, 1, 2, 3, 4, 5))
def auth_db_transaction(self, callback):
"""Imitates AuthDB change and subsequent 'process-change' task.
Returns parent entity of entity subgroup with all generated changes.
"""
@ndb.transactional
def run():
callback()
return model.replicate_auth_db()
auth_db_rev = run()
change_log.process_change(auth_db_rev)
return change_log.change_log_revision_key(auth_db_rev)
def grab_all(self, ancestor):
"""Returns dicts with all entities under given ancestor."""
entities = {}
def cb(key):
# Skip AuthDBLogRev itself, it's not interesting.
if key == ancestor:
return
as_str = []
k = key
while k and k != ancestor:
as_str.append('%s:%s' % (k.kind(), k.id()))
k = k.parent()
entities['/'.join(as_str)] = {
prop: val for prop, val in key.get().to_dict().items() if val
}
ndb.Query(ancestor=ancestor).map(cb, keys_only=True)
return entities
def test_works(self):
# Touch all kinds of entities at once. More thorough tests for per-entity
# changes are below.
def touch_all():
make_group(
name='A group',
members=[ident('[email protected]'), ident('[email protected]')],
description='Blah',
comment='New group')
make_ip_whitelist(
name='An IP whitelist',
subnets=['127.0.0.1/32'],
description='Bluh',
comment='New IP whitelist')
a = model.AuthIPWhitelistAssignments(
key=model.ip_whitelist_assignments_key(),
assignments=[
model.AuthIPWhitelistAssignments.Assignment(
identity=ident('[email protected]'),
ip_whitelist='An IP whitelist')
])
a.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='New assignment')
a.put()
c = model.AuthGlobalConfig(
key=model.root_key(),
oauth_client_id='client_id',
oauth_client_secret='client_secret',
oauth_additional_client_ids=['1', '2'])
c.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Config change')
c.put()
r = model.AuthRealmsGlobals(
key=model.realms_globals_key(),
permissions=[realms_pb2.Permission(name='luci.dev.p1')])
r.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='New permission')
r.put()
p = model.AuthProjectRealms(
key=model.project_realms_key('proj1'),
realms=realms_pb2.Realms(api_version=1234),
config_rev='config_rev',
perms_rev='prems_rev')
p.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='New project')
p.put()
changes = self.grab_all(self.auth_db_transaction(touch_all))
self.assertEqual({
'AuthDBChange:AuthGlobalConfig$root!7000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_CONF_OAUTH_CLIENT_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_client_id': u'client_id',
'oauth_client_secret': u'client_secret',
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGlobalConfig$root!7100': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_CONF_CLIENT_IDS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_additional_client_ids': [u'1', u'2'],
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_CREATED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'description': u'Blah',
'owners': u'administrators',
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1200': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'members': [
model.Identity(kind='user', name='[email protected]'),
model.Identity(kind='user', name='[email protected]'),
],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$An IP whitelist!3000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_CREATED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'New IP whitelist',
'description': u'Bluh',
'target': u'AuthIPWhitelist$An IP whitelist',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$An IP whitelist!3200': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_SUBNETS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'New IP whitelist',
'subnets': [u'127.0.0.1/32'],
'target': u'AuthIPWhitelist$An IP whitelist',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelistAssignments'
'$default$user:[email protected]!5000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_SET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'New assignment',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'An IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]')
},
'AuthDBChange:AuthProjectRealms$proj1!10000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_PROJECT_REALMS_CREATED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'New project',
'config_rev_new': u'config_rev',
'perms_rev_new': u'prems_rev',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]')
},
'AuthDBChange:AuthRealmsGlobals$globals!9000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_REALMS_GLOBALS_CHANGED,
'class_': [u'AuthDBChange', u'AuthRealmsGlobalsChange'],
'comment': u'New permission',
'permissions_added': [u'luci.dev.p1'],
'target': u'AuthRealmsGlobals$globals',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]')
},
}, changes)
def test_groups_diff(self):
def create():
make_group(
name='A group',
members=[ident('[email protected]'), ident('[email protected]')],
globs=[glob('*@example.com'), glob('*@other.com')],
nested=['A', 'B'],
description='Blah',
comment='New group')
changes = self.grab_all(self.auth_db_transaction(create))
self.assertEqual({
'AuthDBChange:AuthGroup$A group!1000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_CREATED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'description': u'Blah',
'owners': u'administrators',
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1200': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'members': [
model.Identity(kind='user', name='[email protected]'),
model.Identity(kind='user', name='[email protected]'),
],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1400': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_GLOBS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'globs': [
model.IdentityGlob(kind='user', pattern='*@example.com'),
model.IdentityGlob(kind='user', pattern='*@other.com'),
],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1600': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_NESTED_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'New group',
'nested': [u'A', u'B'],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def modify():
g = model.group_key('A group').get()
g.members = [ident('[email protected]'), ident('[email protected]')]
g.globs = [glob('*@example.com'), glob('*@blah.com')]
g.nested = ['A', 'C']
g.description = 'Another blah'
g.owners = 'another-owners'
g.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Changed')
g.put()
changes = self.grab_all(self.auth_db_transaction(modify))
self.assertEqual({
'AuthDBChange:AuthGroup$A group!1100': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_DESCRIPTION_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'description': u'Another blah',
'old_description': u'Blah',
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1150': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_OWNERS_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'old_owners': u'administrators',
'owners': u'another-owners',
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1200': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'members': [model.Identity(kind='user', name='[email protected]')],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1300': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'members': [model.Identity(kind='user', name='[email protected]')],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1400': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_GLOBS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'globs': [model.IdentityGlob(kind='user', pattern='*@blah.com')],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1500': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_GLOBS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'globs': [model.IdentityGlob(kind='user', pattern='*@other.com')],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1600': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_NESTED_ADDED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'nested': [u'C'],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1700': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_NESTED_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Changed',
'nested': [u'B'],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def delete():
g = model.group_key('A group').get()
g.record_deletion(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Deleted')
g.key.delete()
changes = self.grab_all(self.auth_db_transaction(delete))
self.assertEqual({
'AuthDBChange:AuthGroup$A group!1300': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Deleted',
'members': [
model.Identity(kind='user', name='[email protected]'),
model.Identity(kind='user', name='[email protected]'),
],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1500': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_GLOBS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Deleted',
'globs': [
model.IdentityGlob(kind='user', pattern='*@example.com'),
model.IdentityGlob(kind='user', pattern='*@blah.com'),
],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1700': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_NESTED_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Deleted',
'nested': [u'A', u'C'],
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGroup$A group!1800': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_GROUP_DELETED,
'class_': [u'AuthDBChange', u'AuthDBGroupChange'],
'comment': u'Deleted',
'old_description': u'Another blah',
'old_owners': u'another-owners',
'target': u'AuthGroup$A group',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def test_ip_whitelists_diff(self):
def create():
make_ip_whitelist(
name='A list',
subnets=['127.0.0.1/32', '127.0.0.2/32'],
description='Blah',
comment='New list')
changes = self.grab_all(self.auth_db_transaction(create))
self.assertEqual({
'AuthDBChange:AuthIPWhitelist$A list!3000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_CREATED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'New list',
'description': u'Blah',
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$A list!3200': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_SUBNETS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'New list',
'subnets': [u'127.0.0.1/32', u'127.0.0.2/32'],
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def modify():
l = model.ip_whitelist_key('A list').get()
l.subnets = ['127.0.0.1/32', '127.0.0.3/32']
l.description = 'Another blah'
l.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Changed')
l.put()
changes = self.grab_all(self.auth_db_transaction(modify))
self.assertEqual({
'AuthDBChange:AuthIPWhitelist$A list!3100': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_DESCRIPTION_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'Changed',
'description': u'Another blah',
'old_description': u'Blah',
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$A list!3200': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_SUBNETS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'Changed',
'subnets': [u'127.0.0.3/32'],
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$A list!3300': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_SUBNETS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'Changed',
'subnets': [u'127.0.0.2/32'],
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def delete():
l = model.ip_whitelist_key('A list').get()
l.record_deletion(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Deleted')
l.key.delete()
changes = self.grab_all(self.auth_db_transaction(delete))
self.assertEqual({
'AuthDBChange:AuthIPWhitelist$A list!3300': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_SUBNETS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'Deleted',
'subnets': [u'127.0.0.1/32', u'127.0.0.3/32'],
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelist$A list!3400': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_IPWL_DELETED,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistChange'],
'comment': u'Deleted',
'old_description': u'Another blah',
'target': u'AuthIPWhitelist$A list',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def test_ip_wl_assignments_diff(self):
def create():
a = model.AuthIPWhitelistAssignments(
key=model.ip_whitelist_assignments_key(),
assignments=[
model.AuthIPWhitelistAssignments.Assignment(
identity=ident('[email protected]'),
ip_whitelist='An IP whitelist'),
model.AuthIPWhitelistAssignments.Assignment(
identity=ident('[email protected]'),
ip_whitelist='Another IP whitelist'),
])
a.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='New assignment')
a.put()
changes = self.grab_all(self.auth_db_transaction(create))
self.assertEqual({
'AuthDBChange:AuthIPWhitelistAssignments$'
'default$user:[email protected]!5000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_SET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'New assignment',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'An IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelistAssignments$'
'default$user:[email protected]!5000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_SET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'New assignment',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'Another IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def change():
a = model.ip_whitelist_assignments_key().get()
a.assignments=[
model.AuthIPWhitelistAssignments.Assignment(
identity=ident('[email protected]'),
ip_whitelist='Another IP whitelist'),
model.AuthIPWhitelistAssignments.Assignment(
identity=ident('[email protected]'),
ip_whitelist='IP whitelist'),
]
a.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='change')
a.put()
changes = self.grab_all(self.auth_db_transaction(change))
self.assertEqual({
'AuthDBChange:AuthIPWhitelistAssignments$'
'default$user:[email protected]!5000': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_SET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'change',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'Another IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelistAssignments$'
'default$user:[email protected]!5100': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_UNSET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'change',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'Another IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthIPWhitelistAssignments$'
'default$user:[email protected]!5000': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_IPWLASSIGN_SET,
'class_': [u'AuthDBChange', u'AuthDBIPWhitelistAssignmentChange'],
'comment': u'change',
'identity': model.Identity(kind='user', name='[email protected]'),
'ip_whitelist': u'IP whitelist',
'target': u'AuthIPWhitelistAssignments$default$user:[email protected]',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def test_global_config_diff(self):
def create():
c = model.AuthGlobalConfig(
key=model.root_key(),
oauth_client_id='client_id',
oauth_client_secret='client_secret',
oauth_additional_client_ids=['1', '2'])
c.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Config change')
c.put()
changes = self.grab_all(self.auth_db_transaction(create))
self.assertEqual({
'AuthDBChange:AuthGlobalConfig$root!7000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_CONF_OAUTH_CLIENT_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_client_id': u'client_id',
'oauth_client_secret': u'client_secret',
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGlobalConfig$root!7100': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_CONF_CLIENT_IDS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_additional_client_ids': [u'1', u'2'],
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def modify():
c = model.root_key().get()
c.oauth_additional_client_ids = ['1', '3']
c.token_server_url = 'https://token-server'
c.security_config = security_config(['hi'])
c.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Config change')
c.put()
changes = self.grab_all(self.auth_db_transaction(modify))
self.assertEqual({
'AuthDBChange:AuthGlobalConfig$root!7100': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_CONF_CLIENT_IDS_ADDED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_additional_client_ids': [u'3'],
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGlobalConfig$root!7200': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_CONF_CLIENT_IDS_REMOVED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'oauth_additional_client_ids': [u'2'],
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGlobalConfig$root!7300': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type':
change_log.AuthDBChange.CHANGE_CONF_TOKEN_SERVER_URL_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'target': u'AuthGlobalConfig$root',
'token_server_url_new': u'https://token-server',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthGlobalConfig$root!7400': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type':
change_log.AuthDBChange.CHANGE_CONF_SECURITY_CONFIG_CHANGED,
'class_': [u'AuthDBChange', u'AuthDBConfigChange'],
'comment': u'Config change',
'security_config_new': security_config(['hi']),
'target': u'AuthGlobalConfig$root',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def test_realms_globals_diff(self):
def create():
c = model.AuthRealmsGlobals(
key=model.realms_globals_key(),
permissions=[
realms_pb2.Permission(name='luci.dev.p1'),
realms_pb2.Permission(name='luci.dev.p2'),
realms_pb2.Permission(name='luci.dev.p3'),
])
c.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='New realms config')
c.put()
self.auth_db_transaction(create)
def modify():
ent = model.realms_globals_key().get()
ent.permissions = [
realms_pb2.Permission(name='luci.dev.p1'),
realms_pb2.Permission(name='luci.dev.p3'),
realms_pb2.Permission(name='luci.dev.p4'),
]
ent.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Realms config change')
ent.put()
changes = self.grab_all(self.auth_db_transaction(modify))
self.assertEqual({
'AuthDBChange:AuthRealmsGlobals$globals!9000': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type':
change_log.AuthDBChange.CHANGE_REALMS_GLOBALS_CHANGED,
'class_': [u'AuthDBChange', u'AuthRealmsGlobalsChange'],
'comment': u'Realms config change',
'permissions_added': [u'luci.dev.p4'],
'permissions_removed': [u'luci.dev.p2'],
'target': u'AuthRealmsGlobals$globals',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def test_project_realms_diff(self):
# Note: in reality Realms.api_version is fixed. We change it in this test
# since it is the simplest field to change.
def create():
p = model.AuthProjectRealms(
key=model.project_realms_key('proj1'),
realms=realms_pb2.Realms(api_version=123),
config_rev='config_rev1',
perms_rev='perms_rev1')
p.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Created')
p.put()
changes = self.grab_all(self.auth_db_transaction(create))
self.assertEqual({
'AuthDBChange:AuthProjectRealms$proj1!10000': {
'app_version': u'v1a',
'auth_db_rev': 1,
'change_type': change_log.AuthDBChange.CHANGE_PROJECT_REALMS_CREATED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'Created',
'config_rev_new': u'config_rev1',
'perms_rev_new': u'perms_rev1',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
def update(api_version, config_rev, perms_rev):
p = model.project_realms_key('proj1').get()
p.realms = realms_pb2.Realms(api_version=api_version)
p.config_rev = config_rev
p.perms_rev = perms_rev
p.record_revision(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Updated')
p.put()
# Update everything.
changes = self.grab_all(self.auth_db_transaction(
lambda: update(1234, 'config_rev2', 'perms_rev2')))
self.assertEqual({
'AuthDBChange:AuthProjectRealms$proj1!10100': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type': change_log.AuthDBChange.CHANGE_PROJECT_REALMS_CHANGED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'Updated',
'config_rev_new': u'config_rev2',
'config_rev_old': u'config_rev1',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
'AuthDBChange:AuthProjectRealms$proj1!10200': {
'app_version': u'v1a',
'auth_db_rev': 2,
'change_type':
change_log.AuthDBChange.CHANGE_PROJECT_REALMS_REEVALUATED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'Updated',
'perms_rev_new': u'perms_rev2',
'perms_rev_old': u'perms_rev1',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
# Update realms_pb2.Realms, but do not change revisions.
changes = self.grab_all(self.auth_db_transaction(
lambda: update(12345, 'config_rev2', 'perms_rev2')))
self.assertEqual({
'AuthDBChange:AuthProjectRealms$proj1!10100': {
'app_version': u'v1a',
'auth_db_rev': 3,
'change_type': change_log.AuthDBChange.CHANGE_PROJECT_REALMS_CHANGED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'Updated',
'config_rev_new': u'config_rev2',
'config_rev_old': u'config_rev2',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
# Update revisions, but don't actually touch realms.
changes = self.grab_all(self.auth_db_transaction(
lambda: update(12345, 'config_rev3', 'perms_rev3')))
self.assertEqual({}, changes)
def delete():
p = model.project_realms_key('proj1').get()
p.record_deletion(
modified_by=ident('[email protected]'),
modified_ts=utils.utcnow(),
comment='Deleted')
p.key.delete()
changes = self.grab_all(self.auth_db_transaction(delete))
self.assertEqual({
'AuthDBChange:AuthProjectRealms$proj1!10300': {
'app_version': u'v1a',
'auth_db_rev': 5,
'change_type': change_log.AuthDBChange.CHANGE_PROJECT_REALMS_REMOVED,
'class_': [u'AuthDBChange', u'AuthProjectRealmsChange'],
'comment': u'Deleted',
'config_rev_old': u'config_rev3',
'perms_rev_old': u'perms_rev3',
'target': u'AuthProjectRealms$proj1',
'when': datetime.datetime(2015, 1, 2, 3, 4, 5),
'who': model.Identity(kind='user', name='[email protected]'),
},
}, changes)
class AuthDBChangeTest(test_case.TestCase):
# Test to_jsonish for AuthDBGroupChange and AuthDBIPWhitelistAssignmentChange,
# the rest are trivial.
def test_group_change_to_jsonish(self):
c = change_log.AuthDBGroupChange(
change_type=change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_ADDED,
target='AuthGroup$abc',
auth_db_rev=123,
who=ident('[email protected]'),
when=datetime.datetime(2015, 1, 2, 3, 4, 5),
comment='A comment',
app_version='v123',
description='abc',
members=[ident('[email protected]')],
globs=[glob('*@a.com')],
nested=['A'],
owners='abc',
old_owners='def')
self.assertEqual({
'app_version': 'v123',
'auth_db_rev': 123,
'change_type': 'GROUP_MEMBERS_ADDED',
'comment': 'A comment',
'description': 'abc',
'globs': ['user:*@a.com'],
'members': ['user:[email protected]'],
'nested': ['A'],
'old_description': None,
'old_owners': 'def',
'owners': 'abc',
'target': 'AuthGroup$abc',
'when': 1420167845000000,
'who': 'user:[email protected]',
}, c.to_jsonish())
def test_wl_assignment_to_jsonish(self):
c = change_log.AuthDBIPWhitelistAssignmentChange(
change_type=change_log.AuthDBChange.CHANGE_GROUP_MEMBERS_ADDED,
target='AuthIPWhitelistAssignments$default',
auth_db_rev=123,
who=ident('[email protected]'),
when=datetime.datetime(2015, 1, 2, 3, 4, 5),
comment='A comment',
app_version='v123',
identity=ident('[email protected]'),
ip_whitelist='whitelist')
self.assertEqual({
'app_version': 'v123',
'auth_db_rev': 123,
'change_type': 'GROUP_MEMBERS_ADDED',
'comment': 'A comment',
'identity': 'user:[email protected]',
'ip_whitelist': 'whitelist',
'target': 'AuthIPWhitelistAssignments$default',
'when': 1420167845000000,
'who': 'user:[email protected]',
}, c.to_jsonish())
def test_security_config_change_to_jsonish(self):
c = change_log.AuthDBConfigChange(
change_type=change_log.AuthDBChange.CHANGE_CONF_SECURITY_CONFIG_CHANGED,
target='AuthGlobalConfig$default',
auth_db_rev=123,
who=ident('[email protected]'),
when=datetime.datetime(2015, 1, 2, 3, 4, 5),
comment='A comment',
app_version='v123',
security_config_old=None,
security_config_new=security_config(['hi']))
self.assertEqual({
'app_version': 'v123',
'auth_db_rev': 123,
'change_type': 'CONF_SECURITY_CONFIG_CHANGED',
'comment': 'A comment',
'oauth_additional_client_ids': [],
'oauth_client_id': None,
'oauth_client_secret': None,
'security_config_new': {'internal_service_regexp': [u'hi']},
'security_config_old': None,
'target': 'AuthGlobalConfig$default',
'token_server_url_new': None,
'token_server_url_old': None,
'when': 1420167845000000,
'who': 'user:[email protected]',
}, c.to_jsonish())
class ChangeLogQueryTest(test_case.TestCase):
# We know that some indexes are required. But component can't declare them,
# so don't check them.
SKIP_INDEX_YAML_CHECK = True
def test_is_changle_log_indexed(self):
self.assertTrue(change_log.is_changle_log_indexed())
def test_make_change_log_query(self):
def mk_ch(tp, rev, target):
ch = change_log.AuthDBChange(
change_type=getattr(change_log.AuthDBChange, 'CHANGE_%s' % tp),
auth_db_rev=rev,
target=target)
ch.key = change_log.make_change_key(ch)
ch.put()
def key(c):
return '%s/%s' % (c.key.parent().id(), c.key.id())
mk_ch('GROUP_CREATED', 1, 'AuthGroup$abc')
mk_ch('GROUP_MEMBERS_ADDED', 1, 'AuthGroup$abc')
mk_ch('GROUP_CREATED', 1, 'AuthGroup$another')
mk_ch('GROUP_DELETED', 2, 'AuthGroup$abc')
mk_ch('GROUP_MEMBERS_ADDED', 2, 'AuthGroup$another')
# All. Most recent first. Largest even types first.
q = change_log.make_change_log_query()
self.assertEqual([
'2/AuthGroup$another!1200',
'2/AuthGroup$abc!1800',
'1/AuthGroup$another!1000',
'1/AuthGroup$abc!1200',
'1/AuthGroup$abc!1000',
], map(key, q.fetch()))
# Single revision only.
q = change_log.make_change_log_query(auth_db_rev=1)
self.assertEqual([
'1/AuthGroup$another!1000',
'1/AuthGroup$abc!1200',
'1/AuthGroup$abc!1000',
], map(key, q.fetch()))
# Single target only.
q = change_log.make_change_log_query(target='AuthGroup$another')
self.assertEqual([
'2/AuthGroup$another!1200',
'1/AuthGroup$another!1000',
], map(key, q.fetch()))
# Single revision and single target.
q = change_log.make_change_log_query(
auth_db_rev=1, target='AuthGroup$another')
self.assertEqual(['1/AuthGroup$another!1000'], map(key, q.fetch()))
if __name__ == '__main__':
if '-v' in sys.argv:
unittest.TestCase.maxDiff = None
unittest.main()
| apache-2.0 | -6,992,933,365,418,927,000 | 38.238832 | 80 | 0.593073 | false |
HydrelioxGitHub/PiDDL | ZTPAGE.py | 1 | 2847 | # coding: utf-8
from urllib2 import urlopen
import urllib2
import bs4 as BeautifulSoup
class ZTPage:
def __init__(self, url):
self.url = url
self.update()
def update(self):
self.update_content()
self.parse_type()
self.parse_infos()
self.parse_links()
def update_content(self):
req = urllib2.Request(self.url, headers={'User-Agent': "Magic Browser"})
html = urlopen(req).read()
soup = BeautifulSoup.BeautifulSoup(html, "html5lib")
self.content = soup.find('div', class_="maincont")
def parse_type(self):
if "series" in self.url:
self.type = "Show"
if "films" in self.url:
self.type = "Movie"
def parse_links(self):
liste = {}
host = 'error'
html = self.content.find('div', class_="contentl").find_all(["span", "a"])
for elem in html:
if ('span' == elem.name) and (unicode(elem.string) != 'None'):
host = elem.string
liste[host] = {}
if elem.name == 'a':
elem.string = elem.string.replace("Episode", '').replace('Final', '').strip()
episode_number = int(elem.string)
liste[host][episode_number] = elem.attrs['href']
self.links = liste
def parse_infos(self):
# Retreive Title
title = self.content.find('div', class_="titrearticles").h1.string
if self.type == "Show":
title = title.split("-")
self.title = title[0].strip()
# Retreive Season for TV Shows
self.season = int(title[1].replace("Saison", "").replace('[Complete]', '').strip())
if self.type == "Movie":
self.title = title.strip()
# Retreive Language, Format, Codec ...
info = self.content.find('div', class_="corps").div.span.span.b.strong.string
first_part = info.split('|')[0]
second_part = info.split('|')[1]
self.language = first_part.split(' ')[1].strip()
self.currentEpisode = first_part.split(' ')[0].strip()
self.currentEpisode = self.currentEpisode.replace('[', '')
self.currentEpisode = int(self.currentEpisode.split('/')[0])
# Pb encodage ...
quality = second_part.replace("Qualit", '').strip()
quality = quality[1:]
# ...
self.quality = quality.strip()
def get_available_hosts(self):
return self.links.keys()
def get_tvshow_link(self, host, episodenumber):
alllinks = self.links[host]
link = alllinks[episodenumber]
return link
def print_report(self):
print self.url
print self.title
print self.season
print self.quality
print self.language
print self.currentEpisode
print self.links
| gpl-2.0 | 8,344,752,789,403,362,000 | 32.892857 | 95 | 0.556024 | false |
darknao/piOClock | ssd1351.py | 1 | 13500 | #!/bin/env python
# -*- coding: UTF-8 -*-
# ----------------------------------------------------------------------
# ssd1351.py from https://github.com/guyc/py-gaugette
# ported by Jason Porritt,
# and reworked by darknao,
# based on original work by Guy Carpenter for display.py
#
# This library works with
# Adafruit's 128x96 SPI color OLED http://www.adafruit.com/products/1673
#
# The code is based heavily on Adafruit's Arduino library
# https://github.com/adafruit/Adafruit_SSD1351
# written by Limor Fried/Ladyada for Adafruit Industries.
#
# It has the following dependencies:
# wiringpi2 for GPIO
# spidev for SPI
# PIL for easy drawing capabilities
# numpy for fast RGB888 to RGB565 convertion
# ----------------------------------------------------------------------
# NEED HEAVY CLEANING !
import wiringpi2
import spidev
import time
import sys
from PIL import Image, ImageDraw, ImageFont
import logging
import numpy as np
import tools
class SSD1351:
# SSD1351 Commands
EXTERNAL_VCC = 0x1
SWITCH_CAP_VCC = 0x2
MEMORY_MODE_HORIZ = 0x00
MEMORY_MODE_VERT = 0x01
CMD_SETCOLUMN = 0x15
CMD_SETROW = 0x75
CMD_WRITERAM = 0x5C
CMD_READRAM = 0x5D
CMD_SETREMAP = 0xA0
CMD_STARTLINE = 0xA1
CMD_DISPLAYOFFSET = 0xA2
CMD_DISPLAYALLOFF = 0xA4
CMD_DISPLAYALLON = 0xA5
CMD_NORMALDISPLAY = 0xA6
CMD_INVERTDISPLAY = 0xA7
CMD_FUNCTIONSELECT = 0xAB
CMD_DISPLAYOFF = 0xAE
CMD_DISPLAYON = 0xAF
CMD_PRECHARGE = 0xB1
CMD_DISPLAYENHANCE = 0xB2
CMD_CLOCKDIV = 0xB3
CMD_SETVSL = 0xB4
CMD_SETGPIO = 0xB5
CMD_PRECHARGE2 = 0xB6
CMD_SETGRAY = 0xB8
CMD_USELUT = 0xB9
CMD_PRECHARGELEVEL = 0xBB
CMD_VCOMH = 0xBE
CMD_CONTRASTABC = 0xC1
CMD_CONTRASTMASTER = 0xC7
CMD_MUXRATIO = 0xCA
CMD_COMMANDLOCK = 0xFD
CMD_HORIZSCROLL = 0x96
CMD_STOPSCROLL = 0x9E
CMD_STARTSCROLL = 0x9F
# Device name will be /dev/spidev-{bus}.{device}
# dc_pin is the data/commmand pin. This line is HIGH for data, LOW for command.
# We will keep d/c low and bump it high only for commands with data
# reset is normally HIGH, and pulled LOW to reset the display
def __init__(self, bus=0, device=0, dc_pin="P9_15", reset_pin="P9_13", rows=128, cols=128):
self.cols = cols
self.rows = rows
self.dc_pin = dc_pin
self.reset_pin = reset_pin
# SPI
self.spi = spidev.SpiDev(bus, device)
self.spi.max_speed_hz = 16000000 # 16Mhz
# GPIO
self.gpio = wiringpi2.GPIO(wiringpi2.GPIO.WPI_MODE_PINS)
self.gpio.pinMode(self.reset_pin, self.gpio.OUTPUT)
self.gpio.digitalWrite(self.reset_pin, self.gpio.HIGH)
self.gpio.pinMode(self.dc_pin, self.gpio.OUTPUT)
self.gpio.digitalWrite(self.dc_pin, self.gpio.LOW)
# Drawing tools
self.im = Image.new("RGB", (cols, rows), 'black')
self.draw = ImageDraw.Draw(self.im)
# logging
self.log = logging.getLogger(self.__class__.__name__)
self.log.setLevel(logging.INFO)
self.contrast = 15
def reset(self):
self.gpio.digitalWrite(self.reset_pin, self.gpio.LOW)
time.sleep(0.010) # 10ms
self.gpio.digitalWrite(self.reset_pin, self.gpio.HIGH)
def command(self, cmd, cmddata=None):
# already low
# self.gpio.digitalWrite(self.dc_pin, self.gpio.LOW)
if type(cmd) == list:
self.spi.writebytes(cmd)
else:
self.spi.writebytes([cmd])
if cmddata is not None:
if type(cmddata) == list:
self.data(cmddata)
else:
self.data([cmddata])
def data(self, bytes):
self.gpio.digitalWrite(self.dc_pin, self.gpio.HIGH)
max_xfer = 1024
start = 0
remaining = len(bytes)
while remaining>0:
count = remaining if remaining <= max_xfer else max_xfer
remaining -= count
self.spi.writebytes(bytes[start:start+count])
start += count
self.gpio.digitalWrite(self.dc_pin, self.gpio.LOW)
def begin(self, vcc_state = SWITCH_CAP_VCC):
time.sleep(0.001) # 1ms
self.reset()
self.command(self.CMD_COMMANDLOCK, 0x12)
self.command(self.CMD_COMMANDLOCK, 0xB1)
self.command(self.CMD_DISPLAYOFF)
self.command(self.CMD_CLOCKDIV, 0xF1)
# support for 128x128 line mode
self.command(self.CMD_MUXRATIO, 127)
self.command(self.CMD_SETREMAP, 0x74)
self.command(self.CMD_SETCOLUMN, [0x00, self.cols-1])
self.command(self.CMD_SETROW, [0x00, self.rows-1])
# TODO Support 96-row display
self.command(self.CMD_STARTLINE, 96)
self.command(self.CMD_DISPLAYOFFSET, 0x00)
self.command(self.CMD_SETGPIO, 0x00)
self.command(self.CMD_FUNCTIONSELECT, 0x01)
self.command(self.CMD_PRECHARGE, 0x32)
self.command(self.CMD_VCOMH, 0x05)
self.command(self.CMD_NORMALDISPLAY)
self.set_contrast(200) # c8 -> 200
self.set_master_contrast(10)
self.command(self.CMD_SETVSL, [0xA0, 0xB5, 0x55])
self.command(self.CMD_PRECHARGE2, 0x01)
self.command(self.CMD_DISPLAYON)
def set_master_contrast(self, level):
# 0 to 15
level &= 0x0F
self.command(self.CMD_CONTRASTMASTER, level)
def set_contrast(self, level):
# 0 to 255
level &= 0xFF
self.command(self.CMD_CONTRASTABC, [level, level, level])
self.contrast = level
def invert_display(self):
self.command(self.CMD_INVERTDISPLAY)
def normal_display(self):
self.command(self.CMD_NORMALDISPLAY)
def scale(self, x, inLow, inHigh, outLow, outHigh):
return ((x - inLow) / float(inHigh) * outHigh) + outLow
def encode_color(self, color):
red = (color >> 16) & 0xFF
green = (color >> 8) & 0xFF
blue = color & 0xFF
redScaled = int(self.scale(red, 0, 0xFF, 0, 0x1F))
greenScaled = int(self.scale(green, 0, 0xFF, 0, 0x3F))
blueScaled = int(self.scale(blue, 0, 0xFF, 0, 0x1F))
return (((redScaled << 6) | greenScaled) << 5) | blueScaled
def color565(self, r, g, b):
# 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
# r r r r r g g g g g g b b b b b
# r = 31 g = 63 b = 31
redScaled = int(self.scale(r, 0, 0xFF, 0, 0x1F))
greenScaled = int(self.scale(g, 0, 0xFF, 0, 0x3F))
blueScaled = int(self.scale(b, 0, 0xFF, 0, 0x1F))
return (((redScaled << 6) | greenScaled) << 5) | blueScaled
def goTo(self, x, y):
if x >= self.cols or y >= self.rows:
return
# set x and y coordinate
self.command(self.CMD_SETCOLUMN, [x, self.cols-1])
self.command(self.CMD_SETROW, [y, self.rows-1])
self.command(self.CMD_WRITERAM)
def drawPixel(self, x, y, color):
if x >= self.cols or y >= self.rows:
return
if x < 0 or y < 0:
return
color = self.encode_color(color)
# set location
self.goTo(x, y)
self.data([color >> 8, color & 0xFF])
def clear(self):
"""Clear display buffer"""
self.im = Image.new("RGB", (self.cols, self.rows), 'black')
self.draw = ImageDraw.Draw(self.im)
def text_center(self, string, color, font=None, size=10):
if font is None:
font = ImageFont.truetype("/usr/share/fonts/truetype/droid/DroidSansMono.ttf", size)
text_size = self.draw.textsize(string, font=font)
text_x = max((self.cols-text_size[0])/2, 0)
text_y = max((self.rows-text_size[1])/2, 0)
self.draw_text(text_x, text_y, string, color, font=font, size=size)
return text_x, text_y
def text_center_y(self, text_y, string, color, font=None, size=10):
if font is None:
font = ImageFont.truetype("/usr/share/fonts/truetype/droid/DroidSansMono.ttf", size)
text_size = self.draw.textsize(string, font=font)
text_x = max((self.cols-text_size[0])/2, 0)
self.draw_text(text_x, text_y, string, color, font=font, size=size)
return text_x, text_y
def draw_text(self, x, y, string, color, font=None, size=10):
if font is None:
font = ImageFont.truetype("/usr/share/fonts/truetype/droid/DroidSansMono.ttf", size)
self.draw.text((x, y), string, font=font, fill=color)
return self.draw.textsize(string, font=font)
def fillScreen(self, fillcolor):
self.rawFillRect(0, 0, self.cols, self.rows, fillcolor)
def rawFillRect(self, x, y, w, h, fillcolor):
self.log.debug("fillScreen start")
# Bounds check
if (x >= self.cols) or (y >= self.rows):
return
# Y bounds check
if y+h > self.rows:
h = self.rows - y - 1
# X bounds check
if x+w > self.cols:
w = self.cols - x - 1
self.setDisplay(x, y, x+(w-1), y+(h-1))
color = self.encode_color(fillcolor)
self.data([color >> 8, color & 0xFF] * w*h)
self.log.debug("fillScreen end")
def setDisplay(self, startx, starty, endx, endy):
if startx >= self.cols or starty >= self.rows:
return
# Y bounds check
if endx > self.cols - 1:
endx = self.cols - 1
# X bounds check
if endy > self.rows - 1:
endy = self.rows - 1
# set x and y coordinate
# print "x:%d y:%d endx:%d endy:%d" % (startx, starty, endx, endy)
self.command(self.CMD_SETCOLUMN, [startx, endx])
self.command(self.CMD_SETROW, [starty, endy])
self.command(self.CMD_WRITERAM)
def im2list(self):
"""Convert PIL RGB888 Image to SSD1351 RAM buffer"""
image = np.array(self.im).reshape(-1, 3)
image[:,0] *= 0.121
image[:,1] *= 0.247
image[:,2] *= 0.121
d = np.left_shift(image, [11, 5, 0]).sum(axis=1)
data =np.dstack(((d>>8)&0xff, d&0xff)).flatten()
return data.tolist()
def display(self, x=0, y=0, w=None, h=None):
"""Send display buffer to the device"""
self.log.debug("disp in")
if h is None:
h = self.rows
if w is None:
w = self.cols
x = max(x, 0)
y = max(y, 0)
w = min(w, self.cols)
h = min(h, self.rows)
if w-x < 0:
return
self.log.debug("set display")
self.setDisplay(x, y, w-1, h-1)
self.log.debug("set display end")
data = []
start = y * self.cols + x
end = h * self.cols + w
self.log.debug("get data")
self.data(self.im2list())
self.log.debug("disp out")
@tools.timed
def dump_disp(self):
"""Dump display buffer on screen,
for debugging purpose"""
image = np.array(self.im).reshape(-1, 3)
for r in range(0, self.rows,2):
txt = [None,] * self.cols
start = r*self.cols
end = start + self.cols * 2
line = image[start:end]
for c in range(len(line)):
idx = c % self.cols
if line[c].sum() > 0:
if txt[idx] is None:
txt[idx] = '▀'
elif txt[idx] == '▀':
txt[idx] = '█'
else:
txt[idx] = '▄'
else:
if txt[idx] is None:
txt[idx] = ' '
print ''.join(txt) + '║'
@tools.timed
def dump_disp2(self):
#image = list(self.im.convert("I").getdata())
image = np.array(self.im)
for row, r in enumerate(image):
if row % 2 == 0:
txt = [None,] * self.cols
for idx, c in enumerate(r):
if c.sum() > 0:
if txt[idx] is None:
txt[idx] = '▀'
elif txt[idx] == '▀':
txt[idx] = '█'
else:
txt[idx] = '▄'
else:
if txt[idx] is None:
txt[idx] = ' '
print ''.join(txt) + '║'
if __name__ == '__main__':
import datetime
import time
import ssd1351
import random
from PIL import ImageFont
import psutil
import logging
import os
log = logging.getLogger("clock")
logging.basicConfig(
format='%(asctime)-23s - %(levelname)-7s - %(name)s - %(message)s')
log.setLevel(logging.INFO)
RESET_PIN = 15
DC_PIN = 16
led = ssd1351.SSD1351(reset_pin=15, dc_pin=16, rows=96)
led.begin()
led.fillScreen(0)
color = 0x000000
bands = 10
color_step = 0xFF / bands
color_width = led.cols / 3
for x in range(0, led.rows, led.rows/bands):
led.rawFillRect(0, x, color_width, bands, color&0xff0000)
led.rawFillRect(color_width, x, color_width*2, bands, color&0xff00)
led.rawFillRect(color_width*2, x, color_width*3, bands, color&0xff)
color = (color + (color_step << 16) + (color_step << 8) + (color_step)) & 0xFFFFFF
| gpl-3.0 | 4,144,339,368,829,440,500 | 32.039216 | 96 | 0.548368 | false |
bnx05/pytest-selenium | test_parameters.py | 1 | 2603 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import time
from selenium import webdriver
sample_email_address = "[email protected]"
sample_password = "Password123"
email_addresses = ["invalid_email", "another_invalid_email@", "not_another_invalid_email@blah"]
passwords = ["weak_password", "generic_password", "bleep_password"]
browser = webdriver.Firefox()
browser.maximize_window()
# this test checks the maxlength attribute of the login and password fields
@pytest.mark.parametrize("field_name, maxlength", [
("login", "75"),
("password", "128"),
])
def test_assert_field_maxlength(field_name, maxlength):
browser.get("https://start.engagespark.com/sign-in/")
time.sleep(5)
browser.find_element_by_name(field_name).get_attribute("maxlength") == maxlength
# this test asserts the string length of values entered in the login and
# password fields
@pytest.mark.parametrize("field_name, sample_string, string_length", [
("login", sample_email_address, 20),
("password", sample_password, 11),
])
def test_assert_email_and_password_length(field_name, sample_string, string_length):
browser.get("https://start.engagespark.com/sign-in/")
time.sleep(5)
browser.find_element_by_name(field_name).click()
browser.find_element_by_name(field_name).send_keys(sample_string)
assert len(browser.find_element_by_name(field_name).get_attribute("value")) == string_length
# this test checks if the login button is enabled after entering different
# combinations of invalid values in the email and password fields
@pytest.mark.parametrize("email", email_addresses)
@pytest.mark.parametrize("password", passwords)
def test_assert_login_button_enabled(email, password):
browser.get("https://start.engagespark.com/sign-in/")
time.sleep(5)
browser.find_element_by_name("login").click()
browser.find_element_by_name("login").send_keys(email)
browser.find_element_by_name("password").click()
browser.find_element_by_name("password").send_keys(password)
assert browser.find_element_by_xpath("//button[contains(text(), 'Login')]").is_enabled()
# this test checks if the values entered into the email field contain '@'
@pytest.mark.parametrize("email", [
"[email protected]",
"[email protected]",
"blah",
])
def test_assert_valid_email_entry(email):
browser.get("https://start.engagespark.com/sign-in/")
time.sleep(5)
browser.find_element_by_name("login").click()
browser.find_element_by_name("login").send_keys(email)
assert "@" in browser.find_element_by_name("login").get_attribute("value")
| mit | -6,535,209,611,040,000,000 | 36.724638 | 96 | 0.717633 | false |
DonaldTrumpHasTinyHands/tiny_hands_pac | documents_gallery/models.py | 1 | 4091 | from django.db import models
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailcore.models import Page
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel
from wagtail.wagtaildocs.models import Document
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from modelcluster.fields import ParentalKey
from modelcluster.tags import ClusterTaggableManager
from taggit.models import TaggedItemBase, Tag
class DocumentsIndexPage(Page):
"""
This is the index page for the Documents Gallery. It contains the links to Gallery pages.
Gallery Page displays the gallery documents according to tags defined.
"""
intro = RichTextField(blank=True)
search_fields = Page.search_fields + (
index.SearchField('intro'),
)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def children(self):
return self.get_children().live()
def get_context(self, request):
# Get list of live Gallery pages that are descendants of this page
pages = DocumentsPage.objects.live().descendant_of(self)
# Update template context
context = super(DocumentsIndexPage, self).get_context(request)
context['pages'] = pages
return context
class Meta:
verbose_name = "Documents Index Page"
DocumentsIndexPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full")
]
DocumentsIndexPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "SEO and metadata fields"),
ImageChooserPanel('feed_image'),
]
class DocumentsPageTag(TaggedItemBase):
content_object = ParentalKey('documents_gallery.DocumentsPage', related_name='tagged_items')
class DocumentsPage(Page):
"""
This is the Documents page. It takes tag names which you have assigned to your
documents. It gets the document objects according to tags defined by you. Your document gallery will
be created as per tags.
"""
tags = ClusterTaggableManager(through=DocumentsPageTag, blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def gallery_index(self):
# Find closest ancestor which is a Gallery index
return self.get_ancestors().type(GalleryIndexPage).last()
def get_context(self, request):
# Get tags and convert them into list so we can iterate over them
tags = self.tags.values_list('name', flat=True)
# Creating empty Queryset from Wagtail Document model
documents = Document.objects.none()
# Populating the empty documents Queryset with documents of all tags in tags list.
if tags:
len_tags = len(tags)
for i in range(0, len_tags):
doc = Document.objects.filter(tags__name=tags[i])
documents = documents | doc
# Pagination
page = request.GET.get('page')
paginator = Paginator(documents, 25) # Show 25 documents per page
try:
documents = paginator.page(page)
except PageNotAnInteger:
documents = paginator.page(1)
except EmptyPage:
documents = paginator.page(paginator.num_pages)
# Update template context
context = super(DocumentsPage, self).get_context(request)
context['documents'] = documents
return context
class Meta:
verbose_name = "Documents Page"
DocumentsPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('tags'),
]
DocumentsPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "SEO and metadata fields"),
ImageChooserPanel('feed_image'),
] | mit | 3,715,756,117,222,630,000 | 29.088235 | 104 | 0.676363 | false |
mrocklin/into | into/backends/sql_csv.py | 1 | 2811 |
from ..regex import RegexDispatcher
from ..append import append
from .csv import CSV
import os
import datashape
import sqlalchemy
import subprocess
copy_command = RegexDispatcher('copy_command')
execute_copy = RegexDispatcher('execute_copy')
@copy_command.register('.*sqlite')
def copy_sqlite(dialect, tbl, csv):
abspath = os.path.abspath(csv.path)
tblname = tbl.name
dbpath = str(tbl.bind.url).split('///')[-1]
statement = """
(echo '.mode csv'; echo '.import {abspath} {tblname}';) | sqlite3 {dbpath}
"""
return statement.format(**locals())
@execute_copy.register('sqlite')
def execute_copy_sqlite(dialect, engine, statement):
ps = subprocess.Popen(statement, shell=True, stdout=subprocess.PIPE)
return ps.stdout.read()
@copy_command.register('postgresql')
def copy_postgres(dialect, tbl, csv):
abspath = os.path.abspath(csv.path)
tblname = tbl.name
format_str = 'csv'
delimiter = csv.dialect.get('delimiter', ',')
na_value = ''
quotechar = csv.dialect.get('quotechar', '"')
escapechar = csv.dialect.get('escapechar', '\\')
header = not not csv.has_header
encoding = csv.encoding or 'utf-8'
statement = """
COPY {tblname} FROM '{abspath}'
(FORMAT {format_str},
DELIMITER E'{delimiter}',
NULL '{na_value}',
QUOTE '{quotechar}',
ESCAPE '{escapechar}',
HEADER {header},
ENCODING '{encoding}');"""
return statement.format(**locals())
@copy_command.register('mysql.*')
def copy_mysql(dialect, tbl, csv):
mysql_local = ''
abspath = os.path.abspath(csv.path)
tblname = tbl.name
delimiter = csv.dialect.get('delimiter', ',')
quotechar = csv.dialect.get('quotechar', '"')
escapechar = csv.dialect.get('escapechar', '\\')
lineterminator = csv.dialect.get('lineterminator', r'\n\r')
skiprows = 1 if csv.has_header else 0
encoding = csv.encoding or 'utf-8'
statement = u"""
LOAD DATA {mysql_local} INFILE '{abspath}'
INTO TABLE {tblname}
CHARACTER SET {encoding}
FIELDS
TERMINATED BY '{delimiter}'
ENCLOSED BY '{quotechar}'
ESCAPED BY '{escapechar}'
LINES TERMINATED by '{lineterminator}'
IGNORE {skiprows} LINES;
"""
return statement.format(**locals())
@execute_copy.register('.*', priority=9)
def execute_copy_all(dialect, engine, statement):
conn = engine.raw_connection()
cursor = conn.cursor()
cursor.execute(statement)
conn.commit()
@append.register(sqlalchemy.Table, CSV)
def append_csv_to_sql_table(tbl, csv, **kwargs):
statement = copy_command(tbl.bind.dialect.name, tbl, csv)
execute_copy(tbl.bind.dialect.name, tbl.bind, statement)
return tbl
| bsd-3-clause | -432,463,007,566,880,260 | 27.683673 | 82 | 0.630736 | false |
blenderben/lolstatbot | lolstatbot.py | 1 | 22559 | # Leauge of Legends Statistics Chat Bot
# A chat bot written in Python that provides match statistics right to your Twitch chat.
# 2015 Benjamin Chu - https://github.com/blenderben
import socket # imports module allowing connection to IRC
import threading # imports module allowing timing functions
import requests # imports module allowing requests
import json
import time
import calendar # imports module allowing epoch time
import ConfigParser # imports module allowing reading of .ini files
import os # for relative pathing
import string # for string manipulation
# from routes import API_ROUTES
class API_ROUTES:
# summoner-v1.4 - get summoner id data
summoner_url = 'https://{region}.api.pvp.net/api/lol/{region}/v1.4/summoner/by-name/{summonername}?api_key={key}'
# summoner-v1.4 - summoner mastery data
summonermastery_url = 'https://{region}.api.pvp.net/api/lol/{region}/v1.4/summoner/{summonerid}/masteries?api_key={key}'
# league-v2.5 - summoner league data
summonerleague_url = 'https://{region}.api.pvp.net/api/lol/{region}/v2.5/league/by-summoner/{summonerid}/entry?api_key={key}'
# lol-static-data-v1.2 - static champion data
championstaticdata_url = 'https://global.api.pvp.net/api/lol/static-data/{region}/v1.2/champion/{championid}?champData=all&api_key={key}'
# lol-static-data-v1.2 - static rune data
runestaticdata_url = 'https://global.api.pvp.net/api/lol/static-data/{region}/v1.2/rune/{runeid}?runeData=all&api_key={key}'
# lol-static-data-v1.2 - static mastery data
masterystaticdata_url = 'https://global.api.pvp.net/api/lol/static-data/{region}/v1.2/mastery/{masteryid}?masteryData=all&api_key={key}'
# lol-static-data-v1.2 - static spell data
spellstaticdata_url = 'https://global.api.pvp.net/api/lol/static-data/{region}/v1.2/summoner-spell/{spellid}?api_key={key}'
# current-game-v1.0 - current game data
current_url = 'https://{region}.api.pvp.net/observer-mode/rest/consumer/getSpectatorGameInfo/{region_upper}1/{summonerid}?api_key={key}'
# game-v1.3 - historic game data
last_url = 'https://{region}.api.pvp.net/api/lol/{region}/v1.3/game/by-summoner/{summonerid}/recent?api_key={key}'
# op.gg
opgg_url = 'http://{region}.op.gg/summoner/userName={summonername}'
opgg_masteries_url = 'http://{region}.op.gg/summoner/mastery/userName={summonername}'
opgg_runes_url = 'http://{region}.op.gg/summoner/rune/userName={summonername}'
opgg_matches_url = 'http://{region}.op.gg/summoner/matches/userName={summonername}'
opgg_leagues_url = 'http://{region}.op.gg/summoner/league/userName={summonername}'
opgg_champions_url = 'http://{region}.op.gg/summoner/champions/userName={summonername}'
# LoLNexus
lolnexus_url = 'http://www.lolnexus.com/{region}/search?name={summonername}&server={region}'
# LoLKing
lolking_url = 'http://www.lolking.net/summoner/{region}/{summonerid}'
# LoLSkill
lolskill_url = 'http://www.lolskill.net/summoner/{region}/{summonername}'
# ====== READ CONFIG ======
Config = ConfigParser.ConfigParser()
Config.read(os.path.dirname(os.path.abspath(__file__)) + '/config.ini')
def ConfigSectionMap(section):
temp_dict = {}
options = Config.options(section)
for option in options:
try:
temp_dict[option] = Config.get(section, option)
if temp_dict[option] == -1:
DebugPrint('skip: %s' % option)
except:
print('exception on %s!' % option)
temp_dict[option] = None
return temp_dict
# ====== CONNECTION INFO ======
# Set variables for connection
botOwner = ConfigSectionMap('settings')['botowner']
nick = ConfigSectionMap('settings')['nick']
channel = '#' + ConfigSectionMap('settings')['channel']
server = ConfigSectionMap('settings')['server']
port = int(ConfigSectionMap('settings')['port'])
password = ConfigSectionMap('settings')['oauth']
# ====== RIOT API PRELIM DATA ======
api_key = ConfigSectionMap('settings')['api']
# Riot API Information
summonerName = ConfigSectionMap('settings')['summonername'].lower()
summonerName = summonerName.replace(" ", "")
region = ConfigSectionMap('settings')['region']
summoner_url = API_ROUTES.summoner_url.format(region=region, summonername=summonerName, key=api_key)
# Initial Data Load // Get Summoner ID and Level
summonerName_dict = requests.get(summoner_url).json()
summonerID = str(summonerName_dict[summonerName]['id'])
summonerLevel = str(summonerName_dict[summonerName]['summonerLevel'])
# ====== RIOT API FUNCTIONS ======
def about(ircname):
return 'Hello ' + ircname + '! I am a League of Legends statistics chat bot. My creator is blenderben [ https://github.com/blenderben/LoLStatBot ].'\
+ ' I am currently assigned to summoner ' + summonerName.upper() + ' [ID:' + getSummonerID() + '].'
def getCommands():
return 'Available commands: ['\
+ ' !about, !summoner, !league, !last, !current, !runes, !mastery, !opgg, !lolnexus, !lolking, !lolskill ]'
def getSummonerInfo():
return summonerName.upper() + ' is summoner level ' + getSummonerLevel() + ', playing in Region: ' + region.upper() + ' // ' + opgg('')
def opgg(details):
if details == 'runes':
return API_ROUTES.opgg_runes_url.format(region=region, summonername=summonerName)
elif details == 'masteries':
return API_ROUTES.opgg_masteries_url.format(region=region, summonername=summonerName)
elif details == 'matches':
return API_ROUTES.opgg_matches_url.format(region=region, summonername=summonerName)
elif details == 'leagues':
return API_ROUTES.opgg_leagues_url.format(region=region, summonername=summonerName)
elif details == 'champions':
return API_ROUTES.opgg_champions_url.format(region=region, summonername=summonerName)
else:
return API_ROUTES.opgg_url.format(region=region, summonername=summonerName)
def lolnexus():
return API_ROUTES.lolnexus_url.format(region=region, summonername=summonerName)
def lolking(details):
if details == 'runes':
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID) + '#runes'
elif details == 'masteries':
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID) + '#masteries'
elif details == 'matches':
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID) + '#matches'
elif details == 'rankedstats':
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID) + '#ranked-stats'
elif details == 'leagues':
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID) + '#leagues'
else:
return API_ROUTES.lolking_url.format(region=region, summonerid=summonerID)
def lolskill(details):
if details == 'runes':
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName) + '/runes'
elif details == 'masteries':
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName) + '/masteries'
elif details == 'matches':
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName) + '/matches'
elif details == 'stats':
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName) + '/stats'
elif details == 'champions':
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName) + '/champions'
else:
return API_ROUTES.lolskill_url.format(region=region.upper(), summonername=summonerName)
def getTeamColor(teamid):
if teamid == 100:
return 'Blue Team'
elif teamid == 200:
return 'Purple Team'
else:
return 'No Team'
def getWinLoss(win):
if win == True:
return 'WON'
elif win == False:
return 'LOST'
else:
return 'TIED'
def getTimePlayed(time):
if time > 3600:
hours = time / 3600
minutes = time % 3600 / 60
seconds = time % 3600 % 60
if hours > 1:
return str(hours) + ' hours & ' + str(minutes) + ' minutes & ' + str(seconds) + ' seconds'
else:
return str(hours) + ' hour & ' + str(minutes) + ' minutes & ' + str(seconds) + ' seconds'
elif time > 60:
minutes = time / 60
seconds = time % 60
return str(minutes) + ' minutes & ' + str(seconds) + ' seconds'
else:
return str(time) + ' seconds'
def getKDA(kills, deaths, assists):
if deaths < 1:
return 'PERFECT'
else:
kda = float(kills) + float(assists) / (float(deaths))
kda = round(kda, 2)
return str(kda) + ':1'
def getChampionbyID(championid):
tempDict = requests.get(API_ROUTES.championstaticdata_url.format(region=region, championid=int(championid), key=api_key)).json()
name = tempDict['name'] + " " + tempDict['title']
return name
def getSpellbyID(spellid):
tempDict = requests.get(API_ROUTES.spellstaticdata_url.format(region=region, spellid=int(spellid), key=api_key)).json()
spellName = tempDict['name']
return spellName
# Refresh / Get Summoner ID
def getSummonerID():
global summonerID
try:
tempDict = requests.get(summoner_url).json()
summonerID = str(tempDict[summonerName]['id'])
return summonerID
except:
print 'Riot API Down'
return 1
# Refresh / Get Summoner Level
def getSummonerLevel():
global summonerLevel
tempDict = requests.get(summoner_url).json()
summonerLevel = str(tempDict[summonerName]['summonerLevel'])
return summonerLevel
def getWinRatio(win, loss):
total = float(win) + float(loss)
ratio = win / total
ratioPercent = round(ratio * 100, 1)
return str(ratioPercent) + '%'
def getStats():
# Function to eventually get statistics, avg kills, etc, for now, output Stats page from Lolskill
return lolskill('stats')
def getSummonerMastery():
tempDict = requests.get(API_ROUTES.summonermastery_url.format(region=region, summonerid=summonerID, key=api_key)).json()
i = 0
masteryIDList = []
masteryRank = []
for pages in tempDict[summonerID]['pages']:
if bool(pages.get('current')) == True:
pageName = tempDict[summonerID]['pages'][i]['name']
for mastery in tempDict[summonerID]['pages'][i]['masteries']:
masteryIDList.append(mastery.get('id'))
masteryRank.append(mastery.get('rank'))
else:
i += 1
return getCurrentMastery(masteryIDList, masteryRank) + ' // Mastery Name: ' + pageName
def getLeagueInfo():
try:
tempDict = requests.get(API_ROUTES.summonerleague_url.format(region=region, summonerid=summonerID, key=api_key)).json()
LEAGUE_TIER = string.capwords(tempDict[summonerID][0]['tier'])
LEAGUE_QUEUE = tempDict[summonerID][0]['queue'].replace('_', ' ')
LEAGUE_DIVISION = tempDict[summonerID][0]['entries'][0]['division']
LEAGUE_WINS = tempDict[summonerID][0]['entries'][0]['wins']
LEAGUE_LOSSES = tempDict[summonerID][0]['entries'][0]['losses']
LEAGUE_POINTS = tempDict[summonerID][0]['entries'][0]['leaguePoints']
# LEAGUE_ISVETERAN = tempDict[summonerID][0]['entries'][0]['isHotStreak']
# LEAGUE_ISHOTSTREAK = tempDict[summonerID][0]['entries'][0]['isVeteran']
# LEAGUE_ISFRESHBLOOD = tempDict[summonerID][0]['entries'][0]['isFreshBlood']
# LEAGUE_ISINACTIVE = tempDict[summonerID][0]['entries'][0]['isInactive']
return summonerName.upper() + ' is ' + LEAGUE_TIER + ' ' + LEAGUE_DIVISION + ' in ' + LEAGUE_QUEUE\
+ ' // ' + str(LEAGUE_WINS) + 'W / ' + str(LEAGUE_LOSSES) + 'L (Win Ratio ' + getWinRatio(LEAGUE_WINS, LEAGUE_LOSSES) + ')'\
+ ' // LP: ' + str(LEAGUE_POINTS)\
+ ' // ' + lolking('leagues')
except:
return 'Summoner ' + summonerName.upper() + ' has not played any Ranked Solo 5x5 matches'\
+ ' // ' + lolking('leagues')
# Get Current Match Stats
def getCurrent(details):
try:
current_api_url = API_ROUTES.current_url.format(region=region, region_upper=region.upper(), summonerid=summonerID, key=api_key)
tempDict = requests.get(current_api_url).json()
CURRENT_GAMEMODE = tempDict['gameMode']
CURRENT_GAMELENGTH = tempDict['gameLength']
CURRENT_GAMETYPE = tempDict['gameType'].replace('_', ' ')
CURRENT_TIME = calendar.timegm(time.gmtime())
CURRENT_EPOCHTIME = tempDict['gameStartTime'] / 1000
if CURRENT_EPOCHTIME <= 0:
CURRENT_TIMEDIFF = 0
else:
CURRENT_TIMEDIFF = CURRENT_TIME - CURRENT_EPOCHTIME
if CURRENT_TIMEDIFF < 0:
CURRENT_TIMEDIFF = 0
runeIDList = []
runeCount = []
masteryIDList = []
masteryRank = []
i = 0
for participant in tempDict['participants']:
if int(summonerID) == int(participant.get('summonerId')):
CURRENT_TEAM = participant.get('teamId')
CURRENT_CHAMPION = participant.get('championId')
CURRENT_SPELL1 = participant.get('spell1Id')
CURRENT_SPELL2 = participant.get('spell2Id')
for rune in tempDict['participants'][i]['runes']:
runeIDList.append(rune.get('runeId'))
runeCount.append(rune.get('count'))
for mastery in tempDict['participants'][i]['masteries']:
masteryIDList.append(mastery.get('masteryId'))
masteryRank.append(mastery.get('rank'))
else:
i += 1
runeCountOutput = ''
runeBonusOutput = ''
for x in range(len(runeIDList)):
runeCountOutput += ' [' + getCurrentRuneTotal(runeIDList[x], runeCount[x]) + '] '
runeBonusOutput += ' [' + getCurrentRuneBonusTotal(runeIDList[x], runeCount[x]) + '] '
masteryOutput = getCurrentMastery(masteryIDList, masteryRank)
if details == 'runes':
return 'Current Runes: ' + runeCountOutput\
+ ' // Rune Bonuses: ' + runeBonusOutput\
+ ' // ' + lolskill('runes')
elif details == 'masteries':
return 'Current Mastery Distribution: ' + masteryOutput\
+ ' // ' + lolskill('masteries')
else:
return summonerName.upper()\
+ ' is currently playing ' + CURRENT_GAMEMODE + ' ' + CURRENT_GAMETYPE\
+ ' with ' + getChampionbyID(CURRENT_CHAMPION)\
+ ' on the ' + getTeamColor(CURRENT_TEAM)\
+ ' // Elapsed Time: ' + getTimePlayed(CURRENT_TIMEDIFF)\
+ ' // Spells Chosen: ' + getSpellbyID(CURRENT_SPELL1) + ' & ' + getSpellbyID(CURRENT_SPELL2)\
+ ' // Mastery Distribution: ' + masteryOutput\
+ ' // Rune Bonuses: ' + runeBonusOutput\
+ ' // ' + lolnexus()
except:
if details == 'runes':
return 'Summoner ' + summonerName.upper() + ' needs to currently be in a game for current Rune data to display'\
+ ' // ' + lolking('runes')
elif details == 'masteries':
return 'Current Mastery Distribution: ' + getSummonerMastery() + ' // ' + lolskill('masteries')
else:
return 'The summoner ' + summonerName.upper() + ' is not currently in a game.'
def getCurrentMastery(masteryidlist, masteryrank):
offense = 0
defense = 0
utility = 0
for x in range(len(masteryidlist)):
masteryID = masteryidlist[x]
tempDict = requests.get(API_ROUTES.masterystaticdata_url.format(region=region, masteryid=masteryID, key=api_key)).json()
masteryTree = tempDict['masteryTree']
ranks = int(masteryrank[x])
if masteryTree == 'Offense':
offense += ranks
elif masteryTree == 'Defense':
defense += ranks
else:
utility += ranks
return '(' + str(offense) + '/' + str(defense) + '/' + str(utility) + ')'
def getCurrentRuneTotal(runeid, count):
tempDict = requests.get(API_ROUTES.runestaticdata_url.format(region=region, runeid=runeid, key=api_key)).json()
runeName = tempDict['name']
return str(count) + 'x ' + runeName
def getCurrentRuneBonusTotal(runeid, count):
tempDict = requests.get(API_ROUTES.runestaticdata_url.format(region=region, runeid=runeid, key=api_key)).json()
runeBonus = tempDict['description']
try:
runeBonus.split('/')[1]
except IndexError:
# Single Bonus
value = runeBonus.split()[0]
value = value.replace('+', '').replace('%', '').replace('-', '')
valueCount = float(value) * float(count)
valueCount = round(valueCount, 2)
description = tempDict['description'].split(' (', 1)[0]
description = string.capwords(description)
description = description.replace(value, str(valueCount))
return description
else:
# Hybrid Bonus
value = runeBonus.split()[0]
value = value.replace('+', '').replace('%', '').replace('-', '')
valueCount = float(value) * float(count)
valueCount = round(valueCount, 2)
firstDescription = runeBonus.split('/')[0].strip()
firstDescription = firstDescription.split(' (', 1)[0]
firstDescription = string.capwords(firstDescription)
firstDescription = firstDescription.replace(value, str(valueCount))
value = runeBonus.split('/')[1].strip()
if value.split()[1] == 'sec.':
return firstDescription + ' / 5 Sec.'
else:
value = value.split()[0]
value = value.replace('+', '').replace('%', '').replace('-', '')
valueCount = float(value) * float(count)
valueCount = round(valueCount, 2)
secondDescription = runeBonus.split('/')[1].strip()
secondDescription = secondDescription.split(' (', 1)[0]
secondDescription = string.capwords(secondDescription)
secondDescription = secondDescription.replace(value, str(valueCount))
return firstDescription + ' / ' + secondDescription
# Get Last Match Stats
def getLast():
tempDict = requests.get(API_ROUTES.last_url.format(region=region, summonerid=summonerID, key=api_key)).json()
LAST_GAMEID = tempDict['games'][0]['gameId']
# LAST_GAMEMODE = tempDict['games'][0]['gameMode']
LAST_SUBTYPE = tempDict['games'][0]['subType'].replace('_', ' ')
LAST_GAMETYPE = tempDict['games'][0]['gameType'].replace('_GAME', '')
LAST_TIMEPLAYED = tempDict['games'][0]['stats']['timePlayed']
LAST_WIN = tempDict['games'][0]['stats']['win']
LAST_GOLDSPENT = tempDict['games'][0]['stats']['goldSpent']
LAST_GOLDEARNED = tempDict['games'][0]['stats']['goldEarned']
LAST_CHAMPION_ID = str(tempDict['games'][0]['championId'])
LAST_IPEARNED = str(tempDict['games'][0]['ipEarned'])
LAST_LEVEL = str(tempDict['games'][0]['stats']['level'])
LAST_SPELL1 = tempDict['games'][0]['spell1']
LAST_SPELL2 = tempDict['games'][0]['spell2']
LAST_CHAMPIONSKILLED = str(tempDict['games'][0]['stats'].get('championsKilled', 0))
LAST_NUMDEATHS = str(tempDict['games'][0]['stats'].get('numDeaths' , 0))
LAST_ASSISTS = str(tempDict['games'][0]['stats'].get('assists', 0))
LAST_TOTALDAMAGECHAMPIONS = str(tempDict['games'][0]['stats']['totalDamageDealtToChampions'])
LAST_MINIONSKILLED = str(tempDict['games'][0]['stats']['minionsKilled'])
LAST_WARDSPLACED = str(tempDict['games'][0]['stats'].get('wardPlaced', 0))
output = summonerName.upper() + ' ' + getWinLoss(LAST_WIN)\
+ ' the last ' + LAST_GAMETYPE + ' ' + LAST_SUBTYPE\
+ ' GAME using ' + getChampionbyID(LAST_CHAMPION_ID)\
+ ' // The game took ' + getTimePlayed(LAST_TIMEPLAYED)\
+ ' // ' + getKDA(LAST_CHAMPIONSKILLED, LAST_NUMDEATHS, LAST_ASSISTS) + ' KDA (' + LAST_CHAMPIONSKILLED + '/' + LAST_NUMDEATHS + '/' + LAST_ASSISTS + ')'\
+ ' // ' + getSpellbyID(LAST_SPELL1) + ' & ' + getSpellbyID(LAST_SPELL2) + ' spells were chosen'\
+ ' // ' + LAST_TOTALDAMAGECHAMPIONS + ' damage was dealt to champions'\
+ ' // ' + LAST_MINIONSKILLED + ' minions were killed'\
+ ' // ' + LAST_WARDSPLACED + ' wards were placed'\
+ ' // Spent ' + str(round(float(LAST_GOLDSPENT) / float(LAST_GOLDEARNED)*100, 1)) + '% of Gold earned [' + str(LAST_GOLDSPENT) + '/' + str(LAST_GOLDEARNED) + ']'\
+ ' // ' + LAST_IPEARNED + ' IP was earned'
# add Official League Match history here
return output
# ====== IRC FUNCTIONS ======
# Extract Nickname
def getNick(data):
nick = data.split('!')[0]
nick = nick.replace(':', ' ')
nick = nick.replace(' ', '')
nick = nick.strip(' \t\n\r')
return nick
def getMessage(data):
if data.find('PRIVMSG'):
try:
message = data.split(channel, 1)[1][2:]
return message
except IndexError:
return 'Index Error'
except:
return 'No message'
else:
return 'Not a message'
# ====== TIMER FUNCTIONS ======
def printit():
threading.Timer(60.0, printit).start()
print "Hello World"
# ===============================
# queue = 13 #sets variable for anti-spam queue functionality
# Connect to server
print '\nConnecting to: ' + server + ' over port ' + str(port)
irc = socket.socket()
irc.connect((server, port))
# Send variables for connection to Twitch chat
irc.send('PASS ' + password + '\r\n')
irc.send('USER ' + nick + ' 0 * :' + botOwner + '\r\n')
irc.send('NICK ' + nick + '\r\n')
irc.send('JOIN ' + channel + '\r\n')
printit()
# Main Program Loop
while True:
ircdata = irc.recv(4096) # gets output from IRC server
ircuser = ircdata.split(':')[1]
ircuser = ircuser.split('!')[0] # determines the sender of the messages
# Check messages for any banned words against banned.txt list
f = open(os.path.dirname(os.path.abspath(__file__)) + '/banned.txt', 'r')
banned = f.readlines()
message = getMessage(ircdata).lower().strip(' \t\n\r')
for i in range(len(banned)):
if message.find(banned[i].strip(' \t\n\r')) != -1:
irc.send('PRIVMSG ' + channel + ' :' + getNick(ircdata) + ', banned words are not allowed. A timeout has been issued.' + '\r\n')
# irc.send('PRIVMSG ' + channel + ' :\/timeout ' + getNick(ircdata) + ' 5\r\n')
break
else:
pass
print 'DEBUG: ' + ircdata.strip(' \t\n\r')
print 'USER: ' + getNick(ircdata).strip(' \t\n\r')
print 'MESSAGE: ' + getMessage(ircdata).strip(' \t\n\r')
print '======================='
# About
if ircdata.find(':!about') != -1:
irc.send('PRIVMSG ' + channel + ' :' + about(getNick(ircdata)) + '\r\n')
# Commands
if ircdata.find(':!commands') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getCommands() + '\r\n')
# Last
if ircdata.find(':!last') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getLast() + '\r\n')
# Current
if ircdata.find(':!current') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getCurrent('games') + '\r\n')
# Current Runes
if ircdata.find(':!runes') != -1 or ircdata.find(':!rune') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getCurrent('runes') + '\r\n')
# Current Mastery
if ircdata.find(':!mastery') != -1 or ircdata.find(':!masteries') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getCurrent('masteries') + '\r\n')
# Basic Summoner Data
if ircdata.find(':!summoner') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getSummonerInfo() + '\r\n')
# Seaonal League Rank Data
if ircdata.find(':!league') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getLeagueInfo() + '\r\n')
# Stats
if ircdata.find(':!stats') != -1:
irc.send('PRIVMSG ' + channel + ' :' + getStats() + '\r\n')
# Return op.gg
if ircdata.find(':!opgg') != -1:
irc.send('PRIVMSG ' + channel + ' :' + opgg('') + '\r\n')
# Return lolnexus
if ircdata.find(':!lolnexus') != -1:
irc.send('PRIVMSG ' + channel + ' :' + lolnexus() + '\r\n')
# Return lolking
if ircdata.find(':!lolking') != -1:
irc.send('PRIVMSG ' + channel + ' :' + lolking('') + '\r\n')
# Return lolskill
if ircdata.find(':!lolskill') != -1:
irc.send('PRIVMSG ' + channel + ' :' + lolskill('') + '\r\n')
# Keep Alive
if ircdata.find('PING') != -1:
irc.send('PONG ' + ircdata.split()[1] + '\r\n')
| mit | 362,198,703,597,034,500 | 37.894828 | 164 | 0.675074 | false |
pacpac1992/mymockup | src/widgets/tab.py | 1 | 3754 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import wx
import wx.lib.ogl as ogl
class Tab_dialog(wx.Dialog):
def __init__(self, parent, title):
super(Tab_dialog, self).__init__(parent, title=title,size=(410,220))
self.parent = parent
self.nombre = wx.TextCtrl(self,-1, pos=(10,10), size=(200,30),style=wx.TE_PROCESS_ENTER)
wx.StaticText(self,-1,'Activo: ',pos=(10,55))
self.lbl_selection = wx.StaticText(self,-1,'',(60, 55),(150, -1))
btn = wx.Button(self,-1,'Aceptar',pos=(10,100))
self.listBox = wx.ListBox(self, -1, (220, 10), (90, 170), [], wx.LB_SINGLE)
up = wx.Button(self,-1,'Arriba',pos=(320,10))
down = wx.Button(self,-1,'Abajo',pos=(320,50))
delete = wx.Button(self,-1,'Eliminar',pos=(320,90))
btn.Bind(wx.EVT_BUTTON,self.crear_tabs)
up.Bind(wx.EVT_BUTTON,self.up)
down.Bind(wx.EVT_BUTTON,self.down)
delete.Bind(wx.EVT_BUTTON,self.delete)
self.nombre.Bind(wx.EVT_TEXT_ENTER, self.add_list)
self.Bind(wx.EVT_LISTBOX, self.onListBox, self.listBox)
def crear_tabs(self,evt):
if self.lbl_selection.GetLabel() != '':
lista = {}
for i in range(0,self.listBox.GetCount()):
lista[i] = self.listBox.GetString(i)
self.parent.draw_tab(None,self.lbl_selection.GetLabel(),lista,False)
self.Destroy()
else:
wx.MessageBox("Seleccione un item", "Message" ,wx.OK | wx.ICON_ERROR)
def add_list(self,evt):
n = self.nombre.GetValue()
self.listBox.Append(n)
self.nombre.SetValue('')
def up(self,evt):
n = self.listBox.GetCount()
r = 0
for i in range(0,n):
if self.listBox.GetString(i) == self.listBox.GetStringSelection():
r = i
dato = self.listBox.GetStringSelection()
if r != 0:
r = r - 1
d = self.listBox.GetString(r)
self.listBox.SetString(r,dato)
self.listBox.SetString(r+1,d)
def down(self,evt):
try:
n = self.listBox.GetCount()
r = 0
for i in range(0,n):
if self.listBox.GetString(i) == self.listBox.GetStringSelection():
r = i
dato = self.listBox.GetStringSelection()
if r <= (n-1):
r = r + 1
d = self.listBox.GetString(r)
self.listBox.SetString(r,dato)
self.listBox.SetString(r-1,d)
except Exception as e:
print(e)
def delete(self,evt):
n = self.listBox.GetCount()
r = 0
for i in range(0,n):
if self.listBox.GetString(i) == self.listBox.GetStringSelection():
r = i
self.listBox.Delete(r)
def onListBox(self,evt):
self.lbl_selection.SetLabel(evt.GetEventObject().GetStringSelection())
class Tab(ogl.DrawnShape):
def __init__(self,lista,active):
ogl.DrawnShape.__init__(self)
n = len(lista)
self.diccionario = lista
i = self.buscarElemento(lista,active)
r = (int(n) * 70 + ((int(n)-1))*4)+50
self.calculate_size(r)
self.tabs(n,r,i)
self.labels(n,r)
self.CalculateSize()
def calculate_size(self,r):
w = r/2
self.SetDrawnPen(wx.BLACK_PEN)
self.SetDrawnBrush(wx.WHITE_BRUSH)
return self.DrawPolygon([(w, 100), (-w,100),(-w,-70),(w,-70),(w,100)])
def tabs(self,n,r,i):
w = r / 2
cp4 = 0
for x in range(0,n):
sp = 70
self.SetDrawnPen(wx.BLACK_PEN)
if x == i:
self.SetDrawnBrush(wx.Brush(wx.Colour(240, 240, 240)))
else:
self.SetDrawnBrush(wx.Brush(wx.Colour(155, 155, 155)))
self.DrawPolygon([((-w + cp4),-70),((-w + cp4),-100),(((-w+cp4)+sp),-100),(((-w+cp4)+sp),-70)])
cp4 = cp4 + 74
def labels(self,items,r):
w = r / 2
ran = 0
for x in xrange(0,items):
self.SetDrawnTextColour(wx.BLACK)
self.SetDrawnFont(wx.Font(10, wx.SWISS, wx.NORMAL, wx.NORMAL))
name = self.diccionario[x]
self.DrawText(str(name), (-w+ran+10, -90))
ran = ran + 74
def buscarElemento(self,lista, elemento):
for i in range(0,len(lista)):
if(lista[i] == elemento):
return i | mit | 5,063,618,514,684,932,000 | 23.86755 | 98 | 0.633724 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_08_01/operations/_vpn_site_link_connections_operations.py | 1 | 5309 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VpnSiteLinkConnectionsOperations(object):
"""VpnSiteLinkConnectionsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
gateway_name, # type: str
connection_name, # type: str
link_connection_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VpnSiteLinkConnection"
"""Retrieves the details of a vpn site link connection.
:param resource_group_name: The resource group name of the VpnGateway.
:type resource_group_name: str
:param gateway_name: The name of the gateway.
:type gateway_name: str
:param connection_name: The name of the vpn connection.
:type connection_name: str
:param link_connection_name: The name of the vpn connection.
:type link_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnSiteLinkConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_08_01.models.VpnSiteLinkConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnSiteLinkConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'gatewayName': self._serialize.url("gateway_name", gateway_name, 'str'),
'connectionName': self._serialize.url("connection_name", connection_name, 'str'),
'linkConnectionName': self._serialize.url("link_connection_name", link_connection_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnSiteLinkConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnGateways/{gatewayName}/vpnConnections/{connectionName}/vpnLinkConnections/{linkConnectionName}'} # type: ignore
| mit | -8,327,537,274,828,321,000 | 46.401786 | 238 | 0.667922 | false |
caesar2164/edx-platform | lms/djangoapps/lti_provider/tests/test_tasks.py | 1 | 4437 | """
Tests for the LTI outcome service handlers, both in outcomes.py and in tasks.py
"""
import unittest
import ddt
from django.test import TestCase
from mock import patch, MagicMock
from student.tests.factories import UserFactory
from lti_provider.models import GradedAssignment, LtiConsumer, OutcomeService
import lti_provider.tasks as tasks
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
class BaseOutcomeTest(TestCase):
"""
Super type for tests of both the leaf and composite outcome celery tasks.
"""
def setUp(self):
super(BaseOutcomeTest, self).setUp()
self.course_key = CourseLocator(
org='some_org',
course='some_course',
run='some_run'
)
self.usage_key = BlockUsageLocator(
course_key=self.course_key,
block_type='problem',
block_id='block_id'
)
self.user = UserFactory.create()
self.consumer = LtiConsumer(
consumer_name='Lti Consumer Name',
consumer_key='consumer_key',
consumer_secret='consumer_secret',
instance_guid='tool_instance_guid'
)
self.consumer.save()
outcome = OutcomeService(
lis_outcome_service_url='http://example.com/service_url',
lti_consumer=self.consumer
)
outcome.save()
self.assignment = GradedAssignment(
user=self.user,
course_key=self.course_key,
usage_key=self.usage_key,
outcome_service=outcome,
lis_result_sourcedid='sourcedid',
version_number=1,
)
self.assignment.save()
self.send_score_update_mock = self.setup_patch(
'lti_provider.outcomes.send_score_update', None
)
def setup_patch(self, function_name, return_value):
"""
Patch a method with a given return value, and return the mock
"""
mock = MagicMock(return_value=return_value)
new_patch = patch(function_name, new=mock)
new_patch.start()
self.addCleanup(new_patch.stop)
return mock
@ddt.ddt
class SendLeafOutcomeTest(BaseOutcomeTest):
"""
Tests for the send_leaf_outcome method in tasks.py
"""
@ddt.data(
(2.0, 2.0, 1.0),
(2.0, 0.0, 0.0),
(1, 2, 0.5),
)
@ddt.unpack
def test_outcome_with_score(self, earned, possible, expected):
tasks.send_leaf_outcome(
self.assignment.id,
earned,
possible
)
self.send_score_update_mock.assert_called_once_with(self.assignment, expected)
@ddt.ddt
class SendCompositeOutcomeTest(BaseOutcomeTest):
"""
Tests for the send_composite_outcome method in tasks.py
"""
def setUp(self):
super(SendCompositeOutcomeTest, self).setUp()
self.descriptor = MagicMock()
self.descriptor.location = BlockUsageLocator(
course_key=self.course_key,
block_type='problem',
block_id='problem',
)
self.course_grade = MagicMock()
self.course_grade_mock = self.setup_patch(
'lti_provider.tasks.CourseGradeFactory.create', self.course_grade
)
self.module_store = MagicMock()
self.module_store.get_item = MagicMock(return_value=self.descriptor)
self.check_result_mock = self.setup_patch(
'lti_provider.tasks.modulestore',
self.module_store
)
@ddt.data(
(2.0, 2.0, 1.0),
(2.0, 0.0, 0.0),
(1, 2, 0.5),
)
@ddt.unpack
@unittest.skip('until it always passes on Jenkins')
def test_outcome_with_score_score(self, earned, possible, expected):
"""
TODO: Figure out why this was failing on Jenkins
"""
self.course_grade.score_for_module = MagicMock(return_value=(earned, possible))
tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1
)
self.send_score_update_mock.assert_called_once_with(self.assignment, expected)
def test_outcome_with_outdated_version(self):
self.assignment.version_number = 2
self.assignment.save()
tasks.send_composite_outcome(
self.user.id, unicode(self.course_key), self.assignment.id, 1
)
self.assertEqual(self.course_grade_mock.call_count, 0)
| agpl-3.0 | 1,854,020,058,025,303,800 | 31.386861 | 87 | 0.608294 | false |
vegeclic/django-regularcom | blog/migrations/0001_initial.py | 1 | 14127 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TaggedItem'
db.create_table('blog_taggeditem', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('tag', self.gf('django.db.models.fields.SlugField')(max_length=50)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'], related_name='blog_tags')),
('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('blog', ['TaggedItem'])
# Adding model 'CategoryTranslation'
db.create_table('blog_category_translation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=200)),
('language_code', self.gf('django.db.models.fields.CharField')(max_length=15, db_index=True)),
('master', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blog.Category'], related_name='translations', null=True)),
))
db.send_create_signal('blog', ['CategoryTranslation'])
# Adding unique constraint on 'CategoryTranslation', fields ['language_code', 'master']
db.create_unique('blog_category_translation', ['language_code', 'master_id'])
# Adding model 'Category'
db.create_table('blog_category', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now_add=True)),
('date_last_modified', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now=True)),
))
db.send_create_signal('blog', ['Category'])
# Adding model 'ArticleTranslation'
db.create_table('blog_article_translation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('body', self.gf('django.db.models.fields.TextField')()),
('language_code', self.gf('django.db.models.fields.CharField')(max_length=15, db_index=True)),
('master', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blog.Article'], related_name='translations', null=True)),
))
db.send_create_signal('blog', ['ArticleTranslation'])
# Adding unique constraint on 'ArticleTranslation', fields ['language_code', 'master']
db.create_unique('blog_article_translation', ['language_code', 'master_id'])
# Adding model 'Article'
db.create_table('blog_article', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('slug', self.gf('django.db.models.fields.SlugField')(max_length=200)),
('enabled', self.gf('django.db.models.fields.BooleanField')(default=True)),
('date_created', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now_add=True)),
('date_last_modified', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now=True)),
('main_image', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, to=orm['common.Image'], related_name='blog_article_main_image', unique=True, null=True)),
('title_image', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, to=orm['common.Image'], related_name='blog_article_title_image', unique=True, null=True)),
('thumb_image', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, to=orm['common.Image'], related_name='blog_article_thumb_image', unique=True, null=True)),
))
db.send_create_signal('blog', ['Article'])
# Adding M2M table for field authors on 'Article'
m2m_table_name = db.shorten_name('blog_article_authors')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('article', models.ForeignKey(orm['blog.article'], null=False)),
('author', models.ForeignKey(orm['accounts.author'], null=False))
))
db.create_unique(m2m_table_name, ['article_id', 'author_id'])
# Adding M2M table for field categories on 'Article'
m2m_table_name = db.shorten_name('blog_article_categories')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('article', models.ForeignKey(orm['blog.article'], null=False)),
('category', models.ForeignKey(orm['blog.category'], null=False))
))
db.create_unique(m2m_table_name, ['article_id', 'category_id'])
# Adding model 'Comment'
db.create_table('blog_comment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('participant', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, to=orm['accounts.Account'], null=True)),
('article', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blog.Article'])),
('body', self.gf('django.db.models.fields.TextField')()),
('date_created', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now_add=True)),
('date_last_modified', self.gf('django.db.models.fields.DateTimeField')(blank=True, auto_now=True)),
))
db.send_create_signal('blog', ['Comment'])
def backwards(self, orm):
# Removing unique constraint on 'ArticleTranslation', fields ['language_code', 'master']
db.delete_unique('blog_article_translation', ['language_code', 'master_id'])
# Removing unique constraint on 'CategoryTranslation', fields ['language_code', 'master']
db.delete_unique('blog_category_translation', ['language_code', 'master_id'])
# Deleting model 'TaggedItem'
db.delete_table('blog_taggeditem')
# Deleting model 'CategoryTranslation'
db.delete_table('blog_category_translation')
# Deleting model 'Category'
db.delete_table('blog_category')
# Deleting model 'ArticleTranslation'
db.delete_table('blog_article_translation')
# Deleting model 'Article'
db.delete_table('blog_article')
# Removing M2M table for field authors on 'Article'
db.delete_table(db.shorten_name('blog_article_authors'))
# Removing M2M table for field categories on 'Article'
db.delete_table(db.shorten_name('blog_article_categories'))
# Deleting model 'Comment'
db.delete_table('blog_comment')
models = {
'accounts.account': {
'Meta': {'object_name': 'Account'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '255', 'unique': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'accounts.author': {
'Meta': {'object_name': 'Author'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['accounts.Account']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'to': "orm['common.Image']", 'related_name': "'+'", 'unique': 'True', 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'blank': 'True', 'max_length': '30'})
},
'blog.article': {
'Meta': {'object_name': 'Article'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['accounts.Author']", 'symmetrical': 'False', 'related_name': "'blog_article_authors'"}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'to': "orm['blog.Category']", 'symmetrical': 'False', 'related_name': "'blog_article_categories'", 'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'to': "orm['common.Image']", 'related_name': "'blog_article_main_image'", 'unique': 'True', 'null': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '200'}),
'thumb_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'to': "orm['common.Image']", 'related_name': "'blog_article_thumb_image'", 'unique': 'True', 'null': 'True'}),
'title_image': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'to': "orm['common.Image']", 'related_name': "'blog_article_title_image'", 'unique': 'True', 'null': 'True'})
},
'blog.articletranslation': {
'Meta': {'object_name': 'ArticleTranslation', 'unique_together': "[('language_code', 'master')]", 'db_table': "'blog_article_translation'"},
'body': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blog.Article']", 'related_name': "'translations'", 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'blog.category': {
'Meta': {'object_name': 'Category'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'blog.categorytranslation': {
'Meta': {'object_name': 'CategoryTranslation', 'unique_together': "[('language_code', 'master')]", 'db_table': "'blog_category_translation'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blog.Category']", 'related_name': "'translations'", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'blog.comment': {
'Meta': {'object_name': 'Comment'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blog.Article']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now_add': 'True'}),
'date_last_modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True', 'auto_now': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'participant': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'to': "orm['accounts.Account']", 'null': 'True'})
},
'blog.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'related_name': "'blog_tags'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'tag': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'common.image': {
'Meta': {'object_name': 'Image'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'related_name': "'+'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '200'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'db_table': "'django_content_type'", 'object_name': 'ContentType'},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blog'] | agpl-3.0 | -4,488,898,404,940,486,700 | 65.328638 | 216 | 0.590784 | false |
Ultimaker/Cura | cura/XRayPass.py | 1 | 1577 | # Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
import os.path
from UM.Resources import Resources
from UM.Application import Application
from UM.PluginRegistry import PluginRegistry
from UM.View.RenderPass import RenderPass
from UM.View.RenderBatch import RenderBatch
from UM.View.GL.OpenGL import OpenGL
from cura.Scene.CuraSceneNode import CuraSceneNode
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
class XRayPass(RenderPass):
def __init__(self, width, height):
super().__init__("xray", width, height)
self._shader = None
self._gl = OpenGL.getInstance().getBindingsObject()
self._scene = Application.getInstance().getController().getScene()
def render(self):
if not self._shader:
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "xray.shader"))
batch = RenderBatch(self._shader, type = RenderBatch.RenderType.NoType, backface_cull = False, blend_mode = RenderBatch.BlendMode.Additive)
for node in DepthFirstIterator(self._scene.getRoot()):
if isinstance(node, CuraSceneNode) and node.getMeshData() and node.isVisible():
batch.addItem(node.getWorldTransformation(copy = False), node.getMeshData(), normal_transformation=node.getCachedNormalMatrix())
self.bind()
self._gl.glDisable(self._gl.GL_DEPTH_TEST)
batch.render(self._scene.getActiveCamera())
self._gl.glEnable(self._gl.GL_DEPTH_TEST)
self.release()
| lgpl-3.0 | -1,777,091,487,277,248,500 | 38.425 | 147 | 0.715916 | false |
the-nick-of-time/DnD | DnD/modules/resourceModule.py | 1 | 2471 | import tkinter as tk
from typing import Union
import lib.components as gui
import lib.resourceLib as res
import lib.settingsLib as settings
class ResourceDisplay(gui.Section):
"""Displays a resource like sorcery points or Hit Dice."""
def __init__(self, container: Union[tk.BaseWidget, tk.Tk], resource: res.Resource,
lockMax=False, **kwargs):
super().__init__(container, **kwargs)
self.resource = resource
self.numbers = tk.Frame(self.f)
self.current = gui.NumericEntry(self.numbers, self.resource.number, self.set_current,
width=5)
self.max = gui.NumericEntry(self.numbers, self.resource.maxnumber, self.set_max,
width=5)
if lockMax:
self.max.disable()
self.value = tk.Label(self.numbers, text='*' + str(self.resource.value))
self.buttonFrame = tk.Frame(self.f)
self.use = tk.Button(self.buttonFrame, text='-', command=self.increment)
self.regain = tk.Button(self.buttonFrame, text='+', command=self.decrement)
self.display = tk.Label(self.buttonFrame, width=3)
self.reset_ = tk.Button(self.buttonFrame, text='Reset', command=self.reset)
self._draw()
def _draw(self):
tk.Label(self.f, text=self.resource.name).grid(row=0, column=0)
self.numbers.grid(row=1, column=0)
self.current.grid(1, 0)
tk.Label(self.numbers, text='/').grid(row=1, column=1)
self.max.grid(1, 2)
self.value.grid(row=1, column=4)
self.buttonFrame.grid(row=2, column=0, columnspan=3)
self.display.grid(row=0, column=0)
self.regain.grid(row=0, column=1)
self.use.grid(row=0, column=2)
self.reset_.grid(row=0, column=3)
def update_view(self):
self.max.set(self.resource.maxnumber)
self.current.set(self.resource.number)
def set_current(self, value):
self.resource.number = value
def set_max(self, value):
self.resource.maxnumber = value
def increment(self):
self.resource.regain(1)
self.update_view()
def decrement(self):
val = self.resource.use(1)
self.display.config(text=str(val))
self.update_view()
def reset(self):
self.resource.reset()
self.update_view()
def rest(self, which: settings.RestLength):
self.resource.rest(which)
self.update_view()
| gpl-2.0 | 771,843,469,293,149,600 | 34.811594 | 93 | 0.613112 | false |
lptorres/noah-inasafe | web_api/third_party/simplejson/decoder.py | 1 | 14670 | """Implementation of JSONDecoder
"""
from __future__ import absolute_import
import re
import sys
import struct
from .compat import fromhex, b, u, text_type, binary_type, PY3, unichr
from .scanner import make_scanner, JSONDecodeError
def _import_c_scanstring():
try:
from ._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
# NOTE (3.1.0): JSONDecodeError may still be imported from this module for
# compatibility, but it was never in the __all__
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = fromhex('7FF80000000000007FF0000000000000')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u('"'), '\\': u('\u005c'), '/': u('/'),
'b': u('\b'), 'f': u('\f'), 'n': u('\n'), 'r': u('\r'), 't': u('\t'),
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match, _join=u('').join,
_PY3=PY3, _maxunicode=sys.maxunicode):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not _PY3 and not isinstance(content, text_type):
content = text_type(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at"
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\X escape sequence %r"
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
msg = "Invalid \\uXXXX escape sequence"
esc = s[end + 1:end + 5]
escX = esc[1:2]
if len(esc) != 4 or escX == 'x' or escX == 'X':
raise JSONDecodeError(msg, s, end - 1)
try:
uni = int(esc, 16)
except ValueError:
raise JSONDecodeError(msg, s, end - 1)
end += 5
# Check for surrogate pair on UCS-4 systems
# Note that this will join high/low surrogate pairs
# but will also pass unpaired surrogates through
if (_maxunicode > 65535 and
uni & 0xfc00 == 0xd800 and
s[end:end + 2] == '\\u'):
esc2 = s[end + 2:end + 6]
escX = esc2[1:2]
if len(esc2) == 4 and not (escX == 'x' or escX == 'X'):
try:
uni2 = int(esc2, 16)
except ValueError:
raise JSONDecodeError(msg, s, end)
if uni2 & 0xfc00 == 0xdc00:
uni = 0x10000 + (((uni - 0xd800) << 10) |
(uni2 - 0xdc00))
end += 6
char = unichr(uni)
# Append the unescaped character
_append(char)
return _join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject(state, encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
(s, end) = state
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end + 1
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError(
"Expecting property name enclosed in double quotes",
s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting ':' delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
value, end = scan_once(s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting ',' delimiter or '}'", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError(
"Expecting property name enclosed in double quotes",
s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray(state, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
(s, end) = state
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
elif nextchar == '':
raise JSONDecodeError("Expecting value or ']'", s, end)
_append = values.append
while True:
value, end = scan_once(s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting ',' delimiter or ']'", s, end - 1)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
if encoding is None:
encoding = DEFAULT_ENCODING
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match, _PY3=PY3):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
if _PY3 and isinstance(s, binary_type):
s = s.decode(self.encoding)
obj, end = self.raw_decode(s)
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0, _w=WHITESPACE.match, _PY3=PY3):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
Optionally, ``idx`` can be used to specify an offset in ``s`` where
the JSON document begins.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
if _PY3 and not isinstance(s, text_type):
raise TypeError("Input string must be text, not bytes")
return self.scan_once(s, idx=_w(s, idx).end())
| gpl-3.0 | 8,479,624,750,722,993,000 | 35.712082 | 79 | 0.519632 | false |
miaoski/bsideslv-plc-home | hmi.py | 1 | 1699 | # -*- coding: utf8 -*-
# This trivial HMI is decoupled from ModBus server
import gevent
from flask import Flask, render_template
from flask_sockets import Sockets
from pymodbus.client.sync import ModbusTcpClient
from time import sleep
import sys
app = Flask(__name__)
sockets = Sockets(app)
try:
myip = sys.argv[1]
except IndexError:
print 'Usage python hmi.py 192.168.42.1'
sys.exit(1)
client = ModbusTcpClient(myip)
def read_di(num = 20):
rr = client.read_discrete_inputs(1, num).bits[:num]
di = ['1' if x else '0' for x in rr]
return di
def read_co(num = 20):
rr = client.read_coils(1, num).bits[:num]
di = ['1' if x else '0' for x in rr]
return di
def read_ir(num = 5):
rr = client.read_input_registers(1, num).registers[:num]
di = map(str, rr)
return di
def read_hr(num = 5):
rr = client.read_holding_registers(1, num).registers[:num]
di = map(str, rr)
return di
@sockets.route('/data')
def read_data(ws):
while not ws.closed:
try:
di = read_di()
co = read_co()
ir = read_ir()
hr = read_hr()
except:
print 'Exception. Wait for next run.'
gevent.sleep(1)
continue
ws.send('\n'.join((','.join(di), ','.join(co), ','.join(ir), ','.join(hr))))
gevent.sleep(0.3)
print "Connection Closed!!!", reason
@app.route('/')
def homepage():
return render_template('hmi.html')
# main
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
server = pywsgi.WSGIServer((myip, 8000), app, handler_class=WebSocketHandler)
server.serve_forever()
| gpl-2.0 | 4,138,471,727,828,687,000 | 23.623188 | 84 | 0.608005 | false |
houshengbo/nova_vmware_compute_driver | nova/virt/hyperv/vmops.py | 1 | 25971 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for basic VM operations.
"""
import os
import uuid
from nova.api.metadata import base as instance_metadata
from nova import exception
from nova.openstack.common import cfg
from nova.openstack.common import lockutils
from nova.openstack.common import log as logging
from nova import utils
from nova.virt import configdrive
from nova.virt.hyperv import baseops
from nova.virt.hyperv import constants
from nova.virt.hyperv import vmutils
LOG = logging.getLogger(__name__)
hyperv_opts = [
cfg.StrOpt('vswitch_name',
default=None,
help='Default vSwitch Name, '
'if none provided first external is used'),
cfg.BoolOpt('limit_cpu_features',
default=False,
help='Required for live migration among '
'hosts with different CPU features'),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help='Sets the admin password in the config drive image'),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help='qemu-img is used to convert between '
'different image types'),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help='Attaches the Config Drive image as a cdrom drive '
'instead of a disk drive')
]
CONF = cfg.CONF
CONF.register_opts(hyperv_opts)
CONF.import_opt('use_cow_images', 'nova.config')
class VMOps(baseops.BaseOps):
def __init__(self, volumeops):
super(VMOps, self).__init__()
self._vmutils = vmutils.VMUtils()
self._volumeops = volumeops
def list_instances(self):
""" Return the names of all the instances known to Hyper-V. """
vms = [v.ElementName
for v in self._conn.Msvm_ComputerSystem(['ElementName'],
Caption="Virtual Machine")]
return vms
def get_info(self, instance):
"""Get information about the VM"""
LOG.debug(_("get_info called for instance"), instance=instance)
return self._get_info(instance['name'])
def _get_info(self, instance_name):
vm = self._vmutils.lookup(self._conn, instance_name)
if vm is None:
raise exception.InstanceNotFound(instance=instance_name)
vm = self._conn.Msvm_ComputerSystem(
ElementName=instance_name)[0]
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
vmsettings = vm.associators(
wmi_association_class='Msvm_SettingsDefineState',
wmi_result_class='Msvm_VirtualSystemSettingData')
settings_paths = [v.path_() for v in vmsettings]
#See http://msdn.microsoft.com/en-us/library/cc160706%28VS.85%29.aspx
summary_info = vs_man_svc.GetSummaryInformation(
[constants.VM_SUMMARY_NUM_PROCS,
constants.VM_SUMMARY_ENABLED_STATE,
constants.VM_SUMMARY_MEMORY_USAGE,
constants.VM_SUMMARY_UPTIME],
settings_paths)[1]
info = summary_info[0]
LOG.debug(_("hyperv vm state: %s"), info.EnabledState)
state = constants.HYPERV_POWER_STATE[info.EnabledState]
memusage = str(info.MemoryUsage)
numprocs = str(info.NumberOfProcessors)
uptime = str(info.UpTime)
LOG.debug(_("Got Info for vm %(instance_name)s: state=%(state)d,"
" mem=%(memusage)s, num_cpu=%(numprocs)s,"
" uptime=%(uptime)s"), locals())
return {'state': state,
'max_mem': info.MemoryUsage,
'mem': info.MemoryUsage,
'num_cpu': info.NumberOfProcessors,
'cpu_time': info.UpTime}
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None):
""" Create a new VM and start it."""
vm = self._vmutils.lookup(self._conn, instance['name'])
if vm is not None:
raise exception.InstanceExists(name=instance['name'])
ebs_root = self._volumeops.volume_in_mapping(
self._volumeops.get_default_root_device(),
block_device_info)
#If is not a boot from volume spawn
if not (ebs_root):
#Fetch the file, assume it is a VHD file.
vhdfile = self._vmutils.get_vhd_path(instance['name'])
try:
self._cache_image(fn=self._vmutils.fetch_image,
context=context,
target=vhdfile,
fname=instance['image_ref'],
image_id=instance['image_ref'],
user=instance['user_id'],
project=instance['project_id'],
cow=CONF.use_cow_images)
except Exception as exn:
LOG.exception(_('cache image failed: %s'), exn)
self.destroy(instance)
try:
self._create_vm(instance)
if not ebs_root:
self._attach_ide_drive(instance['name'], vhdfile, 0, 0,
constants.IDE_DISK)
else:
self._volumeops.attach_boot_volume(block_device_info,
instance['name'])
#A SCSI controller for volumes connection is created
self._create_scsi_controller(instance['name'])
for vif in network_info:
mac_address = vif['address'].replace(':', '')
self._create_nic(instance['name'], mac_address)
if configdrive.required_by(instance):
self._create_config_drive(instance, injected_files,
admin_password)
LOG.debug(_('Starting VM %s '), instance['name'])
self._set_vm_state(instance['name'], 'Enabled')
LOG.info(_('Started VM %s '), instance['name'])
except Exception as exn:
LOG.exception(_('spawn vm failed: %s'), exn)
self.destroy(instance)
raise exn
def _create_config_drive(self, instance, injected_files, admin_password):
if CONF.config_drive_format != 'iso9660':
vmutils.HyperVException(_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
LOG.info(_('Using config drive'), instance=instance)
extra_md = {}
if admin_password and CONF.config_drive_inject_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files, extra_md=extra_md)
instance_path = self._vmutils.get_instance_path(
instance['name'])
configdrive_path_iso = os.path.join(instance_path, 'configdrive.iso')
LOG.info(_('Creating config drive at %(path)s'),
{'path': configdrive_path_iso}, instance=instance)
with configdrive.config_drive_helper(instance_md=inst_md) as cdb:
try:
cdb.make_drive(configdrive_path_iso)
except exception.ProcessExecutionError, e:
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
raise
if not CONF.config_drive_cdrom:
drive_type = constants.IDE_DISK
configdrive_path = os.path.join(instance_path,
'configdrive.vhd')
utils.execute(CONF.qemu_img_cmd,
'convert',
'-f',
'raw',
'-O',
'vpc',
configdrive_path_iso,
configdrive_path,
attempts=1)
os.remove(configdrive_path_iso)
else:
drive_type = constants.IDE_DVD
configdrive_path = configdrive_path_iso
self._attach_ide_drive(instance['name'], configdrive_path, 1, 0,
drive_type)
def _create_vm(self, instance):
"""Create a VM but don't start it. """
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
vs_gs_data = self._conn.Msvm_VirtualSystemGlobalSettingData.new()
vs_gs_data.ElementName = instance["name"]
(job, ret_val) = vs_man_svc.DefineVirtualSystem(
[], None, vs_gs_data.GetText_(1))[1:]
if ret_val == constants.WMI_JOB_STATUS_STARTED:
success = self._vmutils.check_job_status(job)
else:
success = (ret_val == 0)
if not success:
raise vmutils.HyperVException(_('Failed to create VM %s') %
instance["name"])
LOG.debug(_('Created VM %s...'), instance["name"])
vm = self._conn.Msvm_ComputerSystem(ElementName=instance["name"])[0]
vmsettings = vm.associators(
wmi_result_class='Msvm_VirtualSystemSettingData')
vmsetting = [s for s in vmsettings
if s.SettingType == 3][0] # avoid snapshots
memsetting = vmsetting.associators(
wmi_result_class='Msvm_MemorySettingData')[0]
#No Dynamic Memory, so reservation, limit and quantity are identical.
mem = long(str(instance['memory_mb']))
memsetting.VirtualQuantity = mem
memsetting.Reservation = mem
memsetting.Limit = mem
(job, ret_val) = vs_man_svc.ModifyVirtualSystemResources(
vm.path_(), [memsetting.GetText_(1)])
LOG.debug(_('Set memory for vm %s...'), instance["name"])
procsetting = vmsetting.associators(
wmi_result_class='Msvm_ProcessorSettingData')[0]
vcpus = long(instance['vcpus'])
procsetting.VirtualQuantity = vcpus
procsetting.Reservation = vcpus
procsetting.Limit = 100000 # static assignment to 100%
if CONF.limit_cpu_features:
procsetting.LimitProcessorFeatures = True
(job, ret_val) = vs_man_svc.ModifyVirtualSystemResources(
vm.path_(), [procsetting.GetText_(1)])
LOG.debug(_('Set vcpus for vm %s...'), instance["name"])
def _create_scsi_controller(self, vm_name):
""" Create an iscsi controller ready to mount volumes """
LOG.debug(_('Creating a scsi controller for %(vm_name)s for volume '
'attaching') % locals())
vms = self._conn.MSVM_ComputerSystem(ElementName=vm_name)
vm = vms[0]
scsicontrldefault = self._conn.query(
"SELECT * FROM Msvm_ResourceAllocationSettingData \
WHERE ResourceSubType = 'Microsoft Synthetic SCSI Controller'\
AND InstanceID LIKE '%Default%'")[0]
if scsicontrldefault is None:
raise vmutils.HyperVException(_('Controller not found'))
scsicontrl = self._vmutils.clone_wmi_obj(self._conn,
'Msvm_ResourceAllocationSettingData', scsicontrldefault)
scsicontrl.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
scsiresource = self._vmutils.add_virt_resource(self._conn,
scsicontrl, vm)
if scsiresource is None:
raise vmutils.HyperVException(
_('Failed to add scsi controller to VM %s') %
vm_name)
def _get_ide_controller(self, vm, ctrller_addr):
#Find the IDE controller for the vm.
vmsettings = vm.associators(
wmi_result_class='Msvm_VirtualSystemSettingData')
rasds = vmsettings[0].associators(
wmi_result_class='MSVM_ResourceAllocationSettingData')
ctrller = [r for r in rasds
if r.ResourceSubType == 'Microsoft Emulated IDE Controller'
and r.Address == str(ctrller_addr)]
return ctrller
def _attach_ide_drive(self, vm_name, path, ctrller_addr, drive_addr,
drive_type=constants.IDE_DISK):
"""Create an IDE drive and attach it to the vm"""
LOG.debug(_('Creating disk for %(vm_name)s by attaching'
' disk file %(path)s') % locals())
vms = self._conn.MSVM_ComputerSystem(ElementName=vm_name)
vm = vms[0]
ctrller = self._get_ide_controller(vm, ctrller_addr)
if drive_type == constants.IDE_DISK:
resSubType = 'Microsoft Synthetic Disk Drive'
elif drive_type == constants.IDE_DVD:
resSubType = 'Microsoft Synthetic DVD Drive'
#Find the default disk drive object for the vm and clone it.
drivedflt = self._conn.query(
"SELECT * FROM Msvm_ResourceAllocationSettingData \
WHERE ResourceSubType LIKE '%(resSubType)s'\
AND InstanceID LIKE '%%Default%%'" % locals())[0]
drive = self._vmutils.clone_wmi_obj(self._conn,
'Msvm_ResourceAllocationSettingData', drivedflt)
#Set the IDE ctrller as parent.
drive.Parent = ctrller[0].path_()
drive.Address = drive_addr
#Add the cloned disk drive object to the vm.
new_resources = self._vmutils.add_virt_resource(self._conn,
drive, vm)
if new_resources is None:
raise vmutils.HyperVException(
_('Failed to add drive to VM %s') %
vm_name)
drive_path = new_resources[0]
LOG.debug(_('New %(drive_type)s drive path is %(drive_path)s') %
locals())
if drive_type == constants.IDE_DISK:
resSubType = 'Microsoft Virtual Hard Disk'
elif drive_type == constants.IDE_DVD:
resSubType = 'Microsoft Virtual CD/DVD Disk'
#Find the default VHD disk object.
drivedefault = self._conn.query(
"SELECT * FROM Msvm_ResourceAllocationSettingData \
WHERE ResourceSubType LIKE '%(resSubType)s' AND \
InstanceID LIKE '%%Default%%' " % locals())[0]
#Clone the default and point it to the image file.
res = self._vmutils.clone_wmi_obj(self._conn,
'Msvm_ResourceAllocationSettingData', drivedefault)
#Set the new drive as the parent.
res.Parent = drive_path
res.Connection = [path]
#Add the new vhd object as a virtual hard disk to the vm.
new_resources = self._vmutils.add_virt_resource(self._conn, res, vm)
if new_resources is None:
raise vmutils.HyperVException(
_('Failed to add %(drive_type)s image to VM %(vm_name)s') %
locals())
LOG.info(_('Created drive type %(drive_type)s for %(vm_name)s') %
locals())
def _create_nic(self, vm_name, mac):
"""Create a (synthetic) nic and attach it to the vm"""
LOG.debug(_('Creating nic for %s '), vm_name)
#Find the vswitch that is connected to the physical nic.
vms = self._conn.Msvm_ComputerSystem(ElementName=vm_name)
extswitch = self._find_external_network()
if extswitch is None:
raise vmutils.HyperVException(_('Cannot find vSwitch'))
vm = vms[0]
switch_svc = self._conn.Msvm_VirtualSwitchManagementService()[0]
#Find the default nic and clone it to create a new nic for the vm.
#Use Msvm_SyntheticEthernetPortSettingData for Windows or Linux with
#Linux Integration Components installed.
syntheticnics_data = self._conn.Msvm_SyntheticEthernetPortSettingData()
default_nic_data = [n for n in syntheticnics_data
if n.InstanceID.rfind('Default') > 0]
new_nic_data = self._vmutils.clone_wmi_obj(self._conn,
'Msvm_SyntheticEthernetPortSettingData',
default_nic_data[0])
#Create a port on the vswitch.
(new_port, ret_val) = switch_svc.CreateSwitchPort(
Name=str(uuid.uuid4()),
FriendlyName=vm_name,
ScopeOfResidence="",
VirtualSwitch=extswitch.path_())
if ret_val != 0:
LOG.error(_('Failed creating a port on the external vswitch'))
raise vmutils.HyperVException(_('Failed creating port for %s') %
vm_name)
ext_path = extswitch.path_()
LOG.debug(_("Created switch port %(vm_name)s on switch %(ext_path)s")
% locals())
#Connect the new nic to the new port.
new_nic_data.Connection = [new_port]
new_nic_data.ElementName = vm_name + ' nic'
new_nic_data.Address = mac
new_nic_data.StaticMacAddress = 'True'
new_nic_data.VirtualSystemIdentifiers = ['{' + str(uuid.uuid4()) + '}']
#Add the new nic to the vm.
new_resources = self._vmutils.add_virt_resource(self._conn,
new_nic_data, vm)
if new_resources is None:
raise vmutils.HyperVException(_('Failed to add nic to VM %s') %
vm_name)
LOG.info(_("Created nic for %s "), vm_name)
def _find_external_network(self):
"""Find the vswitch that is connected to the physical nic.
Assumes only one physical nic on the host
"""
#If there are no physical nics connected to networks, return.
LOG.debug(_("Attempting to bind NIC to %s ")
% CONF.vswitch_name)
if CONF.vswitch_name:
LOG.debug(_("Attempting to bind NIC to %s ")
% CONF.vswitch_name)
bound = self._conn.Msvm_VirtualSwitch(
ElementName=CONF.vswitch_name)
else:
LOG.debug(_("No vSwitch specified, attaching to default"))
self._conn.Msvm_ExternalEthernetPort(IsBound='TRUE')
if len(bound) == 0:
return None
if CONF.vswitch_name:
return self._conn.Msvm_VirtualSwitch(
ElementName=CONF.vswitch_name)[0]\
.associators(wmi_result_class='Msvm_SwitchPort')[0]\
.associators(wmi_result_class='Msvm_VirtualSwitch')[0]
else:
return self._conn.Msvm_ExternalEthernetPort(IsBound='TRUE')\
.associators(wmi_result_class='Msvm_SwitchPort')[0]\
.associators(wmi_result_class='Msvm_VirtualSwitch')[0]
def reboot(self, instance, network_info, reboot_type):
"""Reboot the specified instance."""
vm = self._vmutils.lookup(self._conn, instance['name'])
if vm is None:
raise exception.InstanceNotFound(instance_id=instance["id"])
self._set_vm_state(instance['name'], 'Reboot')
def destroy(self, instance, network_info=None, cleanup=True):
"""Destroy the VM. Also destroy the associated VHD disk files"""
LOG.debug(_("Got request to destroy vm %s"), instance['name'])
vm = self._vmutils.lookup(self._conn, instance['name'])
if vm is None:
return
vm = self._conn.Msvm_ComputerSystem(ElementName=instance['name'])[0]
vs_man_svc = self._conn.Msvm_VirtualSystemManagementService()[0]
#Stop the VM first.
self._set_vm_state(instance['name'], 'Disabled')
vmsettings = vm.associators(
wmi_result_class='Msvm_VirtualSystemSettingData')
rasds = vmsettings[0].associators(
wmi_result_class='MSVM_ResourceAllocationSettingData')
disks = [r for r in rasds
if r.ResourceSubType == 'Microsoft Virtual Hard Disk']
disk_files = []
volumes = [r for r in rasds
if r.ResourceSubType == 'Microsoft Physical Disk Drive']
volumes_drives_list = []
#collect the volumes information before destroying the VM.
for volume in volumes:
hostResources = volume.HostResource
drive_path = hostResources[0]
#Appending the Msvm_Disk path
volumes_drives_list.append(drive_path)
#Collect disk file information before destroying the VM.
for disk in disks:
disk_files.extend([c for c in disk.Connection])
#Nuke the VM. Does not destroy disks.
(job, ret_val) = vs_man_svc.DestroyVirtualSystem(vm.path_())
if ret_val == constants.WMI_JOB_STATUS_STARTED:
success = self._vmutils.check_job_status(job)
elif ret_val == 0:
success = True
if not success:
raise vmutils.HyperVException(_('Failed to destroy vm %s') %
instance['name'])
#Disconnect volumes
for volume_drive in volumes_drives_list:
self._volumeops.disconnect_volume(volume_drive)
#Delete associated vhd disk files.
for disk in disk_files:
vhdfile = self._conn_cimv2.query(
"Select * from CIM_DataFile where Name = '" +
disk.replace("'", "''") + "'")[0]
LOG.debug(_("Del: disk %(vhdfile)s vm %(name)s")
% {'vhdfile': vhdfile, 'name': instance['name']})
vhdfile.Delete()
def pause(self, instance):
"""Pause VM instance."""
LOG.debug(_("Pause instance"), instance=instance)
self._set_vm_state(instance["name"], 'Paused')
def unpause(self, instance):
"""Unpause paused VM instance."""
LOG.debug(_("Unpause instance"), instance=instance)
self._set_vm_state(instance["name"], 'Enabled')
def suspend(self, instance):
"""Suspend the specified instance."""
print instance
LOG.debug(_("Suspend instance"), instance=instance)
self._set_vm_state(instance["name"], 'Suspended')
def resume(self, instance):
"""Resume the suspended VM instance."""
LOG.debug(_("Resume instance"), instance=instance)
self._set_vm_state(instance["name"], 'Enabled')
def power_off(self, instance):
"""Power off the specified instance."""
LOG.debug(_("Power off instance"), instance=instance)
self._set_vm_state(instance["name"], 'Disabled')
def power_on(self, instance):
"""Power on the specified instance"""
LOG.debug(_("Power on instance"), instance=instance)
self._set_vm_state(instance["name"], 'Enabled')
def _set_vm_state(self, vm_name, req_state):
"""Set the desired state of the VM"""
vms = self._conn.Msvm_ComputerSystem(ElementName=vm_name)
if len(vms) == 0:
return False
(job, ret_val) = vms[0].RequestStateChange(
constants.REQ_POWER_STATE[req_state])
success = False
if ret_val == constants.WMI_JOB_STATUS_STARTED:
success = self._vmutils.check_job_status(job)
elif ret_val == 0:
success = True
elif ret_val == 32775:
#Invalid state for current operation. Typically means it is
#already in the state requested
success = True
if success:
LOG.info(_("Successfully changed vm state of %(vm_name)s"
" to %(req_state)s") % locals())
else:
msg = _("Failed to change vm state of %(vm_name)s"
" to %(req_state)s") % locals()
LOG.error(msg)
raise vmutils.HyperVException(msg)
def _cache_image(self, fn, target, fname, cow=False, Size=None,
*args, **kwargs):
"""Wrapper for a method that creates an image that caches the image.
This wrapper will save the image into a common store and create a
copy for use by the hypervisor.
The underlying method should specify a kwarg of target representing
where the image will be saved.
fname is used as the filename of the base image. The filename needs
to be unique to a given image.
If cow is True, it will make a CoW image instead of a copy.
"""
@lockutils.synchronized(fname, 'nova-')
def call_if_not_exists(path, fn, *args, **kwargs):
if not os.path.exists(path):
fn(target=path, *args, **kwargs)
if not os.path.exists(target):
LOG.debug(_("use_cow_image:%s"), cow)
if cow:
base = self._vmutils.get_base_vhd_path(fname)
call_if_not_exists(base, fn, *args, **kwargs)
image_service = self._conn.query(
"Select * from Msvm_ImageManagementService")[0]
(job, ret_val) = \
image_service.CreateDifferencingVirtualHardDisk(
Path=target, ParentPath=base)
LOG.debug(
"Creating difference disk: JobID=%s, Source=%s, Target=%s",
job, base, target)
if ret_val == constants.WMI_JOB_STATUS_STARTED:
success = self._vmutils.check_job_status(job)
else:
success = (ret_val == 0)
if not success:
raise vmutils.HyperVException(
_('Failed to create Difference Disk from '
'%(base)s to %(target)s') % locals())
else:
call_if_not_exists(target, fn, *args, **kwargs)
| apache-2.0 | -4,034,157,555,488,214,000 | 42.141196 | 79 | 0.576643 | false |
edx/course-discovery | course_discovery/apps/course_metadata/tests/test_emails.py | 1 | 14576 | import datetime
import re
from django.conf import settings
from django.contrib.auth.models import Group
from django.core import mail
from django.test import TestCase
from opaque_keys.edx.keys import CourseKey
from testfixtures import LogCapture, StringComparison
from course_discovery.apps.core.tests.factories import UserFactory
from course_discovery.apps.course_metadata import emails
from course_discovery.apps.course_metadata.models import CourseEditor
from course_discovery.apps.course_metadata.tests.factories import (
CourseEditorFactory, CourseRunFactory, OrganizationFactory
)
from course_discovery.apps.publisher.choices import InternalUserRole
from course_discovery.apps.publisher.constants import LEGAL_TEAM_GROUP_NAME
from course_discovery.apps.publisher.tests.factories import (
GroupFactory, OrganizationExtensionFactory, OrganizationUserRoleFactory, UserAttributeFactory
)
class EmailTests(TestCase):
def setUp(self):
super().setUp()
self.org = OrganizationFactory(name='MyOrg', key='myorg')
self.course_run = CourseRunFactory(draft=True, title_override='MyCourse')
self.course = self.course_run.course
self.course.authoring_organizations.add(self.org)
self.partner = self.course.partner
self.group = GroupFactory()
self.pc = self.make_user(email='[email protected]')
self.editor = self.make_user(groups=[self.group])
self.editor2 = self.make_user(groups=[self.group])
self.non_editor = self.make_user(groups=[self.group])
self.legal = self.make_user(groups=[Group.objects.get(name=LEGAL_TEAM_GROUP_NAME)])
CourseEditorFactory(user=self.editor, course=self.course)
CourseEditorFactory(user=self.editor2, course=self.course)
OrganizationExtensionFactory(group=self.group, organization=self.org)
OrganizationUserRoleFactory(user=self.pc, organization=self.org, role=InternalUserRole.ProjectCoordinator)
self.publisher_url = f'{self.partner.publisher_url}courses/{self.course_run.course.uuid}'
self.studio_url = f'{self.partner.studio_url}course/{self.course_run.key}'
self.admin_url = 'https://{}/admin/course_metadata/courserun/{}/change/'.format(
self.partner.site.domain, self.course_run.id
)
self.run_num = CourseKey.from_string(self.course_run.key).run
@staticmethod
def make_user(groups=None, **kwargs):
user = UserFactory(**kwargs)
UserAttributeFactory(user=user, enable_email_notification=True)
if groups:
user.groups.set(groups)
return user
def assertEmailContains(self, subject=None, to_users=None, both_regexes=None, text_regexes=None,
html_regexes=None, index=0):
email = mail.outbox[index]
if to_users is not None:
assert set(email.to) == {u.email for u in to_users}
if subject is not None:
self.assertRegex(str(email.subject), subject)
assert len(email.alternatives) == 1
assert email.alternatives[0][1] == 'text/html'
text = email.body
html = email.alternatives[0][0]
for regex in both_regexes or []:
self.assertRegex(text, regex)
self.assertRegex(html, regex)
for regex in text_regexes or []:
self.assertRegex(text, regex)
for regex in html_regexes or []:
self.assertRegex(html, regex)
def assertEmailDoesNotContain(self, both_regexes=None, text_regexes=None, html_regexes=None, index=0):
email = mail.outbox[index]
text = email.body
html = email.alternatives[0][0]
for regex in both_regexes or []:
self.assertNotRegex(text, regex)
self.assertNotRegex(html, regex)
for regex in text_regexes or []:
self.assertNotRegex(text, regex)
for regex in html_regexes or []:
self.assertNotRegex(html, regex)
def assertEmailSent(self, function, subject=None, to_users=None, both_regexes=None, text_regexes=None,
html_regexes=None, index=0, total=1):
function(self.course_run)
assert len(mail.outbox) == total
self.assertEmailContains(subject=subject, to_users=to_users, both_regexes=both_regexes,
text_regexes=text_regexes, html_regexes=html_regexes, index=index)
def assertEmailNotSent(self, function, reason):
with LogCapture(emails.logger.name) as log_capture:
function(self.course_run)
assert len(mail.outbox) == 0
if reason:
log_capture.check(
(
emails.logger.name,
'INFO',
StringComparison('Not sending notification email for template course_metadata/email/.* because ' +
reason),
)
)
def test_send_email_for_legal_review(self):
"""
Verify that send_email_for_legal_review's happy path works as expected
"""
self.assertEmailSent(
emails.send_email_for_legal_review,
f'^Legal review requested: {self.course_run.title}$',
[self.legal],
both_regexes=[
'Dear legal team,',
'MyOrg has submitted MyCourse for review.',
'Note: This email address is unable to receive replies.',
],
html_regexes=[
'<a href="%s">View this course run in Publisher</a> to determine OFAC status.' % self.publisher_url,
'For questions or comments, please contact '
'<a href="mailto:[email protected]">the Project Coordinator</a>.',
],
text_regexes=[
'%s\nView this course run in Publisher above to determine OFAC status.' % self.publisher_url,
'For questions or comments, please contact the Project Coordinator at [email protected].',
],
)
def test_send_email_for_internal_review(self):
"""
Verify that send_email_for_internal_review's happy path works as expected
"""
restricted_url = self.partner.lms_admin_url.rstrip('/') + '/embargo/restrictedcourse/'
self.assertEmailSent(
emails.send_email_for_internal_review,
f'^Review requested: {self.course_run.key} - {self.course_run.title}$',
[self.pc],
both_regexes=[
'Dear %s,' % self.pc.full_name,
'MyOrg has submitted %s for review.' % self.course_run.key,
],
html_regexes=[
'<a href="%s">View this course run in Publisher</a> to review the changes and mark it as reviewed.' %
self.publisher_url,
'This is a good time to <a href="%s">review this course run in Studio</a>.' % self.studio_url,
'Visit the <a href="%s">restricted course admin page</a> to set embargo rules for this course, '
'as needed.' % restricted_url,
],
text_regexes=[
'\n\nPublisher page: %s\n' % self.publisher_url,
'\n\nStudio page: %s\n' % self.studio_url,
'\n\nRestricted Course admin: %s\n' % restricted_url,
],
)
def test_send_email_for_reviewed(self):
"""
Verify that send_email_for_reviewed's happy path works as expected
"""
self.assertEmailSent(
emails.send_email_for_reviewed,
f'^Review complete: {self.course_run.title}$',
[self.editor, self.editor2],
both_regexes=[
'Dear course team,',
'The course run about page is now published.',
'Note: This email address is unable to receive replies.',
],
html_regexes=[
'The <a href="%s">%s course run</a> of %s has been reviewed and approved by %s.' %
(self.publisher_url, self.run_num, self.course_run.title, settings.PLATFORM_NAME),
'For questions or comments, please contact '
'<a href="mailto:[email protected]">your Project Coordinator</a>.',
],
text_regexes=[
'The %s course run of %s has been reviewed and approved by %s.' %
(self.run_num, self.course_run.title, settings.PLATFORM_NAME),
'\n\nView the course run in Publisher: %s\n' % self.publisher_url,
'For questions or comments, please contact your Project Coordinator at [email protected].',
],
)
def test_send_email_for_go_live(self):
"""
Verify that send_email_for_go_live's happy path works as expected
"""
kwargs = {
'both_regexes': [
'The About page for the %s course run of %s has been published.' %
(self.run_num, self.course_run.title),
'No further action is necessary.',
],
'html_regexes': [
'<a href="%s">View this About page.</a>' % self.course_run.marketing_url,
'For questions or comments, please contact '
'<a href="mailto:[email protected]">your Project Coordinator</a>.',
],
'text_regexes': [
'\n\nView this About page. %s\n' % self.course_run.marketing_url,
'For questions or comments, please contact your Project Coordinator at [email protected].',
],
}
self.assertEmailSent(
emails.send_email_for_go_live,
f'^Published: {self.course_run.title}$',
[self.editor, self.editor2],
total=2,
**kwargs,
)
self.assertEmailContains(
subject=f'^Published: {self.course_run.key} - {self.course_run.title}$',
to_users=[self.pc],
index=1,
**kwargs,
)
def test_no_project_coordinator(self):
"""
Verify that no email is sent and a message is logged if no PC is defined
"""
self.pc.delete()
self.assertEmailNotSent(
emails.send_email_for_internal_review,
'no project coordinator is defined for organization myorg'
)
def test_no_organization(self):
"""
Verify that no email is sent and a message is logged if no org is defined
"""
self.org.delete()
self.assertEmailNotSent(
emails.send_email_for_internal_review,
'no organization is defined for course %s' % self.course_run.course.key
)
def test_no_publisher_url(self):
"""
Verify that no email is sent and a message is logged if the publisher_url is missing
"""
self.partner.publisher_url = None
self.partner.save()
self.assertEmailNotSent(
emails.send_email_for_internal_review,
'no publisher URL is defined for partner %s' % self.partner.short_code
)
def test_no_studio_url(self):
"""
Verify that no email is sent and a message is logged if the studio_url is missing
"""
self.partner.studio_url = None
self.partner.save()
self.assertEmailNotSent(
emails.send_email_for_internal_review,
'no studio URL is defined for partner %s' % self.partner.short_code
)
def test_no_lms_admin_url(self):
"""
Verify that no link is provided to the restricted course admin if we don't have lms_admin_url
"""
self.partner.lms_admin_url = None
self.partner.save()
self.assertEmailSent(emails.send_email_for_internal_review)
self.assertEmailDoesNotContain(
both_regexes=[
re.compile('restricted', re.IGNORECASE),
],
)
def test_no_editors(self):
"""
Verify that no reviewed email is sent if no editors exist
"""
self.editor.delete()
self.editor2.delete()
self.non_editor.delete()
self.assertEmailNotSent(emails.send_email_for_reviewed, None)
def test_respect_for_no_email_flag(self):
"""
Verify that no email is sent if the user requests it
"""
self.editor.attributes.enable_email_notification = False
self.editor.attributes.save()
self.assertEmailSent(emails.send_email_for_reviewed, to_users=[self.editor2])
def test_emails_all_org_users_if_no_editors(self):
"""
Verify that we send email to all org users if no editors exist
"""
CourseEditor.objects.all().delete()
self.assertEmailSent(emails.send_email_for_reviewed, to_users=[self.editor, self.editor2, self.non_editor])
def test_reviewed_go_live_date_in_future(self):
"""
Verify that we mention when the course run will go live, if it's in the future
"""
self.course_run.go_live_date = datetime.datetime.now(datetime.timezone.utc) + datetime.timedelta(days=10)
self.assertEmailSent(
emails.send_email_for_reviewed,
both_regexes=[
'The course run about page will be published on %s' % self.course_run.go_live_date.strftime('%x'),
],
)
def test_reviewed_go_live_date_in_past(self):
"""
Verify that we mention when the course run is now live, if we missed the go live date
"""
self.course_run.go_live_date = datetime.datetime.now(datetime.timezone.utc) - datetime.timedelta(days=10)
self.assertEmailSent(
emails.send_email_for_reviewed,
both_regexes=[
'The course run about page is now published.',
],
)
def test_comment_email_sent(self):
comment = 'This is a test comment'
emails.send_email_for_comment({
'user': {
'username': self.editor.username,
'email': self.editor.email,
'first_name': self.editor.first_name,
'last_name': self.editor.last_name,
},
'comment': comment,
'created': datetime.datetime.now(datetime.timezone.utc).isoformat(),
}, self.course, self.editor)
assert len(mail.outbox) == 1
self.assertEmailContains(
both_regexes=[
f'{self.editor.username} made the following comment on',
comment
],
)
| agpl-3.0 | 4,705,774,580,766,771,000 | 39.94382 | 118 | 0.593235 | false |
mganeva/mantid | qt/applications/workbench/workbench/widgets/plotselector/presenter.py | 1 | 15293 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantid workbench.
#
#
from __future__ import absolute_import, print_function
import os
import re
from .model import PlotSelectorModel
from .view import PlotSelectorView, Column
class PlotSelectorPresenter(object):
"""
Presenter for the plot selector widget. This class can be
responsible for the creation of the model and view, passing in
the GlobalFigureManager as an argument, or the presenter and view
can be passed as arguments (only intended for testing).
"""
def __init__(self, global_figure_manager, view=None, model=None):
"""
Initialise the presenter, creating the view and model, and
setting the initial plot list
:param global_figure_manager: The GlobalFigureManager class
:param view: Optional - a view to use instead of letting the
class create one (intended for testing)
:param model: Optional - a model to use instead of letting
the class create one (intended for testing)
"""
# Create model and view, or accept mocked versions
if view is None:
self.view = PlotSelectorView(self)
else:
self.view = view
if model is None:
self.model = PlotSelectorModel(self, global_figure_manager)
else:
self.model = model
# Make sure the plot list is up to date
self.update_plot_list()
def get_plot_name_from_number(self, plot_number):
return self.model.get_plot_name_from_number(plot_number)
# ------------------------ Plot Updates ------------------------
def update_plot_list(self):
"""
Updates the plot list in the model and the view. Filter text
is applied to the updated selection if required.
"""
plot_list = self.model.get_plot_list()
self.view.set_plot_list(plot_list)
def append_to_plot_list(self, plot_number):
"""
Appends the plot name to the end of the plot list
:param plot_number: The unique number in GlobalFigureManager
"""
self.view.append_to_plot_list(plot_number)
self.view.set_visibility_icon(plot_number, self.model.is_visible(plot_number))
def remove_from_plot_list(self, plot_number):
"""
Removes the plot name from the plot list
:param plot_number: The unique number in GlobalFigureManager
"""
self.view.remove_from_plot_list(plot_number)
def rename_in_plot_list(self, plot_number, new_name):
"""
Replaces a name in the plot list
:param plot_number: The unique number in GlobalFigureManager
:param new_name: The new name for the plot
"""
self.view.rename_in_plot_list(plot_number, new_name)
# ----------------------- Plot Filtering ------------------------
def filter_text_changed(self):
"""
Called by the view when the filter text is changed (e.g. by
typing or clearing the text)
"""
if self.view.get_filter_text():
self.view.filter_plot_list()
else:
self.view.unhide_all_plots()
def is_shown_by_filter(self, plot_number):
"""
:param plot_number: The unique number in GlobalFigureManager
:return: True if shown, or False if filtered out
"""
filter_text = self.view.get_filter_text()
plot_name = self.get_plot_name_from_number(plot_number)
return filter_text.lower() in plot_name.lower()
# ------------------------ Plot Showing ------------------------
def show_single_selected(self):
"""
When a list item is double clicked the view calls this method
to bring the selected plot to the front
"""
plot_number = self.view.get_currently_selected_plot_number()
self._make_plot_active(plot_number)
def show_multiple_selected(self):
"""
Shows multiple selected plots, e.g. from pressing the 'Show'
button with multiple selected plots
"""
selected_plots = self.view.get_all_selected_plot_numbers()
for plot_number in selected_plots:
self._make_plot_active(plot_number)
def _make_plot_active(self, plot_number):
"""
Make the plot with the given name active - bring it to the
front and make it the choice for overplotting
:param plot_number: The unique number in GlobalFigureManager
"""
try:
self.model.show_plot(plot_number)
except ValueError as e:
print(e)
def set_active_font(self, plot_number):
"""
Set the icon for the active plot to be colored
:param plot_number: The unique number in GlobalFigureManager
"""
active_plot_number = self.view.active_plot_number
if active_plot_number > 0:
try:
self.view.set_active_font(active_plot_number, False)
except TypeError:
pass
# The last active plot could have been closed
# already, so there is nothing to do
self.view.set_active_font(plot_number, True)
self.view.active_plot_number = plot_number
# ------------------------ Plot Hiding -------------------------
def hide_selected_plots(self):
"""
Hide all plots that are selected in the view
"""
selected_plots = self.view.get_all_selected_plot_numbers()
for plot_number in selected_plots:
self._hide_plot(plot_number)
def _hide_plot(self, plot_number):
"""
Hides a single plot
"""
try:
self.model.hide_plot(plot_number)
except ValueError as e:
print(e)
def toggle_plot_visibility(self, plot_number):
"""
Toggles a plot between hidden and shown
:param plot_number: The unique number in GlobalFigureManager
"""
if self.model.is_visible(plot_number):
self._hide_plot(plot_number)
else:
self._make_plot_active(plot_number)
self.update_visibility_icon(plot_number)
def update_visibility_icon(self, plot_number):
"""
Updates the icon to indicate a plot as hidden or visible
:param plot_number: The unique number in GlobalFigureManager
"""
try:
is_visible = self.model.is_visible(plot_number)
self.view.set_visibility_icon(plot_number, is_visible)
except ValueError:
# There is a chance the plot was closed, which calls an
# update to this method. If we can not get the visibility
# status it is safe to assume the plot has been closed.
pass
# ------------------------ Plot Renaming ------------------------
def rename_figure(self, plot_number, new_name):
"""
Replaces a name in the plot list
:param plot_number: The unique number in GlobalFigureManager
:param new_name: The new plot name
"""
try:
self.model.rename_figure(plot_number, new_name)
except ValueError as e:
# We need to undo the rename in the view
self.view.rename_in_plot_list(plot_number, new_name)
print(e)
# ------------------------ Plot Closing -------------------------
def close_action_called(self):
"""
This is called by the view when closing plots is requested
(e.g. pressing close or delete).
"""
selected_plots = self.view.get_all_selected_plot_numbers()
self._close_plots(selected_plots)
def close_single_plot(self, plot_number):
"""
This is used to close plots when a close action is called
that does not refer to the selected plot(s)
:param plot_number: The unique number in GlobalFigureManager
"""
self._close_plots([plot_number])
def _close_plots(self, list_of_plot_numbers):
"""
Accepts a list of plot names to close
:param list_of_plots: A list of strings containing plot names
"""
for plot_number in list_of_plot_numbers:
try:
self.model.close_plot(plot_number)
except ValueError as e:
print(e)
# ----------------------- Plot Sorting --------------------------
def set_sort_order(self, is_ascending):
"""
Sets the sort order in the view
:param is_ascending: If true ascending order, else descending
"""
self.view.set_sort_order(is_ascending)
def set_sort_type(self, sort_type):
"""
Sets the sort order in the view
:param sort_type: A Column enum with the column to sort on
"""
self.view.set_sort_type(sort_type)
self.update_last_active_order()
def update_last_active_order(self):
"""
Update the sort keys in the view. This is only required when
changes to the last shown order occur in the model, when
renaming the key is set already
"""
if self.view.sort_type() == Column.LastActive:
self._set_last_active_order()
def _set_last_active_order(self):
"""
Set the last shown order in the view. This checks the sorting
currently set and then sets the sort keys to the appropriate
values
"""
last_active_values = self.model.last_active_values()
self.view.set_last_active_values(last_active_values)
def get_initial_last_active_value(self, plot_number):
"""
Gets the initial last active value for a plot just added, in
this case it is assumed to not have been shown
:param plot_number: The unique number in GlobalFigureManager
:return: A string with the last active value
"""
return '_' + self.model.get_plot_name_from_number(plot_number)
def get_renamed_last_active_value(self, plot_number, old_last_active_value):
"""
Gets the initial last active value for a plot that was
renamed. If the plot had a numeric value, i.e. has been shown
this is retained, else it is set
:param plot_number: The unique number in GlobalFigureManager
:param old_last_active_value: The previous last active value
"""
if old_last_active_value.isdigit():
return old_last_active_value
else:
return self.get_initial_last_active_value(plot_number)
# ---------------------- Plot Exporting -------------------------
def export_plots_called(self, extension):
"""
Export plots called from the view, then a single or multiple
plots exported depending on the number currently selected
:param extension: The file extension as a string including
a '.', for example '.png' (must be a type
supported by matplotlib)
"""
plot_numbers = self.view.get_all_selected_plot_numbers()
if len(plot_numbers) == 1:
self._export_single_plot(plot_numbers[0], extension)
elif len(plot_numbers) > 1:
self._export_multiple_plots(plot_numbers, extension)
def _export_single_plot(self, plot_number, extension):
"""
Called when a single plot is selected to export - prompts for
a filename then tries to save the plot
:param plot_number: The unique number in GlobalFigureManager
:param extension: The file extension as a string including
a '.', for example '.png' (must be a type
supported by matplotlib)
"""
absolute_path = self.view.get_file_name_for_saving(extension)
if not absolute_path[-4:] == extension:
absolute_path += extension
try:
self.model.export_plot(plot_number, absolute_path)
except ValueError as e:
print(e)
def _export_multiple_plots(self, plot_numbers, extension):
"""
Export all selected plots in the plot_numbers list, first
prompting for a save directory then sanitising plot names to
unique, usable file names
:param plot_numbers: A list of plot numbers to export
:param extension: The file extension as a string including
a '.', for example '.png' (must be a type
supported by matplotlib)
"""
dir_name = self.view.get_directory_name_for_saving()
# A temporary dictionary holding plot numbers as keys, plot
# names as values
plots = {}
for plot_number in plot_numbers:
plot_name = self.model.get_plot_name_from_number(plot_number)
plot_name = self._replace_special_characters(plot_name)
if plot_name in plots.values():
plot_name = self._make_unique_name(plot_name, plots)
plots[plot_number] = plot_name
self._export_plot(plot_number, plot_name, dir_name, extension)
def _replace_special_characters(self, string):
"""
Removes any characters that are not valid in file names
across all operating systems ('/' for Linux/Mac), more for
Windows
:param string: The string to replace characters in
:return: The string with special characters replace by '-'
"""
return re.sub(r'[<>:"/|\\?*]', r'-', string)
def _make_unique_name(self, name, dictionary):
"""
Given a name and a dictionary, make a unique name that does
not already exist in the dictionary values by appending
' (1)', ' (2)', ' (3)' etc. to the end of the name
:param name: A string with the non-unique name
:param dictionary: A dictionary with string values
:return : The unique plot name
"""
i = 1
while True:
plot_name_attempt = name + ' ({})'.format(str(i))
if plot_name_attempt not in dictionary.values():
break
i += 1
return plot_name_attempt
def _export_plot(self, plot_number, plot_name, dir_name, extension):
"""
Given a plot number, plot name, directory and extension
construct the absolute path name and call the model to save
the figure
:param plot_number: The unique number in GlobalFigureManager
:param plot_name: The name to use for saving
:param dir_name: The directory to save to
:param extension: The file extension as a string including
a '.', for example '.png' (must be a type
supported by matplotlib)
"""
if dir_name:
filename = os.path.join(dir_name, plot_name + extension)
try:
self.model.export_plot(plot_number, filename)
except ValueError as e:
print(e)
| gpl-3.0 | 4,486,501,413,745,917,000 | 36.760494 | 86 | 0.589028 | false |
DailyActie/Surrogate-Model | surrogate/sampling/samLatinHypercube.py | 1 | 8477 | # MIT License
#
# Copyright (c) 2016 Daily Actie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Author: Quan Pan <[email protected]>
# License: MIT License
# Create: 2016-12-02
import numpy as np
def samLatinHypercube(n, samples=None, criterion=None, iterations=None):
"""Generate a latin-hypercube design
:param n: The number of factors to generate samples for
:param samples: The number of samples to generate for each factor (Default: n)
:param criterion: Allowable values are "center" or "c", "maximin" or "m",
"centermaximin" or "cm", and "correlation" or "corr". If no value
given, the design is simply randomized.
:param iterations: The number of iterations in the maximin and correlations algorithms
(Default: 5).
:returns: An n-by-samples design matrix that has been normalized so factor values
are uniformly spaced between zero and one.
This code was originally published by the following individuals for use with
Scilab:
- Copyright (C) 2012 - 2013 - Michael Baudin
- Copyright (C) 2012 - Maria Christopoulou
- Copyright (C) 2010 - 2011 - INRIA - Michael Baudin
- Copyright (C) 2009 - Yann Collette
- Copyright (C) 2009 - CEA - Jean-Marc Martinez
web: forge.scilab.org/index.php/p/scidoe/sourcetree/master/macros
Much thanks goes to these individuals. It has been converted to Python by
Abraham Lee.
:Example:
A 3-factor design (defaults to 3 samples):
>>> samLatinHypercube(3)
array([[ 0.40069325, 0.08118402, 0.69763298],
[ 0.19524568, 0.41383587, 0.29947106],
[ 0.85341601, 0.75460699, 0.360024 ]])
A 4-factor design with 6 samples:
>>> samLatinHypercube(4, samples=6)
array([[ 0.27226812, 0.02811327, 0.62792445, 0.91988196],
[ 0.76945538, 0.43501682, 0.01107457, 0.09583358],
[ 0.45702981, 0.76073773, 0.90245401, 0.18773015],
[ 0.99342115, 0.85814198, 0.16996665, 0.65069309],
[ 0.63092013, 0.22148567, 0.33616859, 0.36332478],
[ 0.05276917, 0.5819198 , 0.67194243, 0.78703262]])
A 2-factor design with 5 centered samples:
>>> samLatinHypercube(2, samples=5, criterion='center')
array([[ 0.3, 0.5],
[ 0.7, 0.9],
[ 0.1, 0.3],
[ 0.9, 0.1],
[ 0.5, 0.7]])
A 3-factor design with 4 samples where the minimum distance between
all samples has been maximized:
>>> samLatinHypercube(3, samples=4, criterion='maximin')
array([[ 0.02642564, 0.55576963, 0.50261649],
[ 0.51606589, 0.88933259, 0.34040838],
[ 0.98431735, 0.0380364 , 0.01621717],
[ 0.40414671, 0.33339132, 0.84845707]])
A 4-factor design with 5 samples where the samples are as uncorrelated
as possible (within 10 iterations):
>>> samLatinHypercube(4, samples=5, criterion='correlate', iterations=10)
"""
H = None
if samples is None:
samples = n
if criterion is not None:
assert criterion.lower() in ('center', 'c', 'maximin', 'm',
'centermaximin', 'cm', 'correlation',
'corr'), 'Invalid value for "criterion": {}'.format(criterion)
else:
H = _lhsclassic(n, samples)
if criterion is None:
criterion = 'center'
if iterations is None:
iterations = 5
if H is None:
if criterion.lower() in ('center', 'c'):
H = _lhscentered(n, samples)
elif criterion.lower() in ('maximin', 'm'):
H = _lhsmaximin(n, samples, iterations, 'maximin')
elif criterion.lower() in ('centermaximin', 'cm'):
H = _lhsmaximin(n, samples, iterations, 'centermaximin')
elif criterion.lower() in ('correlate', 'corr'):
H = _lhscorrelate(n, samples, iterations)
return H
################################################################################
def _lhsclassic(n, samples):
# Generate the intervals
cut = np.linspace(0, 1, samples + 1)
# Fill points uniformly in each interval
u = np.random.rand(samples, n)
a = cut[:samples]
b = cut[1:samples + 1]
rdpoints = np.zeros_like(u)
for j in range(n):
rdpoints[:, j] = u[:, j] * (b - a) + a
# Make the random pairings
H = np.zeros_like(rdpoints)
for j in range(n):
order = np.random.permutation(range(samples))
H[:, j] = rdpoints[order, j]
return H
################################################################################
def _lhscentered(n, samples):
# Generate the intervals
cut = np.linspace(0, 1, samples + 1)
# Fill points uniformly in each interval
u = np.random.rand(samples, n)
a = cut[:samples]
b = cut[1:samples + 1]
_center = (a + b) / 2
# Make the random pairings
H = np.zeros_like(u)
for j in range(n):
H[:, j] = np.random.permutation(_center)
return H
################################################################################
def _lhsmaximin(n, samples, iterations, lhstype):
maxdist = 0
# Maximize the minimum distance between points
for i in range(iterations):
if lhstype == 'maximin':
Hcandidate = _lhsclassic(n, samples)
else:
Hcandidate = _lhscentered(n, samples)
d = _pdist(Hcandidate)
if maxdist < np.min(d):
maxdist = np.min(d)
H = Hcandidate.copy()
return H
################################################################################
def _lhscorrelate(n, samples, iterations):
mincorr = np.inf
# Minimize the components correlation coefficients
for i in range(iterations):
# Generate a random LHS
Hcandidate = _lhsclassic(n, samples)
R = np.corrcoef(Hcandidate)
if np.max(np.abs(R[R != 1])) < mincorr:
mincorr = np.max(np.abs(R - np.eye(R.shape[0])))
print('new candidate solution found with max,abs corrcoef = {}'.format(mincorr))
H = Hcandidate.copy()
return H
################################################################################
def _pdist(x):
"""Calculate the pair-wise point distances of a matrix
:param x: An m-by-n array of scalars, where there are m points in n dimensions.
:type x: 2d-array
:returns: d array
A 1-by-b array of scalars, where b = m*(m - 1)/2. This array contains
all the pair-wise point distances, arranged in the order (1, 0),
(2, 0), ..., (m-1, 0), (2, 1), ..., (m-1, 1), ..., (m-1, m-2).
:Example:
>>> x = np.array([[0.1629447, 0.8616334],
... [0.5811584, 0.3826752],
... [0.2270954, 0.4442068],
... [0.7670017, 0.7264718],
... [0.8253975, 0.1937736]])
>>> _pdist(x)
array([ 0.6358488, 0.4223272, 0.6189940, 0.9406808, 0.3593699,
0.3908118, 0.3087661, 0.6092392, 0.6486001, 0.5358894])
"""
x = np.atleast_2d(x)
assert len(x.shape) == 2, 'Input array must be 2d-dimensional'
m, n = x.shape
if m < 2:
return []
d = []
for i in range(m - 1):
for j in range(i + 1, m):
d.append((sum((x[j, :] - x[i, :]) ** 2)) ** 0.5)
return np.array(d)
| mit | 210,019,104,960,525,800 | 33.044177 | 99 | 0.572018 | false |
pyannote/pyannote-parser | tests/test_repere.py | 1 | 2075 | #!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2014-2015 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
from __future__ import print_function
import pytest
from pyannote.core import Segment
from pyannote.parser import REPEREParser
import tempfile
import os
SAMPLE_ANNOTATION = """uri1 1.0 3.5 speech alice
uri1 3.0 7.5 speech barbara
uri1 6.0 9.0 speech chris
"""
@pytest.fixture
def sample_annotation(request):
_, filename = tempfile.mkstemp()
with open(filename, 'w') as f:
f.write(SAMPLE_ANNOTATION)
def delete():
os.remove(filename)
request.addfinalizer(delete)
return filename
def test_load_annotation(sample_annotation):
parser = REPEREParser()
annotations = parser.read(sample_annotation)
speech1 = annotations(uri="uri1", modality="speech")
assert list(speech1.itertracks(label=True)) == [
(Segment(1, 3.5), 0, 'alice'),
(Segment(3, 7.5), 1, 'barbara'),
(Segment(6, 9), 2, 'chris')]
| mit | 1,854,699,619,890,575,400 | 31.920635 | 79 | 0.729508 | false |
Rdbaker/Mealbound | tests/models/test_transactions.py | 1 | 4543 | """Test the Transaction models."""
from unittest.mock import patch
import pytest
from ceraon.models.transactions import Transaction
@pytest.mark.usefixtures('db')
class TestTransaction:
"""Transaction tests."""
def test_get_by_id(self, meal, host, guest):
"""Get Transaction by id."""
transaction = Transaction(payer=guest, amount=meal.price, payee=host,
meal=meal)
transaction.save()
retrieved = Transaction.find(transaction.id)
assert retrieved == transaction
@patch('ceraon.models.transactions.stripe')
def test_charge_returns_true_without_error(self, stripe_mock, transaction):
"""Test that charge() returns True if no stripe error is raised."""
assert transaction.charge() is True
@patch('ceraon.models.transactions.stripe')
def test_successful_charge_sets_property(self, stripe_mock, transaction):
"""Test that charge() sets transaction_went_through to True."""
transaction.charge()
assert transaction.transaction_went_through is True
@patch('ceraon.models.transactions.stripe')
def test_failed_charge_returns_false(self, stripe_mock, transaction):
"""Test that charge() returns false if stripe throws an error."""
stripe_mock.Charge.create.side_effect = RuntimeError('failed charge')
assert transaction.charge() is False
@patch('ceraon.models.transactions.stripe')
def test_failed_charge_doesnt_set_attribute(self, stripe_mock, transaction):
"""Test that a failed charge() doesn't set transaction_went_through."""
stripe_mock.Charge.create.side_effect = RuntimeError('failed charge')
transaction.charge()
assert transaction.transaction_went_through is False
def test_cancel_sets_canceled(self, transaction):
"""Test that calling cancel() sets the canceled property."""
transaction.cancel()
assert transaction.canceled is True
@patch('ceraon.models.transactions.stripe')
def test_set_stripe_source_on_user_no_stripe_id(self, stripe_mock, user):
"""Test that setting the stripe customer ID works."""
customer_id = 'this is the stripe customer id'
stripe_mock.Customer.create.return_value.id = customer_id
Transaction.set_stripe_source_on_user(user=user, token='some token')
assert user.stripe_customer_id == customer_id
@patch('ceraon.models.transactions.stripe')
def test_set_stripe_source_on_user_returns_true(self, stripe_mock, user):
"""Test that setting the stripe customer ID returns True."""
customer_id = 'this is the stripe customer id'
stripe_mock.Customer.create.return_value.id = customer_id
assert Transaction.set_stripe_source_on_user(
user=user, token='some token') is True
@patch('ceraon.models.transactions.stripe')
def test_set_stripe_source_on_user_existing_id(self, stripe_mock, user):
"""Test that resetting the stripe customer ID works."""
customer_id = 'this is the stripe customer id'
assert user.stripe_customer_id is None
user.stripe_customer_id = customer_id
assert Transaction.set_stripe_source_on_user(
user=user, token='some token') is True
stripe_mock.Customer.retrieve.assert_called_once()
@patch('ceraon.models.transactions.stripe')
def test_set_stripe_source_on_user_fail(self, stripe_mock, user):
"""Test that a stripe failure returns false."""
stripe_mock.Customer.create.side_effect = RuntimeError('stripe error')
assert Transaction.set_stripe_source_on_user(
user=user, token='some token') is False
@pytest.mark.parametrize('amount,expected', [
(5.00, 0.5),
(5.05, 0.505),
(4.00, 0.5),
(90.00, 9),
(42.10, 4.21),
(2.50, 0.5)
])
def test_operational_overhead_cut(self, transaction, amount, expected):
"""Test that the operational_overhead_cost is as expected."""
transaction.amount = amount
assert transaction.operational_overhead_cut == expected
@pytest.mark.parametrize('amount,expected', [
(5.00, 4.5),
(5.05, 4.545),
(4.00, 3.5),
(90.00, 81),
(42.10, 37.89),
(2.50, 2)
])
def test_takehome_amount(self, transaction, amount, expected):
"""Test that the takehome_amount is as expected."""
transaction.amount = amount
assert transaction.takehome_amount == expected
| bsd-3-clause | 5,709,859,063,645,114,000 | 41.064815 | 80 | 0.657495 | false |
xkmato/casepro | casepro/urls.py | 1 | 1472 | from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.views import static
from casepro.backend import get_backend
from casepro.utils.views import PartialTemplate
urlpatterns = [
url(r'', include('casepro.cases.urls')),
url(r'', include('casepro.contacts.urls')),
url(r'', include('casepro.msg_board.urls')),
url(r'', include('casepro.msgs.urls')),
url(r'', include('casepro.rules.urls')),
url(r'', include('casepro.profiles.urls')),
url(r'', include('casepro.orgs_ext.urls')),
url(r'^pods/', include('casepro.pods.urls')),
url(r'^stats/', include('casepro.statistics.urls')),
url(r'^users/', include('dash.users.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^comments/', include('django_comments.urls')),
url(r'^partials/(?P<template>[a-z0-9\-_]+)\.html$', PartialTemplate.as_view(), name='utils.partial_template')
]
backend_urls = get_backend().get_url_patterns() or []
urlpatterns += backend_urls
if settings.DEBUG: # pragma: no cover
try:
import debug_toolbar
urlpatterns.append(url(r'^__debug__/', include(debug_toolbar.urls)))
except ImportError:
pass
urlpatterns = [
url(r'^media/(?P<path>.*)$', static.serve, {'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'', include('django.contrib.staticfiles.urls'))
] + urlpatterns
| bsd-3-clause | 4,833,736,216,706,264,000 | 36.74359 | 113 | 0.657609 | false |
wangkua1/sportvu | sportvu/detection_from_raw_pred.py | 1 | 3391 | """detection_from_raw_pred.py
* not super useful, a simple script that plots a) raw pred, b) gt pnr, c) detector output
at 1 single setting
Usage:
detection_from_raw_pred.py <fold_index> <f_data_config> <f_model_config> <f_detect_config> --train
Arguments:
Example:
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import sys
import os
from tqdm import tqdm
from docopt import docopt
import yaml
import gc
import matplotlib.pylab as plt
import cPickle as pkl
##
from sportvu.data.dataset import BaseDataset
from sportvu.detect.running_window_p import RunWindowP
from sportvu.detect.nms import NMS
from sportvu.detect.utils import smooth_1D_array
arguments = docopt(__doc__)
print ("...Docopt... ")
print(arguments)
print ("............\n")
f_data_config = arguments['<f_data_config>']
f_model_config = arguments['<f_model_config>']
f_detect_config = arguments['<f_detect_config>']
if arguments['--train']:
dataset = BaseDataset(f_data_config, fold_index=int(arguments['<fold_index>']), load_raw=True)
# pre_trained = arguments['<pre_trained>']
data_config = yaml.load(open(f_data_config, 'rb'))
model_config = yaml.load(open(f_model_config, 'rb'))
model_name = os.path.basename(f_model_config).split('.')[0]
data_name = os.path.basename(f_data_config).split('.')[0]
exp_name = '%s-X-%s' % (model_name, data_name)
detect_config = yaml.load(open(f_detect_config, 'rb'))
detector = eval(detect_config['class'])(detect_config)
plot_folder = os.path.join('./plots', exp_name)
if not os.path.exists(plot_folder):
raise Exception('Run test.py first to get raw predictions')
def label_in_cand(cand, labels):
for l in labels:
if l > cand[1] and l < cand[0]:
return True
return False
plt.figure()
if arguments['--train']:
split = 'train'
else:
split = 'val'
all_pred_f = filter(lambda s:'.pkl' in s and split in s
and 'meta' not in s,os.listdir(os.path.join(plot_folder,'pkl')))
if arguments['--train']:
annotations = []
for _, f in tqdm(enumerate(all_pred_f)):
ind = int(f.split('.')[0].split('-')[1])
gameclocks, pnr_probs, labels = pkl.load(open(os.path.join(plot_folder,'pkl/%s-%i.pkl'%(split,ind)), 'rb'))
meta = pkl.load( open(
os.path.join(plot_folder, 'pkl/%s-meta-%i.pkl' %(split, ind)), 'rb'))
cands, mp, frame_indices = detector.detect(pnr_probs, gameclocks, True)
print (cands)
plt.plot(gameclocks, pnr_probs, '-')
if mp is not None:
plt.plot(gameclocks, mp, '-')
plt.plot(np.array(labels), np.ones((len(labels))), '.')
for ind, cand in enumerate(cands):
cand_x = np.arange(cand[1], cand[0], .1)
plt.plot(cand_x, np.ones((len(cand_x))) * .95, '-' )
## if FP, record annotations
if arguments['--train'] and not label_in_cand(cand, labels):
anno = {'gameid':meta[1], 'gameclock':gameclocks[frame_indices[ind]],
'eid':meta[0], 'quarter':dataset.games[meta[1]]['events'][meta[0]]['quarter']}
annotations.append(anno)
plt.ylim([0,1])
plt.title('Game: %s, Event: %i'%(meta[1], meta[0]))
plt.savefig(os.path.join(plot_folder, '%s-%s-%i.png' %(detect_config['class'], split, ind)))
plt.clf()
pkl.dump(annotations, open(os.path.join(plot_folder,'pkl/hard-negative-examples.pkl'), 'wb')) | mit | 2,246,855,696,715,281,700 | 36.274725 | 111 | 0.648186 | false |
ptisserand/ansible | lib/ansible/modules/cloud/amazon/cloudfront_distribution.py | 1 | 85955 | #!/usr/bin/python
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudfront_distribution
short_description: create, update and delete aws cloudfront distributions.
description:
- Allows for easy creation, updating and deletion of CloudFront distributions.
requirements:
- boto3 >= 1.0.0
- python >= 2.6
version_added: "2.5"
author:
- Willem van Ketwich (@wilvk)
- Will Thames (@willthames)
extends_documentation_fragment:
- aws
- ec2
options:
state:
description:
- The desired state of the distribution
present - creates a new distribution or updates an existing distribution.
absent - deletes an existing distribution.
choices: ['present', 'absent']
default: 'present'
distribution_id:
description:
- The id of the cloudfront distribution. This parameter can be exchanged with I(alias) or I(caller_reference) and is used in conjunction with I(e_tag).
e_tag:
description:
- A unique identifier of a modified or existing distribution. Used in conjunction with I(distribution_id).
Is determined automatically if not specified.
caller_reference:
description:
- A unique identifier for creating and updating cloudfront distributions. Each caller reference must be unique across all distributions. e.g. a caller
reference used in a web distribution cannot be reused in a streaming distribution. This parameter can be used instead of I(distribution_id)
to reference an existing distribution. If not specified, this defaults to a datetime stamp of the format
'YYYY-MM-DDTHH:MM:SS.ffffff'.
tags:
description:
- Should be input as a dict() of key-value pairs.
Note that numeric keys or values must be wrapped in quotes. e.g. "Priority:" '1'
purge_tags:
description:
- Specifies whether existing tags will be removed before adding new tags. When I(purge_tags=yes), existing tags are removed and I(tags) are added, if
specified. If no tags are specified, it removes all existing tags for the distribution. When I(purge_tags=no), existing tags are kept and I(tags)
are added, if specified.
default: 'no'
choices: ['yes', 'no']
alias:
description:
- The name of an alias (CNAME) that is used in a distribution. This is used to effectively reference a distribution by its alias as an alias can only
be used by one distribution per AWS account. This variable avoids having to provide the I(distribution_id) as well as
the I(e_tag), or I(caller_reference) of an existing distribution.
aliases:
description:
- A I(list[]) of domain name aliases (CNAMEs) as strings to be used for the distribution. Each alias must be unique across all distribution for the AWS
account.
purge_aliases:
description:
- Specifies whether existing aliases will be removed before adding new aliases. When I(purge_aliases=yes), existing aliases are removed and I(aliases)
are added.
default: 'no'
choices: ['yes', 'no']
default_root_object:
description:
- A config element that specifies the path to request when the user requests the origin. e.g. if specified as 'index.html', this maps to
www.example.com/index.html when www.example.com is called by the user. This prevents the entire distribution origin from being exposed at the root.
default_origin_domain_name:
description:
- The domain name to use for an origin if no I(origins) have been specified. Should only be used on a first run of generating a distribution and not on
subsequent runs. Should not be used in conjunction with I(distribution_id), I(caller_reference) or I(alias).
default_origin_path:
description:
- The default origin path to specify for an origin if no I(origins) have been specified. Defaults to empty if not specified.
origins:
description:
- A config element that is a I(list[]) of complex origin objects to be specified for the distribution. Used for creating and updating distributions.
Each origin item comprises the attributes
I(id)
I(domain_name) (defaults to default_origin_domain_name if not specified)
I(origin_path) (defaults to default_origin_path if not specified)
I(custom_headers[])
I(header_name)
I(header_value)
I(s3_origin_access_identity_enabled)
I(custom_origin_config)
I(http_port)
I(https_port)
I(origin_protocol_policy)
I(origin_ssl_protocols[])
I(origin_read_timeout)
I(origin_keepalive_timeout)
purge_origins:
description: Whether to remove any origins that aren't listed in I(origins)
default: false
default_cache_behavior:
description:
- A config element that is a complex object specifying the default cache behavior of the distribution. If not specified, the I(target_origin_id) is
defined as the I(target_origin_id) of the first valid I(cache_behavior) in I(cache_behaviors) with defaults.
The default cache behavior comprises the attributes
I(target_origin_id)
I(forwarded_values)
I(query_string)
I(cookies)
I(forward)
I(whitelisted_names)
I(headers[])
I(query_string_cache_keys[])
I(trusted_signers)
I(enabled)
I(items[])
I(viewer_protocol_policy)
I(min_ttl)
I(allowed_methods)
I(items[])
I(cached_methods[])
I(smooth_streaming)
I(default_ttl)
I(max_ttl)
I(compress)
I(lambda_function_associations[])
I(lambda_function_arn)
I(event_type)
cache_behaviors:
description:
- A config element that is a I(list[]) of complex cache behavior objects to be specified for the distribution. The order
of the list is preserved across runs unless C(purge_cache_behavior) is enabled.
Each cache behavior comprises the attributes
I(path_pattern)
I(target_origin_id)
I(forwarded_values)
I(query_string)
I(cookies)
I(forward)
I(whitelisted_names)
I(headers[])
I(query_string_cache_keys[])
I(trusted_signers)
I(enabled)
I(items[])
I(viewer_protocol_policy)
I(min_ttl)
I(allowed_methods)
I(items[])
I(cached_methods[])
I(smooth_streaming)
I(default_ttl)
I(max_ttl)
I(compress)
I(lambda_function_associations[])
purge_cache_behaviors:
description: Whether to remove any cache behaviors that aren't listed in I(cache_behaviors). This switch
also allows the reordering of cache_behaviors.
default: false
custom_error_responses:
description:
- A config element that is a I(list[]) of complex custom error responses to be specified for the distribution. This attribute configures custom http
error messages returned to the user.
Each custom error response object comprises the attributes
I(error_code)
I(reponse_page_path)
I(response_code)
I(error_caching_min_ttl)
purge_custom_error_responses:
description: Whether to remove any custom error responses that aren't listed in I(custom_error_responses)
default: false
comment:
description:
- A comment that describes the cloudfront distribution. If not specified, it defaults to a
generic message that it has been created with Ansible, and a datetime stamp.
logging:
description:
- A config element that is a complex object that defines logging for the distribution.
The logging object comprises the attributes
I(enabled)
I(include_cookies)
I(bucket)
I(prefix)
price_class:
description:
- A string that specifies the pricing class of the distribution. As per
U(https://aws.amazon.com/cloudfront/pricing/)
I(price_class=PriceClass_100) consists of the areas
United States
Canada
Europe
I(price_class=PriceClass_200) consists of the areas
United States
Canada
Europe
Hong Kong, Philippines, S. Korea, Singapore & Taiwan
Japan
India
I(price_class=PriceClass_All) consists of the areas
United States
Canada
Europe
Hong Kong, Philippines, S. Korea, Singapore & Taiwan
Japan
India
South America
Australia
choices: ['PriceClass_100', 'PriceClass_200', 'PriceClass_All']
default: aws defaults this to 'PriceClass_All'
enabled:
description:
- A boolean value that specifies whether the distribution is enabled or disabled.
default: 'yes'
choices: ['yes', 'no']
viewer_certificate:
description:
- A config element that is a complex object that specifies the encryption details of the distribution.
Comprises the following attributes
I(cloudfront_default_certificate)
I(iam_certificate_id)
I(acm_certificate_arn)
I(ssl_support_method)
I(minimum_protocol_version)
I(certificate)
I(certificate_source)
restrictions:
description:
- A config element that is a complex object that describes how a distribution should restrict it's content.
The restriction object comprises the following attributes
I(geo_restriction)
I(restriction_type)
I(items[])
web_acl_id:
description:
- The id of a Web Application Firewall (WAF) Access Control List (ACL).
http_version:
description:
- The version of the http protocol to use for the distribution.
choices: [ 'http1.1', 'http2' ]
default: aws defaults this to 'http2'
ipv6_enabled:
description:
- Determines whether IPv6 support is enabled or not.
choices: ['yes', 'no']
default: 'no'
wait:
description:
- Specifies whether the module waits until the distribution has completed processing the creation or update.
choices: ['yes', 'no']
default: 'no'
wait_timeout:
description:
- Specifies the duration in seconds to wait for a timeout of a cloudfront create or update. Defaults to 1800 seconds (30 minutes).
default: 1800
'''
EXAMPLES = '''
# create a basic distribution with defaults and tags
- cloudfront_distribution:
state: present
default_origin_domain_name: www.my-cloudfront-origin.com
tags:
Name: example distribution
Project: example project
Priority: '1'
# update a distribution comment by distribution_id
- cloudfront_distribution:
state: present
distribution_id: E1RP5A2MJ8073O
comment: modified by ansible cloudfront.py
# update a distribution comment by caller_reference
- cloudfront_distribution:
state: present
caller_reference: my cloudfront distribution 001
comment: modified by ansible cloudfront.py
# update a distribution's aliases and comment using the distribution_id as a reference
- cloudfront_distribution:
state: present
distribution_id: E1RP5A2MJ8073O
comment: modified by cloudfront.py again
aliases: [ 'www.my-distribution-source.com', 'zzz.aaa.io' ]
# update a distribution's aliases and comment using an alias as a reference
- cloudfront_distribution:
state: present
caller_reference: my test distribution
comment: modified by cloudfront.py again
aliases:
- www.my-distribution-source.com
- zzz.aaa.io
# update a distribution's comment and aliases and tags and remove existing tags
- cloudfront_distribution:
state: present
distribution_id: E15BU8SDCGSG57
comment: modified by cloudfront.py again
aliases:
- tested.com
tags:
Project: distribution 1.2
purge_tags: yes
# create a distribution with an origin, logging and default cache behavior
- cloudfront_distribution:
state: present
caller_reference: unique test distribution id
origins:
- id: 'my test origin-000111'
domain_name: www.example.com
origin_path: /production
custom_headers:
- header_name: MyCustomHeaderName
header_value: MyCustomHeaderValue
default_cache_behavior:
target_origin_id: 'my test origin-000111'
forwarded_values:
query_string: true
cookies:
forward: all
headers:
- '*'
viewer_protocol_policy: allow-all
smooth_streaming: true
compress: true
allowed_methods:
items:
- GET
- HEAD
cached_methods:
- GET
- HEAD
logging:
enabled: true
include_cookies: false
bucket: mylogbucket.s3.amazonaws.com
prefix: myprefix/
enabled: false
comment: this is a cloudfront distribution with logging
# delete a distribution
- cloudfront_distribution:
state: absent
caller_reference: replaceable distribution
'''
RETURN = '''
active_trusted_signers:
description: Key pair IDs that CloudFront is aware of for each trusted signer
returned: always
type: complex
contains:
enabled:
description: Whether trusted signers are in use
returned: always
type: bool
sample: false
quantity:
description: Number of trusted signers
returned: always
type: int
sample: 1
items:
description: Number of trusted signers
returned: when there are trusted signers
type: list
sample:
- key_pair_id
aliases:
description: Aliases that refer to the distribution
returned: always
type: complex
contains:
items:
description: List of aliases
returned: always
type: list
sample:
- test.example.com
quantity:
description: Number of aliases
returned: always
type: int
sample: 1
arn:
description: Amazon Resource Name of the distribution
returned: always
type: string
sample: arn:aws:cloudfront::123456789012:distribution/E1234ABCDEFGHI
cache_behaviors:
description: Cloudfront cache behaviors
returned: always
type: complex
contains:
items:
description: List of cache behaviors
returned: always
type: complex
contains:
allowed_methods:
description: Methods allowed by the cache behavior
returned: always
type: complex
contains:
cached_methods:
description: Methods cached by the cache behavior
returned: always
type: complex
contains:
items:
description: List of cached methods
returned: always
type: list
sample:
- HEAD
- GET
quantity:
description: Count of cached methods
returned: always
type: int
sample: 2
items:
description: List of methods allowed by the cache behavior
returned: always
type: list
sample:
- HEAD
- GET
quantity:
description: Count of methods allowed by the cache behavior
returned: always
type: int
sample: 2
compress:
description: Whether compression is turned on for the cache behavior
returned: always
type: bool
sample: false
default_ttl:
description: Default Time to Live of the cache behavior
returned: always
type: int
sample: 86400
forwarded_values:
description: Values forwarded to the origin for this cache behavior
returned: always
type: complex
contains:
cookies:
description: Cookies to forward to the origin
returned: always
type: complex
contains:
forward:
description: Which cookies to forward to the origin for this cache behavior
returned: always
type: string
sample: none
whitelisted_names:
description: The names of the cookies to forward to the origin for this cache behavior
returned: when I(forward) is C(whitelist)
type: complex
contains:
quantity:
description: Count of cookies to forward
returned: always
type: int
sample: 1
items:
description: List of cookies to forward
returned: when list is not empty
type: list
sample: my_cookie
headers:
description: Which headers are used to vary on cache retrievals
returned: always
type: complex
contains:
quantity:
description: Count of headers to vary on
returned: always
type: int
sample: 1
items:
description: List of headers to vary on
returned: when list is not empty
type: list
sample:
- Host
query_string:
description: Whether the query string is used in cache lookups
returned: always
type: bool
sample: false
query_string_cache_keys:
description: Which query string keys to use in cache lookups
returned: always
type: complex
contains:
quantity:
description: Count of query string cache keys to use in cache lookups
returned: always
type: int
sample: 1
items:
description: List of query string cache keys to use in cache lookups
returned: when list is not empty
type: list
sample:
lambda_function_associations:
description: Lambda function associations for a cache behavior
returned: always
type: complex
contains:
quantity:
description: Count of lambda function associations
returned: always
type: int
sample: 1
items:
description: List of lambda function associations
returned: when list is not empty
type: list
sample:
- lambda_function_arn: arn:aws:lambda:123456789012:us-east-1/lambda/lambda-function
event_type: viewer-response
max_ttl:
description: Maximum Time to Live
returned: always
type: int
sample: 31536000
min_ttl:
description: Minimum Time to Live
returned: always
type: int
sample: 0
path_pattern:
description: Path pattern that determines this cache behavior
returned: always
type: string
sample: /path/to/files/*
smooth_streaming:
description: Whether smooth streaming is enabled
returned: always
type: bool
sample: false
target_origin_id:
description: Id of origin reference by this cache behavior
returned: always
type: string
sample: origin_abcd
trusted_signers:
description: Trusted signers
returned: always
type: complex
contains:
enabled:
description: Whether trusted signers are enabled for this cache behavior
returned: always
type: bool
sample: false
quantity:
description: Count of trusted signers
returned: always
type: int
sample: 1
viewer_protocol_policy:
description: Policy of how to handle http/https
returned: always
type: string
sample: redirect-to-https
quantity:
description: Count of cache behaviors
returned: always
type: int
sample: 1
caller_reference:
description: Idempotency reference given when creating cloudfront distribution
returned: always
type: string
sample: '1484796016700'
comment:
description: Any comments you want to include about the distribution
returned: always
type: string
sample: 'my first cloudfront distribution'
custom_error_responses:
description: Custom error responses to use for error handling
returned: always
type: complex
contains:
items:
description: List of custom error responses
returned: always
type: complex
contains:
error_caching_min_ttl:
description: Mininum time to cache this error response
returned: always
type: int
sample: 300
error_code:
description: Origin response code that triggers this error response
returned: always
type: int
sample: 500
response_code:
description: Response code to return to the requester
returned: always
type: string
sample: '500'
response_page_path:
description: Path that contains the error page to display
returned: always
type: string
sample: /errors/5xx.html
quantity:
description: Count of custom error response items
returned: always
type: int
sample: 1
default_cache_behavior:
description: Default cache behavior
returned: always
type: complex
contains:
allowed_methods:
description: Methods allowed by the cache behavior
returned: always
type: complex
contains:
cached_methods:
description: Methods cached by the cache behavior
returned: always
type: complex
contains:
items:
description: List of cached methods
returned: always
type: list
sample:
- HEAD
- GET
quantity:
description: Count of cached methods
returned: always
type: int
sample: 2
items:
description: List of methods allowed by the cache behavior
returned: always
type: list
sample:
- HEAD
- GET
quantity:
description: Count of methods allowed by the cache behavior
returned: always
type: int
sample: 2
compress:
description: Whether compression is turned on for the cache behavior
returned: always
type: bool
sample: false
default_ttl:
description: Default Time to Live of the cache behavior
returned: always
type: int
sample: 86400
forwarded_values:
description: Values forwarded to the origin for this cache behavior
returned: always
type: complex
contains:
cookies:
description: Cookies to forward to the origin
returned: always
type: complex
contains:
forward:
description: Which cookies to forward to the origin for this cache behavior
returned: always
type: string
sample: none
whitelisted_names:
description: The names of the cookies to forward to the origin for this cache behavior
returned: when I(forward) is C(whitelist)
type: complex
contains:
quantity:
description: Count of cookies to forward
returned: always
type: int
sample: 1
items:
description: List of cookies to forward
returned: when list is not empty
type: list
sample: my_cookie
headers:
description: Which headers are used to vary on cache retrievals
returned: always
type: complex
contains:
quantity:
description: Count of headers to vary on
returned: always
type: int
sample: 1
items:
description: List of headers to vary on
returned: when list is not empty
type: list
sample:
- Host
query_string:
description: Whether the query string is used in cache lookups
returned: always
type: bool
sample: false
query_string_cache_keys:
description: Which query string keys to use in cache lookups
returned: always
type: complex
contains:
quantity:
description: Count of query string cache keys to use in cache lookups
returned: always
type: int
sample: 1
items:
description: List of query string cache keys to use in cache lookups
returned: when list is not empty
type: list
sample:
lambda_function_associations:
description: Lambda function associations for a cache behavior
returned: always
type: complex
contains:
quantity:
description: Count of lambda function associations
returned: always
type: int
sample: 1
items:
description: List of lambda function associations
returned: when list is not empty
type: list
sample:
- lambda_function_arn: arn:aws:lambda:123456789012:us-east-1/lambda/lambda-function
event_type: viewer-response
max_ttl:
description: Maximum Time to Live
returned: always
type: int
sample: 31536000
min_ttl:
description: Minimum Time to Live
returned: always
type: int
sample: 0
path_pattern:
description: Path pattern that determines this cache behavior
returned: always
type: string
sample: /path/to/files/*
smooth_streaming:
description: Whether smooth streaming is enabled
returned: always
type: bool
sample: false
target_origin_id:
description: Id of origin reference by this cache behavior
returned: always
type: string
sample: origin_abcd
trusted_signers:
description: Trusted signers
returned: always
type: complex
contains:
enabled:
description: Whether trusted signers are enabled for this cache behavior
returned: always
type: bool
sample: false
quantity:
description: Count of trusted signers
returned: always
type: int
sample: 1
viewer_protocol_policy:
description: Policy of how to handle http/https
returned: always
type: string
sample: redirect-to-https
default_root_object:
description: The object that you want CloudFront to request from your origin (for example, index.html)
when a viewer requests the root URL for your distribution
returned: always
type: string
sample: ''
diff:
description: Difference between previous configuration and new configuration
returned: always
type: dict
sample: {}
domain_name:
description: Domain name of cloudfront distribution
returned: always
type: string
sample: d1vz8pzgurxosf.cloudfront.net
enabled:
description: Whether the cloudfront distribution is enabled or not
returned: always
type: bool
sample: true
http_version:
description: Version of HTTP supported by the distribution
returned: always
type: string
sample: http2
id:
description: Cloudfront distribution ID
returned: always
type: string
sample: E123456ABCDEFG
in_progress_invalidation_batches:
description: The number of invalidation batches currently in progress
returned: always
type: int
sample: 0
is_ipv6_enabled:
description: Whether IPv6 is enabled
returned: always
type: bool
sample: true
last_modified_time:
description: Date and time distribution was last modified
returned: always
type: string
sample: '2017-10-13T01:51:12.656000+00:00'
logging:
description: Logging information
returned: always
type: complex
contains:
bucket:
description: S3 bucket logging destination
returned: always
type: string
sample: logs-example-com.s3.amazonaws.com
enabled:
description: Whether logging is enabled
returned: always
type: bool
sample: true
include_cookies:
description: Whether to log cookies
returned: always
type: bool
sample: false
prefix:
description: Prefix added to logging object names
returned: always
type: string
sample: cloudfront/test
origins:
description: Origins in the cloudfront distribution
returned: always
type: complex
contains:
items:
description: List of origins
returned: always
type: complex
contains:
custom_headers:
description: Custom headers passed to the origin
returned: always
type: complex
contains:
quantity:
description: Count of headers
returned: always
type: int
sample: 1
custom_origin_config:
description: Configuration of the origin
returned: always
type: complex
contains:
http_port:
description: Port on which HTTP is listening
returned: always
type: int
sample: 80
https_port:
description: Port on which HTTPS is listening
returned: always
type: int
sample: 443
origin_keepalive_timeout:
description: Keep-alive timeout
returned: always
type: int
sample: 5
origin_protocol_policy:
description: Policy of which protocols are supported
returned: always
type: string
sample: https-only
origin_read_timeout:
description: Timeout for reads to the origin
returned: always
type: int
sample: 30
origin_ssl_protocols:
description: SSL protocols allowed by the origin
returned: always
type: complex
contains:
items:
description: List of SSL protocols
returned: always
type: list
sample:
- TLSv1
- TLSv1.1
- TLSv1.2
quantity:
description: Count of SSL protocols
returned: always
type: int
sample: 3
domain_name:
description: Domain name of the origin
returned: always
type: string
sample: test-origin.example.com
id:
description: ID of the origin
returned: always
type: string
sample: test-origin.example.com
origin_path:
description: Subdirectory to prefix the request from the S3 or HTTP origin
returned: always
type: string
sample: ''
quantity:
description: Count of origins
returned: always
type: int
sample: 1
price_class:
description: Price class of cloudfront distribution
returned: always
type: string
sample: PriceClass_All
restrictions:
description: Restrictions in use by Cloudfront
returned: always
type: complex
contains:
geo_restriction:
description: Controls the countries in which your content is distributed.
returned: always
type: complex
contains:
quantity:
description: Count of restrictions
returned: always
type: int
sample: 1
items:
description: List of country codes allowed or disallowed
returned: always
type: list
sample: xy
restriction_type:
description: Type of restriction
returned: always
type: string
sample: blacklist
status:
description: Status of the cloudfront distribution
returned: always
type: string
sample: InProgress
tags:
description: Distribution tags
returned: always
type: dict
sample:
Hello: World
viewer_certificate:
description: Certificate used by cloudfront distribution
returned: always
type: complex
contains:
acm_certificate_arn:
description: ARN of ACM certificate
returned: when certificate comes from ACM
type: string
sample: arn:aws:acm:us-east-1:123456789012:certificate/abcd1234-1234-1234-abcd-123456abcdef
certificate:
description: Reference to certificate
returned: always
type: string
sample: arn:aws:acm:us-east-1:123456789012:certificate/abcd1234-1234-1234-abcd-123456abcdef
certificate_source:
description: Where certificate comes from
returned: always
type: string
sample: acm
minimum_protocol_version:
description: Minimum SSL/TLS protocol supported by this distribution
returned: always
type: string
sample: TLSv1
ssl_support_method:
description: Support for pre-SNI browsers or not
returned: always
type: string
sample: sni-only
web_acl_id:
description: ID of Web Access Control List (from WAF service)
returned: always
type: string
sample: abcd1234-1234-abcd-abcd-abcd12345678
'''
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.aws.cloudfront_facts import CloudFrontFactsServiceManager
from ansible.module_utils.ec2 import get_aws_connection_info
from ansible.module_utils.ec2 import ec2_argument_spec, boto3_conn, compare_aws_tags
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, ansible_dict_to_boto3_tag_list
from ansible.module_utils.ec2 import snake_dict_to_camel_dict, boto3_tag_list_to_ansible_dict
import datetime
try:
from collections import OrderedDict
except ImportError:
try:
from ordereddict import OrderedDict
except ImportError:
pass # caught by AnsibleAWSModule (as python 2.6 + boto3 => ordereddict is installed)
try:
import botocore
except ImportError:
pass
def change_dict_key_name(dictionary, old_key, new_key):
if old_key in dictionary:
dictionary[new_key] = dictionary.get(old_key)
dictionary.pop(old_key, None)
return dictionary
def merge_validation_into_config(config, validated_node, node_name):
if validated_node is not None:
if isinstance(validated_node, dict):
config_node = config.get(node_name)
if config_node is not None:
config_node_items = list(config_node.items())
else:
config_node_items = []
config[node_name] = dict(config_node_items + list(validated_node.items()))
if isinstance(validated_node, list):
config[node_name] = list(set(config.get(node_name) + validated_node))
return config
def ansible_list_to_cloudfront_list(list_items=None, include_quantity=True):
if list_items is None:
list_items = []
if not isinstance(list_items, list):
raise ValueError('Expected a list, got a {0} with value {1}'.format(type(list_items).__name__, str(list_items)))
result = {}
if include_quantity:
result['quantity'] = len(list_items)
if len(list_items) > 0:
result['items'] = list_items
return result
def recursive_diff(dict1, dict2):
left = dict((k, v) for (k, v) in dict1.items() if k not in dict2)
right = dict((k, v) for (k, v) in dict2.items() if k not in dict1)
for k in (set(dict1.keys()) & set(dict2.keys())):
if isinstance(dict1[k], dict) and isinstance(dict2[k], dict):
result = recursive_diff(dict1[k], dict2[k])
if result:
left[k] = result[0]
right[k] = result[1]
elif dict1[k] != dict2[k]:
left[k] = dict1[k]
right[k] = dict2[k]
if left or right:
return left, right
else:
return None
def create_distribution(client, module, config, tags):
try:
if not tags:
return client.create_distribution(DistributionConfig=config)['Distribution']
else:
distribution_config_with_tags = {
'DistributionConfig': config,
'Tags': {
'Items': tags
}
}
return client.create_distribution_with_tags(DistributionConfigWithTags=distribution_config_with_tags)['Distribution']
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error creating distribution")
def delete_distribution(client, module, distribution):
try:
return client.delete_distribution(Id=distribution['Distribution']['Id'], IfMatch=distribution['ETag'])
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error deleting distribution %s" % to_native(distribution['Distribution']))
def update_distribution(client, module, config, distribution_id, e_tag):
try:
return client.update_distribution(DistributionConfig=config, Id=distribution_id, IfMatch=e_tag)['Distribution']
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error updating distribution to %s" % to_native(config))
def tag_resource(client, module, arn, tags):
try:
return client.tag_resource(Resource=arn, Tags=dict(Items=tags))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error tagging resource")
def untag_resource(client, module, arn, tag_keys):
try:
return client.untag_resource(Resource=arn, TagKeys=dict(Items=tag_keys))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error untagging resource")
def list_tags_for_resource(client, module, arn):
try:
response = client.list_tags_for_resource(Resource=arn)
return boto3_tag_list_to_ansible_dict(response.get('Tags').get('Items'))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
module.fail_json_aws(e, msg="Error listing tags for resource")
def update_tags(client, module, existing_tags, valid_tags, purge_tags, arn):
changed = False
to_add, to_remove = compare_aws_tags(existing_tags, valid_tags, purge_tags)
if to_remove:
untag_resource(client, module, arn, to_remove)
changed = True
if to_add:
tag_resource(client, module, arn, ansible_dict_to_boto3_tag_list(to_add))
changed = True
return changed
class CloudFrontValidationManager(object):
"""
Manages Cloudfront validations
"""
def __init__(self, module):
self.__cloudfront_facts_mgr = CloudFrontFactsServiceManager(module)
self.module = module
self.__default_distribution_enabled = True
self.__default_http_port = 80
self.__default_https_port = 443
self.__default_ipv6_enabled = False
self.__default_origin_ssl_protocols = [
'TLSv1',
'TLSv1.1',
'TLSv1.2'
]
self.__default_custom_origin_protocol_policy = 'match-viewer'
self.__default_custom_origin_read_timeout = 30
self.__default_custom_origin_keepalive_timeout = 5
self.__default_datetime_string = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%S.%f')
self.__default_cache_behavior_min_ttl = 0
self.__default_cache_behavior_max_ttl = 31536000
self.__default_cache_behavior_default_ttl = 86400
self.__default_cache_behavior_compress = False
self.__default_cache_behavior_viewer_protocol_policy = 'allow-all'
self.__default_cache_behavior_smooth_streaming = False
self.__default_cache_behavior_forwarded_values_forward_cookies = 'none'
self.__default_cache_behavior_forwarded_values_query_string = True
self.__default_trusted_signers_enabled = False
self.__valid_price_classes = set([
'PriceClass_100',
'PriceClass_200',
'PriceClass_All'
])
self.__valid_origin_protocol_policies = set([
'http-only',
'match-viewer',
'https-only'
])
self.__valid_origin_ssl_protocols = set([
'SSLv3',
'TLSv1',
'TLSv1.1',
'TLSv1.2'
])
self.__valid_cookie_forwarding = set([
'none',
'whitelist',
'all'
])
self.__valid_viewer_protocol_policies = set([
'allow-all',
'https-only',
'redirect-to-https'
])
self.__valid_methods = set([
'GET',
'HEAD',
'POST',
'PUT',
'PATCH',
'OPTIONS',
'DELETE'
])
self.__valid_methods_cached_methods = [
set([
'GET',
'HEAD'
]),
set([
'GET',
'HEAD',
'OPTIONS'
])
]
self.__valid_methods_allowed_methods = [
self.__valid_methods_cached_methods[0],
self.__valid_methods_cached_methods[1],
self.__valid_methods
]
self.__valid_lambda_function_association_event_types = set([
'viewer-request',
'viewer-response',
'origin-request',
'origin-response'
])
self.__valid_viewer_certificate_ssl_support_methods = set([
'sni-only',
'vip'
])
self.__valid_viewer_certificate_minimum_protocol_versions = set([
'SSLv3',
'TLSv1'
])
self.__valid_viewer_certificate_certificate_sources = set([
'cloudfront',
'iam',
'acm'
])
self.__valid_http_versions = set([
'http1.1',
'http2'
])
self.__s3_bucket_domain_identifier = '.s3.amazonaws.com'
def add_missing_key(self, dict_object, key_to_set, value_to_set):
if key_to_set not in dict_object and value_to_set is not None:
dict_object[key_to_set] = value_to_set
return dict_object
def add_key_else_change_dict_key(self, dict_object, old_key, new_key, value_to_set):
if old_key not in dict_object and value_to_set is not None:
dict_object[new_key] = value_to_set
else:
dict_object = change_dict_key_name(dict_object, old_key, new_key)
return dict_object
def add_key_else_validate(self, dict_object, key_name, attribute_name, value_to_set, valid_values, to_aws_list=False):
if key_name in dict_object:
self.validate_attribute_with_allowed_values(value_to_set, attribute_name, valid_values)
else:
if to_aws_list:
dict_object[key_name] = ansible_list_to_cloudfront_list(value_to_set)
elif value_to_set is not None:
dict_object[key_name] = value_to_set
return dict_object
def validate_logging(self, logging):
try:
if logging is None:
return None
valid_logging = {}
if logging and not set(['enabled', 'include_cookies', 'bucket', 'prefix']).issubset(logging):
self.module.fail_json(msg="The logging parameters enabled, include_cookies, bucket and prefix must be specified.")
valid_logging['include_cookies'] = logging.get('include_cookies')
valid_logging['enabled'] = logging.get('enabled')
valid_logging['bucket'] = logging.get('bucket')
valid_logging['prefix'] = logging.get('prefix')
return valid_logging
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution logging")
def validate_is_list(self, list_to_validate, list_name):
if not isinstance(list_to_validate, list):
self.module.fail_json(msg='%s is of type %s. Must be a list.' % (list_name, type(list_to_validate).__name__))
def validate_required_key(self, key_name, full_key_name, dict_object):
if key_name not in dict_object:
self.module.fail_json(msg="%s must be specified." % full_key_name)
def validate_origins(self, client, config, origins, default_origin_domain_name,
default_origin_path, create_distribution, purge_origins=False):
try:
if origins is None:
if default_origin_domain_name is None and not create_distribution:
if purge_origins:
return None
else:
return ansible_list_to_cloudfront_list(config)
if default_origin_domain_name is not None:
origins = [{
'domain_name': default_origin_domain_name,
'origin_path': default_origin_path or ''
}]
else:
origins = []
self.validate_is_list(origins, 'origins')
if not origins and default_origin_domain_name is None and create_distribution:
self.module.fail_json(msg="Both origins[] and default_origin_domain_name have not been specified. Please specify at least one.")
all_origins = OrderedDict()
new_domains = list()
for origin in config:
all_origins[origin.get('domain_name')] = origin
for origin in origins:
origin = self.validate_origin(client, all_origins.get(origin.get('domain_name'), {}), origin, default_origin_path)
all_origins[origin['domain_name']] = origin
new_domains.append(origin['domain_name'])
if purge_origins:
for domain in list(all_origins.keys()):
if domain not in new_domains:
del(all_origins[domain])
return ansible_list_to_cloudfront_list(list(all_origins.values()))
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution origins")
def validate_s3_origin_configuration(self, client, existing_config, origin):
if origin['s3_origin_access_identity_enabled'] and existing_config.get('s3_origin_config', {}).get('origin_access_identity'):
return existing_config['s3_origin_config']['origin_access_identity']
if not origin['s3_origin_access_identity_enabled']:
return None
try:
comment = "access-identity-by-ansible-%s-%s" % (origin.get('domain_name'), self.__default_datetime_string)
cfoai_config = dict(CloudFrontOriginAccessIdentityConfig=dict(CallerReference=self.__default_datetime_string,
Comment=comment))
oai = client.create_cloud_front_origin_access_identity(**cfoai_config)['CloudFrontOriginAccessIdentity']['Id']
except Exception as e:
self.module.fail_json_aws(e, msg="Couldn't create Origin Access Identity for id %s" % origin['id'])
return "origin-access-identity/cloudfront/%s" % oai
def validate_origin(self, client, existing_config, origin, default_origin_path):
try:
origin = self.add_missing_key(origin, 'origin_path', existing_config.get('origin_path', default_origin_path or ''))
self.validate_required_key('origin_path', 'origins[].origin_path', origin)
origin = self.add_missing_key(origin, 'id', existing_config.get('id', self.__default_datetime_string))
if 'custom_headers' in origin and len(origin.get('custom_headers')) > 0:
for custom_header in origin.get('custom_headers'):
if 'header_name' not in custom_header or 'header_value' not in custom_header:
self.module.fail_json(msg="Both origins[].custom_headers.header_name and origins[].custom_headers.header_value must be specified.")
origin['custom_headers'] = ansible_list_to_cloudfront_list(origin.get('custom_headers'))
else:
origin['custom_headers'] = ansible_list_to_cloudfront_list()
if self.__s3_bucket_domain_identifier in origin.get('domain_name').lower():
if origin.get("s3_origin_access_identity_enabled") is not None:
s3_origin_config = self.validate_s3_origin_configuration(client, existing_config, origin)
if s3_origin_config:
oai = s3_origin_config
else:
oai = ""
origin["s3_origin_config"] = dict(origin_access_identity=oai)
del(origin["s3_origin_access_identity_enabled"])
if 'custom_origin_config' in origin:
self.module.fail_json(msg="s3_origin_access_identity_enabled and custom_origin_config are mutually exclusive")
else:
origin = self.add_missing_key(origin, 'custom_origin_config', existing_config.get('custom_origin_config', {}))
custom_origin_config = origin.get('custom_origin_config')
custom_origin_config = self.add_key_else_validate(custom_origin_config, 'origin_protocol_policy',
'origins[].custom_origin_config.origin_protocol_policy',
self.__default_custom_origin_protocol_policy, self.__valid_origin_protocol_policies)
custom_origin_config = self.add_missing_key(custom_origin_config, 'origin_read_timeout', self.__default_custom_origin_read_timeout)
custom_origin_config = self.add_missing_key(custom_origin_config, 'origin_keepalive_timeout', self.__default_custom_origin_keepalive_timeout)
custom_origin_config = self.add_key_else_change_dict_key(custom_origin_config, 'http_port', 'h_t_t_p_port', self.__default_http_port)
custom_origin_config = self.add_key_else_change_dict_key(custom_origin_config, 'https_port', 'h_t_t_p_s_port', self.__default_https_port)
if custom_origin_config.get('origin_ssl_protocols', {}).get('items'):
custom_origin_config['origin_ssl_protocols'] = custom_origin_config['origin_ssl_protocols']['items']
if custom_origin_config.get('origin_ssl_protocols'):
self.validate_attribute_list_with_allowed_list(custom_origin_config['origin_ssl_protocols'], 'origins[].origin_ssl_protocols',
self.__valid_origin_ssl_protocols)
else:
custom_origin_config['origin_ssl_protocols'] = self.__default_origin_ssl_protocols
custom_origin_config['origin_ssl_protocols'] = ansible_list_to_cloudfront_list(custom_origin_config['origin_ssl_protocols'])
return origin
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Error validating distribution origin")
def validate_cache_behaviors(self, config, cache_behaviors, valid_origins, purge_cache_behaviors=False):
try:
if cache_behaviors is None and valid_origins is not None and purge_cache_behaviors is False:
return ansible_list_to_cloudfront_list(config)
all_cache_behaviors = OrderedDict()
# cache behaviors are order dependent so we don't preserve the existing ordering when purge_cache_behaviors
# is true (if purge_cache_behaviors is not true, we can't really know the full new order)
if not purge_cache_behaviors:
for behavior in config:
all_cache_behaviors[behavior['path_pattern']] = behavior
for cache_behavior in cache_behaviors:
valid_cache_behavior = self.validate_cache_behavior(all_cache_behaviors.get(cache_behavior.get('path_pattern'), {}),
cache_behavior, valid_origins)
all_cache_behaviors[cache_behavior['path_pattern']] = valid_cache_behavior
if purge_cache_behaviors:
for target_origin_id in set(all_cache_behaviors.keys()) - set([cb['path_pattern'] for cb in cache_behaviors]):
del(all_cache_behaviors[target_origin_id])
return ansible_list_to_cloudfront_list(list(all_cache_behaviors.values()))
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution cache behaviors")
def validate_cache_behavior(self, config, cache_behavior, valid_origins, is_default_cache=False):
if is_default_cache and cache_behavior is None:
cache_behavior = {}
if cache_behavior is None and valid_origins is not None:
return config
cache_behavior = self.validate_cache_behavior_first_level_keys(config, cache_behavior, valid_origins, is_default_cache)
cache_behavior = self.validate_forwarded_values(config, cache_behavior.get('forwarded_values'), cache_behavior)
cache_behavior = self.validate_allowed_methods(config, cache_behavior.get('allowed_methods'), cache_behavior)
cache_behavior = self.validate_lambda_function_associations(config, cache_behavior.get('lambda_function_associations'), cache_behavior)
cache_behavior = self.validate_trusted_signers(config, cache_behavior.get('trusted_signers'), cache_behavior)
return cache_behavior
def validate_cache_behavior_first_level_keys(self, config, cache_behavior, valid_origins, is_default_cache):
try:
cache_behavior = self.add_key_else_change_dict_key(cache_behavior, 'min_ttl', 'min_t_t_l',
config.get('min_t_t_l', self.__default_cache_behavior_min_ttl))
cache_behavior = self.add_key_else_change_dict_key(cache_behavior, 'max_ttl', 'max_t_t_l',
config.get('max_t_t_l', self.__default_cache_behavior_max_ttl))
cache_behavior = self.add_key_else_change_dict_key(cache_behavior, 'default_ttl', 'default_t_t_l',
config.get('default_t_t_l', self.__default_cache_behavior_default_ttl))
cache_behavior = self.add_missing_key(cache_behavior, 'compress', config.get('compress', self.__default_cache_behavior_compress))
target_origin_id = cache_behavior.get('target_origin_id', config.get('target_origin_id'))
if not target_origin_id:
target_origin_id = self.get_first_origin_id_for_default_cache_behavior(valid_origins)
if target_origin_id not in [origin['id'] for origin in valid_origins.get('items', [])]:
if is_default_cache:
cache_behavior_name = 'Default cache behavior'
else:
cache_behavior_name = 'Cache behavior for path %s' % cache_behavior['path_pattern']
self.module.fail_json(msg="%s has target_origin_id pointing to an origin that does not exist." %
cache_behavior_name)
cache_behavior['target_origin_id'] = target_origin_id
cache_behavior = self.add_key_else_validate(cache_behavior, 'viewer_protocol_policy', 'cache_behavior.viewer_protocol_policy',
config.get('viewer_protocol_policy',
self.__default_cache_behavior_viewer_protocol_policy),
self.__valid_viewer_protocol_policies)
cache_behavior = self.add_missing_key(cache_behavior, 'smooth_streaming',
config.get('smooth_streaming', self.__default_cache_behavior_smooth_streaming))
return cache_behavior
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution cache behavior first level keys")
def validate_forwarded_values(self, config, forwarded_values, cache_behavior):
try:
if not forwarded_values:
forwarded_values = dict()
existing_config = config.get('forwarded_values', {})
headers = forwarded_values.get('headers', existing_config.get('headers', {}).get('items'))
if headers:
headers.sort()
forwarded_values['headers'] = ansible_list_to_cloudfront_list(headers)
if 'cookies' not in forwarded_values:
forward = existing_config.get('cookies', {}).get('forward', self.__default_cache_behavior_forwarded_values_forward_cookies)
forwarded_values['cookies'] = {'forward': forward}
else:
existing_whitelist = existing_config.get('cookies', {}).get('whitelisted_names', {}).get('items')
whitelist = forwarded_values.get('cookies').get('whitelisted_names', existing_whitelist)
if whitelist:
self.validate_is_list(whitelist, 'forwarded_values.whitelisted_names')
forwarded_values['cookies']['whitelisted_names'] = ansible_list_to_cloudfront_list(whitelist)
cookie_forwarding = forwarded_values.get('cookies').get('forward', existing_config.get('cookies', {}).get('forward'))
self.validate_attribute_with_allowed_values(cookie_forwarding, 'cache_behavior.forwarded_values.cookies.forward',
self.__valid_cookie_forwarding)
forwarded_values['cookies']['forward'] = cookie_forwarding
query_string_cache_keys = forwarded_values.get('query_string_cache_keys', existing_config.get('query_string_cache_keys', {}).get('items', []))
self.validate_is_list(query_string_cache_keys, 'forwarded_values.query_string_cache_keys')
forwarded_values['query_string_cache_keys'] = ansible_list_to_cloudfront_list(query_string_cache_keys)
forwarded_values = self.add_missing_key(forwarded_values, 'query_string',
existing_config.get('query_string', self.__default_cache_behavior_forwarded_values_query_string))
cache_behavior['forwarded_values'] = forwarded_values
return cache_behavior
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating forwarded values")
def validate_lambda_function_associations(self, config, lambda_function_associations, cache_behavior):
try:
if lambda_function_associations is not None:
self.validate_is_list(lambda_function_associations, 'lambda_function_associations')
for association in lambda_function_associations:
association = change_dict_key_name(association, 'lambda_function_arn', 'lambda_function_a_r_n')
self.validate_attribute_with_allowed_values(association.get('event_type'), 'cache_behaviors[].lambda_function_associations.event_type',
self.__valid_lambda_function_association_event_types)
cache_behavior['lambda_function_associations'] = ansible_list_to_cloudfront_list(lambda_function_associations)
else:
if 'lambda_function_associations' in config:
cache_behavior['lambda_function_associations'] = config.get('lambda_function_associations')
else:
cache_behavior['lambda_function_associations'] = ansible_list_to_cloudfront_list([])
return cache_behavior
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating lambda function associations")
def validate_allowed_methods(self, config, allowed_methods, cache_behavior):
try:
if allowed_methods is not None:
self.validate_required_key('items', 'cache_behavior.allowed_methods.items[]', allowed_methods)
temp_allowed_items = allowed_methods.get('items')
self.validate_is_list(temp_allowed_items, 'cache_behavior.allowed_methods.items')
self.validate_attribute_list_with_allowed_list(temp_allowed_items, 'cache_behavior.allowed_methods.items[]',
self.__valid_methods_allowed_methods)
cached_items = allowed_methods.get('cached_methods')
if 'cached_methods' in allowed_methods:
self.validate_is_list(cached_items, 'cache_behavior.allowed_methods.cached_methods')
self.validate_attribute_list_with_allowed_list(cached_items, 'cache_behavior.allowed_items.cached_methods[]',
self.__valid_methods_cached_methods)
# we don't care if the order of how cloudfront stores the methods differs - preserving existing
# order reduces likelihood of making unnecessary changes
if 'allowed_methods' in config and set(config['allowed_methods']['items']) == set(temp_allowed_items):
cache_behavior['allowed_methods'] = config['allowed_methods']
else:
cache_behavior['allowed_methods'] = ansible_list_to_cloudfront_list(temp_allowed_items)
if cached_items and set(cached_items) == set(config.get('allowed_methods', {}).get('cached_methods', {}).get('items', [])):
cache_behavior['allowed_methods']['cached_methods'] = config['allowed_methods']['cached_methods']
else:
cache_behavior['allowed_methods']['cached_methods'] = ansible_list_to_cloudfront_list(cached_items)
else:
if 'allowed_methods' in config:
cache_behavior['allowed_methods'] = config.get('allowed_methods')
return cache_behavior
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating allowed methods")
def validate_trusted_signers(self, config, trusted_signers, cache_behavior):
try:
if trusted_signers is None:
trusted_signers = {}
if 'items' in trusted_signers:
valid_trusted_signers = ansible_list_to_cloudfront_list(trusted_signers.get('items'))
else:
valid_trusted_signers = dict(quantity=config.get('quantity', 0))
if 'items' in config:
valid_trusted_signers = dict(items=config['items'])
valid_trusted_signers['enabled'] = trusted_signers.get('enabled', config.get('enabled', self.__default_trusted_signers_enabled))
cache_behavior['trusted_signers'] = valid_trusted_signers
return cache_behavior
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating trusted signers")
def validate_viewer_certificate(self, viewer_certificate):
try:
if viewer_certificate is None:
return None
if viewer_certificate.get('cloudfront_default_certificate') and viewer_certificate.get('ssl_support_method') is not None:
self.module.fail_json(msg="viewer_certificate.ssl_support_method should not be specified with viewer_certificate_cloudfront_default" +
"_certificate set to true.")
self.validate_attribute_with_allowed_values(viewer_certificate.get('ssl_support_method'), 'viewer_certificate.ssl_support_method',
self.__valid_viewer_certificate_ssl_support_methods)
self.validate_attribute_with_allowed_values(viewer_certificate.get('minimum_protocol_version'), 'viewer_certificate.minimum_protocol_version',
self.__valid_viewer_certificate_minimum_protocol_versions)
self.validate_attribute_with_allowed_values(viewer_certificate.get('certificate_source'), 'viewer_certificate.certificate_source',
self.__valid_viewer_certificate_certificate_sources)
viewer_certificate = change_dict_key_name(viewer_certificate, 'cloudfront_default_certificate', 'cloud_front_default_certificate')
viewer_certificate = change_dict_key_name(viewer_certificate, 'ssl_support_method', 's_s_l_support_method')
viewer_certificate = change_dict_key_name(viewer_certificate, 'iam_certificate_id', 'i_a_m_certificate_id')
viewer_certificate = change_dict_key_name(viewer_certificate, 'acm_certificate_arn', 'a_c_m_certificate_arn')
return viewer_certificate
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating viewer certificate")
def validate_custom_error_responses(self, config, custom_error_responses, purge_custom_error_responses):
try:
if custom_error_responses is None and not purge_custom_error_responses:
return ansible_list_to_cloudfront_list(config)
self.validate_is_list(custom_error_responses, 'custom_error_responses')
result = list()
existing_responses = dict((response['error_code'], response) for response in custom_error_responses)
for custom_error_response in custom_error_responses:
self.validate_required_key('error_code', 'custom_error_responses[].error_code', custom_error_response)
custom_error_response = change_dict_key_name(custom_error_response, 'error_caching_min_ttl', 'error_caching_min_t_t_l')
if 'response_code' in custom_error_response:
custom_error_response['response_code'] = str(custom_error_response['response_code'])
if custom_error_response['error_code'] in existing_responses:
del(existing_responses[custom_error_response['error_code']])
result.append(custom_error_response)
if not purge_custom_error_responses:
result.extend(existing_responses.values())
return ansible_list_to_cloudfront_list(result)
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating custom error responses")
def validate_restrictions(self, config, restrictions, purge_restrictions=False):
try:
if restrictions is None:
if purge_restrictions:
return None
else:
return config
self.validate_required_key('geo_restriction', 'restrictions.geo_restriction', restrictions)
geo_restriction = restrictions.get('geo_restriction')
self.validate_required_key('restriction_type', 'restrictions.geo_restriction.restriction_type', geo_restriction)
existing_restrictions = config.get('geo_restriction', {}).get(geo_restriction['restriction_type'], {}).get('items', [])
geo_restriction_items = geo_restriction.get('items')
if not purge_restrictions:
geo_restriction_items.extend([rest for rest in existing_restrictions if
rest not in geo_restriction_items])
valid_restrictions = ansible_list_to_cloudfront_list(geo_restriction_items)
valid_restrictions['restriction_type'] = geo_restriction.get('restriction_type')
return valid_restrictions
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating restrictions")
def validate_distribution_config_parameters(self, config, default_root_object, ipv6_enabled, http_version, web_acl_id):
try:
config['default_root_object'] = default_root_object or config.get('default_root_object', '')
config['is_i_p_v_6_enabled'] = ipv6_enabled or config.get('i_p_v_6_enabled', self.__default_ipv6_enabled)
if http_version is not None or config.get('http_version'):
self.validate_attribute_with_allowed_values(http_version, 'http_version', self.__valid_http_versions)
config['http_version'] = http_version or config.get('http_version')
if web_acl_id or config.get('web_a_c_l_id'):
config['web_a_c_l_id'] = web_acl_id or config.get('web_a_c_l_id')
return config
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution config parameters")
def validate_common_distribution_parameters(self, config, enabled, aliases, logging, price_class, purge_aliases=False):
try:
if config is None:
config = {}
if aliases is not None:
if not purge_aliases:
aliases.extend([alias for alias in config.get('aliases', {}).get('items', [])
if alias not in aliases])
config['aliases'] = ansible_list_to_cloudfront_list(aliases)
if logging is not None:
config['logging'] = self.validate_logging(logging)
config['enabled'] = enabled or config.get('enabled', self.__default_distribution_enabled)
if price_class is not None:
self.validate_attribute_with_allowed_values(price_class, 'price_class', self.__valid_price_classes)
config['price_class'] = price_class
return config
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating common distribution parameters")
def validate_comment(self, config, comment):
config['comment'] = comment or config.get('comment', "Distribution created by Ansible with datetime stamp " + self.__default_datetime_string)
return config
def validate_caller_reference(self, caller_reference):
return caller_reference or self.__default_datetime_string
def get_first_origin_id_for_default_cache_behavior(self, valid_origins):
try:
if valid_origins is not None:
valid_origins_list = valid_origins.get('items')
if valid_origins_list is not None and isinstance(valid_origins_list, list) and len(valid_origins_list) > 0:
return str(valid_origins_list[0].get('id'))
self.module.fail_json(msg="There are no valid origins from which to specify a target_origin_id for the default_cache_behavior configuration.")
except Exception as e:
self.module.fail_json_aws(e, msg="Error getting first origin_id for default cache behavior")
def validate_attribute_list_with_allowed_list(self, attribute_list, attribute_list_name, allowed_list):
try:
self.validate_is_list(attribute_list, attribute_list_name)
if (isinstance(allowed_list, list) and set(attribute_list) not in allowed_list or
isinstance(allowed_list, set) and not set(allowed_list).issuperset(attribute_list)):
self.module.fail_json(msg='The attribute list {0} must be one of [{1}]'.format(attribute_list_name, ' '.join(str(a) for a in allowed_list)))
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating attribute list with allowed value list")
def validate_attribute_with_allowed_values(self, attribute, attribute_name, allowed_list):
if attribute is not None and attribute not in allowed_list:
self.module.fail_json(msg='The attribute {0} must be one of [{1}]'.format(attribute_name, ' '.join(str(a) for a in allowed_list)))
def validate_distribution_from_caller_reference(self, caller_reference):
try:
distributions = self.__cloudfront_facts_mgr.list_distributions(False)
distribution_name = 'Distribution'
distribution_config_name = 'DistributionConfig'
distribution_ids = [dist.get('Id') for dist in distributions]
for distribution_id in distribution_ids:
config = self.__cloudfront_facts_mgr.get_distribution(distribution_id)
distribution = config.get(distribution_name)
if distribution is not None:
distribution_config = distribution.get(distribution_config_name)
if distribution_config is not None and distribution_config.get('CallerReference') == caller_reference:
distribution['DistributionConfig'] = distribution_config
return distribution
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution from caller reference")
def validate_distribution_from_aliases_caller_reference(self, distribution_id, aliases, caller_reference):
try:
if caller_reference is not None:
return self.validate_distribution_from_caller_reference(caller_reference)
else:
if aliases:
distribution_id = self.validate_distribution_id_from_alias(aliases)
if distribution_id:
return self.__cloudfront_facts_mgr.get_distribution(distribution_id)
return None
except Exception as e:
self.module.fail_json_aws(e, msg="Error validating distribution_id from alias, aliases and caller reference")
def validate_distribution_id_from_alias(self, aliases):
distributions = self.__cloudfront_facts_mgr.list_distributions(False)
if distributions:
for distribution in distributions:
distribution_aliases = distribution.get('Aliases', {}).get('Items', [])
if set(aliases) & set(distribution_aliases):
return distribution['Id']
return None
def wait_until_processed(self, client, wait_timeout, distribution_id, caller_reference):
if distribution_id is None:
distribution_id = self.validate_distribution_id_from_caller_reference(caller_reference=caller_reference)
try:
waiter = client.get_waiter('distribution_deployed')
attempts = 1 + int(wait_timeout / 60)
waiter.wait(Id=distribution_id, WaiterConfig={'MaxAttempts': attempts})
except botocore.exceptions.WaiterError as e:
self.module.fail_json(msg="Timeout waiting for cloudfront action. Waited for {0} seconds before timeout. "
"Error: {1}".format(to_text(wait_timeout), to_native(e)))
except (botocore.exceptions.ClientError, botocore.exceptions.BotoCoreError) as e:
self.module.fail_json_aws(e, msg="Error getting distribution {0}".format(distribution_id))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(choices=['present', 'absent'], default='present'),
caller_reference=dict(),
comment=dict(),
distribution_id=dict(),
e_tag=dict(),
tags=dict(type='dict', default={}),
purge_tags=dict(type='bool', default=False),
alias=dict(),
aliases=dict(type='list', default=[]),
purge_aliases=dict(type='bool', default=False),
default_root_object=dict(),
origins=dict(type='list'),
purge_origins=dict(type='bool', default=False),
default_cache_behavior=dict(type='dict'),
cache_behaviors=dict(type='list'),
purge_cache_behaviors=dict(type='bool', default=False),
custom_error_responses=dict(type='list'),
purge_custom_error_responses=dict(type='bool', default=False),
logging=dict(type='dict'),
price_class=dict(),
enabled=dict(type='bool'),
viewer_certificate=dict(type='dict'),
restrictions=dict(type='dict'),
web_acl_id=dict(),
http_version=dict(),
ipv6_enabled=dict(type='bool'),
default_origin_domain_name=dict(),
default_origin_path=dict(),
wait=dict(default=False, type='bool'),
wait_timeout=dict(default=1800, type='int')
))
result = {}
changed = True
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=False,
mutually_exclusive=[
['distribution_id', 'alias'],
['default_origin_domain_name', 'distribution_id'],
['default_origin_domain_name', 'alias'],
]
)
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
client = boto3_conn(module, conn_type='client', resource='cloudfront', region=region, endpoint=ec2_url, **aws_connect_kwargs)
validation_mgr = CloudFrontValidationManager(module)
state = module.params.get('state')
caller_reference = module.params.get('caller_reference')
comment = module.params.get('comment')
e_tag = module.params.get('e_tag')
tags = module.params.get('tags')
purge_tags = module.params.get('purge_tags')
distribution_id = module.params.get('distribution_id')
alias = module.params.get('alias')
aliases = module.params.get('aliases')
purge_aliases = module.params.get('purge_aliases')
default_root_object = module.params.get('default_root_object')
origins = module.params.get('origins')
purge_origins = module.params.get('purge_origins')
default_cache_behavior = module.params.get('default_cache_behavior')
cache_behaviors = module.params.get('cache_behaviors')
purge_cache_behaviors = module.params.get('purge_cache_behaviors')
custom_error_responses = module.params.get('custom_error_responses')
purge_custom_error_responses = module.params.get('purge_custom_error_responses')
logging = module.params.get('logging')
price_class = module.params.get('price_class')
enabled = module.params.get('enabled')
viewer_certificate = module.params.get('viewer_certificate')
restrictions = module.params.get('restrictions')
purge_restrictions = module.params.get('purge_restrictions')
web_acl_id = module.params.get('web_acl_id')
http_version = module.params.get('http_version')
ipv6_enabled = module.params.get('ipv6_enabled')
default_origin_domain_name = module.params.get('default_origin_domain_name')
default_origin_path = module.params.get('default_origin_path')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
if alias and alias not in aliases:
aliases.append(alias)
distribution = validation_mgr.validate_distribution_from_aliases_caller_reference(distribution_id, aliases, caller_reference)
update = state == 'present' and distribution
create = state == 'present' and not distribution
delete = state == 'absent' and distribution
if not (update or create or delete):
module.exit_json(changed=False)
if update or delete:
config = distribution['Distribution']['DistributionConfig']
e_tag = distribution['ETag']
distribution_id = distribution['Distribution']['Id']
else:
config = dict()
if update:
config = camel_dict_to_snake_dict(config, reversible=True)
if create or update:
config = validation_mgr.validate_common_distribution_parameters(config, enabled, aliases, logging, price_class, purge_aliases)
config = validation_mgr.validate_distribution_config_parameters(config, default_root_object, ipv6_enabled, http_version, web_acl_id)
config['origins'] = validation_mgr.validate_origins(client, config.get('origins', {}).get('items', []), origins, default_origin_domain_name,
default_origin_path, create, purge_origins)
config['cache_behaviors'] = validation_mgr.validate_cache_behaviors(config.get('cache_behaviors', {}).get('items', []),
cache_behaviors, config['origins'], purge_cache_behaviors)
config['default_cache_behavior'] = validation_mgr.validate_cache_behavior(config.get('default_cache_behavior', {}),
default_cache_behavior, config['origins'], True)
config['custom_error_responses'] = validation_mgr.validate_custom_error_responses(config.get('custom_error_responses', {}).get('items', []),
custom_error_responses, purge_custom_error_responses)
valid_restrictions = validation_mgr.validate_restrictions(config.get('restrictions', {}), restrictions, purge_restrictions)
if valid_restrictions:
config['restrictions'] = valid_restrictions
valid_viewer_certificate = validation_mgr.validate_viewer_certificate(viewer_certificate)
config = merge_validation_into_config(config, valid_viewer_certificate, 'viewer_certificate')
config = validation_mgr.validate_comment(config, comment)
config = snake_dict_to_camel_dict(config, capitalize_first=True)
if create:
config['CallerReference'] = validation_mgr.validate_caller_reference(caller_reference)
result = create_distribution(client, module, config, ansible_dict_to_boto3_tag_list(tags))
result = camel_dict_to_snake_dict(result)
result['tags'] = list_tags_for_resource(client, module, result['arn'])
if delete:
if config['Enabled']:
config['Enabled'] = False
result = update_distribution(client, module, config, distribution_id, e_tag)
validation_mgr.wait_until_processed(client, wait_timeout, distribution_id, config.get('CallerReference'))
distribution = validation_mgr.validate_distribution_from_aliases_caller_reference(distribution_id, aliases, caller_reference)
# e_tag = distribution['ETag']
result = delete_distribution(client, module, distribution)
if update:
changed = config != distribution['Distribution']['DistributionConfig']
if changed:
result = update_distribution(client, module, config, distribution_id, e_tag)
else:
result = distribution['Distribution']
existing_tags = list_tags_for_resource(client, module, result['ARN'])
distribution['Distribution']['DistributionConfig']['tags'] = existing_tags
changed |= update_tags(client, module, existing_tags, tags, purge_tags, result['ARN'])
result = camel_dict_to_snake_dict(result)
result['distribution_config']['tags'] = config['tags'] = list_tags_for_resource(client, module, result['arn'])
result['diff'] = dict()
diff = recursive_diff(distribution['Distribution']['DistributionConfig'], config)
if diff:
result['diff']['before'] = diff[0]
result['diff']['after'] = diff[1]
if wait and (create or update):
validation_mgr.wait_until_processed(client, wait_timeout, distribution_id, config.get('CallerReference'))
if 'distribution_config' in result:
result.update(result['distribution_config'])
del(result['distribution_config'])
module.exit_json(changed=changed, **result)
if __name__ == '__main__':
main()
| gpl-3.0 | 7,483,290,099,521,737,000 | 42.302267 | 159 | 0.610645 | false |
40223232/w16b_test | wsgi.py | 1 | 27797 | #@+leo-ver=5-thin
#@+node:2014fall.20141212095015.1775: * @file wsgi.py
# coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
#@@language python
#@@tabwidth -4
#@+<<declarations>>
#@+node:2014fall.20141212095015.1776: ** <<declarations>> (wsgi)
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
# 導入 gear 模組
import gear
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
'''以下為近端 input() 與 for 迴圈應用的程式碼, 若要將程式送到 OpenShift 執行, 除了採用 CherryPy 網際框架外, 還要轉為 html 列印
# 利用 input() 取得的資料型別為字串
toprint = input("要印甚麼內容?")
# 若要將 input() 取得的字串轉為整數使用, 必須利用 int() 轉換
repeat_no = int(input("重複列印幾次?"))
for i in range(repeat_no):
print(toprint)
'''
#@-<<declarations>>
#@+others
#@+node:2014fall.20141212095015.1777: ** class Hello
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Hello(object):
# Hello 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
#@+others
#@+node:2014fall.20141212095015.2004: *3* __init__
def __init__(self):
# 配合透過案例啟始建立所需的目錄
if not os.path.isdir(data_dir+'/tmp'):
os.mkdir(data_dir+'/tmp')
if not os.path.isdir(data_dir+"/downloads"):
os.mkdir(data_dir+"/downloads")
if not os.path.isdir(data_dir+"/images"):
os.mkdir(data_dir+"/images")
#@+node:2014fall.20141212095015.1778: *3* index_orig
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index_orig(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141212095015.1779: *3* hello
@cherrypy.expose
def hello(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141215194146.1791: *3* index
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def index(self):
outstring = '''
<!DOCTYPE html>
<html>
<head>
40223232
</head>
<body>
<br /><a href ="index">index</a><br />
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1713: *3* twoDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def twoDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=do2Dgear>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1733: *3* threeDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def threeDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=do3Dgear>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1762: *3* do2Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do2Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1735: *3* do3Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do3Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1765: *3* mygeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def 齒輪(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
齒輪(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:amd.20150415215023.1: *3* mygeartest2
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest2(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 pa 單位為角度
pa = 20
# m 為模數
m = 20
# 第1齒輪齒數
n_g1 = 17
# 第2齒輪齒數
n_g2 = 11
# 第3齒輪齒數
n_g3 = 13
# 計算兩齒輪的節圓半徑
rp_g1 = m*n_g1/2
rp_g2 = m*n_g2/2
rp_g3 = m*n_g3/2
# 繪圖第1齒輪的圓心座標
x_g1 = 400
y_g1 = 400
# 第2齒輪的圓心座標, 假設排列成水平, 表示各齒輪圓心 y 座標相同
x_g2 = x_g1 + rp_g1 + rp_g2
y_g2 = y_g1
# 第3齒輪的圓心座標
x_g3 = x_g1 + rp_g1 + 2*rp_g2 + rp_g3
y_g3 = y_g1
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g1, y_g1)
# rotate to engage
ctx.rotate(pi/2)
# put it back
ctx.translate(-x_g1, -y_g1)
spur.Spur(ctx).Gear(x_g1, y_g1, rp_g1, n_g1, pa, "blue")
ctx.restore()
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g2, y_g2)
# rotate to engage
ctx.rotate(-pi/2-pi/n_g2)
# put it back
ctx.translate(-x_g2, -y_g2)
spur.Spur(ctx).Gear(x_g2, y_g2, rp_g2, n_g2, pa, "black")
ctx.restore()
# 將第3齒輪逆時鐘轉 90 度之後, 再往回轉第2齒輪定位帶動轉角, 然後再逆時鐘多轉一齒, 以便與第2齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g3, y_g3)
# rotate to engage
# pi+pi/n_g2 為第2齒輪從順時鐘轉 90 度之後, 必須配合目前的標記線所作的齒輪 2 轉動角度, 要轉換到齒輪3 的轉動角度
# 必須乘上兩齒輪齒數的比例, 若齒輪2 大, 則齒輪3 會轉動較快
# 第1個 -pi/2 為將原先垂直的第3齒輪定位線逆時鐘旋轉 90 度
# -pi/n_g3 則是第3齒與第2齒定位線重合後, 必須再逆時鐘多轉一齒的轉角, 以便進行囓合
# (pi+pi/n_g2)*n_g2/n_g3 則是第2齒原定位線為順時鐘轉動 90 度,
# 但是第2齒輪為了與第1齒輪囓合, 已經距離定位線, 多轉了 180 度, 再加上第2齒輪的一齒角度, 因為要帶動第3齒輪定位,
# 這個修正角度必須要再配合第2齒與第3齒的轉速比加以轉換成第3齒輪的轉角, 因此乘上 n_g2/n_g3
ctx.rotate(-pi/2-pi/n_g3+(pi+pi/n_g2)*n_g2/n_g3)
# put it back
ctx.translate(-x_g3, -y_g3)
spur.Spur(ctx).Gear(x_g3, y_g3, rp_g3, n_g3, pa, "red")
ctx.restore()
# 按照上面三個正齒輪的囓合轉角運算, 隨後的傳動齒輪轉角便可依此類推, 完成6個齒輪的囓合繪圖
</script>
<canvas id="plotarea" width="1200" height="1200"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1737: *3* my3Dgeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def my3Dgeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def gear(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
gear(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:2014fall.20141215194146.1793: *3* doCheck
@cherrypy.expose
def doCheck(self, guess=None):
# 假如使用者直接執行 doCheck, 則設法轉回根方法
if guess is None:
raise cherrypy.HTTPRedirect("/")
# 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況
try:
theanswer = int(cherrypy.session.get('answer'))
except:
raise cherrypy.HTTPRedirect("/")
# 經由表單所取得的 guess 資料型別為 string
try:
theguess = int(guess)
except:
return "error " + self.guessform()
# 每執行 doCheck 一次,次數增量一次
cherrypy.session['count'] += 1
# 答案與所猜數字進行比對
if theanswer < theguess:
return "big " + self.guessform()
elif theanswer > theguess:
return "small " + self.guessform()
else:
# 已經猜對, 從 session 取出累計猜測次數
thecount = cherrypy.session.get('count')
return "exact: <a href=''>再猜</a>"
#@+node:2014fall.20141215194146.1789: *3* guessform
def guessform(self):
# 印出讓使用者輸入的超文件表單
outstring = str(cherrypy.session.get('answer')) + "/" + str(cherrypy.session.get('count')) + '''<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>'''
return outstring
#@-others
#@-others
################# (4) 程式啟動區
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
# 程式執行目錄下, 必須自行建立 static 目錄
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"},
'/images':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/images"}
}
root = Hello()
root.gear = gear.Gear()
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(root, config=application_conf)
else:
# 表示在近端執行
cherrypy.quickstart(root, config=application_conf)
#@-leo
| gpl-3.0 | 421,274,124,816,849,400 | 29.535904 | 137 | 0.554152 | false |
DailyActie/Surrogate-Model | 00-courses/operators_as _functions.py | 1 | 1783 | # MIT License
#
# Copyright (c) 2016 Daily Actie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Author: Quan Pan <[email protected]>
# License: MIT License
# Create: 2016-12-02
"""
Links:
https://docs.python.org/2/library/operator.html
operator.
lt(a, b) __lt__(a, b)
le(a, b) __le__(a, b)
eq(a, b) __eq__(a, b)
ne(a, b) __ne__(a, b)
ge(a, b) __ge__(a, b) a >= b
gt(a, b) __gt__(a, b) a > b
mul(a, b) __mul__(a, b) Return a * b, for a and b numbers.
"""
# encoding: utf-8
# module __builtin__
# from (built-in)
# by generator 1.138
from __future__ import print_function
"""
Built-in functions, exceptions, and other objects.
Noteworthy: None is the `nil' object; Ellipsis represents `...' in slices.
"""
| mit | 2,027,015,203,623,779,800 | 33.288462 | 80 | 0.692653 | false |
Unode/firefox_decrypt | tests/simpletap/__init__.py | 1 | 3764 | """
Test Anything Protocol extension to Python's unit testing framework
This module contains TAPTestRunner and TAPTestResult which are used to produce
a test report in a TAP compatible format. All remaining functionality comes
from Python's own unittest module.
The core of the tests does not need any change and is purely unittest code.
The sole difference is in the __name__ == "__main__" section.
Simple usage:
import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): # test method names begin 'test*'
"test adding values"
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
"test multiplying values"
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
def testFail(self):
"a failing test"
self.assertEqual(0, 1)
@unittest.expectedFailure
def testExpectFail(self):
"we saw this coming"
self.assertEqual(0, 1)
@unittest.skipIf(True, "Skipping this one")
def testSkip(self):
"pending a fix"
self.assertEqual(0, 1)
def testError(self):
"oops something went wrong"
no_such_variable + 1 # Oops!
if __name__ == "__main__":
from simpletap import TAPTestRunner
unittest.main(testRunner=TAPTestRunner())
When saved in a file called ``test.py`` and executed would produce:
1..6
ok 1 - test.py: test adding values
not ok 2 - test.py: oops something went wrong
# ERROR: NameError on file test.py line 30 in testError: 'no_such_variable + 1 # Oops!':
# global name 'no_such_variable' is not defined
skip 3 - test.py: we saw this coming
# EXPECTED_FAILURE: AssertionError on file test.py line 21 in testExpectFail: 'self.assertEqual(0, 1)':
# 0 != 1
not ok 4 - test.py: a failing test
# FAIL: AssertionError on file test.py line 16 in testFail: 'self.assertEqual(0, 1)':
# 0 != 1
ok 5 - test.py: test multiplying values
skip 6 - test.py: pending a fix
# SKIP:
# Skipping this one
You can also launch simpletap directly from the command line in much the
same way you do with unittest:
python3 -m simpletap test.IntegerArithmeticTestCase
For more information refer to the unittest documentation:
http://docs.python.org/library/unittest.html
Copyright (c) 2014-2016 Renato Alves <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
https://opensource.org/licenses/MIT
"""
from .result import TAPTestResult
from .runner import TAPTestRunner
from .version import __version__ # noqa
__all__ = ['TAPTestResult', 'TAPTestRunner']
| gpl-3.0 | 7,653,470,926,933,786,000 | 34.847619 | 107 | 0.688895 | false |
iwm911/plaso | plaso/parsers/plist_plugins/softwareupdate_test.py | 1 | 2465 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Software Update plist plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import plist as plist_formatter
from plaso.lib import event
from plaso.parsers import plist
from plaso.parsers.plist_plugins import softwareupdate
from plaso.parsers.plist_plugins import test_lib
class SoftwareUpdatePluginTest(test_lib.PlistPluginTestCase):
"""Tests for the SoftwareUpdate plist plugin."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._plugin = softwareupdate.SoftwareUpdatePlugin(None)
self._parser = plist.PlistParser(event.PreprocessObject(), None)
def testProcess(self):
"""Tests the Process function."""
plist_name = u'com.apple.SoftwareUpdate.plist'
test_file = self._GetTestFilePath([plist_name])
events = self._ParsePlistFileWithPlugin(
self._parser, self._plugin, test_file, plist_name)
event_objects = self._GetEventObjects(events)
self.assertEquals(len(event_objects), 2)
event_object = event_objects[0]
self.assertEqual(event_object.key, u'')
self.assertEqual(event_object.root, u'/')
expected_desc = u'Last Mac OS X 10.9.1 (13B42) full update.'
self.assertEqual(event_object.desc, expected_desc)
expected_string = u'// {}'.format(expected_desc)
self._TestGetMessageStrings(
event_object, expected_string, expected_string)
event_object = event_objects[1]
self.assertEqual(event_object.key, u'')
self.assertEqual(event_object.root, u'/')
expected_desc = (u'Last Mac OS 10.9.1 (13B42) partially '
u'udpate, pending 1: RAWCameraUpdate5.03(031-2664).')
self.assertEqual(event_object.desc, expected_desc)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -6,657,414,589,122,883,000 | 36.923077 | 74 | 0.721704 | false |
NeerajM999/recap-python | LearnPython/data_structures/binary_tree.py | 1 | 1761 | class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
class BinaryTree(object):
def __init__(self, root_val):
self.root = Node(root_val)
def preorder_traversal(self, start, traversal):
""" Root -> left -> right """
if start:
traversal += (str(start.value) + "-")
traversal = self.preorder_traversal(start.left, traversal)
traversal = self.preorder_traversal(start.right, traversal)
return traversal
def inorder_traversal(self, start, traversal):
""" left -> root -> right """
if start:
traversal = self.inorder_traversal(start.left, traversal)
traversal += (str(start.value) + "-")
traversal = self.inorder_traversal(start.right, traversal)
return traversal
def postorder_traversal(self, start, traversal):
""" left -> right -> root """
if start:
traversal = self.postorder_traversal(start.left, traversal)
traversal = self.postorder_traversal(start.right, traversal)
traversal += (str(start.value) + "-")
return traversal
if __name__ == "__main__":
"""
1
/ \
2 3
/ \ / \
4 5 6 7
"""
tree = BinaryTree(1)
tree.root.left = Node(2)
tree.root.right = Node(3)
tree.root.left.left = Node(4)
tree.root.left.right = Node(5)
tree.root.right.left = Node(6)
tree.root.right.right = Node(7)
print("preorder-traversal: ", tree.preorder_traversal(tree.root, ""))
print("inorder-traversal: ", tree.inorder_traversal(tree.root, ""))
print("postorder-traversal: ", tree.postorder_traversal(tree.root, "")) | gpl-3.0 | 1,749,836,215,305,637,600 | 27.885246 | 75 | 0.571266 | false |
kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/model/fields/one2one.py | 1 | 2080 | # This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from types import NoneType
from trytond.model.fields.field import Field
from trytond.model.fields.many2many import Many2Many
from trytond.pool import Pool
class One2One(Many2Many):
'''
Define one2one field (``int``).
'''
_type = 'one2one'
def get(self, ids, model, name, values=None):
'''
Return target record.
:param ids: a list of ids
:param model: a string with the name of the model
:param name: a string with the name of the field
:param values: a dictionary with the read values
:return: a dictionary with ids as key and target id as value
'''
res = super(One2One, self).get(ids, model, name, values=values)
for i, vals in res.iteritems():
res[i] = vals[0] if vals else None
return res
def set(self, Model, name, ids, value, *args):
'''
Set the values.
'''
pool = Pool()
Relation = pool.get(self.relation_name)
to_delete = []
to_create = []
args = iter((ids, value) + args)
for ids, value in zip(args, args):
relations = Relation.search([
(self.origin, 'in', ids),
])
to_delete.extend(relations)
if value:
for record_id in ids:
to_create.append({
self.origin: record_id,
self.target: value,
})
if to_delete:
Relation.delete(to_delete)
if to_create:
Relation.create(to_create)
def __set__(self, inst, value):
Target = self.get_target()
if isinstance(value, dict):
value = Target(*value)
elif isinstance(value, (int, long)):
value = Target(value)
assert isinstance(value, (Target, NoneType))
Field.__set__(self, inst, value)
| gpl-3.0 | 4,749,240,343,853,297,000 | 32.015873 | 72 | 0.546635 | false |
Metonimie/Beaglebone | programs/server.py | 1 | 3147 | #!/usr/bin/env python
"""
A very simple server in python
used to control gpio pins on the beaglebone black.
The server listens for POST requests on port
6410. It has no security at all, which means
that it accepts post-data from everyone.
Send a GET request::
curl http://localhost
Send a POST request::
curl -d "foo=bar&bin=baz" http://localhost
Usage:
nohup python3 server.py &
"""
# TODO: Add basic security
# TODO: Use dictionary for gpio name : file
import http.server
import urllib
PORT = 6410
gpio_path = "/sys/class/gpio/"
# If the param name is in here then we handle the value.
authorized_gpio = ["gpio60"]
class Server(http.server.BaseHTTPRequestHandler):
def prepare_response(self, code):
"""
Prepares the response that will be send back to the requester,
along with the code.
"""
self.send_response(code)
self.send_header("Content-type", "text/html")
self.send_header("Access-Control-Allow-Origin", "*")
self.end_headers()
def handle_gpio(self, key, value):
"""
Very basic gpio handling, converts the value into
an int and then it writes it to the file.
"""
try:
clean_value = int(value)
with open("{}{}/value".format(gpio_path, key), mode="w") as file:
file.write(str(clean_value))
return False
except ValueError as e:
print(e)
except Exception as e:
print("Exception: {}".format(e))
return True
def unsupported(self):
self.wfile.write("Go Away!\n".encode())
def do_GET(self):
self.unsupported()
def do_HEAD(self):
self.unsupported()
def do_POST(self):
"""
Handles the post request.
If error is True then the handling has failed or the request is
invalid
"""
error = False
try:
# The length of the request, in bytes.
length = int(self.headers['content-length'])
# Dictionary containing keys and values from the request.
postvars = urllib.parse.parse_qs(self.rfile.read(length))
for key, value in postvars.items():
clean_key = key.decode()
clean_value = value[0].decode()
print("Received: " + clean_key + " : " + clean_value)
if clean_key in authorized_gpio:
error = self.handle_gpio(clean_key, clean_value)
else:
error = True
except Exception as e:
print(e)
error = True
response = None
if not error:
self.prepare_response(200)
response = "Operation authorized.\n"
else:
self.prepare_response(403)
response = "Go away!\n"
# Write response to the client.
self.wfile.write(response.encode())
if __name__ == "__main__":
server_address = ('', PORT)
httpd = http.server.HTTPServer(server_address, Server)
print('Starting server')
httpd.serve_forever()
| gpl-3.0 | 3,281,151,376,845,631,500 | 28.688679 | 77 | 0.571973 | false |
arbrandes/edx-configuration | playbooks/roles/supervisor/files/pre_supervisor_checks.py | 1 | 12593 | import argparse
import boto.ec2
from boto.utils import get_instance_metadata, get_instance_identity
from boto.exception import AWSConnectionError
import hipchat
import os
import subprocess
import traceback
import socket
import time
# Services that should be checked for migrations.
MIGRATION_COMMANDS = {
'lms': "/edx/bin/edxapp-migrate-lms --noinput --list",
'cms': "/edx/bin/edxapp-migrate-cms --noinput --list",
'xqueue': ". {env_file}; sudo -E -u xqueue {python} {code_dir}/manage.py showmigrations",
'ecommerce': ". {env_file}; sudo -E -u ecommerce {python} {code_dir}/manage.py showmigrations",
'insights': ". {env_file}; sudo -E -u insights {python} {code_dir}/manage.py showmigrations",
'analytics_api': ". {env_file}; sudo -E -u analytics_api {python} {code_dir}/manage.py showmigrations",
'credentials': ". {env_file}; sudo -E -u credentials {python} {code_dir}/manage.py showmigrations",
'discovery': ". {env_file}; sudo -E -u discovery {python} {code_dir}/manage.py showmigrations",
}
HIPCHAT_USER = "PreSupervisor"
# Max amount of time to wait for tags to be applied.
MAX_BACKOFF = 120
INITIAL_BACKOFF = 1
REGION = get_instance_identity()['document']['region']
def services_for_instance(instance_id):
"""
Get the list of all services named by the services tag in this
instance's tags.
"""
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
for reservation in reservations:
for instance in reservation.instances:
if instance.id == instance_id:
try:
services = instance.tags['services'].split(',')
except KeyError as ke:
msg = "Tag named 'services' not found on this instance({})".format(instance_id)
raise Exception(msg)
for service in services:
yield service
def edp_for_instance(instance_id):
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
for reservation in reservations:
for instance in reservation.instances:
if instance.id == instance_id:
try:
environment = instance.tags['environment']
deployment = instance.tags['deployment']
play = instance.tags['play']
except KeyError as ke:
msg = "{} tag not found on this instance({})".format(ke.message, instance_id)
raise Exception(msg)
return (environment, deployment, play)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Enable all services that are in the services tag of this ec2 instance.")
parser.add_argument("-a","--available",
help="The location of the available services.")
parser.add_argument("-e","--enabled",
help="The location of the enabled services.")
migration_args = parser.add_argument_group("edxapp_migrations",
"Args for running edxapp migration checks.")
migration_args.add_argument("--edxapp-code-dir",
help="Location of the edx-platform code.")
migration_args.add_argument("--edxapp-python",
help="Path to python to use for executing migration check.")
migration_args.add_argument("--edxapp-env",
help="Location of the edxapp environment file.")
xq_migration_args = parser.add_argument_group("xqueue_migrations",
"Args for running xqueue migration checks.")
xq_migration_args.add_argument("--xqueue-code-dir",
help="Location of the xqueue code.")
xq_migration_args.add_argument("--xqueue-python",
help="Path to python to use for executing migration check.")
migration_args.add_argument("--xqueue-env",
help="Location of the xqueue environment file.")
ecom_migration_args = parser.add_argument_group("ecommerce_migrations",
"Args for running ecommerce migration checks.")
ecom_migration_args.add_argument("--ecommerce-python",
help="Path to python to use for executing migration check.")
ecom_migration_args.add_argument("--ecommerce-env",
help="Location of the ecommerce environment file.")
ecom_migration_args.add_argument("--ecommerce-code-dir",
help="Location of the ecommerce code.")
credentials_migration_args = parser.add_argument_group("credentials_migrations",
"Args for running credentials migration checks.")
credentials_migration_args.add_argument("--credentials-python",
help="Path to python to use for executing migration check.")
credentials_migration_args.add_argument("--credentials-env",
help="Location of the credentials environment file.")
credentials_migration_args.add_argument("--credentials-code-dir",
help="Location of the credentials code.")
discovery_migration_args = parser.add_argument_group("discovery_migrations",
"Args for running discovery migration checks.")
discovery_migration_args.add_argument("--discovery-python",
help="Path to python to use for executing migration check.")
discovery_migration_args.add_argument("--discovery-env",
help="Location of the discovery environment file.")
discovery_migration_args.add_argument("--discovery-code-dir",
help="Location of the discovery code.")
insights_migration_args = parser.add_argument_group("insights_migrations",
"Args for running insights migration checks.")
insights_migration_args.add_argument("--insights-python",
help="Path to python to use for executing migration check.")
insights_migration_args.add_argument("--insights-env",
help="Location of the insights environment file.")
insights_migration_args.add_argument("--insights-code-dir",
help="Location of the insights code.")
analyticsapi_migration_args = parser.add_argument_group("analytics_api_migrations",
"Args for running analytics_api migration checks.")
analyticsapi_migration_args.add_argument("--analytics-api-python",
help="Path to python to use for executing migration check.")
analyticsapi_migration_args.add_argument("--analytics-api-env",
help="Location of the analytics_api environment file.")
analyticsapi_migration_args.add_argument("--analytics-api-code-dir",
help="Location of the analytics_api code.")
hipchat_args = parser.add_argument_group("hipchat",
"Args for hipchat notification.")
hipchat_args.add_argument("-c","--hipchat-api-key",
help="Hipchat token if you want to receive notifications via hipchat.")
hipchat_args.add_argument("-r","--hipchat-room",
help="Room to send messages to.")
args = parser.parse_args()
report = []
prefix = None
notify = None
try:
if args.hipchat_api_key:
hc = hipchat.HipChat(token=args.hipchat_api_key)
notify = lambda message: hc.message_room(room_id=args.hipchat_room,
message_from=HIPCHAT_USER, message=message)
except Exception as e:
print("Failed to initialize hipchat, {}".format(e))
traceback.print_exc()
instance_id = get_instance_metadata()['instance-id']
prefix = instance_id
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
instance = reservations[0].instances[0]
if instance.instance_profile['arn'].endswith('/abbey'):
print("Running an abbey build. Not starting any services.")
# Needs to exit with 1 instead of 0 to prevent
# services from starting.
exit(1)
time_left = MAX_BACKOFF
backoff = INITIAL_BACKOFF
environment = None
deployment = None
play = None
while time_left > 0:
try:
environment, deployment, play = edp_for_instance(instance_id)
prefix = "{environment}-{deployment}-{play}-{instance_id}".format(
environment=environment,
deployment=deployment,
play=play,
instance_id=instance_id)
break
except Exception as e:
print("Failed to get EDP for {}: {}".format(instance_id, str(e)))
# With the time limit being 2 minutes we will
# try 5 times before giving up.
time.sleep(backoff)
time_left -= backoff
backoff = backoff * 2
if environment is None or deployment is None or play is None:
msg = "Unable to retrieve environment, deployment, or play tag."
print(msg)
if notify:
notify("{} : {}".format(prefix, msg))
exit(0)
#get the hostname of the sandbox
hostname = socket.gethostname()
try:
#get the list of the volumes, that are attached to the instance
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id})
for volume in volumes:
volume.add_tags({"hostname": hostname,
"environment": environment,
"deployment": deployment,
"cluster": play,
"instance-id": instance_id,
"created": volume.create_time })
except Exception as e:
msg = "Failed to tag volumes associated with {}: {}".format(instance_id, str(e))
print(msg)
if notify:
notify(msg)
try:
for service in services_for_instance(instance_id):
if service in MIGRATION_COMMANDS:
services = {
"lms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"cms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"ecommerce": {'python': args.ecommerce_python, 'env_file': args.ecommerce_env, 'code_dir': args.ecommerce_code_dir},
"credentials": {'python': args.credentials_python, 'env_file': args.credentials_env, 'code_dir': args.credentials_code_dir},
"discovery": {'python': args.discovery_python, 'env_file': args.discovery_env, 'code_dir': args.discovery_code_dir},
"insights": {'python': args.insights_python, 'env_file': args.insights_env, 'code_dir': args.insights_code_dir},
"analytics_api": {'python': args.analytics_api_python, 'env_file': args.analytics_api_env, 'code_dir': args.analytics_api_code_dir},
"xqueue": {'python': args.xqueue_python, 'env_file': args.xqueue_env, 'code_dir': args.xqueue_code_dir},
}
if service in services and all(arg!=None for arg in services[service].values()) and service in MIGRATION_COMMANDS:
serv_vars = services[service]
cmd = MIGRATION_COMMANDS[service].format(**serv_vars)
if os.path.exists(serv_vars['code_dir']):
os.chdir(serv_vars['code_dir'])
# Run migration check command.
output = subprocess.check_output(cmd, shell=True, )
if '[ ]' in output:
raise Exception("Migrations have not been run for {}".format(service))
# Link to available service.
available_file = os.path.join(args.available, "{}.conf".format(service))
link_location = os.path.join(args.enabled, "{}.conf".format(service))
if os.path.exists(available_file):
subprocess.call("sudo -u supervisor ln -sf {} {}".format(available_file, link_location), shell=True)
report.append("Enabling service: {}".format(service))
else:
raise Exception("No conf available for service: {}".format(link_location))
except AWSConnectionError as ae:
msg = "{}: ERROR : {}".format(prefix, ae)
if notify:
notify(msg)
notify(traceback.format_exc())
raise ae
except Exception as e:
msg = "{}: ERROR : {}".format(prefix, e)
print(msg)
if notify:
notify(msg)
traceback.print_exc()
raise e
else:
msg = "{}: {}".format(prefix, " | ".join(report))
print(msg)
if notify:
notify(msg)
| agpl-3.0 | -2,843,304,026,179,480,000 | 45.640741 | 152 | 0.615262 | false |
cyanogen/uchroma | uchroma/traits.py | 1 | 11759 | #
# uchroma - Copyright (C) 2021 Stefanie Kondik
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# pylint: disable=protected-access, invalid-name, no-member
import enum
import importlib
import sys
from argparse import ArgumentParser
from typing import Iterable
from traitlets import CaselessStrEnum, Container, Dict, Enum, Int, HasTraits, \
List, TraitType, Undefined, UseEnum
from frozendict import frozendict
from uchroma.color import to_color
from uchroma.util import ArgsDict
class ColorTrait(TraitType):
"""
A traitlet which encapsulates a grapefruit.Color and performs
type coercion as needed.
"""
info_text = "a color"
allow_none = True
default_value = 'black'
def __init__(self, *args, **kwargs):
super(ColorTrait, self).__init__(*args, **kwargs)
def validate(self, obj, value):
try:
if value is not None:
value = to_color(value)
except:
self.error(obj, value)
return value
class ColorSchemeTrait(List):
"""
A list of ColorTraits which comprise a scheme
"""
info_text = 'a list of colors'
def __init__(self, trait=ColorTrait(), default_value=(),
minlen=0, maxlen=sys.maxsize, **kwargs):
super(ColorSchemeTrait, self).__init__(trait=trait, default_value=default_value,
minlen=minlen, maxlen=maxlen, **kwargs)
class ColorPresetTrait(UseEnum):
"""
A trait which represents a group of color schemes defined
as a Python Enum.
"""
info_text = 'a predefined color scheme'
def __init__(self, enum_class, default_value=None, **kwargs):
super(ColorPresetTrait, self).__init__(enum_class, default_value=default_value, **kwargs)
class WriteOnceMixin(object):
"""
Mixin for traits which cannot be changed after an initial
value has been set.
"""
write_once = True
def validate(self, obj, value):
if self.name not in obj._trait_values or \
obj._trait_values[self.name] == self.default_value:
return super().validate(obj, value)
self.error(obj, value)
class WriteOnceInt(WriteOnceMixin, Int):
"""
Subclass of Int which may only be written once
"""
pass
class FrozenDict(WriteOnceMixin, Dict):
"""
Subclass of Dict which converts the value to a frozendict on
the first setting.
"""
def validate(self, obj, value):
return frozendict(super().validate(obj, value))
class UseEnumCaseless(UseEnum):
"""
Subclass of UseEnum which allows selection of values using
case insensitive strings
"""
def select_by_name(self, value, default=Undefined):
if value.startswith(self.name_prefix):
# -- SUPPORT SCOPED-NAMES, like: "Color.red" => "red"
value = value.replace(self.name_prefix, "", 1)
keys = [x.lower() for x in self.enum_class.__members__.keys()]
idx = keys.index(value.lower())
if idx < 0:
return Undefined
return self.enum_class[list(self.enum_class.__members__.keys())[idx]]
class WriteOnceUseEnumCaseless(WriteOnceMixin, UseEnumCaseless):
"""
Subclass of UseEnumCaseless which may only be written once.
"""
pass
class DefaultCaselessStrEnum(CaselessStrEnum):
"""
Extension of CaselessStrEnum which handles default values better
"""
def validate(self, obj, value):
if self.default_value and (value is None or value == ''):
value = self.default_value
return super().validate(obj, value)
def is_trait_writable(trait: TraitType) -> bool:
"""
Test if a trait is writable
:param trait: the trait to be tested
:return: True if the trait is writable
"""
if trait.read_only:
return False
if hasattr(trait, 'write_once') and trait.write_once:
return False
return True
def trait_as_dict(trait: TraitType) -> dict:
"""
Convert a trait to a dict for sending over D-Bus or the like
:param trait: the trait to be converted
:return: dict representing this trait
"""
cls = trait.__class__
tdict = {}
for k, v in vars(trait).items():
if k.startswith('__') or k == 'this_class':
continue
if hasattr(cls, k) and getattr(cls, k) == v:
continue
if isinstance(v, Iterable) and len(v) == 0:
continue
if k.startswith('_'):
tdict[k[1:]] = v
else:
tdict[k] = v
if isinstance(trait, UseEnum):
cls = CaselessStrEnum
tdict['values'] = tuple(trait.enum_class.__members__.keys())
if 'enum_class' in tdict:
del tdict['enum_class']
for k, v in tdict.items():
if isinstance(v, TraitType):
tdict[k] = trait_as_dict(v)
if isinstance(v, enum.Enum):
tdict[k] = v.name
if isinstance(v, type):
tdict[k] = '%s.%s' % (v.__module__, v.__name__)
tdict['__class__'] = (cls.__module__, cls.__name__)
return tdict
def class_traits_as_dict(obj: HasTraits, values: dict=None) -> dict:
"""
Create a dict which represents all traits of the given object.
This dict itself can be inspected in a generic API, or it
may be converted back to a (stub) instance of HasTraits. This
facilitates the sending of configurable object properties over
an interface such as D-Bus.
:param obj: an instance of HasTraits
:param value: optional dict of trait values (pulled from obj by default)
:return: dict representing all traits in obj
"""
cls_dt = {}
if isinstance(obj, type) and hasattr(obj, 'class_traits'):
traits = obj.class_traits()
elif isinstance(obj, dict):
traits = obj
elif isinstance(obj, HasTraits):
traits = obj.traits()
values = obj._trait_values
else:
raise TypeError("Object does not support traits")
for k, v in traits.items():
dt = trait_as_dict(v)
if dt is None:
continue
if values is not None and k in values:
dt['__value__'] = values[k]
cls_dt[k] = dt
return cls_dt
def dict_as_trait(obj: dict) -> TraitType:
"""
Create a trait from a dict (trait_as_dict).
"""
if '__class__' not in obj:
raise ValueError("No module and class attribute present")
tobj = obj.copy()
module_name, trait_class = tobj.pop('__class__')
module = importlib.import_module(module_name)
if not hasattr(module, trait_class):
raise TypeError("Unknown class: %s" % trait_class)
cls = getattr(module, trait_class)
if 'trait' in tobj:
tobj['trait'] = dict_as_trait(tobj.pop('trait'))
metadata = {}
if 'metadata' in tobj:
metadata.update(tobj.pop('metadata'))
if issubclass(cls, Enum):
trait = cls(tobj.pop('values'), **tobj)
else:
trait = cls(**tobj)
for k in list(metadata.keys()):
if k in ('name', 'default_args', 'default_kwargs'):
setattr(trait, k, metadata.pop(k))
trait.metadata = metadata
return trait
def dict_as_class_traits(obj: dict) -> HasTraits:
"""
Convert a dict of unpacked traits to a HasTraits instance.
Useful for remote parameter inspection and validation.
:param obj: dict of unpacked traits
:return: the stub HasTraits instance
"""
if not isinstance(obj, dict):
raise TypeError("Object must be a dict (was: %s)" % obj)
traits = {}
values = {}
for k, v in obj.items():
if '__value__' in v:
values[k] = v.pop('__value__')
trait = dict_as_trait(v)
if trait is None:
continue
traits[k] = trait
cls = HasTraits()
cls.add_traits(**traits)
for k, v in values.items():
setattr(cls, k, v)
return cls
def get_args_dict(obj: HasTraits, incl_all=False):
"""
Return a dict of user-configurable traits for an object
:param obj: an instance of HasTraits
:param incl_all: If all items should be included, regardless of RO status
:return: dict of arguments
"""
argsdict = ArgsDict()
for k in sorted(obj._trait_values.keys()):
v = obj._trait_values[k]
trait = obj.traits()[k]
if incl_all or (not trait.get_metadata('hidden') and is_trait_writable(trait)):
argsdict[k] = v
return argsdict
def add_traits_to_argparse(obj: HasTraits, parser: ArgumentParser,
prefix: str=None):
"""
Add all traits from the given object to the argparse context.
:param obj: an instance of HasTraits
:param parser: argparse parser
:param prefix: string to prefix keys with
"""
for key, trait in obj.traits().items():
if trait.get_metadata('config') is not True:
continue
argname = '--%s' % key
if prefix is not None:
argname = '--%s.%s' % (prefix, key)
if isinstance(trait, Container):
parser.add_argument(argname, nargs='+', help=trait.info_text)
elif isinstance(trait, Enum):
parser.add_argument(argname, type=str.lower,
choices=[x.lower() for x in trait.values],
help=trait.info_text)
else:
argtype = str
if hasattr(trait, 'default_value'):
argtype = type(trait.default_value)
parser.add_argument(argname, type=argtype, help=trait.info_text)
def apply_from_argparse(args, traits=None, target: HasTraits=None) -> dict:
"""
Applies arguments added via add_traits_to_argparse to
a target object which implements HasTraits. If a target
is not known, a dict of traits may be passed instead.
Will throw TraitError if validation fails.
:param args: Parsed args from argparse
:param traits: Dictionary of traits (optional)
:param target: Target object (optional)
:return: Dict of the arguments which actually changed
"""
# apply the traits to an empty object, which will run
# the validators on the client
if isinstance(traits, HasTraits):
traits = traits.traits()
traits = traits.copy()
for k, v in traits.items():
if not isinstance(v, TraitType):
if isinstance(v, dict):
k[v] = dict_as_trait(v)
else:
raise TypeError("A dict or trait object must be supplied")
if target is None:
if traits is None:
raise ValueError("Either traits or target must be specified")
target = HasTraits()
target.add_traits(**traits)
# determine what should actually be changed
argkeys = [k for k, v in vars(args).items() if v is not None]
intersect = set(target.traits().keys()).intersection(set(argkeys))
# apply the argparse flags to the target object
for key in intersect:
if target.traits()[key].get_metadata('config') is not True:
raise ValueError("Trait is not marked as configurable: %s" % key)
setattr(target, key, getattr(args, key))
# if all validators passed, return a dict of the changed args
changed = {}
for key in intersect:
changed[key] = target._trait_values[key]
return changed
| lgpl-3.0 | 468,926,082,809,482,900 | 28.619647 | 97 | 0.615188 | false |
cardmagic/PyAMF | pyamf/adapters/_django_db_models_base.py | 1 | 8476 | # Copyright (c) 2007-2009 The PyAMF Project.
# See LICENSE.txt for details.
"""
`django.db.models` adapter module.
:see: `Django Project <http://www.djangoproject.com>`_
:since: 0.4.1
"""
from django.db.models.base import Model
from django.db.models import fields
from django.db.models.fields import related, files
import datetime
import pyamf
from pyamf.util import imports
class DjangoReferenceCollection(dict):
"""
This helper class holds a dict of klass to pk/objects loaded from the
underlying db.
:since: 0.5
"""
def _getClass(self, klass):
if klass not in self.keys():
self[klass] = {}
return self[klass]
def getClassKey(self, klass, key):
"""
Return an instance based on klass/key.
If an instance cannot be found then `KeyError` is raised.
:param klass: The class of the instance.
:param key: The primary_key of the instance.
:return: The instance linked to the `klass`/`key`.
:rtype: Instance of `klass`.
"""
d = self._getClass(klass)
return d[key]
def addClassKey(self, klass, key, obj):
"""
Adds an object to the collection, based on klass and key.
:param klass: The class of the object.
:param key: The datastore key of the object.
:param obj: The loaded instance from the datastore.
"""
d = self._getClass(klass)
d[key] = obj
class DjangoClassAlias(pyamf.ClassAlias):
def getCustomProperties(self):
self.fields = {}
self.relations = {}
self.columns = []
self.meta = self.klass._meta
for name in self.meta.get_all_field_names():
x = self.meta.get_field_by_name(name)[0]
if isinstance(x, files.FileField):
self.readonly_attrs.update([name])
if isinstance(x, related.RelatedObject):
continue
if not isinstance(x, related.ForeignKey):
self.fields[name] = x
else:
self.relations[name] = x
for k, v in self.klass.__dict__.iteritems():
if isinstance(v, related.ReverseManyRelatedObjectsDescriptor):
self.fields[k] = v.field
parent_fields = []
for field in self.meta.parents.values():
parent_fields.append(field.attname)
del self.relations[field.name]
self.exclude_attrs.update(parent_fields)
props = self.fields.keys()
self.encodable_properties.update(props)
self.decodable_properties.update(props)
def _compile_base_class(self, klass):
if klass is Model:
return
pyamf.ClassAlias._compile_base_class(self, klass)
def _encodeValue(self, field, value):
if value is fields.NOT_PROVIDED:
return pyamf.Undefined
if value is None:
return value
# deal with dates ..
if isinstance(field, fields.DateTimeField):
return value
elif isinstance(field, fields.DateField):
return datetime.datetime(value.year, value.month, value.day, 0, 0, 0)
elif isinstance(field, fields.TimeField):
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second, value.microsecond)
elif isinstance(value, files.FieldFile):
return value.name
return value
def _decodeValue(self, field, value):
if value is pyamf.Undefined:
return fields.NOT_PROVIDED
if isinstance(field, fields.AutoField) and value == 0:
return None
elif isinstance(field, fields.DateTimeField):
# deal with dates
return value
elif isinstance(field, fields.DateField):
if not value:
return None
return datetime.date(value.year, value.month, value.day)
elif isinstance(field, fields.TimeField):
if not value:
return None
return datetime.time(value.hour, value.minute, value.second, value.microsecond)
return value
def getEncodableAttributes(self, obj, **kwargs):
attrs = pyamf.ClassAlias.getEncodableAttributes(self, obj, **kwargs)
if not attrs:
attrs = {}
for name, prop in self.fields.iteritems():
if name not in attrs.keys():
continue
if isinstance(prop, related.ManyToManyField):
attrs[name] = [x for x in getattr(obj, name).all()]
else:
attrs[name] = self._encodeValue(prop, getattr(obj, name))
keys = attrs.keys()
for key in keys:
if key.startswith('_'):
del attrs[key]
for name, relation in self.relations.iteritems():
if '_%s_cache' % name in obj.__dict__:
attrs[name] = getattr(obj, name)
del attrs[relation.column]
if not attrs:
attrs = None
return attrs
def getDecodableAttributes(self, obj, attrs, **kwargs):
attrs = pyamf.ClassAlias.getDecodableAttributes(self, obj, attrs, **kwargs)
for n in self.decodable_properties:
if n in self.relations:
continue
f = self.fields[n]
attrs[f.attname] = self._decodeValue(f, attrs[n])
# primary key of django object must always be set first for
# relationships with other model objects to work properly
# and dict.iteritems() does not guarantee order
#
# django also forces the use only one attribute as primary key, so
# our obj._meta.pk.attname check is sufficient)
try:
setattr(obj, obj._meta.pk.attname, attrs[obj._meta.pk.attname])
del attrs[obj._meta.pk.attname]
except KeyError:
pass
return attrs
def getDjangoObjects(context):
"""
Returns a reference to the `django_objects` on the context. If it doesn't
exist then it is created.
:param context: The context to load the `django_objects` index from.
:type context: Instance of :class:`pyamf.BaseContext`
:return: The `django_objects` index reference.
:rtype: Instance of :class:`DjangoReferenceCollection`
:since: 0.5
"""
if not hasattr(context, 'django_objects'):
context.django_objects = DjangoReferenceCollection()
return context.django_objects
def writeDjangoObject(self, obj, *args, **kwargs):
"""
The Django ORM creates new instances of objects for each db request.
This is a problem for PyAMF as it uses the id(obj) of the object to do
reference checking.
We could just ignore the problem, but the objects are conceptually the
same so the effort should be made to attempt to resolve references for a
given object graph.
We create a new map on the encoder context object which contains a dict of
C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We
use the primary key to do the reference checking.
:since: 0.5
"""
if not isinstance(obj, Model):
self.writeNonDjangoObject(obj, *args, **kwargs)
return
context = self.context
kls = obj.__class__
s = obj.pk
if s is None:
self.writeNonDjangoObject(obj, *args, **kwargs)
return
django_objects = getDjangoObjects(context)
try:
referenced_object = django_objects.getClassKey(kls, s)
except KeyError:
referenced_object = obj
django_objects.addClassKey(kls, s, obj)
self.writeNonDjangoObject(referenced_object, *args, **kwargs)
def install_django_reference_model_hook(mod):
"""
Called when :module:`pyamf.amf0` or :module:`pyamf.amf3` are imported. Attaches the
:func:`writeDjangoObject` method to the `Encoder` class in that module.
:param mod: The module imported.
:since: 0.4.1
"""
if not hasattr(mod.Encoder, 'writeNonDjangoObject'):
mod.Encoder.writeNonDjangoObject = mod.Encoder.writeObject
mod.Encoder.writeObject = writeDjangoObject
# initialise the module here: hook into pyamf
pyamf.register_alias_type(DjangoClassAlias, Model)
# hook the L{writeDjangobject} method to the Encoder class on import
imports.when_imported('pyamf.amf0', install_django_reference_model_hook)
imports.when_imported('pyamf.amf3', install_django_reference_model_hook)
| mit | -6,217,738,496,913,844,000 | 28.430556 | 91 | 0.61975 | false |
CSC-IT-Center-for-Science/pouta-blueprints | pebbles/tests/test_docker_driver.py | 1 | 32367 | import json
import logging
import docker.errors
import pebbles.drivers.provisioning.docker_driver as docker_driver
from pebbles.tests.base import BaseTestCase
from pebbles.drivers.provisioning.docker_driver import NAMESPACE_CPU, NAMESPACE_GPU
from pebbles.drivers.provisioning.docker_driver import DD_STATE_ACTIVE, DD_STATE_INACTIVE, DD_STATE_SPAWNED, DD_STATE_REMOVED, KEY_PREFIX_POOL, KEY_CONFIG
import mock
from sys import version_info
import docker.utils
import time
if version_info.major == 2:
import __builtin__ as builtins
else:
import builtins
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
namespace_values = [NAMESPACE_CPU, NAMESPACE_GPU]
# decorator for overriding open
def mock_open_context(func):
def inner(*args, **kwargs):
with mock.patch.object(builtins, 'open', mock.mock_open(read_data='1234123412341234')):
return func(*args, **kwargs)
return inner
# decorator for raising RuntimeError if in failure mode
def raise_on_failure_mode(func):
def inner(*args, **kwargs):
if args[0].failure_mode:
raise RuntimeError('In failure mode')
return func(*args, **kwargs)
return inner
class MockResponse(object):
def __init__(self, status_code):
self.status_code = status_code
class OpenStackServiceMock(object):
def __init__(self, config):
self.spawn_count = 0
self.servers = []
self.failure_mode = False
@raise_on_failure_mode
def provision_instance(self, display_name, image_name, flavor_name,
public_key, extra_sec_groups=None,
master_sg_name=None, allocate_public_ip=True,
root_volume_size=0, data_volume_size=0, data_volume_type=None,
nics=None,
userdata=None
):
self.spawn_count += 1
res = dict(
server_id='%s' % self.spawn_count
)
res['address_data'] = dict(
private_ip='192.168.1.%d' % self.spawn_count,
public_ip=None,
)
if allocate_public_ip:
res['address_data']['public_ip'] = '172.16.0.%d' % self.spawn_count
if flavor_name.startswith("gpu"):
res['namespace'] = "DockerDriverGpu"
else:
res['namespace'] = "DockerDriver"
self.servers.append(res)
return res
@raise_on_failure_mode
def deprovision_instance(self, instance_id, name=None, delete_attached_volumes=False):
self.servers = [x for x in self.servers if str(x['server_id']) != str(instance_id)]
return {}
@raise_on_failure_mode
def upload_key(self, key_name, public_key):
pass
@raise_on_failure_mode
def delete_key(self, key_name):
pass
# noinspection PyUnusedLocal
class DockerClientMock(object):
def __init__(self):
self._containers = []
self.spawn_count = 0
self.failure_mode = False
@raise_on_failure_mode
def pull(self, image):
pass
@raise_on_failure_mode
def containers(self):
return self._containers[:]
def create_host_config(self, *args, **kwargs):
return {}
@raise_on_failure_mode
def create_container(self, name, **kwargs):
self.spawn_count += 1
container = dict(
Id='%s' % self.spawn_count,
Name=name,
Labels=dict(slots='1')
)
self._containers.append(container)
return container
@raise_on_failure_mode
def start(self, container_id, **kwargs):
pass
@raise_on_failure_mode
def remove_container(self, name, **kwargs):
matches = [x for x in self._containers if x['Name'] == name]
if len(matches) == 1:
container = matches[0]
self._containers.remove(container)
elif len(matches) == 0:
response = MockResponse(status_code=404)
raise docker.errors.APIError("foo", response=response, explanation='')
else:
raise RuntimeError('More than one container with same name detected')
@raise_on_failure_mode
def port(self, *args):
return [{'HostPort': 32768 + self.spawn_count % 32768}]
def load_image(self, *args):
pass
class PBClientMock(object):
def __init__(self):
self.instance_data = {}
config = dict(
memory_limit='512m',
environment_vars=''
)
self.blueprint_data = {
'bp-01': dict(
id='bp-01',
name='test blueprint 01',
config=config
)
}
self.blueprint_data['bp-01']['full_config'] = dict(
docker_image='csc/test_image',
internal_port=8888,
consumed_slots=1,
memory_limit=config['memory_limit'],
environment_vars=config['environment_vars']
)
self.namespaced_records = [{
'namespace': 'DockerDriver',
'key': 'backend_config',
'value': dict(
DD_HOST_IMAGE='CentOS-7',
DD_MAX_HOSTS=4,
DD_SHUTDOWN_MODE=False,
DD_FREE_SLOT_TARGET=4,
DD_HOST_FLAVOR_NAME_SMALL='standard.tiny',
DD_HOST_FLAVOR_SLOTS_SMALL=4,
DD_HOST_FLAVOR_NAME_LARGE='standard.xlarge',
DD_HOST_FLAVOR_SLOTS_LARGE=16,
DD_HOST_MASTER_SG='pb_server',
DD_HOST_EXTRA_SGS='',
DD_HOST_ROOT_VOLUME_SIZE=0,
DD_HOST_DATA_VOLUME_FACTOR=4,
DD_HOST_DATA_VOLUME_DEVICE='/dev/vdc',
DD_HOST_DATA_VOLUME_TYPE='',
),
'updated_ts': 0},
{
'namespace': 'DockerDriverGpu',
'key': 'backend_config',
'value': dict(
DD_HOST_IMAGE='CentOS-7-Cuda',
DD_MAX_HOSTS=2,
DD_SHUTDOWN_MODE=False,
DD_FREE_SLOT_TARGET=4,
DD_HOST_FLAVOR_NAME_SMALL='gpu.1.1gpu',
DD_HOST_FLAVOR_SLOTS_SMALL=210,
DD_HOST_FLAVOR_NAME_LARGE='gpu.1.1gpu',
DD_HOST_FLAVOR_SLOTS_LARGE=210,
DD_HOST_MASTER_SG='pb_server',
DD_HOST_EXTRA_SGS='',
DD_HOST_ROOT_VOLUME_SIZE=0,
DD_HOST_DATA_VOLUME_FACTOR=4,
DD_HOST_DATA_VOLUME_DEVICE='/dev/vdc',
DD_HOST_DATA_VOLUME_TYPE='',
),
'updated_ts': 0}]
def add_instance_data(self, instance_id):
self.instance_data[instance_id] = dict(
id='%s' % instance_id,
name='pb-%s' % instance_id,
state='starting',
blueprint_id='bp-01',
)
def get_instance_description(self, instance_id):
return self.instance_data[instance_id]
def get_blueprint_description(self, blueprint_id):
return self.blueprint_data[blueprint_id]
def do_instance_patch(self, instance_id, payload):
data = self.instance_data[instance_id]
data.update(payload)
if 'instance_data' in data.keys() and isinstance(data['instance_data'], str):
data['instance_data'] = json.loads(data['instance_data'])
def _filter_namespaced_records(self, namespace, key=None):
filters = [lambda x: x['namespace'] == namespace]
if key:
filters.append(lambda x: x['key'].startswith(key)) # mocking the 'like' SQL operator
filtered_record = filter(
lambda record: all(f(record) for f in filters),
self.namespaced_records
)
return list(filtered_record)
def get_namespaced_keyvalues(self, payload=None):
filtered_records = self._filter_namespaced_records(payload['namespace'], payload['key'])
return filtered_records
def get_namespaced_keyvalue(self, namespace, key):
filtered_record = self._filter_namespaced_records(namespace, key)[0]
return filtered_record
def create_or_modify_namespaced_keyvalue(self, namespace, key, payload):
if not self._filter_namespaced_records(namespace, key):
payload['updated_ts'] = time.time()
self.namespaced_records.append(payload)
else:
filtered_record = self._filter_namespaced_records(namespace, key)
filtered_record[0]['value'] = payload['value']
filtered_record[0]['updated_ts'] = time.time()
def delete_namespaced_keyvalue(self, namespace, key):
filtered_record = self._filter_namespaced_records(namespace, key)
if filtered_record:
self.namespaced_records.remove(filtered_record[0])
# noinspection PyUnusedLocal
class DockerDriverAccessMock(object):
def __init__(self, config):
self.json_data = {}
self.oss_mock = OpenStackServiceMock(config)
self.dc_mocks = {}
self.pbc_mock = PBClientMock()
self.shutdown_mode = False
self.failure_mode = False
def _filter_records(self, servers_value, namespace_value):
filters = [lambda x: x['namespace'] == namespace_value]
filtered_record = filter(
lambda record: all(f(record) for f in filters),
servers_value
)
# need to convert to list, because python3 filter returns generator instead of list as in python2
# This should not consume more memory because the filterd_record is not big
return list(filtered_record)
def load_records(self, token=None, url=None, namespace_value=NAMESPACE_CPU):
namespaced_records = self.pbc_mock.get_namespaced_keyvalues({'namespace': namespace_value, 'key': KEY_PREFIX_POOL})
hosts = []
for ns_record in namespaced_records:
hosts.append(ns_record['value'])
return hosts
def save_records(self, token, url, hosts, namespace_value):
for host in hosts:
_key = '%s_%s' % (KEY_PREFIX_POOL, host['id'])
payload = {
'namespace': namespace_value,
'key': _key
}
if host.get('state') in [DD_STATE_SPAWNED, DD_STATE_ACTIVE, DD_STATE_INACTIVE]: # POST or PUT
payload['value'] = host
self.pbc_mock.create_or_modify_namespaced_keyvalue(namespace_value, _key, payload)
elif host.get('state') == DD_STATE_REMOVED:
self.pbc_mock.delete_namespaced_keyvalue(namespace_value, _key)
def load_driver_config(self, token=None, url=None, namespace_value=NAMESPACE_CPU):
namespaced_record = self.pbc_mock.get_namespaced_keyvalue(namespace_value, KEY_CONFIG)
driver_config = namespaced_record['value']
return driver_config
def get_docker_client(self, docker_url):
if docker_url not in self.dc_mocks.keys():
self.dc_mocks[docker_url] = DockerClientMock()
self.dc_mocks[docker_url].failure_mode = self.failure_mode
return self.dc_mocks[docker_url]
def get_openstack_service(self, config):
return self.oss_mock
def get_pb_client(self, token, base_url, ssl_verify):
return self.pbc_mock
def run_ansible_on_host(self, host, custom_logger, config, playbook_name):
if self.failure_mode:
raise RuntimeError
@staticmethod
def proxy_add_route(route_id, target_url, options):
pass
@staticmethod
def proxy_remove_route(route_id):
pass
def __repr__(self):
res = dict(
hosts_data=self.hosts_data,
oss_mock='%s' % self.oss_mock
)
return json.dumps(res)
@staticmethod
def get_image_names():
return ['test/test1']
@staticmethod
def wait_for_port(ip_address, port, max_wait_secs=60):
pass
# noinspection PyProtectedMember
class DockerDriverTestCase(BaseTestCase):
def setUp(self):
# set up a constants to known values for tests
docker_driver.DD_HOST_LIFETIME = 900
@staticmethod
def create_docker_driver():
config = dict(
INSTANCE_DATA_DIR='/tmp',
M2M_CREDENTIAL_STORE='',
INTERNAL_API_BASE_URL='http://bogus/api/v1',
TEST_MODE=True,
PUBLIC_IPV4='10.0.0.1',
EXTERNAL_HTTPS_PORT=443,
)
dd = docker_driver.DockerDriver(logger, config)
dd._ap = DockerDriverAccessMock(config)
return dd
@mock_open_context
def test_spawn_one_host(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# check that a host gets created
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
host = hosts_data[0]
self.assertEquals(host['state'], DD_STATE_SPAWNED)
self.assertEquals(host['spawn_ts'], cur_ts)
# check that the new host gets activated
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
host = hosts_data[0]
self.assertEquals(host['state'], DD_STATE_ACTIVE)
# check that we don't scale up if there are no instances
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_do_not_spawn_if_not_used(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# fast forward time past lifetime, but when the host is not used the lifetime should not tick
cur_ts += 60 + docker_driver.DD_HOST_LIFETIME
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_spawn_activate_remove(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
# manipulate the host data a bit so that the host is marked as used
hosts_data[0]['lifetime_tick_ts'] = cur_ts
# fast forward time past host lifetime, should have one active and one spawned
cur_ts += 60 + docker_driver.DD_HOST_LIFETIME
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE, DD_STATE_SPAWNED})
# next tick: should have two active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE, DD_STATE_ACTIVE})
# next tick: should have one inactive, one active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE, DD_STATE_ACTIVE})
# last tick: should have one active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_provision_deprovision(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_double_deprovision(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it twice, should not blow up
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
# because base driver is bypassed in tests, instance state has to be set manually
ddam.pbc_mock.do_instance_patch('1001', dict(state='deleted'))
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_double_deprovision_404(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it twice, should not blow up
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_scale_up_to_the_limit(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
num_slots = (hosts_data['value']['DD_HOST_FLAVOR_SLOTS_SMALL'] +
hosts_data['value']['DD_HOST_FLAVOR_SLOTS_LARGE'] * (hosts_data['value']['DD_MAX_HOSTS'] - 1)
)
# spawn instances up to the limit
for i in range(0, num_slots):
ddam.pbc_mock.add_instance_data('%d' % (1000 + i))
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='%d' % (1000 + i))
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='%d' % (1000 + i), cur_ts=cur_ts, selected_host=docker_hosts[0])
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), hosts_data['value']['DD_MAX_HOSTS'])
try:
ddam.pbc_mock.add_instance_data('999')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='999')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='999', cur_ts=cur_ts, selected_host=docker_hosts[0])
self.fail('pool should have been full')
except RuntimeWarning:
pass
@mock_open_context
def test_scale_down(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
num_slots = (hosts_data['value']['DD_HOST_FLAVOR_SLOTS_SMALL'] +
hosts_data['value']['DD_HOST_FLAVOR_SLOTS_LARGE'] * (hosts_data['value']['DD_MAX_HOSTS'] - 1)
)
# spawn instances up to the limit
for i in range(0, num_slots):
ddam.pbc_mock.add_instance_data('%d' % (1000 + i))
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='%d' % (1000 + i))
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='%d' % (1000 + i), cur_ts=cur_ts, selected_host=docker_hosts[0])
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), hosts_data['value']['DD_MAX_HOSTS'])
# remove instances
for i in range(0, num_slots):
dd._do_deprovision(token='foo', instance_id='%d' % (1000 + i))
# let logic scale down (3 ticks per host should be enough)
cur_ts += docker_driver.DD_HOST_LIFETIME
for i in range(0, hosts_data['value']['DD_MAX_HOSTS'] * 3):
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_shutdown_mode(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
hosts_data['value']['DD_SHUTDOWN_MODE'] = True
# set shutdown mode and see that we have scaled down
payload = {
'namespace': dd_kind,
'key': KEY_CONFIG
}
payload['value'] = hosts_data['value']
ddam.pbc_mock.create_or_modify_namespaced_keyvalue(dd_kind, KEY_CONFIG, payload)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 0)
@mock_open_context
def test_inactive_host_with_instances(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
count = 999
for dd_kind in namespace_values:
count += 1
# add an instance
ddam.pbc_mock.add_instance_data(count)
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id=count)
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id=count, cur_ts=cur_ts, selected_host=docker_hosts[0])
# change the state to inactive under the hood (this is possible due to a race
# between housekeep() and provision())
hosts_data = ddam.load_records(namespace_value=dd_kind)
hosts_data[0]['state'] = DD_STATE_INACTIVE
for i in range(5):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE, DD_STATE_ACTIVE})
# remove the instance and check that the host is removed also
dd._do_deprovision(token='foo', instance_id=(count))
dd._do_deprovision(token='foo', instance_id=(count - 1))
for i in range(5):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_prepare_failing(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# mimic a failure to prepare it
ddam.failure_mode = True
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_SPAWNED})
# recover
ddam.failure_mode = False
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_prepare_failing_max_retries(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# mimic a failure to prepare it
ddam.failure_mode = True
for i in range(docker_driver.DD_MAX_HOST_ERRORS + 1):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE})
ddam.failure_mode = False
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records() # Load the hosts_data each time to get the latest updates
self.assertEqual(len(hosts_data), 0)
for i in range(2):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_docker_comm_probs(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
ddam.pbc_mock.add_instance_data('1000')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1000')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
# mimic a docker comm failure
ddam.failure_mode = True
try:
dd._do_provision(token='foo', instance_id='1000', cur_ts=cur_ts, selected_host=None)
self.fail('should have raised an error')
except Exception:
pass
ddam.failure_mode = False
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1000', cur_ts=cur_ts, selected_host=docker_hosts[0])
ddam.failure_mode = True
ddam.failure_mode = True
try:
dd._do_deprovision(token='foo', instance_id='1000')
self.fail('should have raised an error')
except Exception:
pass
ddam.failure_mode = False
dd._do_deprovision(token='foo', instance_id='1000')
# spawn only one kind of pool_vm
@mock_open_context
def test_spawn_dd_kind(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
if dd_kind == NAMESPACE_CPU:
hosts_data['value']['DD_SHUTDOWN_MODE'] = True
# set shutdown mode and see that we have scaled down
payload = {
'namespace': dd_kind,
'key': KEY_CONFIG
}
payload['value'] = hosts_data['value']
ddam.pbc_mock.create_or_modify_namespaced_keyvalue(dd_kind, KEY_CONFIG, payload)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, NAMESPACE_CPU)), 0)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, NAMESPACE_GPU)), 1)
| mit | -5,674,086,952,452,258,000 | 38.043426 | 154 | 0.586987 | false |
ICOS-Carbon-Portal/data | src/main/python/update-restheart/Restheart.py | 1 | 2242 | import requests
class Restheart(object):
def __init__(self):
# self._baseUrl = 'http://127.0.0.1:8088/db/' # localhost
self._baseUrl = 'https://restheart.icos-cp.eu/db/' # production
self._verfify = True if self._baseUrl.__contains__('restheart') else False
def get_records_to_update(self, op, pagesize, collection):
resp = None
try:
url = self.get_url(op, pagesize, collection)
resp = requests.get(url, timeout=10, verify=self._verfify)
if resp.status_code != 200:
print(resp.status_code, resp.reason, resp.json())
return resp.json()
except:
print(resp)
def update_record(self, id, record, collection):
url = self._baseUrl + collection + '/' + id
headers = {"Content-Type": "application/json"}
resp = None
try:
resp = requests.patch(url, headers=headers, json=record, timeout=5, verify=self._verfify)
if resp.status_code != 200:
print(resp.status_code, resp.reason)
except:
print(resp)
def get_url(self, op, pagesize, collection):
if op == 'geo':
if collection == 'portaluse':
return self._baseUrl + collection + '?filter={"city":{"$exists":0}}&np&pagesize=' + str(pagesize)
elif collection == 'dobjdls':
return self._baseUrl + collection + '?filter={"$and":[{"ip":{"$exists":1}},{"city":{"$exists":0}}]}&np&pagesize=' + str(pagesize)
else:
raise ValueError("Unknown collection: " + collection)
elif op == 'label':
if collection == 'portaluse':
return self._baseUrl + collection + '?np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"_id":{"$oid":"5bb21519f17df4d065e9c53c"}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"filterChange":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"previewNetCDF":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"previewTimeserie":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"$and":[{"filterChange":{"$exists":0}},{"previewNetCDF":{"$exists":0}},{"previewTimeserie":{"$exists":0}}]}&np&pagesize=' + str(pagesize)
else:
raise ValueError("Unknown collection: " + collection)
| gpl-3.0 | 5,153,336,162,370,622,000 | 37.655172 | 189 | 0.650758 | false |
domecraft/Games | RPG/classes.py | 1 | 3530 | class character:
def __init__(self, name, gender ,health, race, role, status, strength, defense, magic, bounty, income, reputation):
self.name = name
self.health = health
self.status = status
self.strength = strength
self.defense = defense
self.race = race
self.role = role
self.bounty = bounty
self.magic = magic
self.gender = gender
self.income = income
self.reputation = reputation
self.inventory = []
def modify_health(self, amount):
self.health += amount
def set_health(self, amount):
self.health = amount
def set_status(self, status):
self.status = status
def modify_str(self, amount):
self.strength += amount
def modify_def(self, amount):
self.defense += amount
def add_item(self, item):
self.inventory.append(item)
def remove_item(self, item):
if item in self.inventory:
self.inventory.remove(item)
else:
print item + " is not in your inventory!"
def set_race(self, race):
self.race = race
def modify_bounty(self, amount):
self.bounty += amount
def checkDead(self, health):
if self.health <= 0:
self.status = "dead"
return "dead"
else:
self.status = "alive"
return "alive"
def modify_income(self, amount):
self.income += amount
def modify_reputation(self, amount):
self.reputation += amount
#The following class is used for random npcs that I don't really develop in the storyline.
class basicCharacter:
def __init__(self, name, gender, income, status):
self.name = name
self.gender = gender
self.income = income
self.status = status
def set_status(self, status):
self.status = status
class store:
def __init__(self, name = "General Store" , owner = "Store Owner", alliance = "Rebellion"):
self.name = name
self.store_owner = owner
self.alliance = alliance
self.stock = {
'longsword': {'cost': 10, 'speed': 3, 'strength': 7, 'defense': 2},
'shortsword': {'cost': 8, 'speed': 5, 'strength': 4, 'defense': 2},
'bronze_armor': {'cost': 10, 'speed': -2, 'strength': 1, 'defense': 6},
'silver_armor': {'cost': 20, 'speed': -5, 'strength': 2, 'defense': 12},
'platinum_armor': {'cost': 35, 'speed': -8, 'strength': 4, 'defense': 20}
}
class town:
def __init__(self, name, ruler, alliance, income, population):
self.name = name
self.ruler = ruler
self.alliance = alliance
self.income = income
self.population = population
def set_ruler(self, ruler):
self.ruler = ruler
def set_name(self, name):
self.name = name
def set_alliance(self, alliance):
self.alliance = alliance
def modify_income(self, amount):
self.income += amount
def modify_pop(self, population):
self.population += population
class bar:
def __init__(self, name, owner, income):
self.name = name
self.owner = owner
self.income = income
def set_owner(self, owner):
self.owner = owner
def modify_income(amount):
self.income += amount
| gpl-2.0 | 3,359,393,522,367,290,000 | 29.964912 | 119 | 0.545326 | false |
fbergmann/libSEDML | examples/python/create_sedml.py | 1 | 5521 | #!/usr/bin/env python
##
## @file create_sedml.py
## @brief cerates a SED-ML document.
## @author Frank T. Bergmann
##
## <!--------------------------------------------------------------------------
## This file is part of libSEDML. Please visit http://sed-ml.org for more
## information about SEDML, and the latest version of libSEDML.
##
## Copyright (c) 2013, Frank T. Bergmann
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice, this
## list of conditions and the following disclaimer.
## 2. Redistributions in binary form must reproduce the above copyright notice,
## this list of conditions and the following disclaimer in the documentation
## and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
## DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
## LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## ------------------------------------------------------------------------ -.
##
import sys
import os.path
import libsedml
def main (args):
"""Usage: create_sedml output-filename
"""
if (len(args) != 2):
print(main.__doc__)
sys.exit(1);
# create the document
doc = libsedml.SedDocument();
doc.setLevel(1);
doc.setVersion(1);
# create a first model referencing an sbml file
model = doc.createModel();
model.setId("model1");
model.setSource("file.xml");
model.setLanguage("urn:sedml:language:sbml");
# create a second model modifying a variable of that other sbml file
model = doc.createModel();
model.setId("model2");
model.setSource("model1");
model.setLanguage("urn:sedml:sbml");
# change a paramerter 'k' to 0.1
change = model.createChangeAttribute();
change.setTarget("/sbml:sbml/sbml:model/sbml:listOfParameters/sbml:parameter[@id='k']/@value");
change.setNewValue("0.1");
# remove species 's1'
remove = model.createRemoveXML();
remove.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']");
# now for something tricky we want to update the initialConcentration of 'S2' to be
# half what it was in the original model
compute = model.createComputeChange();
compute.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id="S2"]/@initialConcentration");
variable = compute.createVariable();
variable.setId("S2");
variable.setModelReference("model1");
variable.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S2']");
compute.setMath(libsedml.parseFormula("S2 / 2"));
# create simulation
tc = doc.createUniformTimeCourse();
tc.setId("sim1");
tc.setInitialTime(0.0);
tc.setOutputStartTime(0.0);
tc.setOutputEndTime(10.0);
tc.setNumberOfPoints(1000);
# need to set the correct KISAO Term
alg = tc.createAlgorithm();
alg.setKisaoID("KISAO:0000019");
# create a task that uses the simulation and the model above
task = doc.createTask();
task.setId("task1");
task.setModelReference("model1");
task.setSimulationReference("sim1");
# add a DataGenerator to hold the output for time
dg = doc.createDataGenerator();
dg.setId("time");
dg.setName("time");
var = dg.createVariable();
var.setId("v0");
var.setName("time");
var.setTaskReference("task1");
var.setSymbol("urn:sedml:symbol:time");
dg.setMath(libsedml.parseFormula("v0"));
# and one for S1
dg = doc.createDataGenerator();
dg.setId("S1");
dg.setName("S1");
var = dg.createVariable();
var.setId("v1");
var.setName("S1");
var.setTaskReference("task1");
var.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']");
dg.setMath(libsedml.parseFormula("v1"));
# add a report
report = doc.createReport();
report.setId("r1");
report.setName("report 1");
set = report.createDataSet();
set.setId("ds1");
set.setLabel("time");
set.setDataReference("time");
set = report.createDataSet();
set.setId("ds2");
set.setLabel("S1");
set.setDataReference("S1");
# add a 2d plot
plot = doc.createPlot2D();
plot.setId("p1");
plot.setName("S1 Timecourse");
curve = plot.createCurve();
curve.setId("c1");
curve.setName("S1");
curve.setLogX(False);
curve.setLogY(False);
curve.setXDataReference("time");
curve.setYDataReference("S1");
# add a 3D Plot
plot2 = doc.createPlot3D();
plot2.setId("p2");
plot2.setName("dunno");
surf = plot2.createSurface();
surf.setId("surf1");
surf.setName("S1");
surf.setLogX(False);
surf.setLogY(False);
surf.setLogZ(False);
surf.setXDataReference("time");
surf.setYDataReference("S1");
surf.setZDataReference("S1");
# write the document
libsedml.writeSedML(doc, args[1]);
if __name__ == '__main__':
main(sys.argv)
| bsd-2-clause | 3,096,274,935,878,346,000 | 32.05988 | 119 | 0.685926 | false |
molmod/yaff | yaff/pes/colvar.py | 1 | 13249 | # -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <[email protected]>,
# Louis Vanduyfhuys <[email protected]>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
'''Collective variables
This module implements the computation of collective variables and their
derivatives, typically used in advanced sampling methods such as umbrella
sampling or metadynamics. The ``CollectiveVariable`` class is the main item
in this module, which is normally used in conjuction with an instance of the
``Bias`` class. Note that many collective variables such as bond lengths,
bending angles, improper angles, ... are already implemented by the
:mod:`yaff.pes.iclist` module, so no separate implementation needs to be
provided here.
'''
from __future__ import division
import numpy as np
from yaff.log import log
from yaff.pes.dlist import DeltaList
from yaff.pes.iclist import InternalCoordinateList
from yaff.sampling.utils import cell_lower
__all__ = [
'CollectiveVariable', 'CVVolume', 'CVCOMProjection','CVInternalCoordinate',
'CVLinCombIC',
]
class CollectiveVariable(object):
'''Base class for collective variables.'''
def __init__(self, name, system):
"""
**Arguments:**
name
A name for the collective variable.
system
The system for the collective variable.
"""
self.name = name
self.system = system
self.value = np.nan
self.gpos = np.zeros((system.natom, 3), float)
self.vtens = np.zeros((3, 3), float)
def get_conversion(self):
'''Auxiliary routine that allows base classes the specify the unit
conversion associated with the internal coordinate.
'''
raise NotImplementedError
def get_log(self):
'''Describe the internal coordinate in a format that is suitable for
screen logging.
'''
return '%s' % (self.__class__.__name__)
def compute(self, gpos=None, vtens=None):
"""Compute the collective variable and optionally some derivatives
The only variable inputs for the compute routine are the atomic
positions and the cell vectors.
**Optional arguments:**
gpos
The derivatives of the collective variable towards the Cartesian
coordinates of the atoms. ('g' stands for gradient and 'pos'
for positions.)
This must be a writeable numpy array with shape (N, 3) where N
is the number of atoms.
vtens
The force contribution to the pressure tensor. This is also
known as the virial tensor. It represents the derivative of the
energy towards uniform deformations, including changes in the
shape of the unit cell. (v stands for virial and 'tens' stands
for tensor.) This must be a writeable numpy array with shape (3,
3).
The collective variable value is returned. The optional arguments
are Fortran-style output arguments. When they are present, the
corresponding results are computed and **stored** to the current
contents of the array.
"""
#Subclasses implement their compute code here.
raise NotImplementedError
def get_last_computed_value(self):
"""Return the last value that was computed. It is not assured that this
value reflects the value for the current state of the system. This
is merely a convenience method to obtain the value without
performing an actual computation.
"""
return self.value
class CVInternalCoordinate(CollectiveVariable):
'''
An InternalCoordinate disguised as a CollectiveVariable so that it can
be used together with a BiasPotential.
This is less efficient than using the InternalCoordinate with a
ValenceTerm, so the latter is preferred if it is possible.
'''
def __init__(self, system, ic, comlist=None):
self.system = system
self.ic = ic
self.comlist = comlist
self.dlist = DeltaList(system if comlist is None else comlist)
self.iclist = InternalCoordinateList(self.dlist)
self.iclist.add_ic(ic)
def get_conversion(self):
return self.ic.get_conversion()
def compute(self, gpos=None, vtens=None):
if self.comlist is not None:
self.comlist.forward()
self.dlist.forward()
self.iclist.forward()
self.value = self.iclist.ictab[0]['value']
if gpos is not None: gpos[:] = 0.0
if vtens is not None: vtens[:] = 0.0
if not ((gpos is None) and (vtens is None)):
self.iclist.ictab[0]['grad'] = 1.0
self.iclist.back()
if self.comlist is None:
self.dlist.back(gpos, vtens)
else:
self.comlist.gpos[:] = 0.0
self.dlist.back(self.comlist.gpos, vtens)
self.comlist.back(gpos)
return self.value
class CVVolume(CollectiveVariable):
'''The volume of the simulation cell.'''
def __init__(self, system):
'''
**Arguments:**
system
An instance of the ``System`` class.
'''
if system.cell.nvec == 0:
raise TypeError('Can not compute volume of a non-periodic system.')
CollectiveVariable.__init__(self, 'CVVolume', system)
def get_conversion(self):
return np.power(log.length.conversion, self.system.cell.nvec)
def compute(self, gpos=None, vtens=None):
self.value = self.system.cell.volume
if gpos is not None:
# No dependence on atomic positions
gpos[:] = 0.0
if vtens is not None:
vtens[:] = np.identity(3)*self.value
return self.value
class CVCOMProjection(CollectiveVariable):
'''Compute the vector connecting two centers of masses and return the
projection along a selected vector. cv=(r_{COM}^{B}-r_{COM}^{A})[index]
and r_{COM} is a vector with centers of mass of groups A and B:
* first component: projected onto ``a`` vector of cell
* second component: projected onto vector perpendicular to ``a``
and in the plane spanned by ``a`` and ``b``
* third component: projected onto vector perpendicular to ``a`` and
``b``
Note that periodic boundary conditions are NOT taken into account
* the centers of mass are computed using absolute positions; this is
most likely the desired behavior
* the center of mass difference can in principle be periodic, but
the periodicity is not the same as the periodicity of the system,
because of the projection on a selected vector
'''
def __init__(self, system, groups, index):
'''
**Arguments:**
system
An instance of the ``System`` class
groups
List of 2 arrays, each array containing atomic indexes
used to compute one of the centers of mass
index
Selected projection vector:
* if index==0, projection onto ``a`` vector of cell
* if index==1, projection onto vector perpendicular to ``a``
and in the plane spanned by ``a`` and ``b``
* if index==2, projection onto vector perpendicular to ``a``
and ``b``
'''
CollectiveVariable.__init__(self, 'CVCOMProjection', system)
self.index = index
# Safety checks
assert len(groups)==2, "Exactly 2 groups need to be defined"
assert system.cell.nvec==3, "Only 3D periodic systems are supported"
assert self.index in [0,1,2], "Index should be one of 0,1,2"
# Masses need to be defined in order to compute centers of mass
if self.system.masses is None:
self.system.set_standard_masses()
# Define weights w_i such that difference of centers of mass can be
# computed as sum_i w_i r_i
self.weights = np.zeros((system.natom))
self.weights[groups[0]] = -self.system.masses[groups[0]]/np.sum(self.system.masses[groups[0]])
self.weights[groups[1]] = self.system.masses[groups[1]]/np.sum(self.system.masses[groups[1]])
def get_conversion(self):
return log.length.conversion
def compute(self, gpos=None, vtens=None):
'''
Consider a rotation of the entire system such that the ``a`` vector
is aligned with the X-axis, the ``b`` vector is in the XY-plane, and
the ``c`` vector chosen such that a right-handed basis is formed.
The rotated cell is lower-diagonal in the Yaff notation.
In this rotated system, it is fairly simple to compute the required
projections and derivatives, because the projections are simply the
Cartesian components. Values obtained in the rotated system are then
transformed back to the original system.
'''
# Compute rotation that makes cell lower diagonal
_, R = cell_lower(self.system.cell.rvecs)
# The projected vector of centers of mass difference (aka the
# collective variable) in the rotated system
cv_orig = np.sum(self.weights.reshape((-1,1))*self.system.pos, axis=0)
# Transform back to the original system
cv = np.dot(R, cv_orig)
self.value = cv[self.index]
if gpos is not None:
gpos[:] = 0.0
gpos[:,self.index] = self.weights
# Forces (vector) need to be rotated back to original system
gpos[:] = np.einsum('ij,kj', gpos, R.T)
if vtens is not None:
vtens[:] = 0.0
vtens[self.index,self.index:] = cv[self.index:]
vtens[self.index:,self.index] = cv[self.index:]
# Virial (tensor) needs to be rotated back to original system
vtens[:] = np.dot(R.T,np.dot(vtens[:],R))
return self.value
class CVLinCombIC(CollectiveVariable):
'''
A linear combination of InternalCoordinates:
cv = w0*ic0 + w1*ic1 + ...
'''
def __init__(self, system, ics, weights, comlist=None):
'''
**Arguments:**
system
An instance of the ``System`` class.
ics
A list of InternalCoordinate instances.
weights
A list defining the weight of each InternalCoordinate that is
used when computing the linear combination.
**Optional arguments:**
comlist
An instance COMList; if provided, this is used instead of the
normal DeltaList to compute the InternalCoordinates
'''
assert len(weights)==len(ics)
self.system = system
self.ics = ics
self.comlist = comlist
self.dlist = DeltaList(system if comlist is None else comlist)
self.iclist = InternalCoordinateList(self.dlist)
for ic in self.ics:
self.iclist.add_ic(ic)
self.weights = weights
def get_conversion(self):
# Units depend on the particular linear combination of internal
# coordinates
return 1.0
def compute(self, gpos=None, vtens=None):
if self.comlist is not None:
self.comlist.forward()
self.dlist.forward()
self.iclist.forward()
self.value = 0.0
for iic in range(len(self.ics)):
self.value += self.weights[iic]*self.iclist.ictab[iic]['value']
if gpos is not None: gpos[:] = 0.0
if vtens is not None: vtens[:] = 0.0
if not ((gpos is None) and (vtens is None)):
for iic in range(len(self.ics)):
# Derivative of the linear combination to this particular
# internal coordinate
self.iclist.ictab[iic]['grad'] = self.weights[iic]
self.iclist.back()
if self.comlist is None:
self.dlist.back(gpos, vtens)
else:
self.comlist.gpos[:] = 0.0
self.dlist.back(self.comlist.gpos, vtens)
self.comlist.back(gpos)
return self.value
| gpl-3.0 | -3,113,228,627,627,625,000 | 37.853372 | 102 | 0.611669 | false |
russorat/savage-leads | api/models/lead.py | 1 | 2649 | from elasticsearch import Elasticsearch,RequestsHttpConnection,NotFoundError
from flask import url_for
import config
import json
class Lead(object):
es = Elasticsearch(config.ES_HOSTS,connection_class=RequestsHttpConnection)
@staticmethod
def create_lead(lead_data):
try:
results = Lead.es.create(index='leads',
doc_type='leads',
body=lead_data
)
if results['created']:
return { 'status': 'success',
'message': '',
'created_id': results['_id'] }
else:
return { 'status': 'failure',
'message': 'failed to create new lead.',
'created_id': '' }
except Exception as e:
print e
return { 'status': 'failure',
'message': 'unknown error',
'created_id': '' }
@staticmethod
def delete_lead(lead_id):
try :
Lead.es.delete(index='leads',
doc_type='leads',
id=lead_id
)
return { 'status': 'success', 'message': '' }
except NotFoundError as e:
return { 'status': 'failure', 'message': 'id not found' }
except Exception as e:
print e
return { 'status': 'failure', 'message': 'unknown error' }
@staticmethod
def get_lead(lead_id):
try:
results = Lead.es.get(
index='leads',
doc_type='leads',
id='%s'%(lead_id),
ignore=404
)
if results and results['found'] :
return {'status':'success','message':'','results':[Lead.from_es_hit(results)]}
return {'status':'success','message':'','results':[]}
except NotFoundError as e:
return { 'status': 'failure', 'message': 'id not found', 'results': [] }
except Exception as e:
print e
return { 'status': 'failure', 'message': 'unknown exception', 'results': [] }
@staticmethod
def get_leads(size,page,search):
try:
results = Lead.es.search(
index='leads',
doc_type='leads',
size=size,
q=search or "*",
sort='last_name:ASC,first_name:ASC'
)
retVal = []
if results and results['hits']['total'] > 0 :
for hit in results['hits']['hits']:
retVal.append(Lead.from_es_hit(hit))
return {'status':'success','message':'','results':retVal}
except Exception as e:
print e
return {'status':'failure','message':'unknown error','results':[]}
@staticmethod
def from_es_hit(hit):
lead = {}
lead['id'] = hit['_id']
for key,val in hit['_source'].items():
lead[key] = val
lead['uri'] = url_for('get_lead', lead_id=lead['id'], _external=True)
return lead
| apache-2.0 | -1,660,902,958,443,782,400 | 29.102273 | 86 | 0.559079 | false |
openstack/dragonflow | dragonflow/tests/unit/test_port_behind_port.py | 1 | 2387 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from testscenarios import load_tests_apply_scenarios as load_tests # noqa
from dragonflow import conf as cfg
from dragonflow.db.models import trunk as trunk_models
from dragonflow.tests.unit import test_mech_driver
class TestPortBehindPort(test_mech_driver.DFMechanismDriverTestCase):
scenarios = [
('ipvlan', {'segmentation_type': trunk_models.TYPE_IPVLAN}),
('macvlan', {'segmentation_type': trunk_models.TYPE_MACVLAN}),
]
def setUp(self):
cfg.CONF.set_override('auto_detect_port_behind_port',
True,
group='df')
super(TestPortBehindPort, self).setUp()
def test_detect_nested_port(self):
with self.network() as n,\
self.subnet(network=n) as s,\
self.port(subnet=s) as p1,\
self.port(subnet=s) as p2:
p1 = p1['port']
p2 = p2['port']
p2_ip = p2['fixed_ips'][0]['ip_address']
aap = {"ip_address": p2_ip}
if self.segmentation_type == trunk_models.TYPE_MACVLAN:
aap['mac_address'] = p2['mac_address']
data = {'port': {'allowed_address_pairs': [aap]}}
self.nb_api.create.reset_mock()
req = self.new_update_request(
'ports',
data, p1['id'])
req.get_response(self.api)
cps_id = trunk_models.get_child_port_segmentation_id(
p1['id'], p2['id'])
model = trunk_models.ChildPortSegmentation(
id=cps_id,
topic=p1['project_id'],
parent=p1['id'],
port=p2['id'],
segmentation_type=self.segmentation_type,
)
self.nb_api.create.assert_called_once_with(model)
| apache-2.0 | 8,434,192,210,710,150,000 | 40.155172 | 78 | 0.584416 | false |
jaantollander/CrowdDynamics | crowddynamics/core/tests/test_interactions_benchmark.py | 1 | 1239 | import numpy as np
import pytest
from crowddynamics.core.interactions import agent_agent_block_list
from crowddynamics.core.vector2D import unit_vector
from crowddynamics.simulation.agents import Agents, Circular, ThreeCircle, \
AgentGroup
def attributes():
orientation = np.random.uniform(-np.pi, np.pi)
return dict(body_type='adult',
orientation=orientation,
velocity=np.random.uniform(0.0, 1.3, 2),
angular_velocity=np.random.uniform(-1.0, 1.0),
target_direction=unit_vector(orientation),
target_orientation=orientation)
@pytest.mark.parametrize('size', (200, 500, 1000))
@pytest.mark.parametrize('agent_type', (Circular, ThreeCircle))
def test_agent_agent_block_list(benchmark, size, agent_type, algorithm):
# Grow the area with size. Keeps agent density constant.
area_size = np.sqrt(2 * size)
agents = Agents(agent_type=agent_type)
group = AgentGroup(
agent_type=agent_type,
size=size,
attributes=attributes)
agents.add_non_overlapping_group(
group, position_gen=lambda: np.random.uniform(-area_size, area_size, 2))
benchmark(agent_agent_block_list, agents.array)
assert True
| gpl-3.0 | 4,276,068,190,012,276,700 | 36.545455 | 80 | 0.684423 | false |
myshkov/bnn-analysis | models/bbb_sampler.py | 1 | 4851 | """
This module implements Bayes By Backprop -based sampler for NNs.
http://jmlr.org/proceedings/papers/v37/blundell15.pdf
"""
import numpy as np
from keras.models import Sequential
from keras.layers.core import Activation
from keras import backend as K
from keras.engine.topology import Layer
from sampler import Sampler, SampleStats
class BBBSampler(Sampler):
"""
BBB sampler for NNs.
"""
def __init__(self, model=None, batch_size=None, n_epochs=None, **kwargs):
"""
Creates a new BBBSampler object.
"""
super().__init__(**kwargs)
self.sampler_type = 'BBB'
self.model = model
self.batch_size = batch_size if batch_size is not None else self.train_set_size
self.n_epochs = n_epochs
def __repr__(self):
s = super().__repr__()
return s
def _fit(self, n_epochs=None, verbose=0, **kwargs):
""" Fits the model before sampling. """
n_epochs = n_epochs if n_epochs is not None else self.n_epochs
self.model.fit(self.train_x, self.train_y, batch_size=self.batch_size, nb_epoch=n_epochs,
verbose=verbose)
def _sample_predictive(self, test_x=None, return_stats=False, **kwargs):
""" Draws a new sample from the model. """
sample = self.model.predict(test_x, batch_size=self.batch_size)
stats = None
if return_stats:
stats = SampleStats(time=self._running_time())
return [sample], [stats]
@classmethod
def model_from_description(cls, layers, noise_std, weights_std, batch_size, train_size):
""" Creates a BBB model from the specified parameters. """
n_batches = int(train_size / batch_size)
step = .01
class BBBLayer(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super().__init__(**kwargs)
def build(self, input_shape):
input_dim = input_shape[1]
shape = [input_dim, self.output_dim]
eps_std = step
# weights
self.eps_w = K.random_normal([input_shape[0]] + shape, std=eps_std)
self.mu_w = K.variable(np.random.normal(0., 10. * step, size=shape), name='mu_w')
self.rho_w = K.variable(np.random.normal(0., 10. * step, size=shape), name='rho_w')
self.W = self.mu_w + self.eps_w * K.log(1.0 + K.exp(self.rho_w))
self.eps_b = K.random_normal([self.output_dim], std=eps_std)
self.mu_b = K.variable(np.random.normal(0., 10. * step, size=[self.output_dim]), name='mu_b')
self.rho_b = K.variable(np.random.normal(0., 10. * step, size=[self.output_dim]), name='rho_b')
self.b = self.mu_b + self.eps_b * K.log(1.0 + K.exp(self.rho_b))
self.trainable_weights = [self.mu_w, self.rho_w, self.mu_b, self.rho_b]
def call(self, x, mask=None):
return K.squeeze(K.batch_dot(K.expand_dims(x, dim=1), self.W), axis=1) + self.b
def get_output_shape_for(self, input_shape):
return (input_shape[0], self.output_dim)
def log_gaussian(x, mean, std):
return -K.log(std) - (x - mean) ** 2 / (2. * std ** 2)
def sigma_from_rho(rho):
return K.log(1. + K.exp(rho)) / step
def variational_objective(model, noise_std, weights_std, batch_size, nb_batches):
def loss(y, fx):
log_pw = K.variable(0.)
log_qw = K.variable(0.)
for layer in model.layers:
if type(layer) is BBBLayer:
log_pw += K.sum(log_gaussian(layer.W, 0., weights_std))
log_pw += K.sum(log_gaussian(layer.b, 0., weights_std))
log_qw += K.sum(log_gaussian(layer.W, layer.mu_w, sigma_from_rho(layer.rho_w)))
log_qw += K.sum(log_gaussian(layer.b, layer.mu_b, sigma_from_rho(layer.rho_b)))
log_likelihood = K.sum(log_gaussian(y, fx, noise_std))
return K.sum((log_qw - log_pw) / nb_batches - log_likelihood) / batch_size
return loss
model = Sequential()
in_shape = [batch_size, layers[0][0]]
# input
model.add(BBBLayer(layers[1][0], batch_input_shape=in_shape))
model.add(Activation('relu'))
# hidden layers
for l in range(2, len(layers) - 1):
model.add(BBBLayer(layers[l - 1][0]))
model.add(Activation('relu'))
# output layer
model.add(BBBLayer(1))
loss = variational_objective(model, noise_std, weights_std, batch_size, n_batches)
model.compile(loss=loss, optimizer='adam', metrics=['accuracy'])
return model
| mit | -8,254,993,371,662,850,000 | 35.201493 | 111 | 0.556999 | false |
felixbr/nosql-rest-preprocessor | nosql_rest_preprocessor/models.py | 1 | 5131 | from __future__ import absolute_import, unicode_literals, print_function, division
from nosql_rest_preprocessor import exceptions
from nosql_rest_preprocessor.utils import non_mutating
class BaseModel(object):
required_attributes = set()
optional_attributes = None
immutable_attributes = set()
private_attributes = set()
sub_models = {}
resolved_attributes = {}
@classmethod
def validate(cls, obj):
cls._check_required_attributes(obj)
cls._check_allowed_attributes(obj)
# recurse for sub models
for attr, sub_model in cls.sub_models.items():
if attr in obj.keys():
sub_model.validate(obj[attr])
return obj
@classmethod
@non_mutating
def prepare_response(cls, obj):
# remove non-public attrs
for attr in cls.private_attributes:
obj.pop(attr, None)
# recurse for sub models
for attr, sub_model in cls.sub_models.items():
if attr in obj.keys():
obj[attr] = sub_model.prepare_response(obj[attr])
return obj
@classmethod
def merge_updated(cls, db_obj, new_obj):
cls.validate(new_obj)
merged_obj = {}
# check if previously present immutable attributes should be deleted
for key in cls.immutable_attributes:
if key in db_obj and key not in new_obj:
raise exceptions.ChangingImmutableAttributeError()
# copy attributes into merged_obj
for key, value in new_obj.items():
cls._check_immutable_attrs_on_update(key, value, db_obj)
if key in cls.resolved_attributes and isinstance(value, dict): # ignore resolved attributes in update
merged_obj[key] = db_obj[key]
else:
merged_obj[key] = value
# recurse for sub models
for attr, sub_model in cls.sub_models.items():
merged_obj[attr] = sub_model.merge_updated(db_obj[attr], new_obj[attr])
return merged_obj
@classmethod
def _check_immutable_attrs_on_update(cls, key, value, db_obj):
# check if immutable attributes should be changed
if key in cls.immutable_attributes:
if db_obj[key] != value:
raise exceptions.ChangingImmutableAttributeError()
@classmethod
def _check_required_attributes(cls, obj):
for attr in cls.required_attributes:
if isinstance(attr, tuple):
set_wanted = set(attr[1])
set_contained = set(obj.keys())
if attr[0] == 'one_of':
if len(set_wanted & set_contained) < 1:
raise exceptions.ValidationError()
elif attr[0] == 'either_of':
if len(set_wanted & set_contained) != 1:
raise exceptions.ValidationError()
else:
raise exceptions.ConfigurationError()
else:
if attr not in obj.keys():
raise exceptions.ValidationError()
@classmethod
def _check_allowed_attributes(cls, obj):
if cls.optional_attributes is not None:
required = cls._required_attributes()
for attr in obj.keys():
if attr in required:
continue
allowed = False
for opt_attr in cls.optional_attributes:
if attr == opt_attr:
allowed = True
break
elif isinstance(opt_attr, tuple):
if opt_attr[0] == 'all_of':
if attr in opt_attr[1]: # if one of these is in obj.keys()...
if not set(opt_attr[1]).issubset(obj.keys()): # ...all of them have to be there
raise exceptions.ValidationError()
else:
allowed = True
break
elif opt_attr[0] == 'either_of':
if attr in opt_attr[1]: # if one of these is in obj.keys()...
if next((key for key in opt_attr[1] if key != attr and key in obj.keys()), None): # ...no other key may be present in obj.keys()
raise exceptions.ValidationError()
else:
allowed = True
break
else:
raise exceptions.ConfigurationError()
if not allowed: # if we haven't found attr anywhere in cls.optional_attributes
raise exceptions.ValidationError()
@classmethod
def _required_attributes(cls):
required = set()
for attr in cls.required_attributes:
if isinstance(attr, tuple):
required = required | set(attr[1])
else:
required.add(attr)
return required | mit | 7,771,158,724,342,860,000 | 33.213333 | 161 | 0.524069 | false |
juliakreger/bifrost | playbooks/library/os_ironic_node.py | 1 | 12262 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2015, Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_ironic_node
short_description: Activate/Deactivate Bare Metal Resources from OpenStack
extends_documentation_fragment: openstack
description:
- Deploy to nodes controlled by Ironic.
options:
state:
description:
- Indicates desired state of the resource
choices: ['present', 'absent']
default: present
deploy:
description:
- Indicates if the resource should be deployed. Allows for deployment
logic to be disengaged and control of the node power or maintenance
state to be changed.
choices: ['true', 'false']
default: true
uuid:
description:
- globally unique identifier (UUID) to be given to the resource.
required: false
default: None
ironic_url:
description:
- If noauth mode is utilized, this is required to be set to the
endpoint URL for the Ironic API. Use with "auth" and "auth_type"
settings set to None.
required: false
default: None
config_drive:
description:
- A configdrive file or HTTP(S) URL that will be passed along to the
node.
required: false
default: None
instance_info:
description:
- Definition of the instance information which is used to deploy
the node. This information is only required when an instance is
set to present.
image_source:
description:
- An HTTP(S) URL where the image can be retrieved from.
image_checksum:
description:
- The checksum of image_source.
image_disk_format:
description:
- The type of image that has been requested to be deployed.
power:
description:
- A setting to allow power state to be asserted allowing nodes
that are not yet deployed to be powered on, and nodes that
are deployed to be powered off.
choices: ['present', 'absent']
default: present
maintenance:
description:
- A setting to allow the direct control if a node is in
maintenance mode.
required: false
default: false
maintenance_reason:
description:
- A string expression regarding the reason a node is in a
maintenance mode.
required: false
default: None
requirements: ["shade"]
'''
EXAMPLES = '''
# Activate a node by booting an image with a configdrive attached
os_ironic_node:
cloud: "openstack"
uuid: "d44666e1-35b3-4f6b-acb0-88ab7052da69"
state: present
power: present
deploy: True
maintenance: False
config_drive: "http://192.168.1.1/host-configdrive.iso"
instance_info:
image_source: "http://192.168.1.1/deploy_image.img"
image_checksum: "356a6b55ecc511a20c33c946c4e678af"
image_disk_format: "qcow"
delegate_to: localhost
'''
def _choose_id_value(module):
if module.params['uuid']:
return module.params['uuid']
if module.params['name']:
return module.params['name']
return None
# TODO(TheJulia): Change this over to use the machine patch method
# in shade once it is available.
def _prepare_instance_info_patch(instance_info):
patch = []
patch.append({
'op': 'replace',
'path': '/instance_info',
'value': instance_info
})
return patch
def _is_true(value):
true_values = [True, 'yes', 'Yes', 'True', 'true', 'present', 'on']
if value in true_values:
return True
return False
def _is_false(value):
false_values = [False, None, 'no', 'No', 'False', 'false', 'absent', 'off']
if value in false_values:
return True
return False
def _check_set_maintenance(module, cloud, node):
if _is_true(module.params['maintenance']):
if _is_false(node['maintenance']):
cloud.set_machine_maintenance_state(
node['uuid'],
True,
reason=module.params['maintenance_reason'])
module.exit_json(changed=True, msg="Node has been set into "
"maintenance mode")
else:
# User has requested maintenance state, node is already in the
# desired state, checking to see if the reason has changed.
if (str(node['maintenance_reason']) not in
str(module.params['maintenance_reason'])):
cloud.set_machine_maintenance_state(
node['uuid'],
True,
reason=module.params['maintenance_reason'])
module.exit_json(changed=True, msg="Node maintenance reason "
"updated, cannot take any "
"additional action.")
elif _is_false(module.params['maintenance']):
if node['maintenance'] is True:
cloud.remove_machine_from_maintenance(node['uuid'])
return True
else:
module.fail_json(msg="maintenance parameter was set but a valid "
"the value was not recognized.")
return False
def _check_set_power_state(module, cloud, node):
if 'power on' in str(node['power_state']):
if _is_false(module.params['power']):
# User has requested the node be powered off.
cloud.set_machine_power_off(node['uuid'])
module.exit_json(changed=True, msg="Power requested off")
if 'power off' in str(node['power_state']):
if (_is_false(module.params['power']) and
_is_false(module.params['state'])):
return False
if (_is_false(module.params['power']) and
_is_false(module.params['state'])):
module.exit_json(
changed=False,
msg="Power for node is %s, node must be reactivated "
"OR set to state absent"
)
# In the event the power has been toggled on and
# deployment has been requested, we need to skip this
# step.
if (_is_true(module.params['power']) and
_is_false(module.params['deploy'])):
# Node is powered down when it is not awaiting to be provisioned
cloud.set_machine_power_on(node['uuid'])
return True
# Default False if no action has been taken.
return False
def main():
argument_spec = openstack_full_argument_spec(
uuid=dict(required=False),
name=dict(required=False),
instance_info=dict(type='dict', required=False),
config_drive=dict(required=False),
ironic_url=dict(required=False),
state=dict(required=False, default='present'),
maintenance=dict(required=False),
maintenance_reason=dict(required=False),
power=dict(required=False, default='present'),
deploy=dict(required=False, default=True),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(argument_spec, **module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
if (module.params['auth_type'] in [None, 'None'] and
module.params['ironic_url'] is None):
module.fail_json(msg="Authentication appears disabled, Please "
"define an ironic_url parameter")
if (module.params['ironic_url'] and
module.params['auth_type'] in [None, 'None']):
module.params['auth'] = dict(
endpoint=module.params['ironic_url']
)
node_id = _choose_id_value(module)
if not node_id:
module.fail_json(msg="A uuid or name value must be defined "
"to use this module.")
try:
cloud = shade.operator_cloud(**module.params)
node = cloud.get_machine(node_id)
if node is None:
module.fail_json(msg="node not found")
uuid = node['uuid']
instance_info = module.params['instance_info']
changed = False
# User has reqeusted desired state to be in maintenance state.
if module.params['state'] is 'maintenance':
module.params['maintenance'] = True
if node['provision_state'] in [
'cleaning',
'deleting',
'wait call-back']:
module.fail_json(msg="Node is in %s state, cannot act upon the "
"request as the node is in a transition "
"state" % node['provision_state'])
# TODO(TheJulia) This is in-development code, that requires
# code in the shade library that is still in development.
if _check_set_maintenance(module, cloud, node):
if node['provision_state'] in 'active':
module.exit_json(changed=True,
result="Maintenance state changed")
changed = True
node = cloud.get_machine(node_id)
if _check_set_power_state(module, cloud, node):
changed = True
node = cloud.get_machine(node_id)
if _is_true(module.params['state']):
if _is_false(module.params['deploy']):
module.exit_json(
changed=changed,
result="User request has explicitly disabled "
"deployment logic"
)
if 'active' in node['provision_state']:
module.exit_json(
changed=changed,
result="Node already in an active state."
)
if instance_info is None:
module.fail_json(
changed=changed,
msg="When setting an instance to present, "
"instance_info is a required variable.")
# TODO(TheJulia): Update instance info, however info is
# deployment specific. Perhaps consider adding rebuild
# support, although there is a known desire to remove
# rebuild support from Ironic at some point in the future.
patch = _prepare_instance_info_patch(instance_info)
cloud.set_node_instance_info(uuid, patch)
cloud.validate_node(uuid)
cloud.activate_node(uuid, module.params['config_drive'])
# TODO(TheJulia): Add more error checking and a wait option.
# We will need to loop, or just add the logic to shade,
# although this could be a very long running process as
# baremetal deployments are not a "quick" task.
module.exit_json(changed=changed, result="node activated")
elif _is_false(module.params['state']):
if node['provision_state'] not in "deleted":
cloud.purge_node_instance_info(uuid)
cloud.deactivate_node(uuid)
module.exit_json(changed=True, result="deleted")
else:
module.exit_json(changed=False, result="node not found")
else:
module.fail_json(msg="State must be present, absent, "
"maintenance, off")
except shade.OpenStackCloudException as e:
module.fail_json(msg=e.message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
main()
| apache-2.0 | -5,918,725,627,129,297,000 | 35.933735 | 79 | 0.592399 | false |
BetterWorks/pysaml2 | src/saml2/client.py | 1 | 42487 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2011 Umeå University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains classes and functions that a SAML2.0 Service Provider (SP) may use
to conclude its tasks.
"""
import saml2
import time
import base64
import urllib
from urlparse import urlparse
try:
from urlparse import parse_qs
except ImportError:
# Compatibility with Python <= 2.5
from cgi import parse_qs
from saml2.time_util import instant, not_on_or_after
from saml2.s_utils import signature
from saml2.s_utils import sid
from saml2.s_utils import do_attributes
from saml2.s_utils import decode_base64_and_inflate
#from saml2.s_utils import deflate_and_base64_encode
from saml2 import samlp, saml, class_name
from saml2 import VERSION
from saml2.sigver import pre_signature_part
from saml2.sigver import security_context, signed_instance_factory
from saml2.soap import SOAPClient
from saml2.binding import send_using_soap, http_redirect_message
from saml2.binding import http_post_message
from saml2.population import Population
from saml2.virtual_org import VirtualOrg
from saml2.config import config_factory
#from saml2.response import authn_response
from saml2.response import response_factory
from saml2.response import LogoutResponse
from saml2.response import AuthnResponse
from saml2.response import attribute_response
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_SOAP
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_PAOS
SSO_BINDING = saml2.BINDING_HTTP_REDIRECT
FORM_SPEC = """<form method="post" action="%s">
<input type="hidden" name="SAMLRequest" value="%s" />
<input type="hidden" name="RelayState" value="%s" />
<input type="submit" value="Submit" />
</form>"""
LAX = False
IDPDISC_POLICY = "urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol:single"
class IdpUnspecified(Exception):
pass
class VerifyError(Exception):
pass
class LogoutError(Exception):
pass
class Saml2Client(object):
""" The basic pySAML2 service provider class """
def __init__(self, config=None,
identity_cache=None, state_cache=None,
virtual_organization=None, config_file="", logger=None):
"""
:param config: A saml2.config.Config instance
:param identity_cache: Where the class should store identity information
:param state_cache: Where the class should keep state information
:param virtual_organization: Which if any virtual organization this
SP belongs to
"""
self.users = Population(identity_cache)
# for server state storage
if state_cache is None:
self.state = {} # in memory storage
else:
self.state = state_cache
if config:
self.config = config
elif config_file:
self.config = config_factory("sp", config_file)
else:
raise Exception("Missing configuration")
self.metadata = self.config.metadata
if logger is None:
self.logger = self.config.setup_logger()
else:
self.logger = logger
# we copy the config.debug variable in an internal
# field for convenience and because we may need to
# change it during the tests
self.debug = self.config.debug
self.sec = security_context(self.config, log=self.logger,
debug=self.debug)
if virtual_organization:
self.vorg = VirtualOrg(self, virtual_organization)
else:
self.vorg = None
if "allow_unsolicited" in self.config:
self.allow_unsolicited = self.config.allow_unsolicited
else:
self.allow_unsolicited = False
if getattr(self.config, 'authn_requests_signed', 'false') == 'true':
self.authn_requests_signed_default = True
else:
self.authn_requests_signed_default = False
if getattr(self.config, 'logout_requests_signed', 'false') == 'true':
self.logout_requests_signed_default = True
else:
self.logout_requests_signed_default = False
#
# Private methods
#
def _relay_state(self, session_id):
vals = [session_id, str(int(time.time()))]
if self.config.secret is None:
vals.append(signature("", vals))
else:
vals.append(signature(self.config.secret, vals))
return "|".join(vals)
def _issuer(self, entityid=None):
""" Return an Issuer instance """
if entityid:
if isinstance(entityid, saml.Issuer):
return entityid
else:
return saml.Issuer(text=entityid,
format=saml.NAMEID_FORMAT_ENTITY)
else:
return saml.Issuer(text=self.config.entityid,
format=saml.NAMEID_FORMAT_ENTITY)
def _sso_location(self, entityid=None, binding=BINDING_HTTP_REDIRECT):
if entityid:
# verify that it's in the metadata
try:
return self.config.single_sign_on_services(entityid, binding)[0]
except IndexError:
if self.logger:
self.logger.info("_sso_location: %s, %s" % (entityid,
binding))
return IdpUnspecified("No IdP to send to given the premises")
# get the idp location from the configuration alternative the
# metadata. If there is more than one IdP in the configuration
# raise exception
eids = self.config.idps()
if len(eids) > 1:
raise IdpUnspecified("Too many IdPs to choose from: %s" % eids)
try:
loc = self.config.single_sign_on_services(eids.keys()[0],
binding)[0]
return loc
except IndexError:
return IdpUnspecified("No IdP to send to given the premises")
def _my_name(self):
return self.config.name
#
# Public API
#
def service_url(self, binding=BINDING_HTTP_POST):
_res = self.config.endpoint("assertion_consumer_service", binding)
if _res:
return _res[0]
else:
return None
def response(self, post, outstanding, log=None, decode=True,
asynchop=True):
""" Deal with an AuthnResponse or LogoutResponse
:param post: The reply as a dictionary
:param outstanding: A dictionary with session IDs as keys and
the original web request from the user before redirection
as values.
:param log: where loggin should go.
:param decode: Whether the response is Base64 encoded or not
:param asynchop: Whether the response was return over a asynchronous
connection. SOAP for instance is synchronous
:return: An response.AuthnResponse or response.LogoutResponse instance
"""
# If the request contains a samlResponse, try to validate it
try:
saml_response = post['SAMLResponse']
except KeyError:
return None
try:
_ = self.config.entityid
except KeyError:
raise Exception("Missing entity_id specification")
if log is None:
log = self.logger
reply_addr = self.service_url()
resp = None
if saml_response:
try:
resp = response_factory(saml_response, self.config,
reply_addr, outstanding, log,
debug=self.debug, decode=decode,
asynchop=asynchop,
allow_unsolicited=self.allow_unsolicited)
except Exception, exc:
if log:
log.error("%s" % exc)
if isinstance(exc, RuntimeError):
raise
return None
if log:
log.debug(">> %s", resp)
resp = resp.verify()
if resp is None:
log.error("Response could not be verified")
return
if isinstance(resp, AuthnResponse):
self.users.add_information_about_person(resp.session_info())
if log:
log.info("--- ADDED person info ----")
elif isinstance(resp, LogoutResponse):
self.handle_logout_response(resp, log)
elif log:
log.error("Response type not supported: %s" % saml2.class_name(resp))
return resp
def authn_request(self, query_id, destination, service_url, spentityid,
my_name="", vorg="", scoping=None, log=None, sign=None,
binding=saml2.BINDING_HTTP_POST,
nameid_format=saml.NAMEID_FORMAT_TRANSIENT):
""" Creates an authentication request.
:param query_id: The identifier for this request
:param destination: Where the request should be sent.
:param service_url: Where the reply should be sent.
:param spentityid: The entity identifier for this service.
:param my_name: The name of this service.
:param vorg: The vitual organization the service belongs to.
:param scoping: The scope of the request
:param log: A service to which logs should be written
:param sign: Whether the request should be signed or not.
:param binding: The protocol to use for the Response !!
:return: <samlp:AuthnRequest> instance
"""
request = samlp.AuthnRequest(
id= query_id,
version= VERSION,
issue_instant= instant(),
assertion_consumer_service_url= service_url,
protocol_binding= binding
)
if destination:
request.destination = destination
if my_name:
request.provider_name = my_name
if scoping:
request.scoping = scoping
# Profile stuff, should be configurable
if nameid_format == saml.NAMEID_FORMAT_TRANSIENT:
name_id_policy = samlp.NameIDPolicy(allow_create="true",
format=nameid_format)
else:
name_id_policy = samlp.NameIDPolicy(format=nameid_format)
if vorg:
try:
name_id_policy.sp_name_qualifier = vorg
name_id_policy.format = saml.NAMEID_FORMAT_PERSISTENT
except KeyError:
pass
if sign is None:
sign = self.authn_requests_signed_default
if sign:
request.signature = pre_signature_part(request.id,
self.sec.my_cert, 1)
to_sign = [(class_name(request), request.id)]
else:
to_sign = []
request.name_id_policy = name_id_policy
request.issuer = self._issuer(spentityid)
if log is None:
log = self.logger
if log:
log.info("REQUEST: %s" % request)
return signed_instance_factory(request, self.sec, to_sign)
def authn(self, location, session_id, vorg="", scoping=None, log=None,
sign=None, binding=saml2.BINDING_HTTP_POST,
service_url_binding=None):
"""
Construct a Authentication Request
:param location: The URL of the destination
:param session_id: The ID of the session
:param vorg: The virtual organization if any that is involved
:param scoping: How the request should be scoped, default == Not
:param log: A log function to use for logging
:param sign: If the request should be signed
:param binding: The binding to use, default = HTTP POST
:return: An AuthnRequest instance
"""
spentityid = self.config.entityid
if service_url_binding is None:
service_url = self.service_url(binding)
else:
service_url = self.service_url(service_url_binding)
if binding == BINDING_PAOS:
my_name = None
location = None
else:
my_name = self._my_name()
if log is None:
log = self.logger
if log:
log.info("spentityid: %s" % spentityid)
log.info("service_url: %s" % service_url)
log.info("my_name: %s" % my_name)
return self.authn_request(session_id, location, service_url,
spentityid, my_name, vorg, scoping, log,
sign, binding=binding)
def authenticate(self, entityid=None, relay_state="",
binding=saml2.BINDING_HTTP_REDIRECT,
log=None, vorg="", scoping=None, sign=None):
""" Makes an authentication request.
:param entityid: The entity ID of the IdP to send the request to
:param relay_state: To where the user should be returned after
successfull log in.
:param binding: Which binding to use for sending the request
:param log: Where to write log messages
:param vorg: The entity_id of the virtual organization I'm a member of
:param scoping: For which IdPs this query are aimed.
:param sign: Whether the request should be signed or not.
:return: AuthnRequest response
"""
location = self._sso_location(entityid)
session_id = sid()
_req_str = "%s" % self.authn(location, session_id, vorg, scoping, log,
sign)
if log:
log.info("AuthNReq: %s" % _req_str)
if binding == saml2.BINDING_HTTP_POST:
# No valid ticket; Send a form to the client
# THIS IS NOT TO BE USED RIGHT NOW
if log:
log.info("HTTP POST")
(head, response) = http_post_message(_req_str, location,
relay_state)
elif binding == saml2.BINDING_HTTP_REDIRECT:
if log:
log.info("HTTP REDIRECT")
(head, _body) = http_redirect_message(_req_str, location,
relay_state)
response = head[0]
else:
raise Exception("Unkown binding type: %s" % binding)
return session_id, response
def create_attribute_query(self, session_id, subject_id, destination,
issuer_id=None, attribute=None, sp_name_qualifier=None,
name_qualifier=None, nameid_format=None, sign=False):
""" Constructs an AttributeQuery
:param session_id: The identifier of the session
:param subject_id: The identifier of the subject
:param destination: To whom the query should be sent
:param issuer_id: Identifier of the issuer
:param attribute: A dictionary of attributes and values that is
asked for. The key are one of 4 variants:
3-tuple of name_format,name and friendly_name,
2-tuple of name_format and name,
1-tuple with name or
just the name as a string.
:param sp_name_qualifier: The unique identifier of the
service provider or affiliation of providers for whom the
identifier was generated.
:param name_qualifier: The unique identifier of the identity
provider that generated the identifier.
:param nameid_format: The format of the name ID
:param sign: Whether the query should be signed or not.
:return: An AttributeQuery instance
"""
subject = saml.Subject(
name_id = saml.NameID(
text=subject_id,
format=nameid_format,
sp_name_qualifier=sp_name_qualifier,
name_qualifier=name_qualifier),
)
query = samlp.AttributeQuery(
id=session_id,
version=VERSION,
issue_instant=instant(),
destination=destination,
issuer=self._issuer(issuer_id),
subject=subject,
)
if sign:
query.signature = pre_signature_part(query.id, self.sec.my_cert, 1)
if attribute:
query.attribute = do_attributes(attribute)
if sign:
signed_query = self.sec.sign_attribute_query_using_xmlsec(
"%s" % query)
return samlp.attribute_query_from_string(signed_query)
else:
return query
def attribute_query(self, subject_id, destination, issuer_id=None,
attribute=None, sp_name_qualifier=None, name_qualifier=None,
nameid_format=None, log=None, real_id=None):
""" Does a attribute request to an attribute authority, this is
by default done over SOAP. Other bindings could be used but not
supported right now.
:param subject_id: The identifier of the subject
:param destination: To whom the query should be sent
:param issuer_id: Who is sending this query
:param attribute: A dictionary of attributes and values that is asked for
:param sp_name_qualifier: The unique identifier of the
service provider or affiliation of providers for whom the
identifier was generated.
:param name_qualifier: The unique identifier of the identity
provider that generated the identifier.
:param nameid_format: The format of the name ID
:param log: Function to use for logging
:param real_id: The identifier which is the key to this entity in the
identity database
:return: The attributes returned
"""
if log is None:
log = self.logger
session_id = sid()
issuer = self._issuer(issuer_id)
request = self.create_attribute_query(session_id, subject_id,
destination, issuer, attribute, sp_name_qualifier,
name_qualifier, nameid_format=nameid_format)
if log:
log.info("Request, created: %s" % request)
soapclient = SOAPClient(destination, self.config.key_file,
self.config.cert_file,
ca_certs=self.config.ca_certs)
if log:
log.info("SOAP client initiated")
try:
response = soapclient.send(request)
except Exception, exc:
if log:
log.info("SoapClient exception: %s" % (exc,))
return None
if log:
log.info("SOAP request sent and got response: %s" % response)
# fil = open("response.xml", "w")
# fil.write(response)
# fil.close()
if response:
if log:
log.info("Verifying response")
try:
# synchronous operation
aresp = attribute_response(self.config, issuer, log=log)
except Exception, exc:
if log:
log.error("%s", (exc,))
return None
_resp = aresp.loads(response, False, soapclient.response).verify()
if _resp is None:
if log:
log.error("Didn't like the response")
return None
session_info = _resp.session_info()
if session_info:
if real_id is not None:
session_info["name_id"] = real_id
self.users.add_information_about_person(session_info)
if log:
log.info("session: %s" % session_info)
return session_info
else:
if log:
log.info("No response")
return None
def construct_logout_request(self, subject_id, destination,
issuer_entity_id, reason=None, expire=None):
""" Constructs a LogoutRequest
:param subject_id: The identifier of the subject
:param destination:
:param issuer_entity_id: The entity ID of the IdP the request is
target at.
:param reason: An indication of the reason for the logout, in the
form of a URI reference.
:param expire: The time at which the request expires,
after which the recipient may discard the message.
:return: A LogoutRequest instance
"""
session_id = sid()
# create NameID from subject_id
name_id = saml.NameID(
text = self.users.get_entityid(subject_id, issuer_entity_id,
False))
request = samlp.LogoutRequest(
id=session_id,
version=VERSION,
issue_instant=instant(),
destination=destination,
issuer=self._issuer(),
name_id = name_id
)
if reason:
request.reason = reason
if expire:
request.not_on_or_after = expire
return request
def global_logout(self, subject_id, reason="", expire=None,
sign=None, log=None, return_to="/"):
""" More or less a layer of indirection :-/
Bootstrapping the whole thing by finding all the IdPs that should
be notified.
:param subject_id: The identifier of the subject that wants to be
logged out.
:param reason: Why the subject wants to log out
:param expire: The latest the log out should happen.
:param sign: Whether the request should be signed or not.
This also depends on what binding is used.
:param log: A logging function
:param return_to: Where to send the user after she has been
logged out.
:return: Depends on which binding is used:
If the HTTP redirect binding then a HTTP redirect,
if SOAP binding has been used the just the result of that
conversation.
"""
if log is None:
log = self.logger
if log:
log.info("logout request for: %s" % subject_id)
# find out which IdPs/AAs I should notify
entity_ids = self.users.issuers_of_info(subject_id)
return self._logout(subject_id, entity_ids, reason, expire,
sign, log, return_to)
def _logout(self, subject_id, entity_ids, reason, expire,
sign=None, log=None, return_to="/"):
# check time
if not not_on_or_after(expire): # I've run out of time
# Do the local logout anyway
self.local_logout(subject_id)
return 0, "504 Gateway Timeout", [], []
# for all where I can use the SOAP binding, do those first
not_done = entity_ids[:]
response = False
if log is None:
log = self.logger
for entity_id in entity_ids:
response = False
for binding in [BINDING_SOAP, BINDING_HTTP_POST,
BINDING_HTTP_REDIRECT]:
destinations = self.config.single_logout_services(entity_id,
binding)
if not destinations:
continue
destination = destinations[0]
if log:
log.info("destination to provider: %s" % destination)
request = self.construct_logout_request(subject_id, destination,
entity_id, reason, expire)
to_sign = []
#if sign and binding != BINDING_HTTP_REDIRECT:
if sign is None:
sign = self.logout_requests_signed_default
if sign:
request.signature = pre_signature_part(request.id,
self.sec.my_cert, 1)
to_sign = [(class_name(request), request.id)]
if log:
log.info("REQUEST: %s" % request)
request = signed_instance_factory(request, self.sec, to_sign)
if binding == BINDING_SOAP:
response = send_using_soap(request, destination,
self.config.key_file,
self.config.cert_file,
log=log,
ca_certs=self.config.ca_certs)
if response:
if log:
log.info("Verifying response")
response = self.logout_response(response, log)
if response:
not_done.remove(entity_id)
if log:
log.info("OK response from %s" % destination)
else:
if log:
log.info(
"NOT OK response from %s" % destination)
else:
session_id = request.id
rstate = self._relay_state(session_id)
self.state[session_id] = {"entity_id": entity_id,
"operation": "SLO",
"entity_ids": entity_ids,
"subject_id": subject_id,
"reason": reason,
"not_on_of_after": expire,
"sign": sign,
"return_to": return_to}
if binding == BINDING_HTTP_POST:
(head, body) = http_post_message(request,
destination,
rstate)
code = "200 OK"
else:
(head, body) = http_redirect_message(request,
destination,
rstate)
code = "302 Found"
return session_id, code, head, body
if not_done:
# upstream should try later
raise LogoutError("%s" % (entity_ids,))
return 0, "", [], response
def local_logout(self, subject_id):
""" Remove the user from the cache, equals local logout
:param subject_id: The identifier of the subject
"""
self.users.remove_person(subject_id)
return True
def handle_logout_response(self, response, log):
""" handles a Logout response
:param response: A response.Response instance
:param log: A logging function
:return: 4-tuple of (session_id of the last sent logout request,
response message, response headers and message)
"""
if log is None:
log = self.logger
if log:
log.info("state: %s" % (self.state,))
status = self.state[response.in_response_to]
if log:
log.info("status: %s" % (status,))
issuer = response.issuer()
if log:
log.info("issuer: %s" % issuer)
del self.state[response.in_response_to]
if status["entity_ids"] == [issuer]: # done
self.local_logout(status["subject_id"])
return 0, "200 Ok", [("Content-type","text/html")], []
else:
status["entity_ids"].remove(issuer)
return self._logout(status["subject_id"],
status["entity_ids"],
status["reason"],
status["not_on_or_after"],
status["sign"],
log, )
def logout_response(self, xmlstr, log=None, binding=BINDING_SOAP):
""" Deal with a LogoutResponse
:param xmlstr: The response as a xml string
:param log: logging function
:param binding: What type of binding this message came through.
:return: None if the reply doesn't contain a valid SAML LogoutResponse,
otherwise the reponse if the logout was successful and None if it
was not.
"""
response = None
if log is None:
log = self.logger
if xmlstr:
try:
# expected return address
return_addr = self.config.endpoint("single_logout_service",
binding=binding)[0]
except Exception:
if log:
log.info("Not supposed to handle this!")
return None
try:
response = LogoutResponse(self.sec, return_addr, debug=self.debug,
log=log)
except Exception, exc:
if log:
log.info("%s" % exc)
return None
if binding == BINDING_HTTP_REDIRECT:
xmlstr = decode_base64_and_inflate(xmlstr)
elif binding == BINDING_HTTP_POST:
xmlstr = base64.b64decode(xmlstr)
if log:
log.debug("XMLSTR: %s" % xmlstr)
response = response.loads(xmlstr, False)
if response:
response = response.verify()
if not response:
return None
if log:
log.debug(response)
return self.handle_logout_response(response, log)
return response
def http_redirect_logout_request(self, get, subject_id, log=None):
""" Deal with a LogoutRequest received through HTTP redirect
:param get: The request as a dictionary
:param subject_id: the id of the current logged user
:return: a tuple with a list of header tuples (presently only location)
and a status which will be True in case of success or False
otherwise.
"""
headers = []
success = False
if log is None:
log = self.logger
try:
saml_request = get['SAMLRequest']
except KeyError:
return None
if saml_request:
xml = decode_base64_and_inflate(saml_request)
request = samlp.logout_request_from_string(xml)
if log:
log.debug(request)
if request.name_id.text == subject_id:
status = samlp.STATUS_SUCCESS
success = self.local_logout(subject_id)
else:
status = samlp.STATUS_REQUEST_DENIED
response, destination = self .make_logout_response(
request.issuer.text,
request.id,
status)
if log:
log.info("RESPONSE: {0:>s}".format(response))
if 'RelayState' in get:
rstate = get['RelayState']
else:
rstate = ""
(headers, _body) = http_redirect_message(str(response),
destination,
rstate, 'SAMLResponse')
return headers, success
def logout_request(self, request, subject_id, log=None,
binding=BINDING_HTTP_REDIRECT):
""" Deal with a LogoutRequest
:param request: The request. The format depends on which binding is
used.
:param subject_id: the id of the current logged user
:return: What is returned also depends on which binding is used.
"""
if log is None:
log = self.logger
if binding == BINDING_HTTP_REDIRECT:
return self.http_redirect_logout_request(request, subject_id, log)
def make_logout_response(self, idp_entity_id, request_id,
status_code, binding=BINDING_HTTP_REDIRECT):
""" Constructs a LogoutResponse
:param idp_entity_id: The entityid of the IdP that want to do the
logout
:param request_id: The Id of the request we are replying to
:param status_code: The status code of the response
:param binding: The type of binding that will be used for the response
:return: A LogoutResponse instance
"""
destination = self.config.single_logout_services(idp_entity_id, binding)[0]
status = samlp.Status(
status_code=samlp.StatusCode(value=status_code))
response = samlp.LogoutResponse(
id=sid(),
version=VERSION,
issue_instant=instant(),
destination=destination,
issuer=self._issuer(),
in_response_to=request_id,
status=status,
)
return response, destination
def add_vo_information_about_user(self, subject_id):
""" Add information to the knowledge I have about the user. This is
for Virtual organizations.
:param subject_id: The subject identifier
:return: A possibly extended knowledge.
"""
ava = {}
try:
(ava, _) = self.users.get_identity(subject_id)
except KeyError:
pass
# is this a Virtual Organization situation
if self.vorg:
if self.vorg.do_aggregation(subject_id):
# Get the extended identity
ava = self.users.get_identity(subject_id)[0]
return ava
#noinspection PyUnusedLocal
def is_session_valid(self, _session_id):
""" Place holder. Supposed to check if the session is still valid.
"""
return True
def authz_decision_query_using_assertion(self, entityid, assertion,
action=None,
resource=None, subject=None,
binding=saml2.BINDING_HTTP_REDIRECT,
log=None, sign=False):
""" Makes an authz decision query.
:param entityid: The entity ID of the IdP to send the request to
:param assertion:
:param action:
:param resource:
:param subject:
:param binding: Which binding to use for sending the request
:param log: Where to write log messages
:param sign: Whether the request should be signed or not.
:return: AuthzDecisionQuery instance
"""
if action:
if isinstance(action, basestring):
_action = [saml.Action(text=action)]
else:
_action = [saml.Action(text=a) for a in action]
else:
_action = None
return self.authz_decision_query(entityid,
_action,
saml.Evidence(assertion=assertion),
resource, subject,
binding, log, sign)
#noinspection PyUnusedLocal
def authz_decision_query(self, entityid, action,
evidence=None, resource=None, subject=None,
binding=saml2.BINDING_HTTP_REDIRECT,
log=None, sign=None):
""" Creates an authz decision query.
:param entityid: The entity ID of the IdP to send the request to
:param action: The action you want to perform (has to be at least one)
:param evidence: Why you should be able to perform the action
:param resource: The resource you want to perform the action on
:param subject: Who wants to do the thing
:param binding: Which binding to use for sending the request
:param log: Where to write log messages
:param sign: Whether the request should be signed or not.
:return: AuthzDecisionQuery instance
"""
spentityid = self._issuer()
service_url = self.service_url()
my_name = self._my_name()
if log is None:
log = self.logger
if log:
log.info("spentityid: %s" % spentityid)
log.info("service_url: %s" % service_url)
log.info("my_name: %s" % my_name)
# authen_req = self.authn_request(session_id, location,
# service_url, spentityid, my_name, vorg,
# scoping, log, sign)
request = samlp.AuthzDecisionQuery(action, evidence, resource,
subject=subject,
issuer=spentityid,
id=sid(),
issue_instant=instant(),
version=VERSION,
destination=entityid)
return request
#noinspection PyUnusedLocal
def authz_decision_query_response(self, response, log=None):
""" Verify that the response is OK """
pass
#noinspection PyUnusedLocal
def do_authz_decision_query(self, entityid, assertion=None,
log=None, sign=False):
authz_decision_query = self.authz_decision_query(entityid, assertion)
for destination in self.config.authz_services(entityid):
to_sign = []
if sign :
authz_decision_query.signature = pre_signature_part(
authz_decision_query.id,
self.sec.my_cert, 1)
to_sign.append((class_name(authz_decision_query),
authz_decision_query.id))
authz_decision_query = signed_instance_factory(authz_decision_query,
self.sec, to_sign)
response = send_using_soap(authz_decision_query, destination,
self.config.key_file,
self.config.cert_file,
log=log,
ca_certs=self.config.ca_certs)
if response:
if log:
log.info("Verifying response")
response = self.authz_decision_query_response(response, log)
if response:
#not_done.remove(entity_id)
if log:
log.info("OK response from %s" % destination)
return response
else:
if log:
log.info("NOT OK response from %s" % destination)
return None
def request_to_discovery_service(self, disc_url, return_url="",
policy="", returnIDParam="",
is_passive=False ):
"""
Created the HTTP redirect URL needed to send the user to the
discovery service.
:param disc_url: The URL of the discovery service
:param return_url: The discovery service MUST redirect the user agent
to this location in response to this request
:param policy: A parameter name used to indicate the desired behavior
controlling the processing of the discovery service
:param returnIDParam: A parameter name used to return the unique
identifier of the selected identity provider to the original
requester.
:param is_passive: A boolean value of "true" or "false" that controls
whether the discovery service is allowed to visibly interact with
the user agent.
:return: A URL
"""
pdir = {"entityID": self.config.entityid}
if return_url:
pdir["return"] = return_url
if policy and policy != IDPDISC_POLICY:
pdir["policy"] = policy
if returnIDParam:
pdir["returnIDParam"] = returnIDParam
if is_passive:
pdir["is_passive"] = "true"
params = urllib.urlencode(pdir)
return "%s?%s" % (disc_url, params)
def get_idp_from_discovery_service(self, query="", url="", returnIDParam=""):
"""
Deal with the reponse url from a Discovery Service
:param url: the url the user was redirected back to
:param returnIDParam: This is where the identifier of the IdP is
place if it was specified in the query otherwise in 'entityID'
:return: The IdP identifier or "" if none was given
"""
if url:
part = urlparse(url)
qsd = parse_qs(part[4])
elif query:
qsd = parse_qs(query)
else:
qsd = {}
if returnIDParam:
try:
return qsd[returnIDParam][0]
except KeyError:
return ""
else:
try:
return qsd["entityID"][0]
except KeyError:
return ""
| bsd-2-clause | 4,292,489,896,724,211,000 | 36.235758 | 85 | 0.535612 | false |
math-a3k/django-ai | tests/test_models/migrations/0011_add_is_inferred_and_minor_tweaks.py | 1 | 2196 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-12-20 15:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('test_models', '0010_myunsupervisedlearningtechnique'),
]
operations = [
migrations.AddField(
model_name='mystatisticalmodel',
name='is_inferred',
field=models.BooleanField(
default=False, verbose_name='Is Inferred?'),
),
migrations.AddField(
model_name='mysupervisedlearningtechnique',
name='is_inferred',
field=models.BooleanField(
default=False, verbose_name='Is Inferred?'),
),
migrations.AddField(
model_name='myunsupervisedlearningtechnique',
name='is_inferred',
field=models.BooleanField(
default=False, verbose_name='Is Inferred?'),
),
migrations.AlterField(
model_name='mystatisticalmodel',
name='sm_type',
field=models.SmallIntegerField(blank=True, choices=[
(0, 'General / System'),
(1, 'Classification'),
(2, 'Regression')],
default=0, null=True,
verbose_name='Statistical Technique Type'),
),
migrations.AlterField(
model_name='mysupervisedlearningtechnique',
name='sm_type',
field=models.SmallIntegerField(blank=True, choices=[
(0, 'General / System'),
(1, 'Classification'),
(2, 'Regression')],
default=0, null=True,
verbose_name='Statistical Technique Type'),
),
migrations.AlterField(
model_name='myunsupervisedlearningtechnique',
name='sm_type',
field=models.SmallIntegerField(blank=True, choices=[
(0, 'General / System'),
(1, 'Classification'),
(2, 'Regression')],
default=0, null=True,
verbose_name='Statistical Technique Type'),
),
]
| lgpl-3.0 | -7,274,611,177,119,370 | 33.857143 | 64 | 0.536885 | false |
Makeystreet/makeystreet | woot/apps/catalog/views/review.py | 1 | 5983 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import render
from django.utils import timezone
from woot.apps.catalog.forms import CreateProductReviewForm,\
CreateShopReviewForm, CreateSpaceReviewForm
from woot.apps.catalog.models.core import Product, Shop, Space, NewProduct
from woot.apps.catalog.models.review import ProductReview, ShopReview,\
SpaceReview
from .helper import get_user_details_json
static_blob = settings.STATIC_BLOB
def all_reviews(request):
product_reviews = ProductReview.objects.all()
shop_reviews = ShopReview.objects.all()
space_reviews = SpaceReview.objects.all()
context = {
'static_blob': static_blob,
'user_details': get_user_details_json(request),
'product_reviews': product_reviews,
'shop_reviews': shop_reviews,
'space_reviews': space_reviews,
}
return render(request, 'catalog/all_reviews.html', context)
def store_review(request, review_id):
try:
user_details = get_user_details_json(request)
review = ShopReview.objects.get(id=review_id)
review.upvotes = review.voteshopreview_set.filter(vote=True)
context = {
'static_blob': static_blob,
'user_details': user_details,
'review': review,
}
return render(request, 'catalog/store_review.html', context)
except ShopReview.DoesNotExist:
raise Http404
def product_review(request, review_id):
try:
user_details = get_user_details_json(request)
review = ProductReview.objects.get(id=review_id)
review.upvotes = review.voteproductreview_set.filter(vote=True)
context = {
'static_blob': static_blob,
'user_details': user_details,
'review': review,
}
return render(request, 'catalog/product_review.html', context)
except ProductReview.DoesNotExist:
raise Http404
def space_review(request, review_id):
try:
user_details = get_user_details_json(request)
review = SpaceReview.objects.get(id=review_id)
review.upvotes = review.votespacereview_set.filter(vote=True)
context = {
'static_blob': static_blob,
'user_details': user_details,
'review': review,
}
return render(request, 'catalog/space_review.html', context)
except SpaceReview.DoesNotExist:
raise Http404
def create_review(request):
if request.method == "POST":
if request.POST.get('val_type', '') == 'PART':
form = CreateProductReviewForm(request.POST)
if form.is_valid():
r = ProductReview()
r.title = form.cleaned_data['val_title']
r.review = form.cleaned_data['val_review']
r.user = request.user
r.rating = form.cleaned_data['val_rating']
r.added_time = timezone.now()
product_data_split = form.cleaned_data['val_part'].split('_')
product_type = product_data_split[0]
product_id = int(product_data_split[1])
if product_type == 'old':
product = Product.objects.get(id=product_id)
r.product = product
elif product_type == 'new':
product = NewProduct.objects.get(id=product_id)
r.product = product
r.save()
return HttpResponseRedirect(reverse('catalog:all_reviews'))
else:
print(form.errors)
elif request.POST.get('val_type', '') == 'SHOP':
form = CreateShopReviewForm(request.POST)
if form.is_valid():
r = ShopReview()
r.title = form.cleaned_data['val_title']
r.review = form.cleaned_data['val_review']
r.user = request.user
r.rating = form.cleaned_data['val_rating']
r.added_time = timezone.now()
shop_data_split = form.cleaned_data['val_shop'].split('_')
shop_type = shop_data_split[0]
shop_id = int(shop_data_split[1])
if shop_type == 'old':
shop = Shop.objects.get(id=shop_id)
r.shop = shop
elif shop_type == 'new':
shop = NewProduct.objects.get(id=shop_id)
r.shop = shop
r.save()
return HttpResponseRedirect(reverse('catalog:all_reviews'))
else:
print(form.errors)
elif request.POST.get('val_type', '') == 'SPACE':
form = CreateSpaceReviewForm(request.POST)
if form.is_valid():
r = SpaceReview()
r.title = form.cleaned_data['val_title']
r.review = form.cleaned_data['val_review']
r.user = request.user
r.rating = form.cleaned_data['val_rating']
r.added_time = timezone.now()
space_data_split = form.cleaned_data['val_space'].split('_')
space_type = space_data_split[0]
space_id = int(space_data_split[1])
if space_type == 'old':
space = Space.objects.get(id=space_id)
r.space = space
elif space_type == 'new':
space = NewProduct.objects.get(id=space_id)
r.space = space
r.save()
return HttpResponseRedirect(reverse('catalog:all_reviews'))
else:
print(form.errors)
else:
pass
context = {
'static_blob': static_blob,
'user_details': get_user_details_json(request),
}
return render(request, 'catalog/create_product_review.html', context)
| apache-2.0 | 661,690,713,215,756,200 | 33.188571 | 77 | 0.563095 | false |
semplea/characters-meta | python/alchemy/examples/alchemy_vision_v1.py | 1 | 1466 | import json
from os.path import join, dirname
from watson_developer_cloud import AlchemyVisionV1
alchemy_vision = AlchemyVisionV1(api_key='c851400276c1acbd020210847f8677e6d1577c26')
# Face recognition
with open(join(dirname(__file__), '../resources/face.jpg'), 'rb') as image_file:
print(json.dumps(alchemy_vision.recognize_faces(image_file, knowledge_graph=True), indent=2))
face_url = 'https://upload.wikimedia.org/wikipedia/commons/9/9d/Barack_Obama.jpg'
print(json.dumps(alchemy_vision.recognize_faces(image_url=face_url, knowledge_graph=True), indent=2))
# Image tagging
with open(join(dirname(__file__), '../resources/test.jpg'), 'rb') as image_file:
print(json.dumps(alchemy_vision.get_image_keywords(image_file, knowledge_graph=True,
force_show_all=True), indent=2))
# Text recognition
with open(join(dirname(__file__), '../resources/text.png'), 'rb') as image_file:
print(json.dumps(alchemy_vision.get_image_scene_text(image_file), indent=2))
print(json.dumps(alchemy_vision.get_image_keywords(
image_url='https://upload.wikimedia.org/wikipedia/commons/8/81/Morris-Chair-Ironwood.jpg'), indent=2))
# Image link extraction
print(json.dumps(alchemy_vision.get_image_links(url='http://www.zillow.com/'), indent=2))
with open(join(dirname(__file__), '../resources/example.html'), 'r') as webpage:
print(json.dumps(alchemy_vision.get_image_links(html=webpage.read()), indent=2))
| mit | 1,583,828,267,179,628,300 | 47.866667 | 106 | 0.71487 | false |
mmclenna/engine | sky/tools/create_ios_sdk.py | 1 | 1820 | #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import shutil
import sys
import os
def main():
parser = argparse.ArgumentParser(description='Creates the Flutter iOS SDK')
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--device-out-dir', type=str, required=True)
parser.add_argument('--simulator-out-dir', type=str, required=True)
args = parser.parse_args()
device_sdk = os.path.join(args.device_out_dir, 'Flutter')
simulator_sdk = os.path.join(args.simulator_out_dir, 'Flutter')
flutter_framework_binary = 'Flutter.framework/Flutter'
device_dylib = os.path.join(args.device_out_dir,
flutter_framework_binary)
simulator_dylib = os.path.join(args.simulator_out_dir,
flutter_framework_binary)
if not os.path.isdir(device_sdk):
print 'Cannot find iOS device SDK at', device_sdk
return 1
if not os.path.isdir(simulator_sdk):
print 'Cannot find iOS simulator SDK at', simulator_sdk
return 1
if not os.path.isfile(device_dylib):
print 'Cannot find iOS device dylib at', device_dylib
return 1
if not os.path.isfile(simulator_dylib):
print 'Cannot find iOS device dylib at', simulator_dylib
return 1
shutil.rmtree(args.dst, True)
shutil.copytree(device_sdk, args.dst)
sim_tools = 'Tools/iphonesimulator'
shutil.copytree(os.path.join(simulator_sdk, sim_tools),
os.path.join(args.dst, sim_tools))
subprocess.call([
'lipo',
device_dylib,
simulator_dylib,
'-create',
'-output',
os.path.join(args.dst, 'Tools/common/Flutter.framework/Flutter')
])
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 4,522,966,620,628,158,000 | 26.575758 | 77 | 0.697802 | false |
gpg/gpgme | lang/python/tests/final.py | 1 | 1048 | #!/usr/bin/env python
# Copyright (C) 2016 g10 Code GmbH
#
# This file is part of GPGME.
#
# GPGME is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GPGME is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General
# Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, see <https://www.gnu.org/licenses/>.
from __future__ import absolute_import, print_function, unicode_literals
import os
import subprocess
import support
_ = support # to appease pyflakes.
del absolute_import, print_function, unicode_literals
subprocess.check_call([
os.path.join(os.getenv('top_srcdir'), "tests", "start-stop-agent"),
"--stop"
])
| lgpl-2.1 | 4,358,481,701,361,559,000 | 31.75 | 79 | 0.745229 | false |
calinerd/AWS | LAMBDA/Lambda_AutoUpdate_SecurityGroup_to_Allow_inbound_All_CloudFront_IPs_443.py | 1 | 6268 | '''
Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
'''
import boto3
import hashlib
import json
import urllib2
# Name of the service, as seen in the ip-groups.json file, to extract information for
SERVICE = "CLOUDFRONT"
# Ports your application uses that need inbound permissions from the service for
INGRESS_PORTS = [ 443 ]
# Tags which identify the security groups you want to update
SECURITY_GROUP_TAGS = { 'Name': 'SG_Allow_CF_IPs_443', 'AutoUpdate': 'true' }
def lambda_handler(event, context):
print("Received event: " + json.dumps(event, indent=2))
message = json.loads(event['Records'][0]['Sns']['Message'])
# Load the ip ranges from the url
ip_ranges = json.loads(get_ip_groups_json(message['url'], message['md5']))
# extract the service ranges
cf_ranges = get_ranges_for_service(ip_ranges, SERVICE)
# update the security groups
result = update_security_groups(cf_ranges)
return result
def get_ip_groups_json(url, expected_hash):
print("Updating from " + url)
response = urllib2.urlopen(url)
ip_json = response.read()
m = hashlib.md5()
m.update(ip_json)
hash = m.hexdigest()
if hash != expected_hash:
raise Exception('MD5 Mismatch: got ' + hash + ' expected ' + expected_hash)
return ip_json
def get_ranges_for_service(ranges, service):
service_ranges = list()
for prefix in ranges['prefixes']:
if prefix['service'] == service:
print('Found ' + service + ' range: ' + prefix['ip_prefix'])
service_ranges.append(prefix['ip_prefix'])
return service_ranges
def update_security_groups(new_ranges):
client = boto3.client('ec2')
groups = get_security_groups_for_update(client)
print ('Found ' + str(len(groups)) + ' SecurityGroups to update')
result = list()
updated = 0
for group in groups:
if update_security_group(client, group, new_ranges):
updated += 1
result.append('Updated ' + group['GroupId'])
result.append('Updated ' + str(updated) + ' of ' + str(len(groups)) + ' SecurityGroups')
return result
def update_security_group(client, group, new_ranges):
added = 0
removed = 0
if len(group['IpPermissions']) > 0:
for permission in group['IpPermissions']:
if INGRESS_PORTS.count(permission['ToPort']) > 0:
old_prefixes = list()
to_revoke = list()
to_add = list()
for range in permission['IpRanges']:
cidr = range['CidrIp']
old_prefixes.append(cidr)
if new_ranges.count(cidr) == 0:
to_revoke.append(range)
print(group['GroupId'] + ": Revoking " + cidr + ":" + str(permission['ToPort']))
for range in new_ranges:
if old_prefixes.count(range) == 0:
to_add.append({ 'CidrIp': range })
print(group['GroupId'] + ": Adding " + range + ":" + str(permission['ToPort']))
removed += revoke_permissions(client, group, permission, to_revoke)
added += add_permissions(client, group, permission, to_add)
else:
for port in INGRESS_PORTS:
to_add = list()
for range in new_ranges:
to_add.append({ 'CidrIp': range })
print(group['GroupId'] + ": Adding " + range + ":" + str(port))
permission = { 'ToPort': port, 'FromPort': port, 'IpProtocol': 'tcp'}
added += add_permissions(client, group, permission, to_add)
print (group['GroupId'] + ": Added " + str(added) + ", Revoked " + str(removed))
return (added > 0 or removed > 0)
def revoke_permissions(client, group, permission, to_revoke):
if len(to_revoke) > 0:
revoke_params = {
'ToPort': permission['ToPort'],
'FromPort': permission['FromPort'],
'IpRanges': to_revoke,
'IpProtocol': permission['IpProtocol']
}
client.revoke_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[revoke_params])
return len(to_revoke)
def add_permissions(client, group, permission, to_add):
if len(to_add) > 0:
add_params = {
'ToPort': permission['ToPort'],
'FromPort': permission['FromPort'],
'IpRanges': to_add,
'IpProtocol': permission['IpProtocol']
}
client.authorize_security_group_ingress(GroupId=group['GroupId'], IpPermissions=[add_params])
return len(to_add)
def get_security_groups_for_update(client):
filters = list();
for key, value in SECURITY_GROUP_TAGS.iteritems():
filters.extend(
[
{ 'Name': "tag-key", 'Values': [ key ] },
{ 'Name': "tag-value", 'Values': [ value ] }
]
)
response = client.describe_security_groups(Filters=filters)
return response['SecurityGroups']
'''
Sample Event From SNS:
{
"Records": [
{
"EventVersion": "1.0",
"EventSubscriptionArn": "arn:aws:sns:EXAMPLE",
"EventSource": "aws:sns",
"Sns": {
"SignatureVersion": "1",
"Timestamp": "1970-01-01T00:00:00.000Z",
"Signature": "EXAMPLE",
"SigningCertUrl": "EXAMPLE",
"MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e",
"Message": "{\"create-time\": \"yyyy-mm-ddThh:mm:ss+00:00\", \"synctoken\": \"0123456789\", \"md5\": \"03a8199d0c03ddfec0e542f8bf650ee7\", \"url\": \"https://ip-ranges.amazonaws.com/ip-ranges.json\"}",
"Type": "Notification",
"UnsubscribeUrl": "EXAMPLE",
"TopicArn": "arn:aws:sns:EXAMPLE",
"Subject": "TestInvoke"
}
}
]
}
''' | unlicense | 4,925,142,042,631,168,000 | 34.619318 | 266 | 0.596522 | false |
Xdynix/PixivPixie | bundle_cli.py | 1 | 2691 | import os
import subprocess
import sys
from pixiv_pixie.cli import main as cli_main, NAME
BINARY_PATH = 'lib'
DATA_PATH = 'data'
def is_packaged():
# Return true if executing from packaged file
return hasattr(sys, 'frozen')
def get_path(path, package_prefix=DATA_PATH):
if os.path.isabs(path) or not is_packaged():
return path
else:
return os.path.join(
sys.prefix,
os.path.join(package_prefix, path)
)
def build(
script, name=None, one_file=False, no_console=False, icon=None,
binary_path=BINARY_PATH, addition_binary=None,
data_path=DATA_PATH, addition_data=None,
hidden_import=None,
distpath=None, workpath=None, specpath=None,
addition_args=None,
):
args = []
if name is not None:
args.extend(('-n', name))
if one_file:
args.append('-F')
if no_console:
args.append('-w')
if icon is not None:
args.extend(('-i', icon))
if addition_args is None:
addition_args = []
def add_resource(add_type, path, resources):
for resource in resources:
args.append('--add-{}'.format(add_type))
if isinstance(resource, tuple) or isinstance(resource, list):
src = resource[0]
dest = resource[1]
args.append(src + os.path.pathsep + os.path.join(path, dest))
else:
args.append(
resource + os.path.pathsep + os.path.join(path, resource),
)
if addition_binary is not None:
add_resource(
add_type='binary',
path=binary_path,
resources=addition_binary,
)
if addition_data is not None:
add_resource(
add_type='data',
path=data_path,
resources=addition_data,
)
if hidden_import is not None:
for m in hidden_import:
args.extend(('--hidden-import', m))
if distpath is not None:
args.extend(('--distpath', distpath))
if workpath is not None:
args.extend(('--workpath', workpath))
if specpath is not None:
args.extend(('--specpath', specpath))
subprocess.call(['pyinstaller'] + args + addition_args + [script])
def main():
if not is_packaged():
build(
__file__,
name=NAME,
one_file=True,
addition_binary=[
('freeimage-3.15.1-win64.dll', '')
],
addition_args=[
'-y',
'--clean',
],
)
else:
cli_main()
if __name__ == '__main__':
main()
| apache-2.0 | -7,565,490,886,109,477,000 | 24.628571 | 78 | 0.531401 | false |
tortugueta/multilayers | examples/radcenter_distribution.py | 1 | 8087 | # -*- coding: utf-8 -*-
"""
Name : radcenter_distribution
Author : Joan Juvert <[email protected]>
Version : 1.0
Description : This script calculates the influence of the distribution of
: radiative centers in the active layer on the observed
: spectrum.
Copyright 2012 Joan Juvert
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import multilayers as ml
import numpy as np
import bphysics as bp
import scipy.integrate as integ
import argparse as ap
import sys
import pdb
# Argument parsing
parser = ap.ArgumentParser(
description = "This script calculates the effect of the " + \
"distribution of radiative centers in the active layer on " + \
"the modificator to the spectrum. The observation angle is " + \
"a fixed parameter. Optionally, the output can be plotted " + \
"and output to the standard output or to a file. The matrix " + \
"containing the values of F(z, lambda) can be saved to a file " + \
"and recovered in a following run of the program to avoid " + \
"recalculating it in case we want to calculate the effect of " + \
"different distributions on the same system.")
parser.add_argument(
"--graph",
help = "Plot the results",
action = "store_true")
parser.add_argument(
"-o",
"--output",
help = "Dump the results to a file")
parser.add_argument(
"-s",
"--savematrix",
help = "Save the matrix with the F(z, lambda) values to a file")
parser.add_argument(
"-l",
"--loadmatrix",
help = "Load the matrix with the F(z, lambda) values from a file")
args = parser.parse_args()
# Load the depth distribution of radiative centers. Note that the origin
# and units of z must be the same as in the multilayer.The distribution
# should be normalized to 1.
print("Loading the distribution...")
path = "/home/joan/Dropbox/CNM/projectes/simulations_report/figures/" + \
"rcdistributions/"
distribution = bp.rdfile(path + "gaussian_m25_s07.dat", usecols = [0, 1])[1]
print("Done")
print("Checking the distribution...")
integral = integ.simps(distribution[:, 1], distribution[:, 0], 0)
np.testing.assert_almost_equal(integral, 1, 2)
print("Done")
# If we load the values of F(z, lambda) calculated in a previous
# execution we do not need to build the multilayer and repeat the
# calculation of the F function. Notice that the values of z at which
# the new distribution is sampled should be the same as the previous
# one.
if args.loadmatrix:
print("Loading matrix...")
fmatrix = np.load(args.loadmatrix)
zlist = fmatrix['zlist']
np.testing.assert_array_equal(zlist, distribution[:, 0])
wlist = fmatrix['wlist']
angle = fmatrix['angle']
fte = fmatrix['fte']
ftm = fmatrix['ftm']
print("Done")
else:
# Create the materials
print("Loading materials... ")
silicon = ml.Medium("silicon.dat")
air = ml.Medium("air.dat")
sio2 = ml.Medium("sio2.dat")
poly = ml.Medium("polysilicon.dat")
print("Done")
# Set the fixed parameters.
angle = np.deg2rad(0)
# Create the multilayer
print("Building multilayer and allocating memory... ")
thicknesses = [300, 50]
multilayer = ml.Multilayer([
air,
[poly, thicknesses[0]],
[sio2, thicknesses[1]],
silicon])
# Define the wavelengths and z coordinates at which F will be calculated
# and allocate memory for the results. We will use a structured array to
# store the values of F(z, lambda).
wstep = 1
wmin = multilayer.getMinMaxWlength()[0]
wmax = multilayer.getMinMaxWlength()[1]
wlist = np.arange(wmin, wmax, wstep)
zlist = distribution[:, 0]
ftype = np.dtype([
('fx', np.complex128),
('fy', np.complex128),
('fz', np.complex128)])
resmatrix = np.empty((zlist.size, wlist.size), dtype = ftype)
print("Done")
# I(wavelength, theta) = s(wavelength) * F'(wavelength, theta), where
# F'(wav, theta) = integral[z](|F|^2 * rcdist(z). Therefore, we
# calculate the new spectrum as a modification to the original spectrum.
# The modification factor F'(wav, theta) is an integral over z.
# First calculate |Fy|^2 for te and |Fx*cos^2 + Fz*sin^2|^2 for tm. We
# do fx and fz in one loop and fy in another independent loop to avoid
# recalculating the characteristic matrix at every iteration due to the
# change of polarization.
print("Calculating F...")
for (widx, wlength) in enumerate(wlist):
percent = (float(widx) / wlist.size) * 100
print("%.2f%%" % percent)
for (zidx, z) in enumerate(zlist):
resmatrix[zidx][widx]['fx'] = multilayer.calculateFx(z, wlength, angle)
resmatrix[zidx][widx]['fz'] = multilayer.calculateFz(z, wlength, angle)
for (zidx, z) in enumerate(zlist):
resmatrix[zidx][widx]['fy'] = multilayer.calculateFy(z, wlength, angle)
# We are probably more interesed on the effect of the multilayer on the
# energy rather than the electric field. What we want is |Fy(z)|^2 for
# TE waves and |Fx(z) cosA^2 + Fz(z) sinA^2|^2 for TM waves.
ftm = np.absolute(
resmatrix['fx'] * np.cos(angle) ** 2 + \
resmatrix['fz'] * np.sin(angle) ** 2) ** 2
fte = np.absolute(resmatrix['fy']) ** 2
print("Done")
# Notice that until now we have not used the distribution of the
# radiative ceneters, but the calculation of ftm and fte is costly.
# If requested, we can save fte and ftm to a file. In a following
# execution of the script, the matrix can be loaded from the file
# instead of recalculated.
if args.savematrix:
print("Saving matrix...")
np.savez(args.savematrix, fte = fte, ftm = ftm, zlist = zlist,
wlist = wlist, angle = angle)
print("Done")
# Build or load the original spectrum. It should be sampled at the same
# wavelengths defined in wlist. If we are interested only in the
# modificator to the spectrum, not in the modified spectrum, we can
# leave it at 1.
original_spec = 1
# Multiply each F(z, lambda) by the distribution.
print("Integrating...")
distval = distribution[:, 1].reshape(distribution[:, 1].size, 1)
fte_mplied = fte * distval
ftm_mplied = ftm * distval
fte_int = integ.simps(fte_mplied, zlist, axis = 0)
ftm_int = integ.simps(ftm_mplied, zlist, axis = 0)
spectrum_modte = original_spec * fte_int
spectrum_modtm = original_spec * ftm_int
print("Done")
# Dump data to file or stdout
comments = "# F_TE = |Fy^2|^2\n" + \
"# F_TM = |Fx * cosA^2 + Fz * sinA^2|^2\n" + \
"# Modified spectrum for TE and TM waves for a\n" + \
"# distributions of the radiative centers.\n" + \
"# wlength\tF_TE\tF_TM"
if args.output:
bp.wdfile(args.output, comments,
np.array([wlist, spectrum_modte, spectrum_modtm]).T, '%.6e')
else:
print(comments)
for i in xrange(wlist.size):
print("%.6e\t%.6e\t%.6e" % (wlist[i], spectrum_modte[i],
spectrum_modtm[i]))
# Plot data if requested
if args.graph:
import matplotlib.pyplot as plt
plt.plot(wlist, spectrum_modte, label='TE', color = 'r')
plt.plot(wlist, spectrum_modtm, label='TM', color = 'b')
plt.xlabel('Wavelength (nm)')
plt.ylabel('Energy ratio')
plt.grid()
plt.legend(loc=2)
plt.title('%.1f rad' % angle)
plt.show()
plt.close()
| gpl-3.0 | -2,529,649,230,264,011,300 | 36.967136 | 83 | 0.649808 | false |
linaro-technologies/jobserv | jobserv/storage/local_storage.py | 1 | 3989 | # Copyright (C) 2017 Linaro Limited
# Author: Andy Doan <[email protected]>
import hmac
import os
import mimetypes
import shutil
from flask import Blueprint, request, send_file, url_for
from jobserv.jsend import get_or_404
from jobserv.models import Build, Project, Run
from jobserv.settings import INTERNAL_API_KEY, LOCAL_ARTIFACTS_DIR
from jobserv.storage.base import BaseStorage
blueprint = Blueprint('local_storage', __name__, url_prefix='/local-storage')
class Storage(BaseStorage):
blueprint = blueprint
def __init__(self):
super().__init__()
self.artifacts = LOCAL_ARTIFACTS_DIR
def _get_local(self, storage_path):
assert storage_path[0] != '/'
path = os.path.join(self.artifacts, storage_path)
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def _create_from_string(self, storage_path, contents):
path = self._get_local(storage_path)
with open(path, 'w') as f:
f.write(contents)
def _create_from_file(self, storage_path, filename, content_type):
path = self._get_local(storage_path)
with open(filename, 'rb') as fin, open(path, 'wb') as fout:
shutil.copyfileobj(fin, fout)
def _get_as_string(self, storage_path):
assert storage_path[0] != '/'
path = os.path.join(self.artifacts, storage_path)
with open(path, 'r') as f:
return f.read()
def list_artifacts(self, run):
path = '%s/%s/%s/' % (
run.build.project.name, run.build.build_id, run.name)
path = os.path.join(self.artifacts, path)
for base, _, names in os.walk(path):
for name in names:
if name != '.rundef.json':
yield os.path.join(base, name)[len(path):]
def get_download_response(self, request, run, path):
try:
p = os.path.join(self.artifacts, self._get_run_path(run), path)
mt = mimetypes.guess_type(p)[0]
return send_file(open(p, 'rb'), mimetype=mt)
except FileNotFoundError:
return 'File not found', 404
def _generate_put_url(self, run, path, expiration, content_type):
p = os.path.join(self.artifacts, self._get_run_path(run), path)
msg = '%s,%s,%s' % ('PUT', p, content_type)
sig = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest()
return url_for(
'local_storage.run_upload_artifact', sig=sig,
proj=run.build.project.name, build_id=run.build.build_id,
run=run.name, path=path, _external=True)
def _get_run(proj, build_id, run):
p = get_or_404(Project.query.filter_by(name=proj))
b = get_or_404(Build.query.filter_by(project=p, build_id=build_id))
return Run.query.filter_by(
name=run
).filter(
Run.build.has(Build.id == b.id)
).first_or_404()
@blueprint.route('/<sig>/<proj>/builds/<int:build_id>/runs/<run>/<path:path>',
methods=('PUT',))
def run_upload_artifact(sig, proj, build_id, run, path):
run = _get_run(proj, build_id, run)
# validate the signature
ls = Storage()
p = os.path.join(ls.artifacts, ls._get_run_path(run), path)
msg = '%s,%s,%s' % (request.method, p, request.headers.get('Content-Type'))
computed = hmac.new(INTERNAL_API_KEY, msg.encode(), 'sha1').hexdigest()
if not hmac.compare_digest(sig, computed):
return 'Invalid signature', 401
dirname = os.path.dirname(p)
try:
# we could have 2 uploads trying this, so just do it this way to avoid
# race conditions
os.makedirs(dirname)
except FileExistsError:
pass
# stream the contents to disk
with open(p, 'wb') as f:
chunk_size = 4096
while True:
chunk = request.stream.read(chunk_size)
if len(chunk) == 0:
break
f.write(chunk)
return 'ok'
| agpl-3.0 | -3,963,570,515,246,286,300 | 33.094017 | 79 | 0.603159 | false |
jadref/buffer_bci | python/echoClient/eventForwarder.py | 1 | 2911 | #!/usr/bin/env python3
bufferpath = "../../python/signalProc"
fieldtripPath="../../dataAcq/buffer/python"
import os, sys, random, math, time, socket, struct
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),bufferpath))
import bufhelp
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),fieldtripPath))
import FieldTrip
# Configuration of buffer
buffer1_hostname='localhost'
buffer1_port=1972
# Configuration of forwarding buffer
buffer2_hostname=None
buffer2_port=None
# holder for the buffer2 connection
ftc2=None
# flag to stop running when used from another function
running=True
def connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port):
if buffer1_hostname==buffer2_hostname and buffer1_port==buffer2_port :
print("WARNING:: fowarding to the same port may result in infinite loops!!!!")
#Connect to Buffer2 -- do this first so the global state is for ftc1
print("Connecting to " + buffer2_hostname + ":" + str(buffer2_port))
(ftc2,hdr2) = bufhelp.connect(buffer2_hostname,buffer2_port)
print("Connected");
print(hdr2)
#Connect to Buffer1
print("Connecting to " + buffer1_hostname + ":" + str(buffer1_port))
(ftc1,hdr1) = bufhelp.connect(buffer1_hostname,buffer1_port)
print("Connected!");
print(hdr1)
return (ftc1,ftc2)
# Receive events from the buffer1 and send them to buffer2
def forwardBufferEvents(ftc1,ftc2):
global running
global ftc
ftc=ftc1
while ( running ):
events = bufhelp.buffer_newevents()
for evt in events:
print(str(evt.sample) + ": " + str(evt))
evt.sample=-1
ftc2.putEvents(evt)
def guiGetBuffer2():
print("GUI info not supported yet!!")
return;
import tkinter as tk
master = tk.Tk()
tk.Label(master, text="HostName").grid(row=0)
tk.Label(master, text="Port").grid(row=1)
e1 = tk.Entry(master)
e2 = tk.Entry(master)
e1.grid(row=0, column=1)
e2.grid(row=1, column=1)
master.mainloop()
if __name__ == "__main__":
if len(sys.argv)>0: # called with options, i.e. commandline
buffer2_hostname = sys.argv[1]
if len(sys.argv)>1:
try:
buffer2_port = int(sys.argv[2])
except:
print('Error: second argument (%s) must be a valid (=integer) port number'%sys.argv[2])
sys.exit(1)
if buffer2_hostname is None :
(buffer2_hostname,buffer2_port)=guiGetBuffer2()
(ftc1,ftc2)=connectBuffers(buffer1_hostname,buffer1_port,buffer2_hostname,buffer2_port)
forwardBufferEvents(ftc1,ftc2)
| gpl-3.0 | -8,554,221,728,104,355,000 | 34.938272 | 103 | 0.605634 | false |
MTG/essentia | test/src/unittests/tonal/test_tonicindianartmusic.py | 1 | 7198 | #!/usr/bin/env python
# Copyright (C) 2006-2021 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Afextentro General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Afextentro GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from numpy import sin, float32, pi, arange, mean, log2, floor, ceil, math, concatenate
import numpy as np
class TestTonicIndianArtMusic(TestCase):
def testInvalidParam(self):
self.assertConfigureFails(TonicIndianArtMusic(), { 'binResolution': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'frameSize': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'binResolution': 0 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'frameSize': 0 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'harmonicWeight': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'harmonicWeight': 0 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'harmonicWeight': 1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'hopSize': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'magnitudeCompression': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'magnitudeCompression': 2 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'magnitudeThreshold': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'maxTonicFrequency': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'minTonicFrequency': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'numberHarmonics': 0 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'numberHarmonics': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'numberSaliencePeaks': 0})
self.assertConfigureFails(TonicIndianArtMusic(), { 'numberSaliencePeaks': 16})
self.assertConfigureFails(TonicIndianArtMusic(), { 'referenceFrequency': -1 })
self.assertConfigureFails(TonicIndianArtMusic(), { 'sampleRate': -1 })
def testEmpty(self):
self.assertRaises(RuntimeError, lambda: TonicIndianArtMusic()([]))
def testSilence(self):
# test 1 second of silence
silence = np.zeros(44100)
self.assertRaises(RuntimeError, lambda: TonicIndianArtMusic()(silence))
def testOnes(self):
# Not a realistic test but useful for sanity checks/ regression checks.
referenceTonic = 108.86
tonic = TonicIndianArtMusic()(ones(1024))
self.assertAlmostEqualFixedPrecision(tonic, referenceTonic, 2)
# Full reference set of values can be sourced from dataset
# Download https://compmusic.upf.edu/carnatic-varnam-dataset
# See file "tonics.yaml"
#
# vignesh: 138.59
# This tonic corresponds to the following mp3 file.
# "23582__gopalkoduri__carnatic-varnam-by-vignesh-in-abhogi-raaga.mp3'
#
# copy this file into essentia/test/audio/recorded.
def testRegressionVignesh(self):
audio = MonoLoader(filename = join(testdata.audio_dir, 'recorded/223582__gopalkoduri__carnatic-varnam-by-vignesh-in-abhogi-raaga.mp3'),
sampleRate = 44100)()
# Reference tonic from YAML file is 138.59. The measured is "138.8064422607422"
referenceTonic = 138.59
tonic = TonicIndianArtMusic()(audio)
self.assertAlmostEqualFixedPrecision(tonic, referenceTonic, 0)
def testRegression(self):
# Regression test using existing vignesh audio file in "essentia/test/audio/recorded"
audio = MonoLoader(filename = join(testdata.audio_dir, 'recorded/vignesh.wav'),
sampleRate = 44100)()
referenceTonic = 102.74
tonic = TonicIndianArtMusic()(audio)
self.assertAlmostEqualFixedPrecision( tonic, referenceTonic, 2)
start_zero = np.zeros(int(44100))
end_zero = np.zeros(int(44100))
# Check result is the same with appended silences of constant length
real_audio = np.hstack([start_zero, audio, end_zero])
tonic = TonicIndianArtMusic()(real_audio)
self.assertAlmostEqualFixedPrecision(tonic, referenceTonic, 2)
def testMinMaxMismatch(self):
self.assertRaises(RuntimeError, lambda: TonicIndianArtMusic(minTonicFrequency=100,maxTonicFrequency=11)(ones(4096)))
def testBelowMinimumTonic(self):
signalSize = 15 * 2048
# generate test signal 99 Hz, and put minTonicFreq as 100 Hz in the TonicIndianArtMusic
x = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 99* 2*math.pi)
self.assertRaises(EssentiaException, lambda: TonicIndianArtMusic(minTonicFrequency=100,maxTonicFrequency=375)(x))
def testAboveMaxTonic(self):
signalSize = 15 * 2048
# generate test signal 101 Hz, and put maxTonicFreq as 100 Hz in the TonicIndianArtMusic
x = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 101* 2*math.pi)
self.assertRaises(RuntimeError, lambda: TonicIndianArtMusic(minTonicFrequency=99,maxTonicFrequency=100)(x))
def testRegressionSyntheticSignal(self):
# generate a test signal concatenating different frequencies
signalSize = 15 * 2048
# Concat 3 sine waves together of different frequencies
x = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 124 * 2*math.pi)
y = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 100 * 2*math.pi)
z = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 80 * 2*math.pi)
mix = concatenate([x, y, z])
# tiam = acronym for "Tonic Indian Art Music"
tiam = TonicIndianArtMusic(minTonicFrequency=50, maxTonicFrequency=111)
tonic = tiam(mix)
# Check that tonic is above minTonicFrequency
self.assertGreater(tonic, 50)
# Check that tonic is below highest frequency in signal
self.assertGreater(124, tonic)
### Make a (unharmonic) chord
x = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 124 * 2*math.pi)
y = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 100 * 2*math.pi)
z = 0.5 * numpy.sin((array(range(signalSize))/44100.) * 80 * 2*math.pi)
chord = x+y+z
tiam = TonicIndianArtMusic(minTonicFrequency=50, maxTonicFrequency=111)
tonic = tiam(chord)
# Check that tonic is above min frequency in signal
self.assertGreater(tonic, 80)
# Check that tonic is below highest frequency in signal
self.assertGreater(124, tonic)
suite = allTests(TestTonicIndianArtMusic)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 | 7,944,952,061,579,169,000 | 48.30137 | 143 | 0.689497 | false |
shanot/imp | modules/saxs/test/test_surface.py | 1 | 3121 | from __future__ import print_function
import IMP
import IMP.test
import IMP.atom
import IMP.core
import IMP.saxs
import os
import time
class Tests(IMP.test.TestCase):
def test_surface_area(self):
"""Check protein surface computation"""
m = IMP.Model()
#! read PDB
mp = IMP.atom.read_pdb(self.get_input_file_name('6lyz.pdb'), m,
IMP.atom.NonWaterNonHydrogenPDBSelector())
IMP.atom.add_radii(mp)
#! select atom particles from the model
particles = IMP.atom.get_by_type(mp, IMP.atom.ATOM_TYPE)
#! calculate surface aceesability
s = IMP.saxs.SolventAccessibleSurface()
surface_area = s.get_solvent_accessibility(IMP.core.XYZRs(particles))
#! sum up
total_area = 0.0
for area in surface_area:
total_area += area
# print 'Area = ' + str(total_area)
self.assertAlmostEqual(total_area, 37.728, delta=0.1)
def test_surface_area2(self):
"""Atom radii and probe radius parameters that work for SOAP"""
m = IMP.Model()
#! read PDB
mp = IMP.atom.read_pdb(self.get_input_file_name('6lyz.pdb'), m,
IMP.atom.NonWaterNonHydrogenPDBSelector())
IMP.atom.add_radii(mp)
#! select atom particles from the model
particles = IMP.atom.get_by_type(mp, IMP.atom.ATOM_TYPE)
for p in particles:
xyzrp = IMP.core.XYZR(p)
xyzrp.set_radius(0.7 * xyzrp.get_radius())
#! calculate surface aceesability
s = IMP.saxs.SolventAccessibleSurface()
surface_area = s.get_solvent_accessibility(
IMP.core.XYZRs(particles), 1.4)
#! sum up
total_area = 0.0
for area in surface_area:
total_area += area
print('Area = ' + str(total_area))
self.assertAlmostEqual(total_area, 73.53, delta=0.1)
def test_corner_case(self):
"""Check the surface area handle points on boundary"""
# this test could be simplified probably, but it is fast enough
# if we move to non-grid based SA, it should go away
ensemble = ["./433.pdb", "./434.pdb"]
m = IMP.Model()
#! read PDBs
for struc in ensemble:
print(" ... Fitting structure %s" % struc)
mp = IMP.atom.read_pdb(self.get_input_file_name(struc), m,
IMP.atom.NonWaterNonHydrogenPDBSelector())
#! select particles from the model
particles = IMP.atom.get_by_type(mp, IMP.atom.ATOM_TYPE)
#! add radius for water layer computation
ft = IMP.saxs.get_default_form_factor_table()
for i in range(0, len(particles)):
radius = ft.get_radius(particles[i])
IMP.core.XYZR(particles[i]).set_radius(radius)
# compute surface accessibility
s = IMP.saxs.SolventAccessibleSurface()
surface_area = s.get_solvent_accessibility(
IMP.core.XYZRs(particles))
if __name__ == '__main__':
IMP.test.main()
| gpl-3.0 | -509,015,444,806,564,160 | 34.067416 | 77 | 0.582826 | false |
ozgurakgun/minion | mini-scripts/testallconstraints.py | 1 | 3983 | #!/usr/bin/python
# Generate two minion input files, run them then compare dumptree outputs to
# detect bugs in constraint propagators.
import sys, os, getopt
from constraint_test_common import *
from multiprocessing import Pool, Manager
import random
#from sendemail import *
import time
(optargs, other)=getopt.gnu_getopt(sys.argv, "", ["minion=", "numtests=", "email", "fullprop", "64bit", "procs=", "seed=", "conslist="])
if len(other)>1:
print("Usage: testallconstraints.py [--minion=<location of minion binary>] [--numtests=...] [--email] [--procs=...] [--seed=...] [--conslist=...]")
sys.exit(1)
# This one tests all the constraints in the following list.
conslist=[]
# equality constraints
conslist+=["diseq", "eq", "gaceq"]
# alldiffs
conslist+=["alldiff", "gacalldiff", "alldiffmatrix"]
# capacity constraints
conslist+=["gcc", "gccweak", "occurrence", "occurrenceleq", "occurrencegeq"]
#element constraints
conslist+=["element", "element_undefzero", "watchelement", "watchelement_undefzero"]
conslist+=["watchelement_one", "element_one"]
# arithmetic constraints
conslist+=["modulo", "modulo_undefzero", "pow", "minuseq", "product", "div", "div_undefzero", "abs"]
conslist+=["watchsumleq", "watchsumgeq", "watchvecneq", "hamming", "not-hamming"]
conslist+=["weightedsumleq", "weightedsumgeq"]
conslist+=["litsumgeq"]
# should test table to test reifytable? and reifyimplytable
conslist+=["sumgeq", "sumleq", "weightedsumleq", "weightedsumgeq"]
conslist+=["ineq"]
conslist+=["difference"]
conslist+=["negativetable", "lighttable"]
# symmetry-breaking constraints
conslist+=["lexleq", "lexless", "lexleq_rv", "lexleq_quick", "lexless_quick"]
conslist+=["max", "min"]
conslist+=["watchneq", "watchless"]
conslist+=["w-inset", "w-inintervalset", "w-notinset", "w-inrange", "w-notinrange", "w-literal", "w-notliteral"]
conslist+=["watchsumgeq", "litsumgeq", "watchneq", "watchless", "not-hamming"]
conslist+=["not-hamming"]
conslist+=["gacschema", "haggisgac", "haggisgac-stable", "str2plus", "shortstr2", "shortctuplestr2", "mddc"]
conslist+=["nvalueleq", "nvaluegeq"]
# add reifyimply variant of all constraints,
# and reify variant of all except those in reifyexceptions
it=conslist[:]
for c in it:
conslist+=["reifyimply"+c]
conslist+=["reify"+c]
numtests=100
minionbin="bin/minion"
email=False
fullprop=False # compare the constraint against itself with fullprop. Needs DEBUG=1.
bit64=False
procs=1
seed=12345
for i in optargs:
(a1, a2)=i
if a1=="--minion":
minionbin=a2
elif a1=="--numtests":
numtests=int(a2)
elif a1=="--email":
email=True
elif a1=="--fullprop":
fullprop=True
elif a1=="--64bit":
bit64=True
elif a1=="--procs":
procs=int(a2)
elif a1=="--seed":
seed=int(a2)
elif a1=="--conslist":
conslist=a2.split(",")
def runtest(consname):
cachename = consname
starttime=time.time()
sys.stdout.flush()
random.seed(seed)
reify=False
reifyimply=False
if consname[0:10]=="reifyimply":
reifyimply=True
consname=consname[10:]
if consname[0:5]=="reify":
reify=True
consname=consname[5:]
consname=consname.replace("-", "__minus__")
testobj=eval("test"+consname+"()")
testobj.solver=minionbin
for testnum in range(numtests):
options = {'reify': reify, 'reifyimply': reifyimply, 'fullprop': fullprop, 'printcmd': False, 'fixlength':False, 'getsatisfyingassignment':True}
if not testobj.runtest(options):
print("Failed when testing %s"%cachename)
sys.stdout.flush()
return False
print("Completed testing %s, duration: %d"%(cachename, time.time()-starttime))
return True
if __name__ == '__main__':
p = Pool(procs)
retval = p.map(runtest, conslist)
if all(retval):
print("Success")
exit(0)
else:
print("Failure")
exit(1)
| gpl-2.0 | -5,649,680,209,550,508,000 | 27.654676 | 152 | 0.651017 | false |
rabramley/telomere | app/model/batch.py | 1 | 2972 | from app import db
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.sql import select, func
from app.model.outstandingError import OutstandingError
import numpy
import decimal
class Batch(db.Model):
id = db.Column(db.Integer, primary_key=True)
robot = db.Column(db.String(20))
temperature = db.Column(db.Numeric(precision=3, scale=1))
datetime = db.Column(db.DateTime())
userId = db.Column(db.Integer, db.ForeignKey('user.id'))
version_id = db.Column(db.Integer, nullable=False)
plateName = db.Column(db.String(50))
halfPlate = db.Column(db.String(1))
humidity = db.Column(db.Integer())
primerBatch = db.Column(db.Integer())
enzymeBatch = db.Column(db.Integer())
rotorGene = db.Column(db.Integer())
operatorUserId = db.Column(db.Integer, db.ForeignKey('user.id'))
batchFailureReason = db.Column(db.Integer())
processType = db.Column(db.String(20))
__mapper_args__ = {
"version_id_col": version_id
}
def __init__(self, *args, **kwargs):
self.id = kwargs.get('id')
self.robot = kwargs.get('robot')
self.temperature = kwargs.get('temperature')
self.datetime = kwargs.get('datetime')
self.userId = kwargs.get('userId')
self.plateName = kwargs.get('plateName')
self.halfPlate = kwargs.get('halfPlate')
self.humidity = kwargs.get('humidity')
self.primerBatch = kwargs.get('primerBatch')
self.enzymeBatch = kwargs.get('enzymeBatch')
self.rotorGene = kwargs.get('rotorGene')
self.operatorUserId = kwargs.get('operatorUserId')
self.batchFailureReason = kwargs.get('batchFailureReason')
self.processType = kwargs.get('processType')
@hybrid_property
def outstandingErrorCount(self):
return len(self.outstandingErrors)
@outstandingErrorCount.expression
def outstandingErrorCount(cls):
return (select([func.count(OutstandingError.id)]).
where(OutstandingError.batchId == cls.id).
label("outstandingErrorCount")
)
def get_measurements_for_sample_code(self, sampleCode):
return [m for m in self.measurements if m.sample.sampleCode == sampleCode]
def has_no_pool_samples(self):
return not any(m.sample.is_pool_sample() for m in self.measurements)
def has_no_non_pool_samples(self):
return not any(not m.sample.is_pool_sample() for m in self.measurements)
def has_invalid_pool_ts_average(self):
poolTsValues = [ decimal.Decimal(m.ts) for m in self.measurements if m.ts is not None and m.sample.is_pool_sample()]
averagePoolTs = numpy.mean(poolTsValues)
return averagePoolTs < 0.99 or averagePoolTs > 1.01
def is_duplicate(self):
return self.processType == "Duplicate"
def is_replate(self):
return self.processType == "Re-Plate"
def is_initial(self):
return self.processType == "Initial"
| mit | 7,113,725,852,751,646,000 | 37.102564 | 124 | 0.664536 | false |
DarioGT/OMS-PluginXML | org.modelsphere.sms/lib/jython-2.2.1/Lib/uu.py | 1 | 6092 | #! /usr/bin/env python
# Copyright 1994 by Lance Ellinghouse
# Cathedral City, California Republic, United States of America.
# All Rights Reserved
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Lance Ellinghouse
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO
# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE
# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# Modified by Jack Jansen, CWI, July 1995:
# - Use binascii module to do the actual line-by-line conversion
# between ascii and binary. This results in a 1000-fold speedup. The C
# version is still 5 times faster, though.
# - Arguments more compliant with python standard
"""Implementation of the UUencode and UUdecode functions.
encode(in_file, out_file [,name, mode])
decode(in_file [, out_file, mode])
"""
import binascii
import os
import sys
from types import StringType
__all__ = ["Error", "encode", "decode"]
class Error(Exception):
pass
def encode(in_file, out_file, name=None, mode=None):
"""Uuencode file"""
#
# If in_file is a pathname open it and change defaults
#
if in_file == '-':
in_file = sys.stdin
elif isinstance(in_file, StringType):
if name is None:
name = os.path.basename(in_file)
if mode is None:
try:
mode = os.stat(in_file)[0]
except AttributeError:
pass
in_file = open(in_file, 'rb')
#
# Open out_file if it is a pathname
#
if out_file == '-':
out_file = sys.stdout
elif isinstance(out_file, StringType):
out_file = open(out_file, 'w')
#
# Set defaults for name and mode
#
if name is None:
name = '-'
if mode is None:
mode = 0666
#
# Write the data
#
out_file.write('begin %o %s\n' % ((mode&0777),name))
str = in_file.read(45)
while len(str) > 0:
out_file.write(binascii.b2a_uu(str))
str = in_file.read(45)
out_file.write(' \nend\n')
def decode(in_file, out_file=None, mode=None, quiet=0):
"""Decode uuencoded file"""
#
# Open the input file, if needed.
#
if in_file == '-':
in_file = sys.stdin
elif isinstance(in_file, StringType):
in_file = open(in_file)
#
# Read until a begin is encountered or we've exhausted the file
#
while 1:
hdr = in_file.readline()
if not hdr:
raise Error, 'No valid begin line found in input file'
if hdr[:5] != 'begin':
continue
hdrfields = hdr.split(" ", 2)
if len(hdrfields) == 3 and hdrfields[0] == 'begin':
try:
int(hdrfields[1], 8)
break
except ValueError:
pass
if out_file is None:
out_file = hdrfields[2].rstrip()
if os.path.exists(out_file):
raise Error, 'Cannot overwrite existing file: %s' % out_file
if mode is None:
mode = int(hdrfields[1], 8)
#
# Open the output file
#
opened = False
if out_file == '-':
out_file = sys.stdout
elif isinstance(out_file, StringType):
fp = open(out_file, 'wb')
try:
os.path.chmod(out_file, mode)
except AttributeError:
pass
out_file = fp
opened = True
#
# Main decoding loop
#
s = in_file.readline()
while s and s.strip() != 'end':
try:
data = binascii.a2b_uu(s)
except binascii.Error, v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
data = binascii.a2b_uu(s[:nbytes])
if not quiet:
sys.stderr.write("Warning: %s\n" % str(v))
out_file.write(data)
s = in_file.readline()
if not s:
raise Error, 'Truncated input file'
if opened:
out_file.close()
def test():
"""uuencode/uudecode main program"""
import getopt
dopt = 0
topt = 0
input = sys.stdin
output = sys.stdout
ok = 1
try:
optlist, args = getopt.getopt(sys.argv[1:], 'dt')
except getopt.error:
ok = 0
if not ok or len(args) > 2:
print 'Usage:', sys.argv[0], '[-d] [-t] [input [output]]'
print ' -d: Decode (in stead of encode)'
print ' -t: data is text, encoded format unix-compatible text'
sys.exit(1)
for o, a in optlist:
if o == '-d': dopt = 1
if o == '-t': topt = 1
if len(args) > 0:
input = args[0]
if len(args) > 1:
output = args[1]
if dopt:
if topt:
if isinstance(output, StringType):
output = open(output, 'w')
else:
print sys.argv[0], ': cannot do -t to stdout'
sys.exit(1)
decode(input, output)
else:
if topt:
if isinstance(input, StringType):
input = open(input, 'r')
else:
print sys.argv[0], ': cannot do -t from stdin'
sys.exit(1)
encode(input, output)
if __name__ == '__main__':
test()
| gpl-3.0 | 8,905,765,275,347,266,000 | 29.241026 | 72 | 0.559094 | false |
jhogg41/gm-o-matic | gom_server/gom_server/urls.py | 1 | 1187 | """gom_server URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
import core.router
import char_attr.router
router = routers.DefaultRouter()
core.router.addRoutes(router)
char_attr.router.addRoutes(router)
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest-framework')),
url(r'^', include(router.urls)),
url(r'^rest-auth/', include('rest_auth.urls')),
url(r'^rest-auth/registration', include('rest_auth.registration.urls')),
]
| bsd-2-clause | -3,051,245,291,478,614,500 | 36.09375 | 83 | 0.708509 | false |
kevin-coder/tensorflow-fork | tensorflow/python/keras/layers/normalization_test.py | 1 | 22900 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for normalization layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.layers import normalization
from tensorflow.python.keras.layers import normalization_v2
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class BatchNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm(self):
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'momentum': 0.9,
'epsilon': 0.1,
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_weights(self):
layer = keras.layers.BatchNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 2)
layer = keras.layers.BatchNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 4)
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_regularization(self):
layer = keras.layers.BatchNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.BatchNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=1, input_shape=(3, 4, 4), momentum=0.8)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=-1, input_shape=(4, 4, 3), momentum=0.8)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_correctness(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float32')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_batchnorm_mixed_precision(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float16')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float16')
@tf_test_util.run_in_graph_and_eager_modes
def test_batchnorm_policy(self):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(4, 4, 3),
momentum=0.8,
dtype=policy.Policy('infer_float32_vars'))
x = np.random.normal(size=(10, 4, 4, 3)).astype('float16')
y = norm(x)
self.assertEqual(y.dtype, 'float16')
self.assertEqual(norm.beta.dtype.base_dtype, 'float32')
self.assertEqual(norm.gamma.dtype.base_dtype, 'float32')
class BatchNormalizationV1Test(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_v1_fused_attribute(self):
norm = normalization.BatchNormalization()
inp = keras.layers.Input((4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(2, 2, 2))
norm(inp)
self.assertEqual(norm.fused, False)
class BatchNormalizationV2Test(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2(self):
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': True},
input_shape=(3, 3, 3, 3))
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': None},
input_shape=(3, 3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_v2_fused_attribute(self):
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=True, axis=[3])
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
with self.assertRaisesRegexp(ValueError, 'fused.*renorm'):
normalization_v2.BatchNormalization(fused=True, renorm=True)
with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=2)
with self.assertRaisesRegexp(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=[1, 3])
with self.assertRaisesRegexp(ValueError, 'fused.*virtual_batch_size'):
normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2)
with self.assertRaisesRegexp(ValueError, 'fused.*adjustment'):
normalization_v2.BatchNormalization(fused=True,
adjustment=lambda _: (1, 0))
norm = normalization_v2.BatchNormalization(fused=True)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4))
with self.assertRaisesRegexp(ValueError, '4D input tensors'):
norm(inp)
def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False):
model = keras.models.Sequential()
model.add(keras.Input(shape=(2, 2, 2), dtype=dtype))
norm = layer(momentum=0.8, fused=fused)
model.add(norm)
if dtype == 'float16':
# Keras models require float32 losses.
model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32')))
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
@parameterized.parameters(
[normalization.BatchNormalization, normalization_v2.BatchNormalization])
class NormalizationLayersGraphModeOnlyTest(
test.TestCase, parameterized.TestCase):
def test_shared_batchnorm(self, layer):
"""Test that a BN layer can be shared across different data streams."""
with self.cached_session():
# Test single layer reuse
bn = layer()
x1 = keras.layers.Input(shape=(10,))
_ = bn(x1)
x2 = keras.layers.Input(shape=(10,))
y2 = bn(x2)
x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10))
model = keras.models.Model(x2, y2)
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(x, x)
self.assertEqual(len(bn.updates), 4)
self.assertEqual(len(model.updates), 2)
self.assertEqual(len(model.get_updates_for(x2)), 2)
# Test model-level reuse
x3 = keras.layers.Input(shape=(10,))
y3 = model(x3)
new_model = keras.models.Model(x3, y3, name='new_model')
self.assertEqual(len(new_model.updates), 2)
self.assertEqual(len(model.updates), 4)
self.assertEqual(len(new_model.get_updates_for(x3)), 2)
new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
new_model.train_on_batch(x, x)
def test_that_trainable_disables_updates(self, layer):
with self.cached_session():
val_a = np.random.random((10, 4))
val_out = np.random.random((10, 4))
a = keras.layers.Input(shape=(4,))
layer = layer(input_shape=(4,))
b = layer(a)
model = keras.models.Model(a, b)
model.trainable = False
assert not model.updates
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert not model.updates
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
model.trainable = True
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert model.updates
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
assert np.abs(np.sum(x1 - x2)) > 1e-5
layer.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
assert not model.updates
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
@tf_test_util.run_deprecated_v1
def test_batchnorm_trainable(self, layer):
"""Tests that batchnorm layer is trainable when learning phase is enabled.
Computes mean and std for current inputs then
applies batch normalization using them.
Args:
layer: Either V1 or V2 of BatchNormalization layer.
"""
# TODO(fchollet): enable in all execution modes when issue with
# learning phase setting is resolved.
with self.cached_session():
bn_mean = 0.5
bn_std = 10.
val_a = np.expand_dims(np.arange(10.), axis=1)
def get_model(bn_mean, bn_std):
inp = keras.layers.Input(shape=(1,))
x = layer()(inp)
model1 = keras.models.Model(inp, x)
model1.set_weights([
np.array([1.]),
np.array([0.]),
np.array([bn_mean]),
np.array([bn_std**2])
])
return model1
# Simulates training-mode with trainable layer.
# Should use mini-batch statistics.
with keras.backend.learning_phase_scope(1):
model = get_model(bn_mean, bn_std)
model.compile(loss='mse', optimizer='rmsprop')
out = model.predict(val_a)
self.assertAllClose(
(val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3)
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
norm = layer(input_shape=(2, 2, 2))
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
class LayerNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
@tf_test_util.run_in_graph_and_eager_modes
def test_layernorm_weights(self):
layer = keras.layers.LayerNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 0)
layer = keras.layers.LayerNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 2)
@tf_test_util.run_in_graph_and_eager_modes
def test_layernorm_regularization(self):
layer = keras.layers.LayerNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.LayerNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(
input_shape=(3, 4, 4), params_axis=1)
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3))
model.add(norm)
model.compile(loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_correctness(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_layernorm_mixed_precision(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float16')
def doOutputTest(self,
input_shape,
tol=1e-5,
norm_axis=None,
params_axis=-1,
dtype=None):
ndim = len(input_shape)
if norm_axis is None:
moments_axis = range(1, ndim)
elif isinstance(norm_axis, int):
if norm_axis < 0:
moments_axis = [norm_axis + ndim]
else:
moments_axis = [norm_axis]
else:
moments_axis = []
for dim in norm_axis:
if dim < 0:
dim = dim + ndim
moments_axis.append(dim)
moments_axis = tuple(moments_axis)
expected_shape = []
for i in range(ndim):
if i not in moments_axis:
expected_shape.append(input_shape[i])
expected_mean = np.zeros(expected_shape)
expected_var = np.ones(expected_shape)
for mu in [0.0, 1e2]:
for sigma in [1.0, 0.1]:
inputs = np.random.randn(*input_shape) * sigma + mu
inputs_t = constant_op.constant(inputs, shape=input_shape)
layer = normalization.LayerNormalization(
norm_axis=norm_axis, params_axis=params_axis, dtype=dtype)
outputs = layer(inputs_t)
beta = layer.beta
gamma = layer.gamma
for weight in layer.weights:
self.evaluate(weight.initializer)
outputs = self.evaluate(outputs)
beta = self.evaluate(beta)
gamma = self.evaluate(gamma)
# The mean and variance of the output should be close to 0 and 1
# respectively.
# Make sure that there are no NaNs
self.assertFalse(np.isnan(outputs).any())
mean = np.mean(outputs, axis=moments_axis)
var = np.var(outputs, axis=moments_axis)
# Layer-norm implemented in numpy
eps = 1e-12
expected_out = (
(gamma * (inputs - np.mean(
inputs, axis=moments_axis, keepdims=True)) /
np.sqrt(eps + np.var(
inputs, axis=moments_axis, keepdims=True))) + beta)
self.assertAllClose(expected_mean, mean, atol=tol, rtol=tol)
self.assertAllClose(expected_var, var, atol=tol)
# The full computation gets a bigger tolerance
self.assertAllClose(expected_out, outputs, atol=5 * tol)
@tf_test_util.run_in_graph_and_eager_modes
def testOutput2DInput(self):
self.doOutputTest((10, 300))
self.doOutputTest((10, 300), norm_axis=[0])
self.doOutputTest((10, 300), params_axis=[0, 1])
@tf_test_util.run_in_graph_and_eager_modes
def testOutput2DInputDegenerateNormAxis(self):
with self.assertRaisesRegexp(ValueError, r'Invalid axis: 2'):
self.doOutputTest((10, 300), norm_axis=2)
@tf_test_util.run_in_graph_and_eager_modes
def testOutput4DInput(self):
self.doOutputTest((100, 10, 10, 3))
@tf_test_util.run_in_graph_and_eager_modes
def testOutput4DInputNormOnInnermostAxis(self):
# Equivalent tests
shape = (100, 10, 10, 3)
self.doOutputTest(
shape, norm_axis=list(range(3, len(shape))), tol=1e-4, dtype='float64')
self.doOutputTest(shape, norm_axis=-1, tol=1e-4, dtype='float64')
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInput(self):
self.doOutputTest((10, 10, 10, 30))
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInputNormOnInnermostAxis(self):
self.doOutputTest((10, 10, 10, 30), norm_axis=3)
@tf_test_util.run_in_graph_and_eager_modes
def testOutputSmallInputNormOnMixedAxes(self):
self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3])
self.doOutputTest((10, 10, 10, 30), params_axis=[-2, -1])
self.doOutputTest((10, 10, 10, 30), norm_axis=[0, 3],
params_axis=[-3, -2, -1])
@tf_test_util.run_in_graph_and_eager_modes
def testOutputBigInput(self):
self.doOutputTest((1, 100, 100, 1))
self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2])
self.doOutputTest((1, 100, 100, 1), norm_axis=[1, 2],
params_axis=[-2, -1])
if __name__ == '__main__':
test.main()
| apache-2.0 | 2,995,893,033,428,413,000 | 36.115073 | 80 | 0.650524 | false |
meisamhe/GPLshared | Programming/MPI — AMath 483 583, Spring 2013 1.0 documentation_files/s2.py | 1 | 1744 | import time
import threading
# @include
class SpellCheckService:
w_last = closest_to_last_word = None
lock = threading.Lock()
@staticmethod
def service(req, resp):
w = req.extract_word_to_check_from_request()
result = None
with SpellCheckService.lock:
if w == SpellCheckService.w_last:
result = SpellCheckService.closest_to_last_word.copy()
if result is None:
result = closest_in_dictionary(w)
with SpellCheckService.lock:
SpellCheckService.w_last = w
SpellCheckService.closest_to_last_word = result
resp.encode_into_response(result)
# @exclude
class ServiceRequest:
def __init__(self, s):
self.request = s
def extract_word_to_check_from_request(self):
return self.request
class ServiceResponse:
response = None
def encode_into_response(self, s):
self.response = s
def closest_in_dictionary(w):
time.sleep(0.2)
return [w + '_result']
class ServiceThread(threading.Thread):
def __init__(self, data):
super().__init__()
self.data = data
def run(self):
start_time = time.time()
req = ServiceRequest(self.data)
resp = ServiceResponse()
SpellCheckService.service(req, resp)
print(self.data, '->', resp.response, '(%.3f sec)' %
(time.time() - start_time))
def main():
i = 0
while True:
ServiceThread('req:%d' % (i + 1)).start()
if i > 0:
# while req:i+1 is computed we could return req:i from the cache
ServiceThread('req:%d' % i).start()
time.sleep(0.5)
i += 1
if __name__ == '__main__':
main()
| gpl-3.0 | -6,428,526,257,127,913,000 | 22.567568 | 76 | 0.575115 | false |
gypsymauro/gestione-cantiere | build/lib.linux-x86_64-2.7/cantiere/admin.py | 1 | 1533 | from django.contrib import admin
# Register your models here.
from .models import Squadra
from .models import StatoSegnalazione
from .models import Segnalazione
from .models import StatoIntervento
from .models import Intervento
from .models import Risorsa
from .models import InterventoRisorsa
from .models import Costo
from .models import CentroCosto
from .models import Allegato
class InterventoRisorsaInline(admin.TabularInline):
model = InterventoRisorsa
exclude = ['created','created_by','modified','modified_by','deleted','note']
class RisorsaAdmin(admin.ModelAdmin):
inlines = (InterventoRisorsaInline,)
exclude = ['created','created_by','modified','modified_by','deleted']
class InterventoAdmin(admin.ModelAdmin):
inlines = (InterventoRisorsaInline,)
list_display = ['oggetto','data_inizio','stato','stampa_intervento']
list_editable = ['stato']
ordering = ['created']
exclude = ['created','created_by','modified','modified_by','deleted']
list_filter = ('stato','data_inizio','centro_costo','responsabile')
save_on_top = True
search_fields = ('oggetto','data_inizio')
admin.site.register(Squadra)
admin.site.register(StatoSegnalazione)
admin.site.register(Segnalazione)
admin.site.register(StatoIntervento)
admin.site.register(Intervento,InterventoAdmin)
admin.site.register(Risorsa,RisorsaAdmin)
admin.site.register(Costo)
admin.site.register(CentroCosto)
admin.site.register(Allegato)
#admin.site.register(InterventoMezzo)
#admin.site.register(InterventoPersona)
| gpl-2.0 | -2,517,305,136,308,949,000 | 32.326087 | 80 | 0.763862 | false |
SembeiNorimaki/Bioinformatics | EulerianCycle.py | 1 | 1903 | # Test passed :)
# TODO: split right here before the conditional.
import sys
def handle_input_output():
# handle input
graph = {}
while True:
try:
line = sys.stdin.readline().rstrip('\n')
left, right = line.split(' -> ')
if left in graph.keys():
graph[left].append(right)
else:
graph[left] = right.split(',')
except:
break # EOF
#print(graph)
# Execute main function
r = EulerianCycle(graph)
# handle output
print('->'.join(r))
def EulerianCycle(graph):
stack = []
location = None
circuit = []
# since it's an Eulerian Cycle we can start at any vertex
location = list(graph)[0]
# Repeat until the current vertex has no more out-going edges (neighbors)
# and the stack is empty.
while len(graph[location]) > 0 or len(stack) > 0:
if len(graph[location]) == 0: # If current vertex has no out-going edges
circuit.append(location) # add it to circuit
location = stack.pop() # remove the last vertex from the stack and set it as the current one
else: # otherwise
stack.append(location) # add the vertex to the stack
location = graph[location].pop() # take any of its neighbors
# remove the edge between that vertex and selected neighbor
# and set that neighbor as the current vertex
# Here we must append the first element at the end to close the cycle
# but since circuit is reversed, we append the last element at the beginning
circuit.insert(0, circuit[-1])
return circuit[::-1] # return the reversed circuit
if __name__ == '__main__':
handle_input_output() | mit | -4,300,464,103,968,979,000 | 33 | 115 | 0.553337 | false |
kittiu/account-payment | account_payment_return/models/payment_return.py | 1 | 15028 | # Copyright 2011-2012 7 i TRIA <http://www.7itria.cat>
# Copyright 2011-2012 Avanzosc <http://www.avanzosc.com>
# Copyright 2013 Pedro M. Baeza <[email protected]>
# Copyright 2014 Markus Schneider <[email protected]>
# Copyright 2016 Carlos Dauden <[email protected]>
# Copyright 2017 Luis M. Ontalba <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
from odoo.exceptions import Warning as UserError
import odoo.addons.decimal_precision as dp
class PaymentReturn(models.Model):
_name = "payment.return"
_inherit = ['mail.thread']
_description = 'Payment return'
_order = 'date DESC, id DESC'
company_id = fields.Many2one(
'res.company', string='Company', required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda self: self.env['res.company']._company_default_get(
'account'))
date = fields.Date(
string='Return date',
help="This date will be used as the account entry date.",
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda x: fields.Date.today())
name = fields.Char(
string="Reference", required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]},
default=lambda self: self.env['ir.sequence'].next_by_code(
'payment.return'))
line_ids = fields.One2many(
comodel_name='payment.return.line', inverse_name='return_id',
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
journal_id = fields.Many2one(
comodel_name='account.journal', string='Bank journal', required=True,
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
move_id = fields.Many2one(
comodel_name='account.move',
string='Reference to the created journal entry',
states={'done': [('readonly', True)],
'cancelled': [('readonly', True)]})
state = fields.Selection(
selection=[('draft', 'Draft'),
('imported', 'Imported'),
('done', 'Done'),
('cancelled', 'Cancelled')],
string='State', readonly=True, default='draft',
track_visibility='onchange')
@api.multi
@api.constrains('line_ids')
def _check_duplicate_move_line(self):
def append_error(error_line):
error_list.append(
_("Payment Line: %s (%s) in Payment Return: %s") % (
', '.join(error_line.mapped('move_line_ids.name')),
error_line.partner_id.name,
error_line.return_id.name
)
)
error_list = []
all_move_lines = self.env['account.move.line']
for line in self.mapped('line_ids'):
for move_line in line.move_line_ids:
if move_line in all_move_lines:
append_error(line)
all_move_lines |= move_line
if (not error_list) and all_move_lines:
duplicate_lines = self.env['payment.return.line'].search([
('move_line_ids', 'in', all_move_lines.ids),
('return_id.state', '=', 'done'),
])
if duplicate_lines:
for line in duplicate_lines:
append_error(line)
if error_list:
raise UserError(
_("Payment reference must be unique"
"\n%s") % '\n'.join(error_list)
)
def _get_move_amount(self, return_line):
return return_line.amount
def _prepare_invoice_returned_vals(self):
return {'returned_payment': True}
@api.multi
def unlink(self):
if self.filtered(lambda x: x.state == 'done'):
raise UserError(_(
"You can not remove a payment return if state is 'Done'"))
return super(PaymentReturn, self).unlink()
@api.multi
def button_match(self):
self.mapped('line_ids').filtered(lambda x: (
(not x.move_line_ids) and x.reference))._find_match()
self._check_duplicate_move_line()
@api.multi
def _prepare_return_move_vals(self):
"""Prepare the values for the journal entry created from the return.
:return: Dictionary with the record values.
"""
self.ensure_one()
return {
'name': '/',
'ref': _('Return %s') % self.name,
'journal_id': self.journal_id.id,
'date': self.date,
'company_id': self.company_id.id,
}
@api.multi
def action_confirm(self):
self.ensure_one()
# Check for incomplete lines
if self.line_ids.filtered(lambda x: not x.move_line_ids):
raise UserError(
_("You must input all moves references in the payment "
"return."))
invoices = self.env['account.invoice']
move_line_obj = self.env['account.move.line']
move = self.env['account.move'].create(
self._prepare_return_move_vals()
)
total_amount = 0.0
for return_line in self.line_ids:
move_amount = self._get_move_amount(return_line)
move_line2 = self.env['account.move.line'].with_context(
check_move_validity=False).create({
'name': move.ref,
'debit': move_amount,
'credit': 0.0,
'account_id': return_line.move_line_ids[0].account_id.id,
'move_id': move.id,
'partner_id': return_line.partner_id.id,
'journal_id': move.journal_id.id,
})
total_amount += move_amount
for move_line in return_line.move_line_ids:
returned_moves = move_line.matched_debit_ids.mapped(
'debit_move_id')
invoices |= returned_moves.mapped('invoice_id')
move_line.remove_move_reconcile()
(move_line | move_line2).reconcile()
return_line.move_line_ids.mapped('matched_debit_ids').write(
{'origin_returned_move_ids': [(6, 0, returned_moves.ids)]})
if return_line.expense_amount:
expense_lines_vals = []
expense_lines_vals.append({
'name': move.ref,
'move_id': move.id,
'debit': 0.0,
'credit': return_line.expense_amount,
'partner_id': return_line.expense_partner_id.id,
'account_id': (return_line.return_id.journal_id.
default_credit_account_id.id),
})
expense_lines_vals.append({
'move_id': move.id,
'debit': return_line.expense_amount,
'name': move.ref,
'credit': 0.0,
'partner_id': return_line.expense_partner_id.id,
'account_id': return_line.expense_account.id,
})
for expense_line_vals in expense_lines_vals:
move_line_obj.with_context(
check_move_validity=False).create(expense_line_vals)
extra_lines_vals = return_line._prepare_extra_move_lines(move)
for extra_line_vals in extra_lines_vals:
move_line_obj.create(extra_line_vals)
move_line_obj.create({
'name': move.ref,
'debit': 0.0,
'credit': total_amount,
'account_id': self.journal_id.default_credit_account_id.id,
'move_id': move.id,
'journal_id': move.journal_id.id,
})
# Write directly because we returned payments just now
invoices.write(self._prepare_invoice_returned_vals())
move.post()
self.write({'state': 'done', 'move_id': move.id})
return True
@api.multi
def action_cancel(self):
invoices = self.env['account.invoice']
for move_line in self.mapped('move_id.line_ids').filtered(
lambda x: x.user_type_id.type == 'receivable'):
for partial_line in move_line.matched_credit_ids:
invoices |= partial_line.origin_returned_move_ids.mapped(
'invoice_id')
lines2reconcile = (partial_line.origin_returned_move_ids |
partial_line.credit_move_id)
partial_line.credit_move_id.remove_move_reconcile()
lines2reconcile.reconcile()
self.move_id.button_cancel()
self.move_id.unlink()
self.write({'state': 'cancelled', 'move_id': False})
invoices.check_payment_return()
return True
@api.multi
def action_draft(self):
self.write({'state': 'draft'})
return True
class PaymentReturnLine(models.Model):
_name = "payment.return.line"
_description = 'Payment return lines'
return_id = fields.Many2one(
comodel_name='payment.return', string='Payment return',
required=True, ondelete='cascade')
concept = fields.Char(
string='Concept',
help="Read from imported file. Only for reference.")
reason_id = fields.Many2one(
comodel_name='payment.return.reason',
oldname="reason",
string='Return reason',
)
reference = fields.Char(
string='Reference',
help="Reference to match moves from related documents")
move_line_ids = fields.Many2many(
comodel_name='account.move.line', string='Payment Reference')
date = fields.Date(
string='Return date', help="Only for reference",
)
partner_name = fields.Char(
string='Partner name', readonly=True,
help="Read from imported file. Only for reference.")
partner_id = fields.Many2one(
comodel_name='res.partner', string='Customer',
domain="[('customer', '=', True)]")
amount = fields.Float(
string='Amount',
help="Returned amount. Can be different from the move amount",
digits=dp.get_precision('Account'))
expense_account = fields.Many2one(
comodel_name='account.account', string='Charges Account')
expense_amount = fields.Float(string='Charges Amount')
expense_partner_id = fields.Many2one(
comodel_name="res.partner", string="Charges Partner",
domain=[('supplier', '=', True)],
)
@api.multi
def _compute_amount(self):
for line in self:
line.amount = sum(line.move_line_ids.mapped('credit'))
@api.multi
def _get_partner_from_move(self):
for line in self.filtered(lambda x: not x.partner_id):
partners = line.move_line_ids.mapped('partner_id')
if len(partners) > 1:
raise UserError(
_("All payments must be owned by the same partner"))
line.partner_id = partners[:1].id
line.partner_name = partners[:1].name
@api.onchange('move_line_ids')
def _onchange_move_line(self):
self._compute_amount()
@api.onchange('expense_amount')
def _onchange_expense_amount(self):
if self.expense_amount:
journal = self.return_id.journal_id
self.expense_account = journal.default_expense_account_id
self.expense_partner_id = journal.default_expense_partner_id
@api.multi
def match_invoice(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
domain.append(('number', '=', line.reference))
invoice = self.env['account.invoice'].search(domain)
if invoice:
payments = invoice.payment_move_line_ids
if payments:
line.move_line_ids = payments[0].ids
if not line.concept:
line.concept = _('Invoice: %s') % invoice.number
@api.multi
def match_move_lines(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
if line.return_id.journal_id:
domain.append(('journal_id', '=',
line.return_id.journal_id.id))
domain.extend([
('account_id.internal_type', '=', 'receivable'),
('reconciled', '=', True),
'|',
('name', '=', line.reference),
('ref', '=', line.reference),
])
move_lines = self.env['account.move.line'].search(domain)
if move_lines:
line.move_line_ids = move_lines.ids
if not line.concept:
line.concept = (_('Move lines: %s') %
', '.join(move_lines.mapped('name')))
@api.multi
def match_move(self):
for line in self:
domain = line.partner_id and [
('partner_id', '=', line.partner_id.id)] or []
domain.append(('name', '=', line.reference))
move = self.env['account.move'].search(domain)
if move:
if len(move) > 1:
raise UserError(
_("More than one matches to move reference: %s") %
self.reference)
line.move_line_ids = move.line_ids.filtered(lambda l: (
l.user_type_id.type == 'receivable' and l.reconciled
)).ids
if not line.concept:
line.concept = _('Move: %s') % move.ref
@api.multi
def _find_match(self):
# we filter again to remove all ready matched lines in inheritance
lines2match = self.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_invoice()
lines2match = lines2match.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_move_lines()
lines2match = lines2match.filtered(lambda x: (
(not x.move_line_ids) and x.reference))
lines2match.match_move()
self._get_partner_from_move()
self.filtered(lambda x: not x.amount)._compute_amount()
@api.multi
def _prepare_extra_move_lines(self, move):
"""Include possible extra lines in the return journal entry for other
return concepts.
:param self: Reference to the payment return line.
:param move: Reference to the journal entry created for the return.
:return: A list with dictionaries of the extra move lines to add
"""
self.ensure_one()
return []
| agpl-3.0 | 5,260,136,324,633,751,000 | 39.506739 | 79 | 0.544118 | false |
dpshelio/sunpy | examples/units_and_coordinates/planet_locations.py | 1 | 1252 | """
===================================
Getting the location of the planets
===================================
How to get the position of planetary bodies im the solar system using
`astropy's solar system ephemeris <http://docs.astropy.org/en/stable/coordinates/solarsystem.html#solar-system-ephemerides>`__ information and SunPy.
"""
import matplotlib.pyplot as plt
from astropy.time import Time
from sunpy.coordinates import get_body_heliographic_stonyhurst
##############################################################################
# Lets grab the positions of each of the planets in Heliographic Stonyhurst
# coordinates.
obstime = Time('2014-05-15T07:54:00.005')
planet_list = ['earth', 'venus', 'mars', 'mercury', 'jupiter', 'neptune', 'uranus', 'sun']
planet_coord = [get_body_heliographic_stonyhurst(this_planet, time=obstime) for this_planet in planet_list]
##############################################################################
# Let's plot the results. Remember the Sun is at the center of this coordinate
# system.
ax = plt.subplot(projection='polar')
for this_planet, this_coord in zip(planet_list, planet_coord):
plt.polar(this_coord.lon.to('rad'), this_coord.radius, 'o', label=this_planet)
plt.legend()
plt.show()
| bsd-2-clause | -3,296,977,724,421,778,000 | 42.172414 | 149 | 0.615815 | false |
kohout/djangocms-getaweb-topstory | djangocms_topstory/south_migrations/0008_auto__chg_field_topstoryitem_object_id.py | 1 | 5765 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'TopStoryItem.object_id'
db.alter_column(u'djangocms_topstory_topstoryitem', 'object_id', self.gf('gfklookupwidget.fields.GfkLookupField')(null=True))
def backwards(self, orm):
# Changing field 'TopStoryItem.object_id'
db.alter_column(u'djangocms_topstory_topstoryitem', 'object_id', self.gf('gfklookupwidget.fields.GfkLookupField')(default=''))
models = {
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'djangocms_topstory.topstory': {
'Meta': {'object_name': 'TopStory', '_ormbases': ['cms.CMSPlugin']},
u'cmsplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['cms.CMSPlugin']", 'unique': 'True', 'primary_key': 'True'}),
'height': ('django.db.models.fields.CharField', [], {'default': "'434px'", 'max_length': '10'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.CharField', [], {'default': "'100%'", 'max_length': '10'})
},
u'djangocms_topstory.topstoryitem': {
'Meta': {'ordering': "['ordering']", 'object_name': 'TopStoryItem'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'external_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'focal_point_x': ('django.db.models.fields.PositiveIntegerField', [], {'default': '50'}),
'focal_point_y': ('django.db.models.fields.PositiveIntegerField', [], {'default': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'image_height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}),
'image_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'null': 'True'}),
'object_id': ('gfklookupwidget.fields.GfkLookupField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.PositiveIntegerField', [], {}),
'size': ('django.db.models.fields.CharField', [], {'default': "'fullscreen'", 'max_length': '50'}),
'teaser_layout': ('django.db.models.fields.CharField', [], {'default': "'white'", 'max_length': '100'}),
'teaser_position': ('django.db.models.fields.CharField', [], {'default': "'left'", 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'topstory': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'topstory_items'", 'to': u"orm['djangocms_topstory.TopStory']"})
}
}
complete_apps = ['djangocms_topstory'] | unlicense | 4,711,748,214,628,988,000 | 72.923077 | 161 | 0.572246 | false |
Khan/flask-wtf | tests/__init__.py | 1 | 9854 | from __future__ import with_statement
import re
from flask import Flask, Response, render_template, jsonify
from flaskext.uploads import UploadSet, IMAGES, TEXT, configure_uploads
from flaskext.testing import TestCase as _TestCase
from flaskext.wtf import Form, TextField, FileField, HiddenField, \
SubmitField, Required, FieldList, file_required, file_allowed, html5
class DummyField(object):
def __init__(self, data, name='f', label='', id='', type='TextField'):
self.data = data
self.name = name
self.label = label
self.id = id
self.type = type
_value = lambda x: x.data
__unicode__ = lambda x: x.data
__call__ = lambda x, **k: x.data
__iter__ = lambda x: iter(x.data)
iter_choices = lambda x: iter(x.data)
class TestCase(_TestCase):
def create_app(self):
class MyForm(Form):
name = TextField("Name", validators=[Required()])
submit = SubmitField("Submit")
class HiddenFieldsForm(Form):
name = HiddenField()
url = HiddenField()
method = HiddenField()
secret = HiddenField()
submit = SubmitField("Submit")
def __init__(self, *args, **kwargs):
super(HiddenFieldsForm, self).__init__(*args, **kwargs)
self.method.name = '_method'
class SimpleForm(Form):
pass
app = Flask(__name__)
app.secret_key = "secret"
@app.route("/", methods=("GET", "POST"))
def index():
form = MyForm()
if form.validate_on_submit():
name = form.name.data.upper()
else:
name = ''
return render_template("index.html",
form=form,
name=name)
@app.route("/simple/", methods=("POST",))
def simple():
form = SimpleForm()
form.validate()
assert form.csrf_enabled
assert not form.validate()
assert not form.validate()
return "OK"
@app.route("/hidden/")
def hidden():
form = HiddenFieldsForm()
return render_template("hidden.html", form=form)
@app.route("/ajax/", methods=("POST",))
def ajax_submit():
form = MyForm()
if form.validate_on_submit():
return jsonify(name=form.name.data,
success=True,
errors=None)
return jsonify(name=None,
errors=form.errors,
success=False)
return app
class HTML5Tests(TestCase):
field = DummyField("name", id="name", name="name")
def test_url_input(self):
assert html5.URLInput()(self.field) == \
'<input id="name" name="name" type="url" value="name" />'
def test_search_input(self):
assert html5.SearchInput()(self.field) == \
'<input id="name" name="name" type="search" value="name" />'
def test_date_input(self):
assert html5.DateInput()(self.field) == \
'<input id="name" name="name" type="date" value="name" />'
def test_email_input(self):
assert html5.EmailInput()(self.field) == \
'<input id="name" name="name" type="email" value="name" />'
def test_number_input(self):
assert html5.NumberInput()(self.field, min=0, max=10) == \
'<input id="name" max="10" min="0" name="name" type="number" value="name" />'
def test_range_input(self):
assert html5.RangeInput()(self.field, min=0, max=10) == \
'<input id="name" max="10" min="0" name="name" type="range" value="name" />'
# FILE UPLOAD TESTS #
images = UploadSet("images", IMAGES)
text = UploadSet("text", TEXT)
class FileUploadForm(Form):
upload = FileField("Upload file")
class MultipleFileUploadForm(Form):
uploads = FieldList(FileField("upload"), min_entries=3)
class ImageUploadForm(Form):
upload = FileField("Upload file",
validators=[file_required(),
file_allowed(images)])
class TextUploadForm(Form):
upload = FileField("Upload file",
validators=[file_required(),
file_allowed(text)])
class TestFileUpload(TestCase):
def create_app(self):
app = super(TestFileUpload, self).create_app()
app.config['CSRF_ENABLED'] = False
app.config['UPLOADED_FILES_DEST'] = 'uploads'
app.config['UPLOADS_DEFAULT_DEST'] = 'uploads'
configure_uploads(app, [images, text])
@app.route("/upload-image/", methods=("POST",))
def upload_image():
form = ImageUploadForm()
if form.validate_on_submit():
return "OK"
return "invalid"
@app.route("/upload-text/", methods=("POST",))
def upload_text():
form = TextUploadForm()
if form.validate_on_submit():
return "OK"
return "invalid"
@app.route("/upload-multiple/", methods=("POST",))
def upload_multiple():
form = MultipleFileUploadForm()
if form.validate_on_submit():
assert len(form.uploads.entries) == 3
for upload in form.uploads.entries:
assert upload.file is not None
return "OK"
@app.route("/upload-multiple-field/", methods=("POST",))
def upload_multiple_field():
form = MultipleFileFieldUploadForm()
if form.validate_on_submit():
assert len(form.uploads.files) == 3
for upload in form.uploads.files:
assert "flask.png" in upload.filename
return "OK"
@app.route("/upload/", methods=("POST",))
def upload():
form = FileUploadForm()
if form.validate_on_submit():
filedata = form.upload.file
else:
filedata = None
return render_template("upload.html",
filedata=filedata,
form=form)
return app
def test_multiple_files(self):
fps = [self.app.open_resource("flask.png") for i in xrange(3)]
data = [("uploads-%d" % i, fp) for i, fp in enumerate(fps)]
response = self.client.post("/upload-multiple/", data=dict(data))
assert response.status_code == 200
def test_valid_file(self):
with self.app.open_resource("flask.png") as fp:
response = self.client.post("/upload-image/",
data={'upload' : fp})
assert "OK" in response.data
def test_missing_file(self):
response = self.client.post("/upload-image/",
data={'upload' : "test"})
assert "invalid" in response.data
def test_invalid_file(self):
with self.app.open_resource("flask.png") as fp:
response = self.client.post("/upload-text/",
data={'upload' : fp})
assert "invalid" in response.data
def test_invalid_file(self):
response = self.client.post("/upload/",
data={'upload' : 'flask.png'})
assert "flask.png</h3>" not in response.data
class TestValidateOnSubmit(TestCase):
def test_not_submitted(self):
response = self.client.get("/")
assert 'DANNY' not in response.data
def test_submitted_not_valid(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={})
assert 'DANNY' not in response.data
def test_submitted_and_valid(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={"name" : "danny"})
print response.data
assert 'DANNY' in response.data
class TestHiddenTag(TestCase):
def test_hidden_tag(self):
response = self.client.get("/hidden/")
assert response.data.count('type="hidden"') == 5
assert 'name="_method"' in response.data
class TestCSRF(TestCase):
def test_csrf_token(self):
response = self.client.get("/")
assert '<div style="display:none;"><input id="csrf" name="csrf" type="hidden" value' in response.data
def test_invalid_csrf(self):
response = self.client.post("/", data={"name" : "danny"})
assert 'DANNY' not in response.data
assert "Missing or invalid CSRF token" in response.data
def test_csrf_disabled(self):
self.app.config['CSRF_ENABLED'] = False
response = self.client.post("/", data={"name" : "danny"})
assert 'DANNY' in response.data
def test_validate_twice(self):
response = self.client.post("/simple/", data={})
self.assert_200(response)
def test_ajax(self):
response = self.client.post("/ajax/",
data={"name" : "danny"},
headers={'X-Requested-With' : 'XMLHttpRequest'})
assert response.status_code == 200
def test_valid_csrf(self):
response = self.client.get("/")
pattern = re.compile(r'name="csrf" type="hidden" value="([0-9a-zA-Z-]*)"')
match = pattern.search(response.data)
assert match
csrf_token = match.groups()[0]
response = self.client.post("/", data={"name" : "danny",
"csrf" : csrf_token})
assert "DANNY" in response.data
| bsd-3-clause | -3,217,176,349,144,430,600 | 28.240356 | 109 | 0.532576 | false |
Djimmer/obts | Fuzzer/function_scanner.py | 1 | 6412 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import time
import binascii
import os
import sys
from libmich.formats import *
import gsm_um
import smarter_fuzzer_function_def as fuzzer
import itertools
from random import randint
from math import factorial
import logging
from pythonjsonlogger import jsonlogger
# Fill in current mobile device
if len(sys.argv) > 2:
device = sys.argv[1];
imsi = sys.argv[2];
else:
print("ERROR: Device name not found.")
print("Call the script with: ./smarter_fuzzer #DEVICE #IMSI");
print("Where #DEVICE is the name and #IMSI is the IMSI of the mobile device.");
sys.exit(0);
############################################### SETTINGS #############################################
# Default OpenBTS port
TESTCALL_PORT = 28670;
# Log file location
date = str(time.strftime("%Y%m%d-%H%M%S"));
log_all_functions_JSON = "logs/functions/" + device + "_log_" + date + ".json";
# Creat socket
tcsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
tcsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
tcsock.settimeout(2)
ocsock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
ocsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
HOST = 'localhost' # Symbolic name meaning all available interfaces
PORT = 21337 # Arbitrary non-privileged port
ocsock.bind((HOST, PORT))
ocsock.settimeout(20)
# Initialize JSON logger
logger = logging.getLogger()
logger.setLevel(logging.INFO)
# create a file handler
handler = logging.FileHandler(log_all_functions_JSON)
handler.setLevel(logging.INFO)
# create a logging format
formatter = jsonlogger.JsonFormatter()
handler.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(handler)
logger.info({
"message": "Function Scanner; Device and SIM information",
"device": device,
"imsi" : imsi});
################################################# LOG ################################################
def log_packets(run, maxRun, packet, parsed_packet, reply, parsed_reply):
if "ERROR" in parsed_reply:
parsed_reply = "libmich ERROR";
logger.info({
"message": run,
"maxRun" : maxRun,
"packet": str(packet).encode("hex"),
"parsed_packet": parsed_packet,
"reply": str(reply).encode("hex"),
"parsed_reply": parsed_reply
})
############################################## CHANNEL ###############################################
# Send a restart to OpenBTS to establish a new channel
def establishNewChannel():
restart = "RESTART";
print("Channel restart: Establishing a new channel, this may take a second.");
tcsock.sendto(restart, ('127.0.0.1', TESTCALL_PORT));
# Wait for OpenBTS to confirm new channel.
try:
reply = ocsock.recv(20000)
except:
print "Could not establish a new channel.";
return False;
print "New channel established, fuzzing will continue.";
time.sleep(1);
return True;
def send(tcsock, packet):
try:
tcsock.sendto(packet, ('127.0.0.1', TESTCALL_PORT))
reply = tcsock.recv(1024)
except socket.timeout:
print "socket.timeout: Mobile device is not responding";
return False
return packetImplemented(reply)
def packetImplemented(reply):
parsed_reply = repr(L3Mobile.parse_L3(reply));
print "Received packet: ", str(reply).encode("hex") + "\n";
print "GSM_UM interpetation: " + '\n' + parsed_reply + "\n\n";
if "RELEASE_COMPLETE" in parsed_reply:
return "Restart";
elif((str(reply).encode("hex") == "786e430200")): #MDL_ERROR_INDICATION
return "Restart";
elif((str(reply).encode("hex") == "789ea400")): #MDL_ERROR_INDICATION
return "Restart";
elif((str(reply).encode("hex") == "06126100")):
return "Skip";
elif "Message type non-existent or not implemented" in parsed_reply:
return "Skip";
else:
return reply;
############################################### UTILS ################################################
def printPacket(packet, currentRun, total_runs):
print('------------------------------- INPUT -------------------------------' + '\n');
print('Run ' + str(currentRun) + "/" + str(total_runs) + '\n');
# Make the packet readable
if(len(packet) % 2 == 0):
printable = str(packet).encode("hex");
print "Current complete packet: " + printable + '\n';
# Decode printable hex to make it usable for L3Mobile.
# Adding the \x for the bytes.
l3msg_input = repr(L3Mobile.parse_L3(str(packet)));
print "GSM_UM interpetation: \n " + l3msg_input + '\n\n';
print "------------------------------- OUTPUT -------------------------------" + '\n';
############################################ SMART FUZZER ############################################
# This fuzzer targets fields with variable length
# Tries all different bytes for length byte
# Tries random bytes for a range of lengths
######################################################################################################
# Fuzzer specific settings
maxPacketAttempt = 5;
currentPacketAttempt = 1;
protocols = [3];
currentRun = 1;
total_runs = len(protocols) * 256;
print "Total amount of runs: " + str(total_runs);
time.sleep(1);
for i in protocols:
firstByte = "{0:0{1}x}".format(i,2);
n = 1;
while n < 256:
secondByte = "{0:0{1}x}".format(n,2);
if(i == 5 and n == 17):
# Skip because the packet 0511 is a Authentication Reject
# and disconnects the mobile device
secondByte = "{0:0{1}x}".format(n+1,2);
packet = "\\x" + str(firstByte) + "\\x" + str(secondByte);
packet = packet.replace('\\x', '').decode('hex');
print "Packet: " + str(packet).encode("hex");
printPacket(packet, currentRun, total_runs);
# Send packet to the mobile device.
result = send(tcsock, packet);
if(result == "Restart" or result == False):
currentPacketAttempt = currentPacketAttempt + 1;
establishNewChannel();
if(currentPacketAttempt >= maxPacketAttempt):
parsed_packet = repr(L3Mobile.parse_L3(packet));
log_packets(currentRun, total_runs, packet, parsed_packet, "None", "None");
currentRun = currentRun + 1;
n = n + 1;
elif(result =="Skip"):
currentRun = currentRun + 1;
currentPacketAttempt = 0;
n = n + 1;
else:
parsed_result = repr(L3Mobile.parse_L3(result));
parsed_packet = repr(L3Mobile.parse_L3(packet));
log_packets(currentRun, total_runs, packet, parsed_packet, result, parsed_result);
currentRun = currentRun + 1;
currentPacketAttempt = 0;
n = n + 1;
| agpl-3.0 | 7,928,618,940,592,154,000 | 29.980676 | 102 | 0.611822 | false |
jordiclariana/ansible | lib/ansible/modules/cloud/vmware/vmware_guest.py | 1 | 70396 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: vmware_guest
short_description: Manages virtualmachines in vcenter
description:
- Uses pyvmomi to ...
- copy a template to a new virtualmachine
- poweron/poweroff/restart a virtualmachine
- remove a virtualmachine
version_added: 2.2
author:
- James Tanner (@jctanner) <[email protected]>
- Loic Blot (@nerzhul) <[email protected]>
notes:
- Tested on vSphere 6.0
requirements:
- "python >= 2.6"
- PyVmomi
options:
state:
description:
- What state should the virtualmachine be in?
- if state is set to present and VM exists, ensure the VM configuration if conform to task arguments
required: True
choices: ['present', 'absent', 'poweredon', 'poweredoff', 'restarted', 'suspended']
name:
description:
- Name of the newly deployed guest
required: True
name_match:
description:
- If multiple vms matching the name, use the first or last found
required: False
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is vmware's unique identifier.
- This is required if name is not supplied.
required: False
template:
description:
- Template used to create guest.
- If this value is not set, VM is created without using a template.
- If the guest exists already this setting will be ignored.
required: False
is_template:
description:
- Flag the instance as a template
required: False
default: False
version_added: "2.3"
folder:
description:
- Destination folder path for the new guest
required: False
hardware:
description:
- "Manage some VM hardware attributes."
- "Valid attributes are: memory_mb, num_cpus and scsi"
- "scsi: Valid values are buslogic, lsilogic, lsilogicsas and paravirtual (default)"
required: False
guest_id:
description:
- "Set the guest ID (Debian, RHEL, Windows...)"
- "This field is required when creating a VM"
- "Valid values are referenced here: https://www.vmware.com/support/developer/converter-sdk/conv55_apireference/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html"
required: False
version_added: "2.3"
disk:
description:
- "A list of disks to add"
- "Valid attributes are: size_[tb,gb,mb,kb], type, datastore and autoselect_datastore"
- "type: Valid value is thin (default: None)"
- "datastore: Datastore to use for the disk. If autoselect_datastore is True, filter datastore selection."
- "autoselect_datastore (bool): select the less used datastore."
required: False
resource_pool:
description:
- Affect machine to the given resource pool
- Resource pool should be child of the selected host parent
required: False
default: None
version_added: "2.3"
wait_for_ip_address:
description:
- Wait until vcenter detects an IP address for the guest
required: False
force:
description:
- Ignore warnings and complete the actions
required: False
datacenter:
description:
- Destination datacenter for the deploy operation
required: True
cluster:
description:
- The cluster name where the VM will run.
required: False
version_added: "2.3"
esxi_hostname:
description:
- The esxi hostname where the VM will run.
required: False
annotation:
description:
- A note or annotation to include in the VM
required: False
version_added: "2.3"
customize:
description:
- Should customization spec be applied. This is only used when deploying a template.
required: False
version_added: "2.3"
networks:
description:
- Network to use should include VM network name or VLAN, ip and gateway
- "You can add 'mac' optional field to customize mac address"
required: False
version_added: "2.3"
dns_servers:
description:
- DNS servers to use
required: False
version_added: "2.3"
domain:
description:
- Domain to use while customizing
required: False
version_added: "2.3"
snapshot_op:
description:
- A key, value pair of snapshot operation types and their additional required parameters.
required: False
version_added: "2.3"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
Example from Ansible playbook
#
# Create a VM from a template
#
- name: create the VM
vmware_guest:
validate_certs: False
hostname: 192.0.2.44
username: [email protected]
password: vmware
name: testvm_2
state: poweredon
folder: testvms
guest_id: centos64guest
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: paravirtual
datacenter: datacenter1
esxi_hostname: 192.0.2.117
template: template_el7
wait_for_ip_address: yes
register: deploy
#
# Create a VM and flag it as a template
#
- name: create VM template
vmware_guest:
validate_certs: False
hostname: 192.0.2.88
username: [email protected]
password: vmware
name: testvm_6
folder: testvms
is_template: yes
guest_id: debian6_64Guest
resource_pool: highperformance_pool
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 1
scsi: lsilogic
datacenter: datacenter1
cluster: vmware_cluster_esx
wait_for_ip_address: yes
register: deploy
#
# Clone Template and customize
#
- name: Clone template and customize
vmware_guest:
hostname: "192.168.1.209"
username: "[email protected]"
password: "vmware"
validate_certs: False
name: testvm-2
datacenter: datacenter1
cluster: cluster
validate_certs: False
template: template_el7
customize: True
domain: "example.com"
dns_servers: ['192.168.1.1','192.168.1.2']
networks:
'192.168.1.0/24':
network: 'VM Network'
gateway: '192.168.1.1'
ip: "192.168.1.100"
mac: "aa:bb:dd:aa:00:14"
#
# Gather facts only
#
- name: gather the VM facts
vmware_guest:
validate_certs: False
hostname: 192.168.1.209
username: [email protected]
password: vmware
name: testvm_2
esxi_hostname: 192.168.1.117
state: gatherfacts
register: facts
### Snapshot Operations
# Create snapshot
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: create
name: snap1
description: snap1_description
# Remove a snapshot
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: remove
name: snap1
# Revert to a snapshot
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: revert
name: snap1
# List all snapshots of a VM
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: list_all
# List current snapshot of a VM
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: list_current
# Remove all snapshots of a VM
- vmware_guest:
hostname: 192.168.1.209
username: [email protected]
password: vmware
validate_certs: False
name: dummy_vm
snapshot_op:
op_type: remove_all
'''
RETURN = """
instance:
descripton: metadata about the new virtualmachine
returned: always
type: dict
sample: None
"""
import os
import time
from netaddr import IPNetwork, IPAddress
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.vmware import get_all_objs, connect_to_api
try:
import json
except ImportError:
import simplejson as json
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
class PyVmomiDeviceHelper(object):
""" This class is a helper to create easily VMWare Objects for PyVmomiHelper """
def __init__(self, module):
self.module = module
self.next_disk_unit_number = 0
@staticmethod
def create_scsi_controller(scsi_type):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
if scsi_type == 'lsilogic':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicController()
elif scsi_type == 'paravirtual':
scsi_ctl.device = vim.vm.device.ParaVirtualSCSIController()
elif scsi_type == 'buslogic':
scsi_ctl.device = vim.vm.device.VirtualBusLogicController()
elif scsi_type == 'lsilogicsas':
scsi_ctl.device = vim.vm.device.VirtualLsiLogicSASController()
scsi_ctl.device.deviceInfo = vim.Description()
scsi_ctl.device.slotInfo = vim.vm.device.VirtualDevice.PciBusSlotInfo()
scsi_ctl.device.slotInfo.pciSlotNumber = 16
scsi_ctl.device.controllerKey = 100
scsi_ctl.device.unitNumber = 3
scsi_ctl.device.busNumber = 0
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
@staticmethod
def is_scsi_controller(device):
return isinstance(device, vim.vm.device.VirtualLsiLogicController) or \
isinstance(device, vim.vm.device.ParaVirtualSCSIController) or \
isinstance(device, vim.vm.device.VirtualBusLogicController) or \
isinstance(device, vim.vm.device.VirtualLsiLogicSASController)
def create_scsi_disk(self, scsi_ctl, disk_index=None):
diskspec = vim.vm.device.VirtualDeviceSpec()
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
diskspec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
diskspec.device = vim.vm.device.VirtualDisk()
diskspec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskspec.device.backing.diskMode = 'persistent'
diskspec.device.controllerKey = scsi_ctl.device.key
assert self.next_disk_unit_number != 7
assert disk_index != 7
"""
Configure disk unit number.
"""
if disk_index is not None:
diskspec.device.unitNumber = disk_index
self.next_disk_unit_number = disk_index + 1
else:
diskspec.device.unitNumber = self.next_disk_unit_number
self.next_disk_unit_number += 1
# unit number 7 is reserved to SCSI controller, increase next index
if self.next_disk_unit_number == 7:
self.next_disk_unit_number += 1
return diskspec
def create_nic(self, device_type, device_label, device_infos):
nic = vim.vm.device.VirtualDeviceSpec()
if device_type == 'pcnet32':
nic.device = vim.vm.device.VirtualPCNet32()
if device_type == 'vmxnet2':
nic.device = vim.vm.device.VirtualVmxnet2()
elif device_type == 'vmxnet3':
nic.device = vim.vm.device.VirtualVmxnet3()
elif device_type == 'e1000':
nic.device = vim.vm.device.VirtualE1000()
elif device_type == 'e1000e':
nic.device = vim.vm.device.VirtualE1000e()
elif device_type == 'sriov':
nic.device = vim.vm.device.VirtualSriovEthernetCard()
else:
self.module.fail_json(msg="Invalid device_type '%s' for network %s" %
(device_type, device_infos['network']))
nic.device.wakeOnLanEnabled = True
nic.device.addressType = 'assigned'
nic.device.deviceInfo = vim.Description()
nic.device.deviceInfo.label = device_label
nic.device.deviceInfo.summary = device_infos['network']
nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic.device.connectable.startConnected = True
nic.device.connectable.allowGuestControl = True
nic.device.connectable.connected = True
if 'mac' in device_infos:
nic.device.macAddress = device_infos['mac']
return nic
class PyVmomiCache(object):
""" This class caches references to objects which are requested multiples times but not modified """
def __init__(self, content):
self.content = content
self.networks = {}
self.clusters = {}
self.esx_hosts = {}
def get_network(self, network):
if network not in self.networks:
self.networks[network] = get_obj(self.content, [vim.Network], network)
return self.networks[network]
def get_cluster(self, cluster):
if cluster not in self.clusters:
self.clusters[cluster] = get_obj(self.content, [vim.ClusterComputeResource], cluster)
return self.clusters[cluster]
def get_esx_host(self, host):
if host not in self.esx_hosts:
self.esx_hosts[host] = get_obj(self.content, [vim.HostSystem], host)
return self.esx_hosts[host]
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.device_helper = PyVmomiDeviceHelper(self.module)
self.params = module.params
self.si = None
self.content = connect_to_api(self.module)
self.datacenter = None
self.folders = None
self.foldermap = {'fvim_by_path': {}, 'path_by_fvim': {}, 'path_by_vvim': {}, 'paths': {},
'uuids': {}}
self.configspec = None
self.change_detected = False
self.customspec = None
self.current_vm_obj = None
self.cache = PyVmomiCache(self.content)
def should_deploy_from_template(self):
return self.params.get('template') is not None
def _build_folder_tree(self, folder):
tree = {'virtualmachines': [],
'subfolders': {},
'vimobj': folder,
'name': folder.name}
children = None
if hasattr(folder, 'childEntity'):
children = folder.childEntity
if children:
for child in children:
if child == folder or child in tree:
continue
if isinstance(child, vim.Folder):
ctree = self._build_folder_tree(child)
tree['subfolders'][child] = dict.copy(ctree)
elif isinstance(child, vim.VirtualMachine):
tree['virtualmachines'].append(child)
else:
if isinstance(folder, vim.VirtualMachine):
return folder
return tree
def _build_folder_map(self, folder, inpath='/'):
""" Build a searchable index for vms+uuids+folders """
if isinstance(folder, tuple):
folder = folder[1]
thispath = os.path.join(inpath, folder['name'])
if thispath not in self.foldermap['paths']:
self.foldermap['paths'][thispath] = []
# store object by path and store path by object
self.foldermap['fvim_by_path'][thispath] = folder['vimobj']
self.foldermap['path_by_fvim'][folder['vimobj']] = thispath
for item in folder.items():
k = item[0]
v = item[1]
if k == 'name':
pass
elif k == 'subfolders':
for x in v.items():
self._build_folder_map(x, inpath=thispath)
elif k == 'virtualmachines':
for x in v:
# Apparently x.config can be None on corrupted VMs
if x.config is None: continue
self.foldermap['uuids'][x.config.uuid] = x.config.name
self.foldermap['paths'][thispath].append(x.config.uuid)
if x not in self.foldermap['path_by_vvim']:
self.foldermap['path_by_vvim'][x] = thispath
def getfolders(self):
if not self.datacenter:
self.get_datacenter()
self.folders = self._build_folder_tree(self.datacenter.vmFolder)
self._build_folder_map(self.folders)
@staticmethod
def compile_folder_path_for_object(vobj):
""" make a /vm/foo/bar/baz like folder path for an object """
paths = []
if isinstance(vobj, vim.Folder):
paths.append(vobj.name)
thisobj = vobj
while hasattr(thisobj, 'parent'):
thisobj = thisobj.parent
if isinstance(thisobj, vim.Folder):
paths.append(thisobj.name)
paths.reverse()
if paths[0] == 'Datacenters':
paths.remove('Datacenters')
return '/' + '/'.join(paths)
def get_datacenter(self):
self.datacenter = get_obj(self.content, [vim.Datacenter],
self.params['datacenter'])
def getvm(self, name=None, uuid=None, folder=None, name_match=None, cache=False):
# https://www.vmware.com/support/developer/vc-sdk/visdk2xpubs/ReferenceGuide/vim.SearchIndex.html
# self.si.content.searchIndex.FindByInventoryPath('DC1/vm/test_folder')
vm = None
searchpath = None
if uuid:
vm = self.content.searchIndex.FindByUuid(uuid=uuid, vmSearch=True)
elif folder:
# Build the absolute folder path to pass into the search method
if self.params['folder'].startswith('/'):
searchpath = '%(datacenter)s%(folder)s' % self.params
else:
# need to look for matching absolute path
if not self.folders:
self.getfolders()
paths = self.foldermap['paths'].keys()
paths = [x for x in paths if x.endswith(self.params['folder'])]
if len(paths) > 1:
self.module.fail_json(
msg='%(folder)s matches more than one folder. Please use the absolute path starting with /vm/' % self.params)
elif paths:
searchpath = paths[0]
if searchpath:
# get all objects for this path ...
fObj = self.content.searchIndex.FindByInventoryPath(searchpath)
if fObj:
if isinstance(fObj, vim.Datacenter):
fObj = fObj.vmFolder
for cObj in fObj.childEntity:
if not isinstance(cObj, vim.VirtualMachine):
continue
if cObj.name == name:
vm = cObj
break
if not vm:
# FIXME - this is unused if folder has a default value
# narrow down by folder
if folder:
if not self.folders:
self.getfolders()
# compare the folder path of each VM against the search path
vmList = get_all_objs(self.content, [vim.VirtualMachine])
for item in vmList.items():
vobj = item[0]
if not isinstance(vobj.parent, vim.Folder):
continue
if self.compile_folder_path_for_object(vobj) == searchpath:
# Match by name
if vobj.config.name == name:
self.current_vm_obj = vobj
return vobj
if name_match:
if name_match == 'first':
vm = get_obj(self.content, [vim.VirtualMachine], name)
elif name_match == 'last':
matches = []
for thisvm in get_all_objs(self.content, [vim.VirtualMachine]):
if thisvm.config.name == name:
matches.append(thisvm)
if matches:
vm = matches[-1]
else:
matches = []
for thisvm in get_all_objs(self.content, [vim.VirtualMachine]):
if thisvm.config.name == name:
matches.append(thisvm)
if len(matches) > 1:
self.module.fail_json(
msg='more than 1 vm exists by the name %s. Please specify a uuid, or a folder, '
'or a datacenter or name_match' % name)
if matches:
vm = matches[0]
if cache and vm:
self.current_vm_obj = vm
return vm
def set_powerstate(self, vm, state, force):
"""
Set the power status for a VM determined by the current and
requested states. force is forceful
"""
facts = self.gather_facts(vm)
expected_state = state.replace('_', '').lower()
current_state = facts['hw_power_status'].lower()
result = {}
# Need Force
if not force and current_state not in ['poweredon', 'poweredoff']:
return "VM is in %s power state. Force is required!" % current_state
# State is already true
if current_state == expected_state:
result['changed'] = False
result['failed'] = False
else:
task = None
try:
if expected_state == 'poweredoff':
task = vm.PowerOff()
elif expected_state == 'poweredon':
task = vm.PowerOn()
elif expected_state == 'restarted':
if current_state in ('poweredon', 'poweringon', 'resetting', 'poweredoff'):
task = vm.Reset()
else:
result = {'changed': False, 'failed': True,
'msg': "Cannot restart VM in the current state %s" % current_state}
elif expected_state == 'suspended':
if current_state in ('poweredon', 'poweringon'):
task = vm.Suspend()
else:
result = {'changed': False, 'failed': True,
'msg': 'Cannot suspend VM in the current state %s' % current_state}
except Exception:
result = {'changed': False, 'failed': True,
'msg': get_exception()}
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result = {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
result = {'changed': True, 'failed': False}
# need to get new metadata if changed
if result['changed']:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
result['instance'] = facts
return result
@staticmethod
def gather_facts(vm):
""" Gather facts from vim.VirtualMachine object. """
facts = {
'module_hw': True,
'hw_name': vm.config.name,
'hw_power_status': vm.summary.runtime.powerState,
'hw_guest_full_name': vm.summary.guest.guestFullName,
'hw_guest_id': vm.summary.guest.guestId,
'hw_product_uuid': vm.config.uuid,
'hw_processor_count': vm.config.hardware.numCPU,
'hw_memtotal_mb': vm.config.hardware.memoryMB,
'hw_interfaces': [],
'ipv4': None,
'ipv6': None,
}
netDict = {}
for device in vm.guest.net:
netDict[device.macAddress] = list(device.ipAddress)
for k, v in iteritems(netDict):
for ipaddress in v:
if ipaddress:
if '::' in ipaddress:
facts['ipv6'] = ipaddress
else:
facts['ipv4'] = ipaddress
ethernet_idx = 0
for idx, entry in enumerate(vm.config.hardware.device):
if not hasattr(entry, 'macAddress'):
continue
factname = 'hw_eth' + str(ethernet_idx)
facts[factname] = {
'addresstype': entry.addressType,
'label': entry.deviceInfo.label,
'macaddress': entry.macAddress,
'ipaddresses': netDict.get(entry.macAddress, None),
'macaddress_dash': entry.macAddress.replace(':', '-'),
'summary': entry.deviceInfo.summary,
}
facts['hw_interfaces'].append('eth' + str(ethernet_idx))
ethernet_idx += 1
return facts
def remove_vm(self, vm):
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.ManagedEntity.html#destroy
task = vm.Destroy()
self.wait_for_task(task)
if task.info.state == 'error':
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
return {'changed': True, 'failed': False}
def configure_guestid(self, vm_obj, vm_creation=False):
# guest_id is not required when using templates
if self.should_deploy_from_template() and self.params.get('guest_id') is None:
return
# guest_id is only mandatory on VM creation
if vm_creation and self.params['guest_id'] is None:
self.module.fail_json(msg="guest_id attribute is mandatory for VM creation")
if vm_obj is None or self.configspec.guestId != vm_obj.summary.guest.guestId:
self.change_detected = True
self.configspec.guestId = self.params['guest_id']
def configure_cpu_and_memory(self, vm_obj, vm_creation=False):
# set cpu/memory/etc
if 'hardware' in self.params:
if 'num_cpus' in self.params['hardware']:
self.configspec.numCPUs = int(self.params['hardware']['num_cpus'])
if vm_obj is None or self.configspec.numCPUs != vm_obj.config.hardware.numCPU:
self.change_detected = True
# num_cpu is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.num_cpus attribute is mandatory for VM creation")
if 'memory_mb' in self.params['hardware']:
self.configspec.memoryMB = int(self.params['hardware']['memory_mb'])
if vm_obj is None or self.configspec.memoryMB != vm_obj.config.hardware.memoryMB:
self.change_detected = True
# memory_mb is mandatory for VM creation
elif vm_creation and not self.should_deploy_from_template():
self.module.fail_json(msg="hardware.memory_mb attribute is mandatory for VM creation")
def get_vm_network_interfaces(self, vm=None):
if vm is None:
return []
device_list = []
for device in vm.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualPCNet32) or \
isinstance(device, vim.vm.device.VirtualVmxnet2) or \
isinstance(device, vim.vm.device.VirtualVmxnet3) or \
isinstance(device, vim.vm.device.VirtualE1000) or \
isinstance(device, vim.vm.device.VirtualE1000e) or \
isinstance(device, vim.vm.device.VirtualSriovEthernetCard):
device_list.append(device)
return device_list
def configure_network(self, vm_obj):
# Ignore empty networks, this permits to keep networks when deploying a template/cloning a VM
if len(self.params['networks']) == 0:
return
network_devices = list()
for network in self.params['networks']:
if network:
if 'ip' in self.params['networks'][network]:
ip = self.params['networks'][network]['ip']
if ip not in IPNetwork(network):
self.module.fail_json(msg="ip '%s' not in network %s" % (ip, network))
ipnet = IPNetwork(network)
self.params['networks'][network]['subnet_mask'] = str(ipnet.netmask)
if 'network' in self.params['networks'][network]:
if get_obj(self.content, [vim.Network], self.params['networks'][network]['network']) is None:
self.module.fail_json(msg="Network %s doesn't exists" % network)
elif 'vlan' in self.params['networks'][network]:
network_name = None
dvps = get_all_objs(self.content, [vim.dvs.DistributedVirtualPortgroup])
for dvp in dvps:
if dvp.config.defaultPortConfig.vlan.vlanId == self.params['networks'][network]['vlan']:
network_name = dvp.config.name
break
if network_name:
self.params['networks'][network]['network'] = network_name
else:
self.module.fail_json(msg="VLAN %(vlan)s doesn't exists" % self.params['networks'][network])
else:
self.module.fail_json(msg="You need to define a network or a vlan")
network_devices.append(self.params['networks'][network])
adaptermaps = []
# List current device for Clone or Idempotency
current_net_devices = self.get_vm_network_interfaces(vm=vm_obj)
if len(network_devices) < len(current_net_devices):
self.module.fail_json(msg="given network device list is lesser than current VM device list (%d < %d). "
"Removing interfaces is not allowed"
% (len(network_devices), len(current_net_devices)))
for key in range(0, len(network_devices)):
# Default device type is vmxnet3, VMWare best practice
device_type = network_devices[key].get('device_type', 'vmxnet3')
nic = self.device_helper.create_nic(device_type,
'Network Adapter %s' % (key + 1),
network_devices[key])
nic_change_detected = False
if key < len(current_net_devices) and (vm_obj or self.should_deploy_from_template()):
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
# Changing mac address has no effect when editing interface
if 'mac' in network_devices[key] and nic.device.macAddress != current_net_devices[key].macAddress:
self.module.fail_json(msg="Changing MAC address has not effect when interface is already present. "
"The failing new MAC address is %s" % nic.device.macAddress)
nic.device = current_net_devices[key]
nic.device.deviceInfo = vim.Description()
else:
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_change_detected = True
if hasattr(self.cache.get_network(network_devices[key]['network']), 'portKeys'):
# VDS switch
pg_obj = get_obj(self.content, [vim.dvs.DistributedVirtualPortgroup], network_devices[key]['network'])
dvs_port_connection = vim.dvs.PortConnection()
dvs_port_connection.portgroupKey = pg_obj.key
dvs_port_connection.switchUuid = pg_obj.config.distributedVirtualSwitch.uuid
nic.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
nic.device.backing.port = dvs_port_connection
else:
# vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
nic.device.backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_change_detected = True
net_obj = self.cache.get_network(network_devices[key]['network'])
if nic.device.backing.network != net_obj:
nic.device.backing.network = net_obj
nic_change_detected = True
if nic.device.backing.deviceName != network_devices[key]['network']:
nic.device.backing.deviceName = network_devices[key]['network']
nic_change_detected = True
if nic_change_detected:
self.configspec.deviceChange.append(nic)
self.change_detected = True
if vm_obj is None or self.should_deploy_from_template():
if 'ip' in network_devices[key]:
guest_map = vim.vm.customization.AdapterMapping()
guest_map.adapter = vim.vm.customization.IPSettings()
guest_map.adapter.ip = vim.vm.customization.FixedIp()
guest_map.adapter.ip.ipAddress = str(network_devices[key]['ip'])
guest_map.adapter.subnetMask = str(network_devices[key]['subnet_mask'])
if 'gateway' in network_devices[key]:
guest_map.adapter.gateway = network_devices[key]['gateway']
if self.params.get('domain'):
guest_map.adapter.dnsDomain = self.params['domain']
adaptermaps.append(guest_map)
if vm_obj is None or self.should_deploy_from_template():
# DNS settings
globalip = vim.vm.customization.GlobalIPSettings()
globalip.dnsServerList = self.params['dns_servers']
globalip.dnsSuffixList = str(self.params['domain'])
# Hostname settings
ident = vim.vm.customization.LinuxPrep()
ident.domain = str(self.params['domain'])
ident.hostName = vim.vm.customization.FixedName()
ident.hostName.name = self.params['name']
self.customspec = vim.vm.customization.Specification()
self.customspec.nicSettingMap = adaptermaps
self.customspec.globalIPSettings = globalip
self.customspec.identity = ident
def get_vm_scsi_controller(self, vm_obj):
# If vm_obj doesn't exists no SCSI controller to find
if vm_obj is None:
return None
for device in vm_obj.config.hardware.device:
if self.device_helper.is_scsi_controller(device):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.device = device
return scsi_ctl
return None
def get_configured_disk_size(self, expected_disk_spec):
# what size is it?
if [x for x in expected_disk_spec.keys() if x.startswith('size_') or x == 'size']:
# size_tb, size_gb, size_mb, size_kb, size_b ...?
if 'size' in expected_disk_spec:
expected = ''.join(c for c in expected_disk_spec['size'] if c.isdigit())
unit = expected_disk_spec['size'].replace(expected, '').lower()
expected = int(expected)
else:
param = [x for x in expected_disk_spec.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1].lower()
expected = [x[1] for x in expected_disk_spec.items() if x[0].startswith('size_')][0]
expected = int(expected)
if unit == 'tb':
return expected * 1024 * 1024 * 1024
elif unit == 'gb':
return expected * 1024 * 1024
elif unit == ' mb':
return expected * 1024
elif unit == 'kb':
return expected
self.module.fail_json(
msg='%s is not a supported unit for disk size. Supported units are kb, mb, gb or tb' % unit)
# No size found but disk, fail
self.module.fail_json(
msg="No size, size_kb, size_mb, size_gb or size_tb attribute found into disk configuration")
def configure_disks(self, vm_obj):
# Ignore empty disk list, this permits to keep disks when deploying a template/cloning a VM
if len(self.params['disk']) == 0:
return
scsi_ctl = self.get_vm_scsi_controller(vm_obj)
# Create scsi controller only if we are deploying a new VM, not a template or reconfiguring
if vm_obj is None or scsi_ctl is None:
scsi_ctl = self.device_helper.create_scsi_controller(self.get_scsi_type())
self.change_detected = True
self.configspec.deviceChange.append(scsi_ctl)
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)] \
if vm_obj is not None else None
if disks is not None and self.params.get('disk') and len(self.params.get('disk')) < len(disks):
self.module.fail_json(msg="Provided disks configuration has less disks than "
"the target object (%d vs %d)" % (len(self.params.get('disk')), len(disks)))
disk_index = 0
for expected_disk_spec in self.params.get('disk'):
disk_modified = False
# If we are manipulating and existing objects which has disks and disk_index is in disks
if vm_obj is not None and disks is not None and disk_index < len(disks):
diskspec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
diskspec.device = disks[disk_index]
else:
diskspec = self.device_helper.create_scsi_disk(scsi_ctl, disk_index)
disk_modified = True
# is it thin?
if 'type' in expected_disk_spec:
if expected_disk_spec.get('type', '').lower() == 'thin':
diskspec.device.backing.thinProvisioned = True
# which datastore?
if expected_disk_spec.get('datastore'):
# TODO: This is already handled by the relocation spec,
# but it needs to eventually be handled for all the
# other disks defined
pass
# increment index for next disk search
disk_index += 1
# index 7 is reserved to SCSI controller
if disk_index == 7:
disk_index += 1
kb = self.get_configured_disk_size(expected_disk_spec)
# VMWare doesn't allow to reduce disk sizes
if kb < diskspec.device.capacityInKB:
self.module.fail_json(
msg="given disk size is lesser than found (%d < %d). Reducing disks is not allowed." %
(kb, diskspec.device.capacityInKB))
if kb != diskspec.device.capacityInKB or disk_modified:
diskspec.device.capacityInKB = kb
self.configspec.deviceChange.append(diskspec)
self.change_detected = True
def select_host(self):
# if the user wants a cluster, get the list of hosts for the cluster and use the first one
if self.params['cluster']:
cluster = self.cache.get_cluster(self.params['cluster'])
if not cluster:
self.module.fail_json(msg="Failed to find a cluster named %(cluster)s" % self.params)
hostsystems = [x for x in cluster.host]
# TODO: add a policy to select host
hostsystem = hostsystems[0]
else:
hostsystem = self.cache.get_esx_host(self.params['esxi_hostname'])
if not hostsystem:
self.module.fail_json(msg="Failed to find a host named %(esxi_hostname)s" % self.params)
return hostsystem
def select_datastore(self, vm_obj=None):
datastore = None
datastore_name = None
if len(self.params['disk']) != 0:
# TODO: really use the datastore for newly created disks
if 'autoselect_datastore' in self.params['disk'][0] and self.params['disk'][0]['autoselect_datastore']:
datastores = get_all_objs(self.content, [vim.Datastore])
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
if 'datastore' in self.params['disk'][0] and \
isinstance(self.params['disk'][0]['datastore'], str) and \
ds.name.find(self.params['disk'][0]['datastore']) < 0:
continue
datastore = ds
datastore_name = datastore.name
datastore_freespace = ds.summary.freeSpace
elif 'datastore' in self.params['disk'][0]:
datastore_name = self.params['disk'][0]['datastore']
datastore = get_obj(self.content, [vim.Datastore], datastore_name)
else:
self.module.fail_json(msg="Either datastore or autoselect_datastore "
"should be provided to select datastore")
if not datastore and self.should_deploy_from_template():
# use the template's existing DS
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)]
datastore = disks[0].backing.datastore
datastore_name = datastore.name
if not datastore:
self.module.fail_json(msg="Failed to find a matching datastore")
return datastore, datastore_name
def obj_has_parent(self, obj, parent):
assert obj is not None and parent is not None
current_parent = obj
while True:
if current_parent.name == parent.name:
return True
current_parent = current_parent.parent
if current_parent is None:
return False
def select_resource_pool(self, host):
resource_pools = get_all_objs(self.content, [vim.ResourcePool])
for rp in resource_pools.items():
if not rp[0]:
continue
if not hasattr(rp[0], 'parent'):
continue
# Find resource pool on host
if self.obj_has_parent(rp[0].parent, host.parent):
# If no resource_pool selected or it's the selected pool, return it
if self.module.params['resource_pool'] is None or rp[0].name == self.module.params['resource_pool']:
return rp[0]
if self.module.params['resource_pool'] is not None:
self.module.fail_json(msg="Could not find resource_pool %s for selected host %s"
% (self.module.params['resource_pool'], host.name))
else:
self.module.fail_json(msg="Failed to find a resource group for %s" % host.name)
def get_scsi_type(self):
disk_controller_type = "paravirtual"
# set cpu/memory/etc
if 'hardware' in self.params:
if 'scsi' in self.params['hardware']:
if self.params['hardware']['scsi'] in ['buslogic', 'paravirtual', 'lsilogic', 'lsilogicsas']:
disk_controller_type = self.params['hardware']['scsi']
else:
self.module.fail_json(msg="hardware.scsi attribute should be 'paravirtual' or 'lsilogic'")
return disk_controller_type
def deploy_vm(self):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/clone_vm.py
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.CloneSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.ConfigSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# FIXME:
# - multiple datacenters
# - multiple templates by the same name
# - static IPs
datacenters = get_all_objs(self.content, [vim.Datacenter])
datacenter = get_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if not datacenter:
self.module.fail_json(msg='No datacenter named %(datacenter)s was found' % self.params)
# find matching folders
if self.params['folder'].startswith('/'):
folders = [x for x in self.foldermap['fvim_by_path'].items() if x[0] == self.params['folder']]
else:
folders = [x for x in self.foldermap['fvim_by_path'].items() if x[0].endswith(self.params['folder'])]
# throw error if more than one match or no matches
if len(folders) == 0:
self.module.fail_json(msg='No folder matched the path: %(folder)s' % self.params)
elif len(folders) > 1:
self.module.fail_json(
msg='Too many folders matched "%s", please give the full path starting with /vm/' % self.params[
'folder'])
# grab the folder vim object
destfolder = folders[0][1]
hostsystem = self.select_host()
if self.should_deploy_from_template():
# FIXME: need to search for this in the same way as guests to ensure accuracy
vm_obj = get_obj(self.content, [vim.VirtualMachine], self.params['template'])
if not vm_obj:
self.module.fail_json(msg="Could not find a template named %(template)s" % self.params)
else:
vm_obj = None
# set the destination datastore for VM & disks
(datastore, datastore_name) = self.select_datastore(vm_obj)
resource_pool = self.select_resource_pool(hostsystem)
self.configspec = vim.vm.ConfigSpec(cpuHotAddEnabled=True, memoryHotAddEnabled=True)
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_disks(vm_obj=vm_obj)
self.configure_network(vm_obj=vm_obj)
try:
if self.should_deploy_from_template():
# create the relocation spec
relospec = vim.vm.RelocateSpec()
relospec.host = hostsystem
relospec.datastore = datastore
relospec.pool = resource_pool
clonespec = vim.vm.CloneSpec(template=self.params['is_template'],
location=relospec)
if self.params['customize'] is True:
clonespec.customization = self.customspec
clonespec.config = self.configspec
task = vm_obj.Clone(folder=destfolder, name=self.params['name'], spec=clonespec)
else:
# ConfigSpec require name for VM creation
self.configspec.name = self.params['name']
self.configspec.files = vim.vm.FileInfo(logDirectory=None,
snapshotDirectory=None,
suspendDirectory=None,
vmPathName="[" + datastore_name + "] " + self.params["name"])
task = destfolder.CreateVM_Task(config=self.configspec, pool=resource_pool)
self.wait_for_task(task)
except TypeError:
self.module.fail_json(msg="TypeError was returned, please ensure to give correct inputs.")
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
# set annotation
vm = task.info.result
if self.params['annotation']:
annotation_spec = vim.vm.ConfigSpec()
annotation_spec.annotation = str(self.params['annotation'])
task = vm.ReconfigVM_Task(annotation_spec)
self.wait_for_task(task)
if self.params['wait_for_ip_address'] or self.params['state'] in ['poweredon', 'restarted']:
self.set_powerstate(vm, 'poweredon', force=False)
if self.params['wait_for_ip_address']:
self.wait_for_vm_ip(vm)
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': False, 'instance': vm_facts}
def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_disks(vm_obj=self.current_vm_obj)
self.configure_network(vm_obj=self.current_vm_obj)
relospec = vim.vm.RelocateSpec()
hostsystem = self.select_host()
relospec.pool = self.select_resource_pool(hostsystem)
change_applied = False
if relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec)
self.wait_for_task(task)
change_applied = True
# Only send VMWare task if we see a modification
if self.change_detected:
task = self.current_vm_obj.ReconfigVM_Task(spec=self.configspec)
self.wait_for_task(task)
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
change_applied = True
vm_facts = self.gather_facts(self.current_vm_obj)
return {'changed': change_applied, 'failed': False, 'instance': vm_facts}
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['success', 'error']:
time.sleep(1)
def wait_for_vm_ip(self, vm, poll=100, sleep=5):
ips = None
facts = {}
thispoll = 0
while not ips and thispoll <= poll:
newvm = self.getvm(uuid=vm.config.uuid)
facts = self.gather_facts(newvm)
if facts['ipv4'] or facts['ipv6']:
ips = True
else:
time.sleep(sleep)
thispoll += 1
return facts
def fetch_file_from_guest(self, vm, username, password, src, dest):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/FileManager/FileTransferInformation.rst
fti = self.content.guestOperationsManager.fileManager. \
InitiateFileTransferFromGuest(vm, creds, src)
result['size'] = fti.size
result['url'] = fti.url
# Use module_utils to fetch the remote url returned from the api
rsp, info = fetch_url(self.module, fti.url, use_proxy=False,
force=True, last_mod_time=None,
timeout=10, headers=None)
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
# exit early if xfer failed
if info['status'] != 200:
result['failed'] = True
return result
# attempt to read the content and write it
try:
with open(dest, 'wb') as f:
f.write(rsp.read())
except Exception as e:
result['failed'] = True
result['msg'] = str(e)
return result
def push_file_to_guest(self, vm, username, password, src, dest, overwrite=True):
""" Use VMWare's filemanager api to fetch a file over http """
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if tools_status == 'toolsNotInstalled' or tools_status == 'toolsNotRunning':
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
# the api requires a filesize in bytes
fdata = None
try:
# filesize = os.path.getsize(src)
filesize = os.stat(src).st_size
with open(src, 'rb') as f:
fdata = f.read()
result['local_filesize'] = filesize
except Exception as e:
result['failed'] = True
result['msg'] = "Unable to read src file: %s" % str(e)
return result
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.vm.guest.FileManager.html#initiateFileTransferToGuest
file_attribute = vim.vm.guest.FileManager.FileAttributes()
url = self.content.guestOperationsManager.fileManager. \
InitiateFileTransferToGuest(vm, creds, dest, file_attribute,
filesize, overwrite)
# PUT the filedata to the url ...
rsp, info = fetch_url(self.module, url, method="put", data=fdata,
use_proxy=False, force=True, last_mod_time=None,
timeout=10, headers=None)
result['msg'] = str(rsp.read())
# save all of the transfer data
for k, v in iteritems(info):
result[k] = v
return result
def run_command_in_guest(self, vm, username, password, program_path, program_args, program_cwd, program_env):
result = {'failed': False}
tools_status = vm.guest.toolsStatus
if (tools_status == 'toolsNotInstalled' or
tools_status == 'toolsNotRunning'):
result['failed'] = True
result['msg'] = "VMwareTools is not installed or is not running in the guest"
return result
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/NamePasswordAuthentication.rst
creds = vim.vm.guest.NamePasswordAuthentication(
username=username, password=password
)
try:
# https://github.com/vmware/pyvmomi/blob/master/docs/vim/vm/guest/ProcessManager.rst
pm = self.content.guestOperationsManager.processManager
# https://www.vmware.com/support/developer/converter-sdk/conv51_apireference/vim.vm.guest.ProcessManager.ProgramSpec.html
ps = vim.vm.guest.ProcessManager.ProgramSpec(
# programPath=program,
# arguments=args
programPath=program_path,
arguments=program_args,
workingDirectory=program_cwd,
)
res = pm.StartProgramInGuest(vm, creds, ps)
result['pid'] = res
pdata = pm.ListProcessesInGuest(vm, creds, [res])
# wait for pid to finish
while not pdata[0].endTime:
time.sleep(1)
pdata = pm.ListProcessesInGuest(vm, creds, [res])
result['owner'] = pdata[0].owner
result['startTime'] = pdata[0].startTime.isoformat()
result['endTime'] = pdata[0].endTime.isoformat()
result['exitCode'] = pdata[0].exitCode
if result['exitCode'] != 0:
result['failed'] = True
result['msg'] = "program exited non-zero"
else:
result['msg'] = "program completed successfully"
except Exception as e:
result['msg'] = str(e)
result['failed'] = True
return result
def list_snapshots_recursively(self, snapshots):
snapshot_data = []
for snapshot in snapshots:
snap_text = 'Id: %s; Name: %s; Description: %s; CreateTime: %s; State: %s' % (snapshot.id, snapshot.name,
snapshot.description,
snapshot.createTime,
snapshot.state)
snapshot_data.append(snap_text)
snapshot_data = snapshot_data + self.list_snapshots_recursively(snapshot.childSnapshotList)
return snapshot_data
def get_snapshots_by_name_recursively(self, snapshots, snapname):
snap_obj = []
for snapshot in snapshots:
if snapshot.name == snapname:
snap_obj.append(snapshot)
else:
snap_obj = snap_obj + self.get_snapshots_by_name_recursively(snapshot.childSnapshotList, snapname)
return snap_obj
def get_current_snap_obj(self, snapshots, snapob):
snap_obj = []
for snapshot in snapshots:
if snapshot.snapshot == snapob:
snap_obj.append(snapshot)
snap_obj = snap_obj + self.get_current_snap_obj(snapshot.childSnapshotList, snapob)
return snap_obj
def snapshot_vm(self, vm, guest, snapshot_op):
""" To perform snapshot operations create/remove/revert/list_all/list_current/remove_all """
snapshot_op_name = None
try:
snapshot_op_name = snapshot_op['op_type']
except KeyError:
self.module.fail_json(msg="Specify op_type - create/remove/revert/list_all/list_current/remove_all")
task = None
result = {}
if snapshot_op_name not in ['create', 'remove', 'revert', 'list_all', 'list_current', 'remove_all']:
self.module.fail_json(msg="Specify op_type - create/remove/revert/list_all/list_current/remove_all")
if snapshot_op_name != 'create' and vm.snapshot is None:
self.module.exit_json(msg="VM - %s doesn't have any snapshots" % guest)
if snapshot_op_name == 'create':
try:
snapname = snapshot_op['name']
except KeyError:
self.module.fail_json(msg="specify name & description(optional) to create a snapshot")
if 'description' in snapshot_op:
snapdesc = snapshot_op['description']
else:
snapdesc = ''
dumpMemory = False
quiesce = False
task = vm.CreateSnapshot(snapname, snapdesc, dumpMemory, quiesce)
elif snapshot_op_name in ['remove', 'revert']:
try:
snapname = snapshot_op['name']
except KeyError:
self.module.fail_json(msg="specify snapshot name")
snap_obj = self.get_snapshots_by_name_recursively(vm.snapshot.rootSnapshotList, snapname)
# if len(snap_obj) is 0; then no snapshots with specified name
if len(snap_obj) == 1:
snap_obj = snap_obj[0].snapshot
if snapshot_op_name == 'remove':
task = snap_obj.RemoveSnapshot_Task(True)
else:
task = snap_obj.RevertToSnapshot_Task()
else:
self.module.exit_json(
msg="Couldn't find any snapshots with specified name: %s on VM: %s" % (snapname, guest))
elif snapshot_op_name == 'list_all':
snapshot_data = self.list_snapshots_recursively(vm.snapshot.rootSnapshotList)
result['snapshot_data'] = snapshot_data
elif snapshot_op_name == 'list_current':
current_snapref = vm.snapshot.currentSnapshot
current_snap_obj = self.get_current_snap_obj(vm.snapshot.rootSnapshotList, current_snapref)
result['current_snapshot'] = 'Id: %s; Name: %s; Description: %s; CreateTime: %s; State: %s' % (
current_snap_obj[0].id,
current_snap_obj[0].name, current_snap_obj[0].description, current_snap_obj[0].createTime,
current_snap_obj[0].state)
elif snapshot_op_name == 'remove_all':
task = vm.RemoveAllSnapshots()
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result = {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
result = {'changed': True, 'failed': False}
return result
def get_obj(content, vimtype, name):
"""
Return an object by name, if name is None the
first found object is returned
"""
obj = None
container = content.viewManager.CreateContainerView(
content.rootFolder, vimtype, True)
for c in container.view:
if name:
if c.name == name:
obj = c
break
else:
obj = c
break
container.Destroy()
return obj
def main():
module = AnsibleModule(
argument_spec=dict(
hostname=dict(
type='str',
default=os.environ.get('VMWARE_HOST')
),
username=dict(
type='str',
default=os.environ.get('VMWARE_USER')
),
password=dict(
type='str', no_log=True,
default=os.environ.get('VMWARE_PASSWORD')
),
state=dict(
required=False,
choices=[
'poweredon',
'poweredoff',
'present',
'absent',
'restarted',
'suspended',
'gatherfacts',
],
default='present'),
validate_certs=dict(required=False, type='bool', default=True),
template_src=dict(required=False, type='str', aliases=['template'], default=None),
is_template=dict(required=False, type='bool', default=False),
annotation=dict(required=False, type='str', aliases=['notes']),
name=dict(required=True, type='str'),
name_match=dict(required=False, type='str', default='first'),
snapshot_op=dict(required=False, type='dict', default={}),
uuid=dict(required=False, type='str'),
folder=dict(required=False, type='str', default='/vm'),
guest_id=dict(required=False, type='str', default=None),
disk=dict(required=False, type='list', default=[]),
hardware=dict(required=False, type='dict', default={}),
force=dict(required=False, type='bool', default=False),
datacenter=dict(required=False, type='str', default=None),
esxi_hostname=dict(required=False, type='str', default=None),
cluster=dict(required=False, type='str', default=None),
wait_for_ip_address=dict(required=False, type='bool', default=True),
customize=dict(required=False, type='bool', default=False),
dns_servers=dict(required=False, type='list', default=None),
domain=dict(required=False, type='str', default=None),
networks=dict(required=False, type='dict', default={}),
resource_pool=dict(required=False, type='str', default=None)
),
supports_check_mode=True,
mutually_exclusive=[
['esxi_hostname', 'cluster'],
],
required_together=[
['state', 'force'],
['template'],
],
)
result = {'failed': False, 'changed': False}
# Prepend /vm if it was missing from the folder path, also strip trailing slashes
if not module.params['folder'].startswith('/vm') and module.params['folder'].startswith('/'):
module.params['folder'] = '/vm%(folder)s' % module.params
module.params['folder'] = module.params['folder'].rstrip('/')
# Fail check, customize require template to be defined
if module.params["customize"] and not module.params['template']:
module.fail_json(msg="customize option is only valid when template option is defined")
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'],
name_match=module.params['name_match'],
cache=True)
# VM already exists
if vm:
if module.params['state'] == 'absent':
# destroy it
if module.params['force']:
# has to be poweredoff first
pyv.set_powerstate(vm, 'poweredoff', module.params['force'])
result = pyv.remove_vm(vm)
elif module.params['state'] == 'present':
result = pyv.reconfigure_vm()
elif module.params['state'] in ['poweredon', 'poweredoff', 'restarted', 'suspended']:
# set powerstate
tmp_result = pyv.set_powerstate(vm, module.params['state'], module.params['force'])
if tmp_result['changed']:
result["changed"] = True
if not tmp_result["failed"]:
result["failed"] = False
elif module.params['state'] == 'gatherfacts':
# Run for facts only
try:
module.exit_json(instance=pyv.gather_facts(vm))
except Exception:
e = get_exception()
module.fail_json(msg="Fact gather failed with exception %s" % e)
elif module.params['snapshot_op']:
result = pyv.snapshot_vm(vm, module.params['name'], module.params['snapshot_op'])
else:
# This should not happen
assert False
# VM doesn't exist
else:
if module.params['state'] in ['poweredon', 'poweredoff', 'present', 'restarted', 'suspended']:
# Create it ...
result = pyv.deploy_vm()
elif module.params['state'] == 'gatherfacts':
module.fail_json(msg="Unable to gather facts for inexistant VM %s" % module.params['name'])
if 'failed' not in result:
result['failed'] = False
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -5,752,638,804,125,786,000 | 39.620889 | 172 | 0.571055 | false |
gyurisc/stackjobs | clean_data.py | 1 | 1758 | # Ad-hoc fixing of mongo database
from datetime import datetime
import pymongo
client = pymongo.MongoClient('localhost', 27017)
db = client['stackoverflow']
jobs = db['jobs']
# total jobs
total_jobs = jobs.count()
print "Total jobs: %s" % total_jobs
print "=== Fixing Date Stamp ==="
date_stamp = datetime(2016, 6, 1, 7, 01, 01)
jobs.update_many({ "date" : { "$exists" : False}}, {"$set" : {"date" : date_stamp}})
count = 0
for job in jobs.find( { "date" : { "$exists" : False}}):
count = count + 1
# print(job)
print "=== Fixing Date Stamp ==="
print "Number of jobs with no date is %s." % count
count = 0
for job in jobs.find( { "date" : date_stamp}):
count = count + 1
# print(job)
print "Number of jobs with default date is %s." % count
# Week number
print "=== Fixing Week Number ==="
wkcount = jobs.find( {"weeknum" : {"$exists" : True}}).count()
print "Week number exists with %s and missing for %s jobs." % (wkcount, total_jobs - wkcount)
for job in jobs.find({"weeknum" : {"$exists": False}}):
d = datetime.strptime(job["date"], '%Y-%m-%d')
wk = d.isocalendar()[1]
jobs.update({"_id" : job["_id"]}, {"$set" : {"weeknum" : wk}})
# Employee and Location Whitespace
print "=== Fixing Employee & Location ==="
print "Striping strings from white space in employer and location strings"
for job in jobs.find():
_emp = job["employer"].strip()
_loc = job["location"].strip()
jobs.update({"_id" : job["_id"]}, {"$set" : {"employer" : _emp, "location" : _loc}})
print "Stripping strings from whitespace where salary exists"
for job in jobs.find({ "salary" : { "$exists" : True }}):
_salary = job["salary"].strip()
jobs.update({"_id" : job["_id"]}, {"$set" : {"salary" : _salary}})
| mit | 6,723,297,113,947,829,000 | 31.555556 | 93 | 0.610353 | false |
nginx/unit | test/test_respawn.py | 1 | 3110 | import re
import subprocess
import time
from unit.applications.lang.python import TestApplicationPython
from unit.option import option
class TestRespawn(TestApplicationPython):
prerequisites = {'modules': {'python': 'any'}}
PATTERN_ROUTER = 'unit: router'
PATTERN_CONTROLLER = 'unit: controller'
def setup_method(self):
self.app_name = "app-" + option.temp_dir.split('/')[-1]
self.load('empty', self.app_name)
assert 'success' in self.conf(
'1', 'applications/' + self.app_name + '/processes'
)
def pid_by_name(self, name, ppid):
output = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
m = re.search(r'\s*(\d+)\s*' + str(ppid) + r'.*' + name, output)
return None if m is None else m.group(1)
def kill_pids(self, *pids):
subprocess.call(['kill', '-9'] + list(pids))
def wait_for_process(self, process, unit_pid):
for i in range(50):
found = self.pid_by_name(process, unit_pid)
if found is not None:
break
time.sleep(0.1)
return found
def find_proc(self, name, ppid, ps_output):
return re.findall(str(ppid) + r'.*' + name, ps_output)
def smoke_test(self, unit_pid):
for _ in range(10):
r = self.conf('1', 'applications/' + self.app_name + '/processes')
if 'success' in r:
break
time.sleep(0.1)
assert 'success' in r
assert self.get()['status'] == 200
# Check if the only one router, controller,
# and application processes running.
out = subprocess.check_output(['ps', 'ax', '-O', 'ppid']).decode()
assert len(self.find_proc(self.PATTERN_ROUTER, unit_pid, out)) == 1
assert len(self.find_proc(self.PATTERN_CONTROLLER, unit_pid, out)) == 1
assert len(self.find_proc(self.app_name, unit_pid, out)) == 1
def test_respawn_router(self, skip_alert, unit_pid, skip_fds_check):
skip_fds_check(router=True)
pid = self.pid_by_name(self.PATTERN_ROUTER, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert self.wait_for_process(self.PATTERN_ROUTER, unit_pid) is not None
self.smoke_test(unit_pid)
def test_respawn_controller(self, skip_alert, unit_pid, skip_fds_check):
skip_fds_check(controller=True)
pid = self.pid_by_name(self.PATTERN_CONTROLLER, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert (
self.wait_for_process(self.PATTERN_CONTROLLER, unit_pid)
is not None
)
assert self.get()['status'] == 200
self.smoke_test(unit_pid)
def test_respawn_application(self, skip_alert, unit_pid):
pid = self.pid_by_name(self.app_name, unit_pid)
self.kill_pids(pid)
skip_alert(r'process %s exited on signal 9' % pid)
assert self.wait_for_process(self.app_name, unit_pid) is not None
self.smoke_test(unit_pid)
| apache-2.0 | 8,054,660,929,248,665,000 | 29.792079 | 79 | 0.592605 | false |
rodo/ansible-tsung | ec2tool.py | 1 | 5117 | #!/usr/bin/env python
import boto.ec2
import jinja2
import sys
import json
import yaml
class Tsing(boto.ec2.instance.Instance):
def shortname(self):
return self.private_dns_name.split('.')[0]
@property
def private_short_name(self):
return self.private_dns_name.split('.')[0]
def get_specs(instance, region, data):
"""
region (string) : the region name
data (dict)
"""
datas = get_data_region(region, data)
instance_spec = get_instance(instance, datas)
return instance_spec
def get_instance(instance, data):
"""
instance (string)
data (dict)
"""
result = None
for inst in data['instanceTypes']:
for size in inst['sizes']:
if instance == size['size']:
result = size
break
return result
def get_data_region(region, data):
"""
region (string) : the region name
data (dict)
"""
config = data['config']
ec2_regions = {"us-east-1": "us-east",
"us-west-1": "us-west",
"us-west-2": "us-west-2",
"eu-west-1": "eu-ireland",
"ap-southeast-1": "apac-sin",
"ap-southeast-2": "apac-syd",
"ap-northeast-1": "apac-tokyo",
"sa-east-1": "sa-east-1"
}
for reg in config['regions']:
if reg['region'] == ec2_regions[region]:
return reg
def write_nodes(controller, injectors, data):
"""
controller (dict)
injectors (dict)
"""
hosts = open("playbooks/roles/tsung/vars/nodes.yml", 'w')
hosts.write("---\n")
contr_str = "controller: { private_dns_name: '%s', private_ip_address: '%s', private_short_name: '%s' }\n\n"
hosts.write(contr_str % (controller.private_dns_name,
controller.private_ip_address,
controller.private_short_name))
hosts.write("injectors:\n")
for injec in injectors:
print injec.__dict__
specs = get_specs(injec.instance_type, region, data)
injector = {"private_dns_name": str(injec.private_dns_name),
"private_ip_address": str(injec.private_ip_address),
"private_short_name": str(injec.private_short_name),
"instance_type": str(injec.instance_type),
"cpu": int(specs['vCPU'])}
hosts.write(" - {}".format(yaml.dump(injector, encoding='utf-8')))
hosts.close()
def instance_weights(injectors, region, data):
"""
Define instances weights
"""
assw = {}
weights = []
for injec in injectors:
specs = get_specs(injec['instance_type'], region, data)
weights.append(float(specs['memoryGiB']))
minweight = min(weights)
for injec in injectors:
specs = get_specs(injec['instance_type'], region, data)
iid = injec['id']
assw[iid] = int(round(float(specs['memoryGiB']) / minweight))
return assw
def parse_instances(instances):
"""
Wait for instance in running state
"""
controller = None
injectors = []
for instance in instances:
inst = instance.instances[0]
inst.__class__ = Tsing
if inst.state == 'running':
tags = inst.tags
if 'tsung_role' in tags:
if tags['tsung_role'] == 'controller':
controller = inst
else:
injectors.append(inst)
else:
injectors.append(inst)
return controller, injectors
def cloud_connect(region):
"""
Connect on cloud
"""
print "connect on {}...".format(region)
conn = boto.ec2.connect_to_region(region)
return conn
def write_ini(injectors, controller):
"""
Write ansible .ini file
"""
templateLoader = jinja2.FileSystemLoader(searchpath=".")
templateEnv = jinja2.Environment(loader=templateLoader)
templateVars = {"injectors": injectors,
"controller": controller}
#
# Configure the cluster
#
template = templateEnv.get_template("cluster.j2")
clients = open("cluster.ini", 'w')
clients.write(template.render(templateVars))
clients.close()
if __name__ == "__main__":
try:
region = sys.argv[1]
except:
print "usage : ec2tool.py REGI0N"
sys.exit(1)
conn = cloud_connect(region)
print "connected"
instances = conn.get_all_instances()
controller, injectors = parse_instances(instances)
print "found\n {} injectors".format(len(injectors))
if controller is None:
print "ERROR didn't found any controller"
sys.exit(1)
else:
print " controller : tsung@{} ".format(controller.ip_address)
#
#
with open("linux-od.json") as data_file:
data = json.load(data_file)
#
#
write_nodes(controller, injectors, data)
write_ini(injectors, controller)
#
print 'ansible-playbook -i cluster.ini -u ubuntu playbooks/tsung.yml'
| gpl-3.0 | 6,011,133,720,753,712,000 | 24.713568 | 112 | 0.560876 | false |
ktbyers/netmiko | netmiko/cisco/cisco_viptela.py | 1 | 3093 | """Subclass specific to Cisco Viptela."""
from typing import Union, Sequence, TextIO, Any
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class CiscoViptelaSSH(CiscoSSHConnection):
"""Subclass specific to Cisco Viptela."""
def session_preparation(self) -> None:
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r"[>#]")
self.set_base_prompt()
self.disable_paging(command="paginate false")
def check_config_mode(self, check_string: str = ")#", pattern: str = "#") -> bool:
"""Checks if the device is in configuration mode or not."""
return super().check_config_mode(check_string=check_string, pattern=pattern)
def commit(self, confirm: bool = False, confirm_response: str = "") -> str:
cmd = "commit"
return super().save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
)
def config_mode(
self,
config_command: str = "conf terminal",
pattern: str = "",
re_flags: int = 0,
) -> str:
return super().config_mode(
config_command=config_command, pattern=pattern, re_flags=re_flags
)
def send_config_set( # type: ignore
self,
config_commands: Union[str, Sequence[str], TextIO, None] = None,
exit_config_mode: bool = False,
**kwargs: Any,
) -> str:
return super().send_config_set(
config_commands=config_commands, exit_config_mode=exit_config_mode, **kwargs
)
def exit_config_mode(self, exit_config: str = "end", pattern: str = r"#") -> str:
"""
Exit from configuration mode.
Viptela might have the following in the output (if no 'commit()' occurred.
Uncommitted changes found, commit them? [yes/no/CANCEL]
"""
output = ""
if self.check_config_mode():
self.write_channel(self.normalize_cmd(exit_config))
# Make sure you read until you detect the command echo (avoid getting out of sync)
if self.global_cmd_verify is not False:
output += self.read_until_pattern(
pattern=re.escape(exit_config.strip())
)
if not re.search(pattern, output, flags=re.M):
uncommit_pattern = r"Uncommitted changes found"
new_pattern = f"({pattern}|{uncommit_pattern})"
output += self.read_until_pattern(pattern=new_pattern)
# Do not save 'uncommited changes'
if uncommit_pattern in output:
self.write_channel(self.normalize_cmd("no"))
output += self.read_until_pattern(pattern=pattern)
if self.check_config_mode():
raise ValueError("Failed to exit configuration mode")
return output
def save_config(
self, cmd: str = "commit", confirm: bool = False, confirm_response: str = ""
) -> str:
"""Saves Config"""
raise NotImplementedError
| mit | -8,207,812,357,518,117,000 | 37.6625 | 94 | 0.593922 | false |
nemesisdesign/openwisp2 | openwisp_controller/config/controller/views.py | 1 | 14788 | import json
from ipaddress import ip_address
from django.core.exceptions import FieldDoesNotExist, ValidationError
from django.db import transaction
from django.db.models import Q
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.base import View
from django.views.generic.detail import SingleObjectMixin
from swapper import load_model
from .. import settings as app_settings
from ..signals import checksum_requested, config_download_requested, device_registered
from ..utils import (
ControllerResponse,
forbid_unallowed,
get_object_or_404,
invalid_response,
send_device_config,
send_vpn_config,
update_last_ip,
)
Device = load_model('config', 'Device')
OrganizationConfigSettings = load_model('config', 'OrganizationConfigSettings')
Vpn = load_model('config', 'Vpn')
class BaseConfigView(SingleObjectMixin, View):
"""
Base view that implements a ``get_object`` method
Subclassed by all views dealing with existing objects
"""
def get_object(self, *args, **kwargs):
kwargs['config__isnull'] = False
return get_object_or_404(self.model, *args, **kwargs)
class CsrfExtemptMixin(object):
"""
Mixin that makes the view extempt from CSFR protection
"""
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super().dispatch(request, *args, **kwargs)
class UpdateLastIpMixin(object):
def update_last_ip(self, device, request):
result = update_last_ip(device, request)
if result:
# avoid that any other device in the
# same org stays with the same management_ip
# This can happen when management interfaces are using DHCP
# and they get a new address which was previously used by another
# device that may now be offline, without this fix, we will end up
# with two devices having the same management_ip, which will
# cause OpenWISP to be confused
self.model.objects.filter(
organization=device.organization, management_ip=device.management_ip
).exclude(pk=device.pk).update(management_ip='')
# in the case of last_ip, we take a different approach,
# because it may be a public IP. If it's a public IP we will
# allow it to be duplicated
if ip_address(device.last_ip).is_private:
Device.objects.filter(
organization=device.organization, last_ip=device.last_ip
).exclude(pk=device.pk).update(last_ip='')
return result
class ActiveOrgMixin(object):
"""
adds check to organization.is_active to ``get_object`` method
"""
def get_object(self, *args, **kwargs):
kwargs['organization__is_active'] = True
return super().get_object(*args, **kwargs)
class DeviceChecksumView(ActiveOrgMixin, UpdateLastIpMixin, BaseConfigView):
"""
returns device's configuration checksum
"""
model = Device
def get(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', device.key)
if bad_request:
return bad_request
self.update_last_ip(device, request)
checksum_requested.send(
sender=device.__class__, instance=device, request=request
)
return ControllerResponse(device.config.checksum, content_type='text/plain')
class DeviceDownloadConfigView(ActiveOrgMixin, BaseConfigView):
"""
returns configuration archive as attachment
"""
model = Device
def get(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', device.key)
if bad_request:
return bad_request
config_download_requested.send(
sender=device.__class__, instance=device, request=request
)
return send_device_config(device.config, request)
class DeviceUpdateInfoView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView):
"""
updates general information about the device
"""
model = Device
UPDATABLE_FIELDS = ['os', 'model', 'system']
def post(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'POST', 'key', device.key)
if bad_request:
return bad_request
# update device information
for attr in self.UPDATABLE_FIELDS:
if attr in request.POST:
setattr(device, attr, request.POST.get(attr))
# validate and save everything or fail otherwise
try:
with transaction.atomic():
device.full_clean()
device.save()
except ValidationError as e:
# dump message_dict as JSON,
# this should make it easy to debug
return ControllerResponse(
json.dumps(e.message_dict, indent=4, sort_keys=True),
content_type='text/plain',
status=400,
)
return ControllerResponse('update-info: success', content_type='text/plain')
class DeviceReportStatusView(ActiveOrgMixin, CsrfExtemptMixin, BaseConfigView):
"""
updates status of config objects
"""
model = Device
def post(self, request, *args, **kwargs):
device = self.get_object(*args, **kwargs)
config = device.config
# ensure request is well formed and authorized
allowed_status = [choices[0] for choices in config.STATUS]
allowed_status.append('running') # backward compatibility
required_params = [('key', device.key), ('status', allowed_status)]
for key, value in required_params:
bad_response = forbid_unallowed(request, 'POST', key, value)
if bad_response:
return bad_response
status = request.POST.get('status')
# mantain backward compatibility with old agents
# ("running" was changed to "applied")
status = status if status != 'running' else 'applied'
# call set_status_{status} method on Config model
method_name = f'set_status_{status}'
getattr(config, method_name)()
return ControllerResponse(
f'report-result: success\ncurrent-status: {config.status}\n',
content_type='text/plain',
)
class DeviceRegisterView(UpdateLastIpMixin, CsrfExtemptMixin, View):
"""
registers new Config objects
"""
model = Device
org_config_settings_model = OrganizationConfigSettings
UPDATABLE_FIELDS = ['os', 'model', 'system']
def init_object(self, **kwargs):
"""
initializes Config object with incoming POST data
"""
device_model = self.model
config_model = device_model.get_config_model()
options = {}
for attr in kwargs.keys():
# skip attributes that are not model fields
try:
device_model._meta.get_field(attr)
except FieldDoesNotExist:
continue
options[attr] = kwargs.get(attr)
# do not specify key if:
# app_settings.CONSISTENT_REGISTRATION is False
# if key is ``None`` (it would cause exception)
if 'key' in options and (
app_settings.CONSISTENT_REGISTRATION is False or options['key'] is None
):
del options['key']
if 'hardware_id' in options and options['hardware_id'] == "":
options['hardware_id'] = None
config = config_model(device=device_model(**options), backend=kwargs['backend'])
config.organization = self.organization
config.device.organization = self.organization
return config
def get_template_queryset(self, config):
"""
returns Template model queryset
"""
queryset = config.get_template_model().objects.all()
# filter templates of the same organization or shared templates
return queryset.filter(Q(organization=self.organization) | Q(organization=None))
def add_tagged_templates(self, config, request):
"""
adds templates specified in incoming POST tag setting
"""
tags = request.POST.get('tags')
if not tags:
return
# retrieve tags and add them to current config
tags = tags.split()
queryset = self.get_template_queryset(config)
templates = queryset.filter(tags__name__in=tags).only('id').distinct()
for template in templates:
config.templates.add(template)
def invalid(self, request):
"""
ensures request is well formed
"""
allowed_backends = [path for path, name in app_settings.BACKENDS]
required_params = [
('secret', None),
('name', None),
('mac_address', None),
('backend', allowed_backends),
]
# valid required params or forbid
for key, value in required_params:
invalid_response = forbid_unallowed(request, 'POST', key, value)
if invalid_response:
return invalid_response
def forbidden(self, request):
"""
ensures request is authorized:
- secret matches an organization's shared_secret
- the organization has registration_enabled set to True
"""
try:
secret = request.POST.get('secret')
org_settings = self.org_config_settings_model.objects.select_related(
'organization'
).get(shared_secret=secret, organization__is_active=True)
except self.org_config_settings_model.DoesNotExist:
return invalid_response(request, 'error: unrecognized secret', status=403)
if not org_settings.registration_enabled:
return invalid_response(request, 'error: registration disabled', status=403)
# set an organization attribute as a side effect
# this attribute will be used in ``init_object``
self.organization = org_settings.organization
def post(self, request, *args, **kwargs):
"""
POST logic
"""
if not app_settings.REGISTRATION_ENABLED:
return ControllerResponse('error: registration disabled', status=403)
# ensure request is valid
bad_response = self.invalid(request)
if bad_response:
return bad_response
# ensure request is allowed
forbidden = self.forbidden(request)
if forbidden:
return forbidden
# prepare model attributes
key = None
if app_settings.CONSISTENT_REGISTRATION:
key = request.POST.get('key')
# try retrieving existing Device first
# (key is not None only if CONSISTENT_REGISTRATION is enabled)
new = False
try:
device = self.model.objects.get(key=key)
# update hw info
for attr in self.UPDATABLE_FIELDS:
if attr in request.POST:
setattr(device, attr, request.POST.get(attr))
config = device.config
# if get queryset fails, instantiate a new Device and Config
except self.model.DoesNotExist:
if not app_settings.REGISTRATION_SELF_CREATION:
return ControllerResponse(
'Device not found in the system, please create it first.',
status=404,
)
new = True
config = self.init_object(**request.POST.dict())
device = config.device
# if get queryset succedes but device has no related config
# instantiate new Config but reuse existing device
except self.model.config.RelatedObjectDoesNotExist:
config = self.init_object(**request.POST.dict())
config.device = device
# update last_ip field of device
device.last_ip = request.META.get('REMOTE_ADDR')
# validate and save everything or fail otherwise
try:
with transaction.atomic():
device.full_clean()
device.save()
config.full_clean()
config.save()
except ValidationError as e:
# dump message_dict as JSON,
# this should make it easy to debug
return ControllerResponse(
json.dumps(e.message_dict, indent=4, sort_keys=True),
content_type='text/plain',
status=400,
)
# add templates specified in tags
self.add_tagged_templates(config, request)
# emit device registered signal
device_registered.send(sender=device.__class__, instance=device, is_new=new)
# prepare response
s = (
'registration-result: success\n'
'uuid: {id}\n'
'key: {key}\n'
'hostname: {name}\n'
'is-new: {is_new}\n'
)
attributes = device.__dict__.copy()
attributes.update({'id': device.pk.hex, 'key': device.key, 'is_new': int(new)})
return ControllerResponse(
s.format(**attributes), content_type='text/plain', status=201
)
class VpnChecksumView(BaseConfigView):
"""
returns vpn's configuration checksum
"""
model = Vpn
def get(self, request, *args, **kwargs):
vpn = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key)
if bad_request:
return bad_request
checksum_requested.send(sender=vpn.__class__, instance=vpn, request=request)
return ControllerResponse(vpn.checksum, content_type='text/plain')
class VpnDownloadConfigView(BaseConfigView):
"""
returns configuration archive as attachment
"""
model = Vpn
def get(self, request, *args, **kwargs):
vpn = self.get_object(*args, **kwargs)
bad_request = forbid_unallowed(request, 'GET', 'key', vpn.key)
if bad_request:
return bad_request
config_download_requested.send(
sender=vpn.__class__, instance=vpn, request=request
)
return send_vpn_config(vpn, request)
device_checksum = DeviceChecksumView.as_view()
device_download_config = DeviceDownloadConfigView.as_view()
device_update_info = DeviceUpdateInfoView.as_view()
device_report_status = DeviceReportStatusView.as_view()
device_register = DeviceRegisterView.as_view()
vpn_checksum = VpnChecksumView.as_view()
vpn_download_config = VpnDownloadConfigView.as_view()
| gpl-3.0 | 6,931,061,912,724,383,000 | 35.78607 | 88 | 0.61719 | false |
pinax/pinax-blog | pinax/blog/admin.py | 1 | 3056 | from functools import partial as curry
from django.contrib import admin
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from pinax.images.admin import ImageInline
from pinax.images.models import ImageSet
from .conf import settings
from .forms import AdminPostForm
from .models import Blog, Post, ReviewComment, Section
class PostImageSet(ImageSet):
class Meta:
proxy = True
class ReviewInline(admin.TabularInline):
model = ReviewComment
def make_published(modeladmin, request, queryset):
queryset = queryset.exclude(state=Post.STATE_CHOICES[-1][0], published__isnull=False)
queryset.update(state=Post.STATE_CHOICES[-1][0])
queryset.filter(published__isnull=True).update(published=timezone.now())
make_published.short_description = _("Publish selected posts")
class PostAdmin(admin.ModelAdmin):
list_display = ["title", "state", "section", "published", "show_secret_share_url"]
list_filter = ["section", "state"]
form = AdminPostForm
actions = [make_published]
fields = [
"section",
"title",
"slug",
"author",
"markup",
"teaser",
"content",
"description",
"sharable_url",
"state",
"published",
"image_set" # maybe this https://github.com/anziem/django_reverse_admin
]
readonly_fields = ["sharable_url"]
prepopulated_fields = {"slug": ("title",)}
inlines = [
ReviewInline,
]
def show_secret_share_url(self, obj):
return '<a href="{}">{}</a>'.format(obj.sharable_url, obj.sharable_url)
show_secret_share_url.short_description = _("Share this url")
show_secret_share_url.allow_tags = True
def formfield_for_dbfield(self, db_field, **kwargs):
request = kwargs.get("request")
if db_field.name == "author":
ff = super().formfield_for_dbfield(db_field, **kwargs)
ff.initial = request.user.id
return ff
return super().formfield_for_dbfield(db_field, **kwargs)
def get_form(self, request, obj=None, **kwargs):
kwargs.update({
"formfield_callback": curry(self.formfield_for_dbfield, request=request),
})
return super().get_form(request, obj, **kwargs)
def save_form(self, request, form, change):
# this is done for explicitness that we want form.save to commit
# form.save doesn't take a commit kwarg for this reason
return form.save(Blog.objects.first() if not settings.PINAX_BLOG_SCOPING_MODEL else None)
if settings.PINAX_BLOG_SCOPING_MODEL:
PostAdmin.fields.insert(0, "blog")
PostAdmin.list_filter.append("blog__scoper")
class SectionAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
admin.site.register(Post, PostAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(
PostImageSet,
list_display=["blog_post", "primary_image", "created_by", "created_at"],
raw_id_fields=["created_by"],
inlines=[ImageInline],
)
| mit | 7,357,990,425,241,163,000 | 29.56 | 97 | 0.659359 | false |
roberthodgen/thought-jot | src/utilities.py | 1 | 2732 | """
The MIT License (MIT)
Copyright (c) 2015 Robert Hodgen
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from ndb_users import users
import string
import re
import google.net.proto.ProtocolBuffer
from google.appengine.ext import ndb
from google.appengine.api import mail
def permalinkify(string):
""" Return a clean URL-friendly version of `string`. """
clean = string.lower().strip() # lowercase, striped of whitespace
clean = re.sub(r'\s(\s*)?', '-', clean) # Replace spaces with dashes "-"
clean = re.sub(r'[^a-z0-9-]', '', clean) # Strip non-alphanumeric
return clean
def key_for_urlsafe_id(key_id):
""" Try returning an NDB Key for `key_id`. None otherwise. """
key = None
try:
key = ndb.Key(urlsafe=key_id)
except google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError, e:
return key
finally:
return key
def send_project_contributor_email(email_address, user, project):
""" Send `email` an email notifying them they've been added as a contributor
on `project`. """
sender_email_address = users._email_sender()
subject = ''.join([project.name, ' invite'])
with open('resource/email/project_contributor.txt', 'r') as f:
body_text = f.read()
body_text = body_text.format(login='http://thought-jot.appspot.com/login',
from_email=user.email, to_email=email_address,
project_name=project.name)
mail.send_mail(sender_email_address, email_address, subject, body_text)
def str_to_bool(string, allow_none=False):
""" Return a Boolean value for `string`. """
if allow_none and string is None:
return None
if string == 'True' or string == 'true':
return True
else:
return False
| mit | 7,328,222,909,209,888,000 | 34.025641 | 80 | 0.712299 | false |
epuzanov/ZenPacks.community.CIMMon | ZenPacks/community/CIMMon/modeler/plugins/community/cim/SNIANetworkPortMap.py | 1 | 3274 | ################################################################################
#
# This program is part of the CIMMon Zenpack for Zenoss.
# Copyright (C) 2012 Egor Puzanov.
#
# This program can be used under the GNU General Public License version 2
# You can find full information here: http://www.zenoss.com/oss
#
################################################################################
__doc__="""SNIANetworkPortMap
SNIANetworkPortMap maps SNIA_NetworkPort class to CIM_NetworkPort class.
$Id: SNIANetworkPortMap.py,v 1.0 2012/01/23 23:50:55 egor Exp $"""
__version__ = '$Revision: 1.0 $'[11:-2]
from ZenPacks.community.CIMMon.modeler.plugins.community.cim.CIMNetworkPortMap \
import CIMNetworkPortMap
class SNIANetworkPortMap(CIMNetworkPortMap):
"""Map SNIA_NetworkPort CIM class to CIM_NetworkPort class"""
def queries(self, device):
connectionString = getattr(device, 'zCIMConnectionString', '')
if not connectionString:
return {}
cs = self.prepareCS(device, connectionString)
return {
"CIM_NetworkPort":
(
"SELECT * FROM CIM_NetworkPort",
None,
cs,
{
"setPath":"__PATH",
"description":"Description",
"mtu":"ActiveMaximumTransmissionUnit",
"interfaceName":"ElementName",
"adminStatus":"EnabledDefault",
"operStatus":"EnabledState",
"type":"LinkTechnology",
"macaddress":"PermanentAddress",
"speed":"Speed",
"_sysname":"SystemName",
}
),
"CIM_IPProtocolEndpoint":
(
"SELECT * FROM CIM_IPProtocolEndpoint",
None,
cs,
{
"_path":"__PATH",
"_ipAddress":"Address",
"_ipSubnet":"SubnetMask",
}
),
"CIM_PortImplementsEndpoint":
(
"SELECT Antecedent,Dependent FROM CIM_PortImplementsEndpoint",
None,
cs,
{
"ant":"Antecedent", # LogicalPort
"dep":"Dependent", # ProtocolEndpoint
}
),
"CIM_SystemComponent":
(
"SELECT GroupComponent,PartComponent FROM CIM_SystemComponent",
None,
cs,
{
"gc":"GroupComponent", # System
"pc":"PartComponent", # SystemComponent
},
),
"CIM_ElementStatisticalData":
(
"SELECT ManagedElement,Stats FROM CIM_ElementStatisticalData",
None,
cs,
{
"me":"ManagedElement",
"stats":"Stats",
},
),
}
| gpl-2.0 | -6,646,513,954,352,335,000 | 35.377778 | 83 | 0.419976 | false |
quokkaproject/quokka-classes | pipelines.py | 1 | 2318 | # coding: utf-8
from flask import request
from quokka.modules.cart.pipelines.base import CartPipeline
from quokka.utils import get_current_user
from .models import CourseSubscription, Subscriber
class SetSubscriber(CartPipeline):
def process(self):
name = request.form.get("name")
email = request.form.get("email")
area_code = request.form.get("area_code")
phone = request.form.get("phone")
document = request.form.get("document")
address = request.form.get("address")
confirm = request.form.get("classes_setsubscriber_confirm")
if not confirm:
return self.render('classes/setsubscriber.html', cart=self.cart)
formdata = dict(name=name, email=email, area_code=area_code,
phone=phone, document=document, address=address)
subscriptions = CourseSubscription.objects.filter(
cart=self.cart
)
user = get_current_user()
for subscription in subscriptions:
subscription.subscriber = self.get_subscriber(user, **formdata)
subscription.save()
self.cart.sender_data = {
"name": name or user.name,
"email": email or user.email,
"area_code": area_code,
"phone": phone.replace('-', '').replace('(', '').replace(')', ''),
}
self.cart.addlog("SetSubscriber Pipeline: defined sender data")
return self.go()
def get_subscriber(self, user, **kwargs):
if not user:
return None
try:
sub = Subscriber.objects.get(user=user)
sub.name = kwargs.get('name')
sub.email = kwargs.get('email')
sub.document = kwargs.get('document')
sub.address = kwargs.get('address')
sub.phone = u"%(area_code)s%(phone)s" % kwargs
sub.save()
return sub
except:
self.cart.addlog("Creating a new subscriber", save=False)
return Subscriber.objects.create(
name=kwargs.get('name'),
email=kwargs.get('email'),
user=user,
document=kwargs.get('document'),
address=kwargs.get('address'),
phone=u"%(area_code)s%(phone)s" % kwargs
)
| mit | 7,357,983,847,842,146,000 | 33.088235 | 78 | 0.572045 | false |
walshjon/openmc | openmc/region.py | 1 | 18303 | from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from collections.abc import Iterable, MutableSequence
from copy import deepcopy
import numpy as np
from openmc.checkvalue import check_type
class Region(metaclass=ABCMeta):
"""Region of space that can be assigned to a cell.
Region is an abstract base class that is inherited by
:class:`openmc.Halfspace`, :class:`openmc.Intersection`,
:class:`openmc.Union`, and :class:`openmc.Complement`. Each of those
respective classes are typically not instantiated directly but rather are
created through operators of the Surface and Region classes.
"""
def __and__(self, other):
return Intersection((self, other))
def __or__(self, other):
return Union((self, other))
def __invert__(self):
return Complement(self)
@abstractmethod
def __contains__(self, point):
pass
@abstractmethod
def __str__(self):
pass
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return str(self) == str(other)
def __ne__(self, other):
return not self == other
def get_surfaces(self, surfaces=None):
"""
Recursively find all the surfaces referenced by a region and return them
Parameters
----------
surfaces: collections.OrderedDict, optional
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
Returns
-------
surfaces: collections.OrderedDict
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
"""
if surfaces is None:
surfaces = OrderedDict()
for region in self:
surfaces = region.get_surfaces(surfaces)
return surfaces
@staticmethod
def from_expression(expression, surfaces):
"""Generate a region given an infix expression.
Parameters
----------
expression : str
Boolean expression relating surface half-spaces. The possible
operators are union '|', intersection ' ', and complement '~'. For
example, '(1 -2) | 3 ~(4 -5)'.
surfaces : dict
Dictionary whose keys are suface IDs that appear in the Boolean
expression and whose values are Surface objects.
"""
# Strip leading and trailing whitespace
expression = expression.strip()
# Convert the string expression into a list of tokens, i.e., operators
# and surface half-spaces, representing the expression in infix
# notation.
i = 0
i_start = -1
tokens = []
while i < len(expression):
if expression[i] in '()|~ ':
# If special character appears immediately after a non-operator,
# create a token with the apporpriate half-space
if i_start >= 0:
j = int(expression[i_start:i])
if j < 0:
tokens.append(-surfaces[abs(j)])
else:
tokens.append(+surfaces[abs(j)])
if expression[i] in '()|~':
# For everything other than intersection, add the operator
# to the list of tokens
tokens.append(expression[i])
else:
# Find next non-space character
while expression[i+1] == ' ':
i += 1
# If previous token is a halfspace or right parenthesis and next token
# is not a left parenthese or union operator, that implies that the
# whitespace is to be interpreted as an intersection operator
if (i_start >= 0 or tokens[-1] == ')') and \
expression[i+1] not in ')|':
tokens.append(' ')
i_start = -1
else:
# Check for invalid characters
if expression[i] not in '-+0123456789':
raise SyntaxError("Invalid character '{}' in expression"
.format(expression[i]))
# If we haven't yet reached the start of a word, start one
if i_start < 0:
i_start = i
i += 1
# If we've reached the end and we're still in a word, create a
# half-space token and add it to the list
if i_start >= 0:
j = int(expression[i_start:])
if j < 0:
tokens.append(-surfaces[abs(j)])
else:
tokens.append(+surfaces[abs(j)])
# The functions below are used to apply an operator to operands on the
# output queue during the shunting yard algorithm.
def can_be_combined(region):
return isinstance(region, Complement) or hasattr(region, 'surface')
def apply_operator(output, operator):
r2 = output.pop()
if operator == ' ':
r1 = output.pop()
if isinstance(r1, Intersection):
r1 &= r2
output.append(r1)
elif isinstance(r2, Intersection) and can_be_combined(r1):
r2.insert(0, r1)
output.append(r2)
else:
output.append(r1 & r2)
elif operator == '|':
r1 = output.pop()
if isinstance(r1, Union):
r1 |= r2
output.append(r1)
elif isinstance(r2, Union) and can_be_combined(r1):
r2.insert(0, r1)
output.append(r2)
else:
output.append(r1 | r2)
elif operator == '~':
output.append(~r2)
# The following is an implementation of the shunting yard algorithm to
# generate an abstract syntax tree for the region expression.
output = []
stack = []
precedence = {'|': 1, ' ': 2, '~': 3}
associativity = {'|': 'left', ' ': 'left', '~': 'right'}
for token in tokens:
if token in (' ', '|', '~'):
# Normal operators
while stack:
op = stack[-1]
if (op not in ('(', ')') and
((associativity[token] == 'right' and
precedence[token] < precedence[op]) or
(associativity[token] == 'left' and
precedence[token] <= precedence[op]))):
apply_operator(output, stack.pop())
else:
break
stack.append(token)
elif token == '(':
# Left parentheses
stack.append(token)
elif token == ')':
# Right parentheses
while stack[-1] != '(':
apply_operator(output, stack.pop())
if len(stack) == 0:
raise SyntaxError('Mismatched parentheses in '
'region specification.')
stack.pop()
else:
# Surface halfspaces
output.append(token)
while stack:
if stack[-1] in '()':
raise SyntaxError('Mismatched parentheses in region '
'specification.')
apply_operator(output, stack.pop())
# Since we are generating an abstract syntax tree rather than a reverse
# Polish notation expression, the output queue should have a single item
# at the end
return output[0]
@abstractmethod
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
region's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Region
The clone of this region
Raises
------
NotImplementedError
This method is not implemented for the abstract region class.
"""
raise NotImplementedError('The clone method is not implemented for '
'the abstract region class.')
class Intersection(Region, MutableSequence):
r"""Intersection of two or more regions.
Instances of Intersection are generally created via the & operator applied
to two instances of :class:`openmc.Region`. This is illustrated in the
following example:
>>> equator = openmc.ZPlane(z0=0.0)
>>> earth = openmc.Sphere(R=637.1e6)
>>> northern_hemisphere = -earth & +equator
>>> southern_hemisphere = -earth & -equator
>>> type(northern_hemisphere)
<class 'openmc.region.Intersection'>
Instances of this class behave like a mutable sequence, e.g., they can be
indexed and have an append() method.
Parameters
----------
nodes : iterable of openmc.Region
Regions to take the intersection of
Attributes
----------
bounding_box : tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, nodes):
self._nodes = list(nodes)
def __and__(self, other):
new = Intersection(self)
new &= other
return new
def __iand__(self, other):
if isinstance(other, Intersection):
self.extend(other)
else:
self.append(other)
return self
# Implement mutable sequence protocol by delegating to list
def __getitem__(self, key):
return self._nodes[key]
def __setitem__(self, key, value):
self._nodes[key] = value
def __delitem__(self, key):
del self._nodes[key]
def __len__(self):
return len(self._nodes)
def insert(self, index, value):
self._nodes.insert(index, value)
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return all(point in n for n in self)
def __str__(self):
return '(' + ' '.join(map(str, self)) + ')'
@property
def bounding_box(self):
lower_left = np.array([-np.inf, -np.inf, -np.inf])
upper_right = np.array([np.inf, np.inf, np.inf])
for n in self:
lower_left_n, upper_right_n = n.bounding_box
lower_left[:] = np.maximum(lower_left, lower_left_n)
upper_right[:] = np.minimum(upper_right, upper_right_n)
return lower_left, upper_right
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
intersection's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Intersection
The clone of this intersection
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone[:] = [n.clone(memo) for n in self]
return clone
class Union(Region, MutableSequence):
r"""Union of two or more regions.
Instances of Union are generally created via the | operator applied to two
instances of :class:`openmc.Region`. This is illustrated in the following
example:
>>> s1 = openmc.ZPlane(z0=0.0)
>>> s2 = openmc.Sphere(R=637.1e6)
>>> type(-s2 | +s1)
<class 'openmc.region.Union'>
Instances of this class behave like a mutable sequence, e.g., they can be
indexed and have an append() method.
Parameters
----------
nodes : iterable of openmc.Region
Regions to take the union of
Attributes
----------
bounding_box : 2-tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, nodes):
self._nodes = list(nodes)
def __or__(self, other):
new = Union(self)
new |= other
return new
def __ior__(self, other):
if isinstance(other, Union):
self.extend(other)
else:
self.append(other)
return self
# Implement mutable sequence protocol by delegating to list
def __getitem__(self, key):
return self._nodes[key]
def __setitem__(self, key, value):
self._nodes[key] = value
def __delitem__(self, key):
del self._nodes[key]
def __len__(self):
return len(self._nodes)
def insert(self, index, value):
self._nodes.insert(index, value)
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return any(point in n for n in self)
def __str__(self):
return '(' + ' | '.join(map(str, self)) + ')'
@property
def bounding_box(self):
lower_left = np.array([np.inf, np.inf, np.inf])
upper_right = np.array([-np.inf, -np.inf, -np.inf])
for n in self:
lower_left_n, upper_right_n = n.bounding_box
lower_left[:] = np.minimum(lower_left, lower_left_n)
upper_right[:] = np.maximum(upper_right, upper_right_n)
return lower_left, upper_right
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
union's nodes will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Union
The clone of this union
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone[:] = [n.clone(memo) for n in self]
return clone
class Complement(Region):
"""Complement of a region.
The Complement of an existing :class:`openmc.Region` can be created by using
the ~ operator as the following example demonstrates:
>>> xl = openmc.XPlane(x0=-10.0)
>>> xr = openmc.XPlane(x0=10.0)
>>> yl = openmc.YPlane(y0=-10.0)
>>> yr = openmc.YPlane(y0=10.0)
>>> inside_box = +xl & -xr & +yl & -yl
>>> outside_box = ~inside_box
>>> type(outside_box)
<class 'openmc.region.Complement'>
Parameters
----------
node : openmc.Region
Region to take the complement of
Attributes
----------
node : openmc.Region
Regions to take the complement of
bounding_box : tuple of numpy.array
Lower-left and upper-right coordinates of an axis-aligned bounding box
"""
def __init__(self, node):
self.node = node
def __contains__(self, point):
"""Check whether a point is contained in the region.
Parameters
----------
point : 3-tuple of float
Cartesian coordinates, :math:`(x',y',z')`, of the point
Returns
-------
bool
Whether the point is in the region
"""
return point not in self.node
def __str__(self):
return '~' + str(self.node)
@property
def node(self):
return self._node
@node.setter
def node(self, node):
check_type('node', node, Region)
self._node = node
@property
def bounding_box(self):
# Use De Morgan's laws to distribute the complement operator so that it
# only applies to surface half-spaces, thus allowing us to calculate the
# bounding box in the usual recursive manner.
if isinstance(self.node, Union):
temp_region = Intersection(~n for n in self.node)
elif isinstance(self.node, Intersection):
temp_region = Union(~n for n in self.node)
elif isinstance(self.node, Complement):
temp_region = self.node.node
else:
temp_region = ~self.node
return temp_region.bounding_box
def get_surfaces(self, surfaces=None):
"""
Recursively find and return all the surfaces referenced by the node
Parameters
----------
surfaces: collections.OrderedDict, optional
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
Returns
-------
surfaces: collections.OrderedDict
Dictionary mapping surface IDs to :class:`openmc.Surface` instances
"""
if surfaces is None:
surfaces = OrderedDict()
for region in self.node:
surfaces = region.get_surfaces(surfaces)
return surfaces
def clone(self, memo=None):
"""Create a copy of this region - each of the surfaces in the
complement's node will be cloned and will have new unique IDs.
Parameters
----------
memo : dict or None
A nested dictionary of previously cloned objects. This parameter
is used internally and should not be specified by the user.
Returns
-------
clone : openmc.Complement
The clone of this complement
"""
if memo is None:
memo = {}
clone = deepcopy(self)
clone.node = self.node.clone(memo)
return clone
| mit | -4,142,554,628,031,096,300 | 30.233788 | 90 | 0.543845 | false |
squilter/ardupilot | Tools/autotest/arduplane.py | 1 | 85180 | #!/usr/bin/env python
# Fly ArduPlane in SITL
from __future__ import print_function
import math
import os
import time
from pymavlink import quaternion
from pymavlink import mavutil
from common import AutoTest
from common import AutoTestTimeoutException
from common import NotAchievedException
from common import PreconditionFailedException
import operator
# get location of scripts
testdir = os.path.dirname(os.path.realpath(__file__))
SITL_START_LOCATION = mavutil.location(-35.362938, 149.165085, 585, 354)
WIND = "0,180,0.2" # speed,direction,variance
class AutoTestPlane(AutoTest):
@staticmethod
def get_not_armable_mode_list():
return []
@staticmethod
def get_not_disarmed_settable_modes_list():
return ["FOLLOW"]
@staticmethod
def get_no_position_not_settable_modes_list():
return []
@staticmethod
def get_position_armable_modes_list():
return ["GUIDED", "AUTO"]
@staticmethod
def get_normal_armable_modes_list():
return ["MANUAL", "STABILIZE", "ACRO"]
def log_name(self):
return "ArduPlane"
def test_filepath(self):
return os.path.realpath(__file__)
def sitl_start_location(self):
return SITL_START_LOCATION
def defaults_filepath(self):
return os.path.join(testdir, 'default_params/plane-jsbsim.parm')
def set_current_test_name(self, name):
self.current_test_name_directory = "ArduPlane_Tests/" + name + "/"
def default_frame(self):
return "plane-elevrev"
def apply_defaultfile_parameters(self):
# plane passes in a defaults_filepath in place of applying
# parameters afterwards.
pass
def is_plane(self):
return True
def get_stick_arming_channel(self):
return int(self.get_parameter("RCMAP_YAW"))
def get_disarm_delay(self):
return int(self.get_parameter("LAND_DISARMDELAY"))
def set_autodisarm_delay(self, delay):
self.set_parameter("LAND_DISARMDELAY", delay)
def takeoff(self, alt=150, alt_max=None, relative=True):
"""Takeoff to altitude."""
if alt_max is None:
alt_max = alt + 30
self.change_mode("FBWA")
self.wait_ready_to_arm()
self.arm_vehicle()
# some rudder to counteract the prop torque
self.set_rc(4, 1700)
# some up elevator to keep the tail down
self.set_rc(2, 1200)
# get it moving a bit first
self.set_rc(3, 1300)
self.wait_groundspeed(6, 100)
# a bit faster again, straighten rudder
self.set_rc(3, 1600)
self.set_rc(4, 1500)
self.wait_groundspeed(12, 100)
# hit the gas harder now, and give it some more elevator
self.set_rc(2, 1100)
self.set_rc(3, 2000)
# gain a bit of altitude
self.wait_altitude(alt, alt_max, timeout=30, relative=relative)
# level off
self.set_rc(2, 1500)
self.progress("TAKEOFF COMPLETE")
def fly_left_circuit(self):
"""Fly a left circuit, 200m on a side."""
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 2000)
self.wait_level_flight()
self.progress("Flying left circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(1, 1000)
self.wait_heading(270 - (90*i), accuracy=10)
self.set_rc(1, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
def fly_RTL(self):
"""Fly to home."""
self.progress("Flying home in RTL")
self.mavproxy.send('switch 2\n')
self.wait_mode('RTL')
self.wait_location(self.homeloc,
accuracy=120,
target_altitude=self.homeloc.alt+100,
height_accuracy=20,
timeout=180)
self.progress("RTL Complete")
def fly_LOITER(self, num_circles=4):
"""Loiter where we are."""
self.progress("Testing LOITER for %u turns" % num_circles)
self.mavproxy.send('loiter\n')
self.wait_mode('LOITER')
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
num_circles -= 1
self.progress("Loiter %u circles left" % num_circles)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
self.progress("Completed Loiter OK")
def fly_CIRCLE(self, num_circles=1):
"""Circle where we are."""
self.progress("Testing CIRCLE for %u turns" % num_circles)
self.mavproxy.send('mode CIRCLE\n')
self.wait_mode('CIRCLE')
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
while num_circles > 0:
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
num_circles -= 1
self.progress("CIRCLE %u circles left" % num_circles)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
self.progress("Completed CIRCLE OK")
def wait_level_flight(self, accuracy=5, timeout=30):
"""Wait for level flight."""
tstart = self.get_sim_time()
self.progress("Waiting for level flight")
self.set_rc(1, 1500)
self.set_rc(2, 1500)
self.set_rc(4, 1500)
while self.get_sim_time_cached() < tstart + timeout:
m = self.mav.recv_match(type='ATTITUDE', blocking=True)
roll = math.degrees(m.roll)
pitch = math.degrees(m.pitch)
self.progress("Roll=%.1f Pitch=%.1f" % (roll, pitch))
if math.fabs(roll) <= accuracy and math.fabs(pitch) <= accuracy:
self.progress("Attained level flight")
return
raise NotAchievedException("Failed to attain level flight")
def change_altitude(self, altitude, accuracy=30):
"""Get to a given altitude."""
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
alt_error = self.mav.messages['VFR_HUD'].alt - altitude
if alt_error > 0:
self.set_rc(2, 2000)
else:
self.set_rc(2, 1000)
self.wait_altitude(altitude-accuracy/2, altitude+accuracy/2)
self.set_rc(2, 1500)
self.progress("Reached target altitude at %u" %
self.mav.messages['VFR_HUD'].alt)
return self.wait_level_flight()
def axial_left_roll(self, count=1):
"""Fly a left axial roll."""
# full throttle!
self.set_rc(3, 2000)
self.change_altitude(self.homeloc.alt+300)
# fly the roll in manual
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 1000)
try:
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
except Exception as e:
self.set_rc(1, 1500)
raise e
count -= 1
# back to FBWA
self.set_rc(1, 1500)
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def inside_loop(self, count=1):
"""Fly a inside loop."""
# full throttle!
self.set_rc(3, 2000)
self.change_altitude(self.homeloc.alt+300)
# fly the loop in manual
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
while count > 0:
self.progress("Starting loop")
self.set_rc(2, 1000)
self.wait_pitch(-60, accuracy=20)
self.wait_pitch(0, accuracy=20)
count -= 1
# back to FBWA
self.set_rc(2, 1500)
self.mavproxy.send('switch 4\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def set_attitude_target(self, tolerance=10):
"""Test setting of attitude target in guided mode."""
self.change_mode("GUIDED")
# self.set_parameter("STALL_PREVENTION", 0)
state_roll_over = "roll-over"
state_stabilize_roll = "stabilize-roll"
state_hold = "hold"
state_roll_back = "roll-back"
state_done = "done"
tstart = self.get_sim_time()
try:
state = state_roll_over
while state != state_done:
m = self.mav.recv_match(type='ATTITUDE',
blocking=True,
timeout=0.1)
now = self.get_sim_time_cached()
if now - tstart > 20:
raise AutoTestTimeoutException("Manuevers not completed")
if m is None:
continue
r = math.degrees(m.roll)
if state == state_roll_over:
target_roll_degrees = 60
if abs(r - target_roll_degrees) < tolerance:
state = state_stabilize_roll
stabilize_start = now
elif state == state_stabilize_roll:
# just give it a little time to sort it self out
if now - stabilize_start > 2:
state = state_hold
hold_start = now
elif state == state_hold:
target_roll_degrees = 60
if now - hold_start > tolerance:
state = state_roll_back
if abs(r - target_roll_degrees) > tolerance:
raise NotAchievedException("Failed to hold attitude")
elif state == state_roll_back:
target_roll_degrees = 0
if abs(r - target_roll_degrees) < tolerance:
state = state_done
else:
raise ValueError("Unknown state %s" % str(state))
m_nav = self.mav.messages['NAV_CONTROLLER_OUTPUT']
self.progress("%s Roll: %f desired=%f set=%f" %
(state, r, m_nav.nav_roll, target_roll_degrees))
time_boot_millis = 0 # FIXME
target_system = 1 # FIXME
target_component = 1 # FIXME
type_mask = 0b10000001 ^ 0xFF # FIXME
# attitude in radians:
q = quaternion.Quaternion([math.radians(target_roll_degrees),
0,
0])
roll_rate_radians = 0.5
pitch_rate_radians = 0
yaw_rate_radians = 0
thrust = 1.0
self.mav.mav.set_attitude_target_send(time_boot_millis,
target_system,
target_component,
type_mask,
q,
roll_rate_radians,
pitch_rate_radians,
yaw_rate_radians,
thrust)
except Exception as e:
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
raise e
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
self.wait_level_flight()
def test_stabilize(self, count=1):
"""Fly stabilize mode."""
# full throttle!
self.set_rc(3, 2000)
self.set_rc(2, 1300)
self.change_altitude(self.homeloc.alt+300)
self.set_rc(2, 1500)
self.mavproxy.send("mode STABILIZE\n")
self.wait_mode('STABILIZE')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 2000)
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
count -= 1
self.set_rc(1, 1500)
self.wait_roll(0, accuracy=5)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def test_acro(self, count=1):
"""Fly ACRO mode."""
# full throttle!
self.set_rc(3, 2000)
self.set_rc(2, 1300)
self.change_altitude(self.homeloc.alt+300)
self.set_rc(2, 1500)
self.mavproxy.send("mode ACRO\n")
self.wait_mode('ACRO')
while count > 0:
self.progress("Starting roll")
self.set_rc(1, 1000)
self.wait_roll(-150, accuracy=90)
self.wait_roll(150, accuracy=90)
self.wait_roll(0, accuracy=90)
count -= 1
self.set_rc(1, 1500)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.wait_level_flight()
self.mavproxy.send("mode ACRO\n")
self.wait_mode('ACRO')
count = 2
while count > 0:
self.progress("Starting loop")
self.set_rc(2, 1000)
self.wait_pitch(-60, accuracy=20)
self.wait_pitch(0, accuracy=20)
count -= 1
self.set_rc(2, 1500)
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
self.set_rc(3, 1700)
return self.wait_level_flight()
def test_FBWB(self, mode='FBWB'):
"""Fly FBWB or CRUISE mode."""
self.mavproxy.send("mode %s\n" % mode)
self.wait_mode(mode)
self.set_rc(3, 1700)
self.set_rc(2, 1500)
# lock in the altitude by asking for an altitude change then releasing
self.set_rc(2, 1000)
self.wait_distance(50, accuracy=20)
self.set_rc(2, 1500)
self.wait_distance(50, accuracy=20)
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
initial_alt = m.alt
self.progress("Initial altitude %u\n" % initial_alt)
self.progress("Flying right circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(1, 1800)
try:
self.wait_heading(0 + (90*i), accuracy=20, timeout=60)
except Exception as e:
self.set_rc(1, 1500)
raise e
self.set_rc(1, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
self.progress("Flying rudder left circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(4, 1900)
try:
self.wait_heading(360 - (90*i), accuracy=20, timeout=60)
except Exception as e:
self.set_rc(4, 1500)
raise e
self.set_rc(4, 1500)
self.progress("Starting leg %u" % i)
self.wait_distance(100, accuracy=20)
self.progress("Circuit complete")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
final_alt = m.alt
self.progress("Final altitude %u initial %u\n" %
(final_alt, initial_alt))
# back to FBWA
self.mavproxy.send('mode FBWA\n')
self.wait_mode('FBWA')
if abs(final_alt - initial_alt) > 20:
raise NotAchievedException("Failed to maintain altitude")
return self.wait_level_flight()
def fly_mission(self, filename, mission_timeout=60.0):
"""Fly a mission from a file."""
self.progress("Flying mission %s" % filename)
self.load_mission(filename)
self.mavproxy.send('switch 1\n') # auto mode
self.wait_mode('AUTO')
self.wait_waypoint(1, 7, max_dist=60)
self.wait_groundspeed(0, 0.5, timeout=mission_timeout)
self.mavproxy.expect("Auto disarmed")
self.progress("Mission OK")
def fly_do_reposition(self):
self.progress("Takeoff")
self.takeoff(alt=50)
self.set_rc(3, 1500)
self.progress("Entering guided and flying somewhere constant")
self.change_mode("GUIDED")
loc = self.mav.location()
self.location_offset_ne(loc, 500, 500)
new_alt = 100
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
int(loc.lat*1e7),
int(loc.lng*1e7),
new_alt, # alt
frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
)
self.wait_altitude(new_alt-10, new_alt, timeout=30, relative=True)
self.fly_home_land_and_disarm()
def fly_deepstall(self):
# self.fly_deepstall_absolute()
self.fly_deepstall_relative()
def fly_deepstall_absolute(self):
self.start_subtest("DeepStall Relative Absolute")
self.set_parameter("LAND_TYPE", 1)
deepstall_elevator_pwm = 1661
self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm)
self.load_mission("plane-deepstall-mission.txt")
self.change_mode("AUTO")
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Waiting for deepstall messages")
self.wait_text("Deepstall: Entry: ", timeout=240)
# assume elevator is on channel 2:
self.wait_servo_channel_value(2, deepstall_elevator_pwm)
self.disarm_wait(timeout=120)
self.progress("Flying home")
self.takeoff(10)
self.set_parameter("LAND_TYPE", 0)
self.fly_home_land_and_disarm()
def fly_deepstall_relative(self):
self.start_subtest("DeepStall Relative")
self.set_parameter("LAND_TYPE", 1)
deepstall_elevator_pwm = 1661
self.set_parameter("LAND_DS_ELEV_PWM", deepstall_elevator_pwm)
self.load_mission("plane-deepstall-relative-mission.txt")
self.change_mode("AUTO")
self.wait_ready_to_arm()
self.arm_vehicle()
self.progress("Waiting for deepstall messages")
self.wait_text("Deepstall: Entry: ", timeout=240)
# assume elevator is on channel 2:
self.wait_servo_channel_value(2, deepstall_elevator_pwm)
self.disarm_wait(timeout=120)
self.progress("Flying home")
self.takeoff(100)
self.set_parameter("LAND_TYPE", 0)
self.fly_home_land_and_disarm(timeout=240)
def fly_do_change_speed(self):
# the following lines ensure we revert these parameter values
# - DO_CHANGE_AIRSPEED is a permanent vehicle change!
self.set_parameter("TRIM_ARSPD_CM", self.get_parameter("TRIM_ARSPD_CM"))
self.set_parameter("MIN_GNDSPD_CM", self.get_parameter("MIN_GNDSPD_CM"))
self.progress("Takeoff")
self.takeoff(alt=100)
self.set_rc(3, 1500)
# ensure we know what the airspeed is:
self.progress("Entering guided and flying somewhere constant")
self.change_mode("GUIDED")
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
12345, # lat*1e7
12345, # lon*1e7
100 # alt
)
self.delay_sim_time(10)
self.progress("Ensuring initial speed is known and relatively constant")
initial_speed = 21.5;
timeout = 10
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
break
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.progress("GroundSpeed: %f want=%f" %
(m.groundspeed, initial_speed))
if abs(initial_speed - m.groundspeed) > 1:
raise NotAchievedException("Initial speed not as expected (want=%f got=%f" % (initial_speed, m.groundspeed))
self.progress("Setting groundspeed")
new_target_groundspeed = initial_speed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
1, # groundspeed
new_target_groundspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0)
self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40)
self.progress("Adding some wind, ensuring groundspeed holds")
self.set_parameter("SIM_WIND_SPD", 5)
self.delay_sim_time(5)
self.wait_groundspeed(new_target_groundspeed-0.5, new_target_groundspeed+0.5, timeout=40)
self.set_parameter("SIM_WIND_SPD", 0)
self.progress("Setting airspeed")
new_target_airspeed = initial_speed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
0, # airspeed
new_target_airspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0)
self.wait_groundspeed(new_target_airspeed-0.5, new_target_airspeed+0.5)
self.progress("Adding some wind, hoping groundspeed increases/decreases")
self.set_parameter("SIM_WIND_SPD", 5)
self.set_parameter("SIM_WIND_DIR", 270)
self.delay_sim_time(5)
timeout = 10
tstart = self.get_sim_time()
while True:
if self.get_sim_time_cached() - tstart > timeout:
raise NotAchievedException("Did not achieve groundspeed delta")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
delta = abs(m.airspeed - m.groundspeed)
want_delta = 4
self.progress("groundspeed and airspeed should be different (have=%f want=%f)" % (delta, want_delta))
if delta > want_delta:
break
self.fly_home_land_and_disarm()
def fly_home_land_and_disarm(self, timeout=120):
filename = "flaps.txt"
self.progress("Using %s to fly home" % filename)
num_wp = self.load_mission(filename)
self.change_mode("AUTO")
self.mavproxy.send('wp set 7\n')
self.drain_mav()
# TODO: reflect on file to find this magic waypoint number?
# self.wait_waypoint(7, num_wp-1, timeout=500) # we tend to miss the final waypoint by a fair bit, and this is probably too noisy anyway?
self.wait_disarmed(timeout=timeout)
def fly_flaps(self):
"""Test flaps functionality."""
filename = "flaps.txt"
self.context_push()
ex = None
try:
flaps_ch = 5
servo_ch = 5
self.set_parameter("SERVO%u_FUNCTION" % servo_ch, 3) # flapsauto
self.set_parameter("RC%u_OPTION" % flaps_ch, 208) # Flaps RCx_OPTION
self.set_parameter("LAND_FLAP_PERCNT", 50)
self.set_parameter("LOG_DISARMED", 1)
flaps_ch_min = 1000
flaps_ch_trim = 1500
flaps_ch_max = 2000
self.set_parameter("RC%u_MIN" % flaps_ch, flaps_ch_min)
self.set_parameter("RC%u_MAX" % flaps_ch, flaps_ch_max)
self.set_parameter("RC%u_TRIM" % flaps_ch, flaps_ch_trim)
servo_ch_min = 1200
servo_ch_trim = 1300
servo_ch_max = 1800
self.set_parameter("SERVO%u_MIN" % servo_ch, servo_ch_min)
self.set_parameter("SERVO%u_MAX" % servo_ch, servo_ch_max)
self.set_parameter("SERVO%u_TRIM" % servo_ch, servo_ch_trim)
self.progress("check flaps are not deployed")
self.set_rc(flaps_ch, flaps_ch_min)
self.wait_servo_channel_value(servo_ch, servo_ch_min)
self.progress("deploy the flaps")
self.set_rc(flaps_ch, flaps_ch_max)
tstart = self.get_sim_time()
self.wait_servo_channel_value(servo_ch, servo_ch_max)
tstop = self.get_sim_time_cached()
delta_time = tstop - tstart
delta_time_min = 0.5
delta_time_max = 1.5
if delta_time < delta_time_min or delta_time > delta_time_max:
raise NotAchievedException((
"Flaps Slew not working (%f seconds)" % (delta_time,)))
self.progress("undeploy flaps")
self.set_rc(flaps_ch, flaps_ch_min)
self.wait_servo_channel_value(servo_ch, servo_ch_min)
self.progress("Flying mission %s" % filename)
self.load_mission(filename)
self.mavproxy.send('wp set 1\n')
self.mavproxy.send('switch 1\n') # auto mode
self.wait_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
last_mission_current_msg = 0
last_seq = None
while self.armed():
m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True)
time_delta = (self.get_sim_time_cached() -
last_mission_current_msg)
if (time_delta > 1 or m.seq != last_seq):
dist = None
x = self.mav.messages.get("NAV_CONTROLLER_OUTPUT", None)
if x is not None:
dist = x.wp_dist
self.progress("MISSION_CURRENT.seq=%u (dist=%s)" %
(m.seq, str(dist)))
last_mission_current_msg = self.get_sim_time_cached()
last_seq = m.seq
# flaps should undeploy at the end
self.wait_servo_channel_value(servo_ch, servo_ch_min, timeout=30)
# do a short flight in FBWA, watching for flaps
# self.mavproxy.send('switch 4\n')
# self.wait_mode('FBWA')
# self.delay_sim_time(10)
# self.mavproxy.send('switch 6\n')
# self.wait_mode('MANUAL')
# self.delay_sim_time(10)
self.progress("Flaps OK")
except Exception as e:
ex = e
self.context_pop()
if ex:
if self.armed():
self.disarm_vehicle()
raise ex
def test_rc_relay(self):
'''test toggling channel 12 toggles relay'''
self.set_parameter("RC12_OPTION", 28) # Relay On/Off
self.set_rc(12, 1000)
self.reboot_sitl() # needed for RC12_OPTION to take effect
off = self.get_parameter("SIM_PIN_MASK")
if off:
raise PreconditionFailedException("SIM_MASK_PIN off")
# allow time for the RC library to register initial value:
self.delay_sim_time(1)
self.set_rc(12, 2000)
self.wait_heartbeat()
self.wait_heartbeat()
on = self.get_parameter("SIM_PIN_MASK")
if not on:
raise NotAchievedException("SIM_PIN_MASK doesn't reflect ON")
self.set_rc(12, 1000)
self.wait_heartbeat()
self.wait_heartbeat()
off = self.get_parameter("SIM_PIN_MASK")
if off:
raise NotAchievedException("SIM_PIN_MASK doesn't reflect OFF")
def test_rc_option_camera_trigger(self):
'''test toggling channel 12 takes picture'''
self.set_parameter("RC12_OPTION", 9) # CameraTrigger
self.reboot_sitl() # needed for RC12_OPTION to take effect
x = self.mav.messages.get("CAMERA_FEEDBACK", None)
if x is not None:
raise PreconditionFailedException("Receiving CAMERA_FEEDBACK?!")
self.set_rc(12, 2000)
tstart = self.get_sim_time()
while self.get_sim_time_cached() - tstart < 10:
x = self.mav.messages.get("CAMERA_FEEDBACK", None)
if x is not None:
break
self.wait_heartbeat()
self.set_rc(12, 1000)
if x is None:
raise NotAchievedException("No CAMERA_FEEDBACK message received")
def test_throttle_failsafe(self):
self.change_mode('MANUAL')
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
receiver_bit = mavutil.mavlink.MAV_SYS_STATUS_SENSOR_RC_RECEIVER
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise PreconditionFailedException()
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise PreconditionFailedException()
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise PreconditionFailedException()
self.progress("Ensure we know original throttle value")
self.wait_rc_channel_value(3, 1000)
self.set_parameter("THR_FS_VALUE", 960)
self.progress("Failing receiver (throttle-to-950)")
self.context_collect("HEARTBEAT")
self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950
self.wait_mode('RTL') # long failsafe
if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]):
raise NotAchievedException("Did not go via circle mode")
self.progress("Ensure we've had our throttle squashed to 950")
self.wait_rc_channel_value(3, 950)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
# skip this until RC is fixed
# self.progress("Testing receiver health")
# if (m.onboard_control_sensors_health & receiver_bit):
# raise NotAchievedException("Sensor healthy when it shouldn't be")
self.set_parameter("SIM_RC_FAIL", 0)
self.drain_mav_unparsed()
# have to allow time for RC to be fetched from SITL
self.delay_sim_time(0.5)
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise NotAchievedException("Receiver not healthy2")
self.change_mode('MANUAL')
self.progress("Failing receiver (no-pulses)")
self.context_collect("HEARTBEAT")
self.set_parameter("SIM_RC_FAIL", 1) # no-pulses
self.wait_mode('RTL') # long failsafe
if (not self.get_mode_from_mode_mapping("CIRCLE") in [x.custom_mode for x in self.context_stop_collecting("HEARTBEAT")]):
raise NotAchievedException("Did not go via circle mode")
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (m.onboard_control_sensors_health & receiver_bit):
raise NotAchievedException("Sensor healthy when it shouldn't be")
self.progress("Making RC work again")
self.set_parameter("SIM_RC_FAIL", 0)
# have to allow time for RC to be fetched from SITL
self.progress("Giving receiver time to recover")
self.delay_sim_time(0.5)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
self.progress("Testing receiver enabled")
if (not (m.onboard_control_sensors_enabled & receiver_bit)):
raise NotAchievedException("Receiver not enabled")
self.progress("Testing receiver present")
if (not (m.onboard_control_sensors_present & receiver_bit)):
raise NotAchievedException("Receiver not present")
self.progress("Testing receiver health")
if (not (m.onboard_control_sensors_health & receiver_bit)):
raise NotAchievedException("Receiver not healthy")
self.change_mode('MANUAL')
self.progress("Ensure long failsafe can trigger when short failsafe disabled")
self.context_push()
self.context_collect("STATUSTEXT")
ex = None
try:
self.set_parameter("FS_SHORT_ACTN", 3) # 3 means disabled
self.set_parameter("SIM_RC_FAIL", 1)
self.wait_statustext("Long event on", check_context=True)
self.wait_mode("RTL")
# self.context_clear_collection("STATUSTEXT")
self.set_parameter("SIM_RC_FAIL", 0)
self.wait_text("Long event off", check_context=True)
self.change_mode("MANUAL")
self.progress("Trying again with THR_FS_VALUE")
self.set_parameter("THR_FS_VALUE", 960)
self.set_parameter("SIM_RC_FAIL", 2)
self.wait_statustext("Long event on", check_context=True)
self.wait_mode("RTL")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
if ex is not None:
raise ex
def test_throttle_failsafe_fence(self):
fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
self.progress("Checking fence is not present before being configured")
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (m.onboard_control_sensors_enabled & fence_bit):
raise NotAchievedException("Fence enabled before being configured")
self.change_mode('MANUAL')
self.wait_ready_to_arm()
self.load_fence("CMAC-fence.txt")
self.set_parameter("FENCE_CHANNEL", 7)
self.set_parameter("FENCE_ACTION", 4)
self.set_rc(3, 1000)
self.set_rc(7, 2000)
self.progress("Checking fence is initially OK")
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (not (m.onboard_control_sensors_enabled & fence_bit)):
raise NotAchievedException("Fence not initially enabled")
self.set_parameter("THR_FS_VALUE", 960)
self.progress("Failing receiver (throttle-to-950)")
self.set_parameter("SIM_RC_FAIL", 2) # throttle-to-950
self.wait_mode("CIRCLE")
self.delay_sim_time(1) # give
self.drain_mav_unparsed()
self.progress("Checking fence is OK after receiver failure (bind-values)")
fence_bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
m = self.mav.recv_match(type='SYS_STATUS', blocking=True)
print("%s" % str(m))
if (not (m.onboard_control_sensors_enabled & fence_bit)):
raise NotAchievedException("Fence not enabled after RC fail")
def test_gripper_mission(self):
self.context_push()
ex = None
try:
self.load_mission("plane-gripper-mission.txt")
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.mavproxy.expect("Gripper Grabbed")
self.mavproxy.expect("Gripper Released")
self.mavproxy.expect("Auto disarmed")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
if ex is not None:
raise ex
def assert_fence_sys_status(self, present, enabled, health):
self.delay_sim_time(1)
self.drain_mav_unparsed()
m = self.mav.recv_match(type='SYS_STATUS', blocking=True, timeout=1)
if m is None:
raise NotAchievedException("Did not receive SYS_STATUS")
tests = [ ( "present", present, m.onboard_control_sensors_present ),
( "enabled", enabled, m.onboard_control_sensors_enabled ),
( "health", health, m.onboard_control_sensors_health ),
]
bit = mavutil.mavlink.MAV_SYS_STATUS_GEOFENCE
for test in tests:
(name, want, field) = test
got = (field & bit) != 0
if want != got:
raise NotAchievedException("fence status incorrect; %s want=%u got=%u" %
(name, want, got))
def do_fence_en_or_dis_able(self, value, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
if value:
p1 = 1
else:
p1 = 0
self.run_cmd(mavutil.mavlink.MAV_CMD_DO_FENCE_ENABLE,
p1, # param1
0, # param2
0, # param3
0, # param4
0, # param5
0, # param6
0, # param7
want_result=want_result)
def do_fence_enable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
self.do_fence_en_or_dis_able(True, want_result=want_result)
def do_fence_disable(self, want_result=mavutil.mavlink.MAV_RESULT_ACCEPTED):
self.do_fence_en_or_dis_able(False, want_result=want_result)
def wait_circling_point_with_radius(self, loc, want_radius, epsilon=5.0, min_circle_time=5, timeout=120):
on_radius_start_heading = None
average_radius = 0.0
circle_time_start = 0
done_time = False
done_angle = False
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > timeout:
raise AutoTestTimeoutException("Did not get onto circle")
here = self.mav.location()
got_radius = self.get_distance(loc, here)
average_radius = 0.95*average_radius + 0.05*got_radius
on_radius = abs(got_radius - want_radius) < epsilon
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
heading = m.heading
on_string = "off"
got_angle = ""
if on_radius_start_heading is not None:
got_angle = "%0.2f" % abs(on_radius_start_heading - heading) # FIXME
on_string = "on"
want_angle = 180 # we don't actually get this (angle-substraction issue. But we get enough...
self.progress("wait-circling: got-r=%0.2f want-r=%f avg-r=%f %s want-a=%0.1f got-a=%s" %
(got_radius, want_radius, average_radius, on_string, want_angle, got_angle))
if on_radius:
if on_radius_start_heading is None:
on_radius_start_heading = heading
average_radius = got_radius
circle_time_start = self.get_sim_time()
continue
if abs(on_radius_start_heading - heading) > want_angle: # FIXME
done_angle = True
if self.get_sim_time() - circle_time_start > min_circle_time:
done_time = True
if done_time and done_angle:
return
continue
if on_radius_start_heading is not None:
average_radius = 0.0
on_radius_start_heading = None
circle_time_start = 0
def test_fence_static(self):
ex = None
try:
self.progress("Checking for bizarre healthy-when-not-present-or-enabled")
self.assert_fence_sys_status(False, False, True)
self.load_fence("CMAC-fence.txt")
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is not None:
raise NotAchievedException("Got FENCE_STATUS unexpectedly");
self.drain_mav_unparsed()
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE) # report only
self.assert_fence_sys_status(False, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL) # report only
self.assert_fence_sys_status(True, False, True)
self.mavproxy.send('fence enable\n')
self.mavproxy.expect("fence enabled")
self.assert_fence_sys_status(True, True, True)
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is None:
raise NotAchievedException("Did not get FENCE_STATUS");
if m.breach_status:
raise NotAchievedException("Breached fence unexpectedly (%u)" %
(m.breach_status))
self.mavproxy.send('fence disable\n')
self.mavproxy.expect("fence disabled")
self.assert_fence_sys_status(True, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_NONE)
self.assert_fence_sys_status(False, False, True)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.assert_fence_sys_status(True, False, True)
self.mavproxy.send("fence clear\n")
self.mavproxy.expect("fence removed")
if self.get_parameter("FENCE_TOTAL") != 0:
raise NotAchievedException("Expected zero points remaining")
self.assert_fence_sys_status(False, False, True)
self.progress("Trying to enable fence with no points")
self.do_fence_enable(want_result=mavutil.mavlink.MAV_RESULT_FAILED)
# test a rather unfortunate behaviour:
self.progress("Killing a live fence with fence-clear")
self.load_fence("CMAC-fence.txt")
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.do_fence_enable()
self.assert_fence_sys_status(True, True, True)
self.mavproxy.send("fence clear\n")
self.mavproxy.expect("fence removed")
if self.get_parameter("FENCE_TOTAL") != 0:
raise NotAchievedException("Expected zero points remaining")
self.assert_fence_sys_status(False, False, True)
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('fence clear\n')
if ex is not None:
raise ex
def test_fence_breach_circle_at(self, loc, disable_on_breach=False):
ex = None
try:
self.load_fence("CMAC-fence.txt")
want_radius = 100
# when ArduPlane is fixed, remove this fudge factor
REALLY_BAD_FUDGE_FACTOR = 1.16
expected_radius = REALLY_BAD_FUDGE_FACTOR * want_radius
self.set_parameter("RTL_RADIUS", want_radius)
self.set_parameter("NAVL1_LIM_BANK", 60)
self.set_parameter("FENCE_ACTION", mavutil.mavlink.FENCE_ACTION_RTL)
self.do_fence_enable()
self.assert_fence_sys_status(True, True, True)
self.takeoff(alt=45, alt_max=300)
tstart = self.get_sim_time()
while True:
if self.get_sim_time() - tstart > 30:
raise NotAchievedException("Did not breach fence")
m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=2)
if m is None:
raise NotAchievedException("Did not get FENCE_STATUS");
if m.breach_status == 0:
continue
# we've breached; check our state;
if m.breach_type != mavutil.mavlink.FENCE_BREACH_BOUNDARY:
raise NotAchievedException("Unexpected breach type %u" %
(m.breach_type,))
if m.breach_count == 0:
raise NotAchievedException("Unexpected breach count %u" %
(m.breach_count,))
self.assert_fence_sys_status(True, True, False)
break
if disable_on_breach:
self.do_fence_disable()
self.wait_circling_point_with_radius(loc, expected_radius)
self.disarm_vehicle(force=True)
self.reboot_sitl()
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('fence clear\n')
if ex is not None:
raise ex
def test_fence_rtl(self):
self.progress("Testing FENCE_ACTION_RTL no rally point")
# have to disable the fence once we've breached or we breach
# it as part of the loiter-at-home!
self.test_fence_breach_circle_at(self.home_position_as_mav_location(),
disable_on_breach=True)
def test_fence_rtl_rally(self):
ex = None
target_system = 1
target_component = 1
try:
self.progress("Testing FENCE_ACTION_RTL with rally point")
self.wait_ready_to_arm()
loc = self.home_position_as_mav_location()
self.location_offset_ne(loc, 50, -50)
self.set_parameter("RALLY_TOTAL", 1)
self.mav.mav.rally_point_send(target_system,
target_component,
0, # sequence number
1, # total count
int(loc.lat * 1e7),
int(loc.lng * 1e7),
15,
0, # "break" alt?!
0, # "land dir"
0) # flags
self.delay_sim_time(1)
self.mavproxy.send("rally list\n")
self.test_fence_breach_circle_at(loc)
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.mavproxy.send('rally clear\n')
if ex is not None:
raise ex
def test_parachute(self):
self.set_rc(9, 1000)
self.set_parameter("CHUTE_ENABLED", 1)
self.set_parameter("CHUTE_TYPE", 10)
self.set_parameter("SERVO9_FUNCTION", 27)
self.set_parameter("SIM_PARA_ENABLE", 1)
self.set_parameter("SIM_PARA_PIN", 9)
self.load_mission("plane-parachute-mission.txt")
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.mavproxy.expect("BANG")
self.disarm_vehicle(force=True)
self.reboot_sitl()
def test_parachute_sinkrate(self):
self.set_rc(9, 1000)
self.set_parameter("CHUTE_ENABLED", 1)
self.set_parameter("CHUTE_TYPE", 10)
self.set_parameter("SERVO9_FUNCTION", 27)
self.set_parameter("SIM_PARA_ENABLE", 1)
self.set_parameter("SIM_PARA_PIN", 9)
self.set_parameter("CHUTE_CRT_SINK", 9)
self.progress("Takeoff")
self.takeoff(alt=300)
self.progress("Diving")
self.set_rc(2, 2000)
self.mavproxy.expect("BANG")
self.disarm_vehicle(force=True)
self.reboot_sitl()
def run_subtest(self, desc, func):
self.start_subtest(desc)
func()
def test_main_flight(self):
self.change_mode('MANUAL')
self.progress("Asserting we don't support transfer of fence via mission item protocol")
self.assert_no_capability(mavutil.mavlink.MAV_PROTOCOL_CAPABILITY_MISSION_FENCE)
# grab home position:
self.mav.recv_match(type='HOME_POSITION', blocking=True)
self.homeloc = self.mav.location()
self.run_subtest("Takeoff", self.takeoff)
self.run_subtest("Set Attitude Target", self.set_attitude_target)
self.run_subtest("Fly left circuit", self.fly_left_circuit)
self.run_subtest("Left roll", lambda: self.axial_left_roll(1))
self.run_subtest("Inside loop", self.inside_loop)
self.run_subtest("Stablize test", self.test_stabilize)
self.run_subtest("ACRO test", self.test_acro)
self.run_subtest("FBWB test", self.test_FBWB)
self.run_subtest("CRUISE test", lambda: self.test_FBWB(mode='CRUISE'))
self.run_subtest("RTL test", self.fly_RTL)
self.run_subtest("LOITER test", self.fly_LOITER)
self.run_subtest("CIRCLE test", self.fly_CIRCLE)
self.run_subtest("Mission test",
lambda: self.fly_mission("ap1.txt"))
def airspeed_autocal(self):
self.progress("Ensure no AIRSPEED_AUTOCAL on ground")
self.set_parameter("ARSPD_AUTOCAL", 1)
m = self.mav.recv_match(type='AIRSPEED_AUTOCAL',
blocking=True,
timeout=5)
if m is not None:
raise NotAchievedException("Got autocal on ground")
mission_filepath = "flaps.txt"
num_wp = self.load_mission(mission_filepath)
self.wait_ready_to_arm()
self.arm_vehicle()
self.change_mode("AUTO")
self.progress("Ensure AIRSPEED_AUTOCAL in air")
m = self.mav.recv_match(type='AIRSPEED_AUTOCAL',
blocking=True,
timeout=5)
self.wait_waypoint(7, num_wp-1, timeout=500)
self.wait_disarmed(timeout=120)
def deadreckoning_main(self, disable_airspeed_sensor=False):
self.gpi = None
self.simstate = None
self.last_print = 0
self.max_divergence = 0
def validate_global_position_int_against_simstate(mav, m):
if m.get_type() == 'GLOBAL_POSITION_INT':
self.gpi = m
elif m.get_type() == 'SIMSTATE':
self.simstate = m
if self.gpi is None:
return
if self.simstate is None:
return
divergence = self.get_distance_int(self.gpi, self.simstate)
max_allowed_divergence = 200
if time.time() - self.last_print > 1:
self.progress("position-estimate-divergence=%fm" % (divergence,))
self.last_print = time.time()
if divergence > max_allowed_divergence:
raise NotAchievedException("global-position-int diverged from simstate by >%fm" % (max_allowed_divergence,))
if divergence > self.max_divergence:
self.max_divergence = divergence
self.install_message_hook(validate_global_position_int_against_simstate)
try:
# wind is from the West:
self.set_parameter("SIM_WIND_DIR", 270)
# light winds:
self.set_parameter("SIM_WIND_SPD", 10)
if disable_airspeed_sensor:
self.set_parameter("ARSPD_USE", 0)
self.takeoff(50)
loc = self.mav.location()
loc.lat = -35.35690712
loc.lng = 149.17083386
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
mavutil.mavlink.MAV_DO_REPOSITION_FLAGS_CHANGE_MODE,
0,
0,
int(loc.lat*1e7),
int(loc.lng*1e7),
100, # alt
frame=mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT,
)
self.wait_location(loc, accuracy=100)
self.progress("Stewing")
self.delay_sim_time(20)
self.set_parameter("SIM_GPS_DISABLE", 1)
self.progress("Roasting")
self.delay_sim_time(20)
self.change_mode("RTL")
self.wait_distance_to_home(100, 200, timeout=200)
self.set_parameter("SIM_GPS_DISABLE", 0)
self.delay_sim_time(10)
self.set_rc(3, 1000)
self.fly_home_land_and_disarm()
self.progress("max-divergence: %fm" % (self.max_divergence,))
finally:
self.remove_message_hook(validate_global_position_int_against_simstate)
def deadreckoning(self):
self.deadreckoning_main()
self.deadreckoning_main(disable_airspeed_sensor=True)
def sample_enable_parameter(self):
return "Q_ENABLE"
def test_rangefinder(self):
ex = None
self.context_push()
self.progress("Making sure we don't ordinarily get RANGEFINDER")
m = None
try:
m = self.mav.recv_match(type='RANGEFINDER',
blocking=True,
timeout=5)
except Exception as e:
self.progress("Caught exception: %s" %
self.get_exception_stacktrace(e))
if m is not None:
raise NotAchievedException("Received unexpected RANGEFINDER msg")
try:
self.set_analog_rangefinder_parameters()
self.reboot_sitl()
'''ensure rangefinder gives height-above-ground'''
self.load_mission("plane-gripper-mission.txt") # borrow this
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
self.wait_waypoint(5, 5, max_dist=100)
rf = self.mav.recv_match(type="RANGEFINDER", timeout=1, blocking=True)
if rf is None:
raise NotAchievedException("Did not receive rangefinder message")
gpi = self.mav.recv_match(type='GLOBAL_POSITION_INT', blocking=True, timeout=1)
if gpi is None:
raise NotAchievedException("Did not receive GLOBAL_POSITION_INT message")
if abs(rf.distance - gpi.relative_alt/1000.0) > 3:
raise NotAchievedException("rangefinder alt (%s) disagrees with global-position-int.relative_alt (%s)" % (rf.distance, gpi.relative_alt/1000.0))
self.mavproxy.expect("Auto disarmed")
self.progress("Ensure RFND messages in log")
if not self.current_onboard_log_contains_message("RFND"):
raise NotAchievedException("No RFND messages in log")
except Exception as e:
self.progress("Exception caught:")
self.progress(self.get_exception_stacktrace(e))
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def rc_defaults(self):
ret = super(AutoTestPlane, self).rc_defaults()
ret[3] = 1000
ret[8] = 1800
return ret
def default_mode(self):
return "MANUAL"
def test_pid_tuning(self):
self.change_mode("FBWA") # we don't update PIDs in MANUAL
super(AutoTestPlane, self).test_pid_tuning()
def test_setting_modes_via_auxswitches(self):
self.set_parameter("FLTMODE5", 1)
self.mavproxy.send('switch 1\n') # random mode
self.wait_heartbeat()
self.change_mode('MANUAL')
self.mavproxy.send('switch 5\n') # acro mode
self.wait_mode("CIRCLE")
self.set_rc(9, 1000)
self.set_rc(10, 1000)
self.set_parameter("RC9_OPTION", 4) # RTL
self.set_parameter("RC10_OPTION", 55) # guided
self.set_rc(9, 1900)
self.wait_mode("RTL")
self.set_rc(10, 1900)
self.wait_mode("GUIDED")
self.progress("resetting both switches - should go back to CIRCLE")
self.set_rc(9, 1000)
self.set_rc(10, 1000)
self.wait_mode("CIRCLE")
self.set_rc(9, 1900)
self.wait_mode("RTL")
self.set_rc(10, 1900)
self.wait_mode("GUIDED")
self.progress("Resetting switch should repoll mode switch")
self.set_rc(10, 1000) # this re-polls the mode switch
self.wait_mode("CIRCLE")
self.set_rc(9, 1000)
def wait_for_collision_threat_to_clear(self):
'''wait to get a "clear" collision message", then slurp remaining
messages'''
last_collision = self.get_sim_time()
while True:
now = self.get_sim_time()
if now - last_collision > 5:
return
self.progress("Waiting for collision message")
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=1)
self.progress("Got (%s)" % str(m))
if m is None:
continue
last_collision = now
def test_adsb_send_threatening_adsb_message(self, here):
self.progress("Sending ABSD_VEHICLE message")
self.mav.mav.adsb_vehicle_send(37, # ICAO address
int(here.lat * 1e7),
int(here.lng * 1e7),
mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH,
int(here.alt*1000 + 10000), # 10m up
0, # heading in cdeg
0, # horizontal velocity cm/s
0, # vertical velocity cm/s
"bob".encode("ascii"), # callsign
mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT,
1, # time since last communication
65535, # flags
17 # squawk
)
def test_adsb(self):
self.context_push()
ex = None
try:
# message ADSB_VEHICLE 37 -353632614 1491652305 0 584070 0 0 0 "bob" 3 1 255 17
self.set_parameter("RC12_OPTION", 38) # avoid-adsb
self.set_rc(12, 2000)
self.set_parameter("ADSB_ENABLE", 1)
self.set_parameter("AVD_ENABLE", 1)
self.set_parameter("AVD_F_ACTION", mavutil.mavlink.MAV_COLLISION_ACTION_RTL)
self.reboot_sitl()
self.wait_ready_to_arm()
here = self.mav.location()
self.change_mode("FBWA")
self.delay_sim_time(2) # TODO: work out why this is required...
self.test_adsb_send_threatening_adsb_message(here)
self.progress("Waiting for collision message")
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4)
if m is None:
raise NotAchievedException("Did not get collision message")
if m.threat_level != 2:
raise NotAchievedException("Expected some threat at least")
if m.action != mavutil.mavlink.MAV_COLLISION_ACTION_RTL:
raise NotAchievedException("Incorrect action; want=%u got=%u" %
(mavutil.mavlink.MAV_COLLISION_ACTION_RTL, m.action))
self.wait_mode("RTL")
self.progress("Sending far-away ABSD_VEHICLE message")
self.mav.mav.adsb_vehicle_send(37, # ICAO address
int(here.lat+1 * 1e7),
int(here.lng * 1e7),
mavutil.mavlink.ADSB_ALTITUDE_TYPE_PRESSURE_QNH,
int(here.alt*1000 + 10000), # 10m up
0, # heading in cdeg
0, # horizontal velocity cm/s
0, # vertical velocity cm/s
"bob".encode("ascii"), # callsign
mavutil.mavlink.ADSB_EMITTER_TYPE_LIGHT,
1, # time since last communication
65535, # flags
17 # squawk
)
self.wait_for_collision_threat_to_clear()
self.change_mode("FBWA")
self.progress("Disabling ADSB-avoidance with RC channel")
self.set_rc(12, 1000)
self.delay_sim_time(1) # let the switch get polled
self.test_adsb_send_threatening_adsb_message(here)
m = self.mav.recv_match(type='COLLISION', blocking=True, timeout=4)
print("Got (%s)" % str(m))
if m is not None:
raise NotAchievedException("Got collision message when I shouldn't have")
except Exception as e:
ex = e
self.context_pop()
self.reboot_sitl()
if ex is not None:
raise ex
def fly_do_guided_request(self, target_system=1, target_component=1):
self.progress("Takeoff")
self.takeoff(alt=50)
self.set_rc(3, 1500)
self.start_subtest("Ensure command bounced outside guided mode")
desired_relative_alt = 33
loc = self.mav.location()
self.location_offset_ne(loc, 300, 300)
loc.alt += desired_relative_alt
self.mav.mav.mission_item_int_send(
target_system,
target_component,
0, # seq
mavutil.mavlink.MAV_FRAME_GLOBAL,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
2, # current - guided-mode request
0, # autocontinue
0, # p1
0, # p2
0, # p3
0, # p4
int(loc.lat *1e7), # latitude
int(loc.lng *1e7), # longitude
loc.alt, # altitude
mavutil.mavlink.MAV_MISSION_TYPE_MISSION)
m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get MISSION_ACK")
if m.type != mavutil.mavlink.MAV_MISSION_ERROR:
raise NotAchievedException("Did not get appropriate error")
self.start_subtest("Enter guided and flying somewhere constant")
self.change_mode("GUIDED")
self.mav.mav.mission_item_int_send(
target_system,
target_component,
0, # seq
mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
2, # current - guided-mode request
0, # autocontinue
0, # p1
0, # p2
0, # p3
0, # p4
int(loc.lat *1e7), # latitude
int(loc.lng *1e7), # longitude
desired_relative_alt, # altitude
mavutil.mavlink.MAV_MISSION_TYPE_MISSION)
m = self.mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get MISSION_ACK")
if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED:
raise NotAchievedException("Did not get accepted response")
self.wait_location(loc, accuracy=100) # based on loiter radius
self.delay_sim_time(20)
self.wait_altitude(altitude_min=desired_relative_alt-3,
altitude_max=desired_relative_alt+3,
relative=True)
self.fly_home_land_and_disarm()
def LOITER(self):
self.takeoff(alt=200)
self.set_rc(3, 1500)
self.change_mode("LOITER")
self.progress("Doing a bit of loitering to start with")
tstart = self.get_sim_time()
while True:
now = self.get_sim_time_cached()
if now - tstart > 60:
break
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
new_throttle = m.throttle
alt = m.alt
m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get ATTITUDE")
pitch = math.degrees(m.pitch)
self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt))
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
initial_throttle = m.throttle
initial_alt = m.alt
self.progress("Initial throttle: %u" % initial_throttle)
# pitch down, ensure throttle decreases:
rc2_max = self.get_parameter("RC2_MAX")
self.set_rc(2, rc2_max)
tstart = self.get_sim_time()
while True:
now = self.get_sim_time_cached()
'''stick-mixing is pushing the aircraft down. It doesn't want to go
down (the target loiter altitude hasn't changed), so it
tries to add energy by increasing the throttle.
'''
if now - tstart > 60:
raise NotAchievedException("Did not see increase in throttle")
m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get VFR_HUD")
new_throttle = m.throttle
alt = m.alt
m = self.mav.recv_match(type='ATTITUDE', blocking=True, timeout=5)
if m is None:
raise NotAchievedException("Did not get ATTITUDE")
pitch = math.degrees(m.pitch)
self.progress("Pitch:%f throttle:%u alt:%f" % (pitch, new_throttle, alt))
if new_throttle - initial_throttle > 20:
self.progress("Throttle delta achieved")
break
self.progress("Centering elevator and ensuring we get back to loiter altitude")
self.set_rc(2, 1500)
self.wait_altitude(initial_alt-1, initial_alt+1)
self.fly_home_land_and_disarm()
def CPUFailsafe(self):
'''In lockup Plane should copy RC inputs to RC outputs'''
self.plane_CPUFailsafe()
def test_large_missions(self):
self.load_mission("Kingaroy-vlarge.txt")
self.load_mission("Kingaroy-vlarge2.txt")
def fly_soaring(self):
model="plane-soaring"
self.customise_SITL_commandline([],
model=model,
defaults_filepath=self.model_defaults_filepath("ArduPlane",model),
wipe=True)
self.load_mission('CMAC-soar.txt')
self.mavproxy.send("wp set 1\n")
self.change_mode('AUTO')
self.wait_ready_to_arm()
self.arm_vehicle()
# Enable thermalling RC
rc_chan = 0
for i in range(8):
rcx_option = self.get_parameter('RC{0}_OPTION'.format(i+1))
if rcx_option==88:
rc_chan = i+1;
break
if rc_chan==0:
raise NotAchievedException("Did not find soaring enable channel option.")
self.send_set_rc(rc_chan, 1900)
# Use trim airspeed.
self.send_set_rc(3, 1500)
# Wait to detect thermal
self.progress("Waiting for thermal")
self.wait_mode('THERMAL',timeout=600)
# Wait to climb to SOAR_ALT_MAX
self.progress("Waiting for climb to max altitude")
alt_max = self.get_parameter('SOAR_ALT_MAX')
self.wait_altitude(alt_max-10, alt_max, timeout=600, relative=True)
# Wait for AUTO
self.progress("Waiting for AUTO mode")
self.wait_mode('AUTO')
# Disable thermals
self.set_parameter("SIM_THML_SCENARI", 0)
# Wait to descend to SOAR_ALT_MIN
self.progress("Waiting for glide to min altitude")
alt_min = self.get_parameter('SOAR_ALT_MIN')
self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True)
self.progress("Waiting for throttle up")
self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.gt)
self.progress("Waiting for climb to cutoff altitude")
alt_ctf = self.get_parameter('SOAR_ALT_CUTOFF')
self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True)
# Allow time to suppress throttle and start descent.
self.delay_sim_time(20)
# Now set FBWB mode
self.change_mode('FBWB')
self.delay_sim_time(5)
# Now disable soaring (should hold altitude)
self.set_parameter("SOAR_ENABLE", 0)
self.delay_sim_time(10)
#And reenable. This should force throttle-down
self.set_parameter("SOAR_ENABLE", 1)
self.delay_sim_time(10)
# Now wait for descent and check throttle up
self.wait_altitude(alt_min-10, alt_min, timeout=600, relative=True)
self.progress("Waiting for climb")
self.wait_altitude(alt_ctf-10, alt_ctf, timeout=600, relative=True)
# Back to auto
self.change_mode('AUTO')
# Reenable thermals
self.set_parameter("SIM_THML_SCENARI", 1)
# Disable soaring using RC channel.
self.send_set_rc(rc_chan, 1100)
# Wait to get back to waypoint before thermal.
self.progress("Waiting to get back to position")
self.wait_current_waypoint(3,timeout=1200)
# Enable soaring with mode changes suppressed)
self.send_set_rc(rc_chan, 1500)
# Make sure this causes throttle down.
self.wait_servo_channel_value(3, 1200, timeout=2, comparator=operator.lt)
self.progress("Waiting for next WP with no thermalling")
self.wait_waypoint(4,4,timeout=1200,max_dist=120)
# Disarm
self.disarm_vehicle()
self.progress("Mission OK")
def fly_terrain_mission(self):
self.customise_SITL_commandline([], wipe=True)
self.mavproxy.send("wp set 1\n")
self.wait_ready_to_arm()
self.arm_vehicle()
self.fly_mission("ap-terrain.txt", mission_timeout=600)
def ekf_lane_switch(self):
self.context_push()
ex = None
# new lane swtich available only with EK3
self.set_parameter("EK3_ENABLE", 1)
self.set_parameter("EK2_ENABLE", 0)
self.set_parameter("AHRS_EKF_TYPE", 3)
self.set_parameter("EK3_AFFINITY", 15) # enable affinity for all sensors
self.set_parameter("EK3_IMU_MASK", 3) # use only 2 IMUs
self.set_parameter("GPS_TYPE2", 1)
self.set_parameter("SIM_GPS2_DISABLE", 0)
self.set_parameter("SIM_BARO2_DISABL", 0)
self.set_parameter("SIM_BARO_COUNT", 2)
self.set_parameter("ARSPD2_TYPE", 2)
self.set_parameter("ARSPD2_USE", 1)
self.set_parameter("ARSPD2_PIN", 2)
# some parameters need reboot to take effect
self.reboot_sitl()
self.lane_switches = []
# add an EKF lane switch hook
def statustext_hook(mav, message):
if message.get_type() != 'STATUSTEXT':
return
# example msg: EKF3 lane switch 1
if not message.text.startswith("EKF3 lane switch "):
return
newlane = int(message.text[-1])
self.lane_switches.append(newlane)
self.install_message_hook(statustext_hook)
# get flying
self.takeoff(alt=50)
self.change_mode('CIRCLE')
try:
#####################################################################################################################################################
self.progress("Checking EKF3 Lane Switching trigger from all sensors")
#####################################################################################################################################################
self.start_subtest("ACCELEROMETER: Change z-axis offset")
# create an accelerometer error by changing the Z-axis offset
self.context_collect("STATUSTEXT")
old_parameter = self.get_parameter("INS_ACCOFFS_Z")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_ACCOFFS_Z", old_parameter + 5), check_context=True)
if self.lane_switches != [1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("INS_ACCOFFS_Z", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("BAROMETER: Freeze to last measured value")
self.context_collect("STATUSTEXT")
# create a barometer error by inhibiting any pressure change while changing altitude
old_parameter = self.get_parameter("SIM_BARO2_FREEZE")
self.set_parameter("SIM_BARO2_FREEZE", 1)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=lambda: self.set_rc(2, 2000), check_context=True)
if self.lane_switches != [1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_rc(2, 1500)
self.set_parameter("SIM_BARO2_FREEZE", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("GPS: Apply GPS Velocity Error in NED")
self.context_push()
self.context_collect("STATUSTEXT")
# create a GPS velocity error by adding a random 2m/s noise on each axis
def sim_gps_verr():
self.set_parameter("SIM_GPS_VERR_X", self.get_parameter("SIM_GPS_VERR_X") + 2)
self.set_parameter("SIM_GPS_VERR_Y", self.get_parameter("SIM_GPS_VERR_Y") + 2)
self.set_parameter("SIM_GPS_VERR_Z", self.get_parameter("SIM_GPS_VERR_Z") + 2)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=sim_gps_verr(), check_context=True)
if self.lane_switches != [1, 0, 1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.context_pop()
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("MAGNETOMETER: Change X-Axis Offset")
self.context_collect("STATUSTEXT")
# create a magnetometer error by changing the X-axis offset
old_parameter = self.get_parameter("SIM_MAG2_OFS_X")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("SIM_MAG2_OFS_X", old_parameter + 150), check_context=True)
if self.lane_switches != [1, 0, 1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("SIM_MAG2_OFS_X", old_parameter)
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.start_subtest("AIRSPEED: Fail to constant value")
self.context_push()
self.context_collect("STATUSTEXT")
# create an airspeed sensor error by freezing to the current airspeed then changing the groundspeed
old_parameter = self.get_parameter("SIM_ARSPD_FAIL")
m = self.mav.recv_match(type='VFR_HUD', blocking=True)
self.set_parameter("SIM_ARSPD_FAIL", m.airspeed)
def change_speed():
self.change_mode("GUIDED")
self.run_cmd_int(
mavutil.mavlink.MAV_CMD_DO_REPOSITION,
0,
0,
0,
0,
12345, # lat*1e7
12345, # lon*1e7
50 # alt
)
self.delay_sim_time(5)
new_target_groundspeed = m.groundspeed + 5
self.run_cmd(
mavutil.mavlink.MAV_CMD_DO_CHANGE_SPEED,
1, # groundspeed
new_target_groundspeed,
-1, # throttle / no change
0, # absolute values
0,
0,
0
)
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=change_speed(), check_context=True)
if self.lane_switches != [1, 0, 1, 0, 1]:
raise NotAchievedException("Expected lane switch 1, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.change_mode('CIRCLE')
self.context_pop()
self.context_clear_collection("STATUSTEXT")
self.wait_heading(0, accuracy=10, timeout=60)
self.wait_heading(180, accuracy=10, timeout=60)
#####################################################################################################################################################
self.progress("GYROSCOPE: Change Y-Axis Offset")
self.context_collect("STATUSTEXT")
# create a gyroscope error by changing the Y-axis offset
old_parameter = self.get_parameter("INS_GYR2OFFS_Y")
self.wait_statustext(text="EKF3 lane switch", timeout=30, the_function=self.set_parameter("INS_GYR2OFFS_Y", old_parameter + 1), check_context=True)
if self.lane_switches != [1, 0, 1, 0, 1, 0]:
raise NotAchievedException("Expected lane switch 0, got %s" % str(self.lane_switches[-1]))
# Cleanup
self.set_parameter("INS_GYR2OFFS_Y", old_parameter)
self.context_clear_collection("STATUSTEXT")
#####################################################################################################################################################
self.disarm_vehicle()
except Exception as e:
self.progress("Caught exception: %s" % self.get_exception_stacktrace(e))
ex = e
self.remove_message_hook(statustext_hook)
self.context_pop()
if ex is not None:
raise ex
def tests(self):
'''return list of all tests'''
ret = super(AutoTestPlane, self).tests()
ret.extend([
("AuxModeSwitch",
"Set modes via auxswitches",
self.test_setting_modes_via_auxswitches),
("TestRCCamera",
"Test RC Option - Camera Trigger",
self.test_rc_option_camera_trigger),
("TestRCRelay", "Test Relay RC Channel Option", self.test_rc_relay),
("ThrottleFailsafe",
"Fly throttle failsafe",
self.test_throttle_failsafe),
("ThrottleFailsafeFence",
"Fly fence survives throttle failsafe",
self.test_throttle_failsafe_fence),
("TestFlaps", "Flaps", self.fly_flaps),
("DO_CHANGE_SPEED", "Test mavlink DO_CHANGE_SPEED command", self.fly_do_change_speed),
("DO_REPOSITION",
"Test mavlink DO_REPOSITION command",
self.fly_do_reposition),
("GuidedRequest",
"Test handling of MISSION_ITEM in guided mode",
self.fly_do_guided_request),
("MainFlight",
"Lots of things in one flight",
self.test_main_flight),
("TestGripperMission",
"Test Gripper mission items",
self.test_gripper_mission),
("Parachute", "Test Parachute", self.test_parachute),
("ParachuteSinkRate", "Test Parachute (SinkRate triggering)", self.test_parachute_sinkrate),
("AIRSPEED_AUTOCAL", "Test AIRSPEED_AUTOCAL", self.airspeed_autocal),
("RangeFinder",
"Test RangeFinder Basic Functionality",
self.test_rangefinder),
("FenceStatic",
"Test Basic Fence Functionality",
self.test_fence_static),
("FenceRTL",
"Test Fence RTL",
self.test_fence_rtl),
("FenceRTLRally",
"Test Fence RTL Rally",
self.test_fence_rtl_rally),
("ADSB",
"Test ADSB",
self.test_adsb),
("Button",
"Test Buttons",
self.test_button),
("FRSkySPort",
"Test FrSky SPort mode",
self.test_frsky_sport),
("FRSkyPassThrough",
"Test FrSky PassThrough serial output",
self.test_frsky_passthrough),
("FRSkyD",
"Test FrSkyD serial output",
self.test_frsky_d),
("LTM",
"Test LTM serial output",
self.test_ltm),
("AdvancedFailsafe",
"Test Advanced Failsafe",
self.test_advanced_failsafe),
("LOITER",
"Test Loiter mode",
self.LOITER),
("DeepStall",
"Test DeepStall Landing",
self.fly_deepstall),
("LargeMissions",
"Test Manipulation of Large missions",
self.test_large_missions),
("Soaring",
"Test Soaring feature",
self.fly_soaring),
("Terrain",
"Test terrain following in mission",
self.fly_terrain_mission),
("Deadreckoning",
"Test deadreckoning support",
self.deadreckoning),
("EKFlaneswitch",
"Test EKF3 Affinity and Lane Switching",
self.ekf_lane_switch),
("LogUpload",
"Log upload",
self.log_upload),
])
return ret
def disabled_tests(self):
return {
"Button": "See https://github.com/ArduPilot/ardupilot/issues/15259",
}
| gpl-3.0 | -238,449,084,716,790,980 | 38.840973 | 161 | 0.547441 | false |
akinaru/ffmpeg-image-sequencer | ffmpeg-appender-test.py | 1 | 3157 | #!/usr/bin/python
#####################################################################################
#####################################################################################
#
# title : ffmpeg-appender-test.py
# authors : Bertrand Martel
# copyrights : Copyright (c) 2015 Bertrand Martel
# license : The MIT License (MIT)
# date : 16/08/2015
# description : create video if not exist and append a series of image to this video taken from WEB
# usage : python ffmpeg-appender-test.py
#
#####################################################################################
#####################################################################################
import sys, getopt, os, subprocess
def main(argv):
output_file_name = "video_space"
temporary_file_name = "temp_space"
temporary_file_name_video = "temp_video"
picture_array = [ "https://upload.wikimedia.org/wikipedia/commons/4/4e/Anttlers101.jpg", \
"https://upload.wikimedia.org/wikipedia/commons/3/3b/NASA-SpiralGalaxyM101-20140505.jpg", \
"https://upload.wikimedia.org/wikipedia/commons/b/b0/Supernova_in_M101_2011-08-25.jpg", \
"http://1.1.1.5/bmi/images.nationalgeographic.com/wpf/media-live/photos/000/061/cache/earth-full-view_6125_990x742.jpg" ]
this_dir = os.path.dirname(os.path.abspath(__file__))
os.chdir(this_dir)
output_file_path = ''.join([this_dir , "/",output_file_name,".avi"])
temporary_file_path_avi = ''.join([this_dir,"/",temporary_file_name,".avi"])
temporary_file_name_jpg = ''.join([this_dir,"/",temporary_file_name,".jpg"])
temporary_file_name_video = ''.join([this_dir,"/",temporary_file_name_video,".avi"])
#remove files
try:
os.remove(output_file_path)
except OSError:
pass
try:
os.remove(temporary_file_path_avi)
except OSError:
pass
try:
os.remove(temporary_file_name_jpg)
except OSError:
pass
try:
os.remove(temporary_file_name_video)
except OSError:
pass
for picture in picture_array:
subprocess.call(["wget", picture, "-O", temporary_file_name_jpg])
subprocess.call(["ffmpeg -nostdin -v verbose -f image2 -pattern_type sequence -start_number 0 -r 1 -i " + temporary_file_name_jpg + " -s 1920x1080 " + temporary_file_path_avi],shell=True)
try:
os.remove(temporary_file_name_jpg)
except OSError:
pass
if os.path.exists(output_file_path):
# concat this video and former video
subprocess.call(['cd ' + this_dir + ' | ffmpeg -nostdin -v verbose -i "concat:' + output_file_name + '.avi|' + temporary_file_name + '.avi" -c copy ' + temporary_file_name_video],shell=True)
try:
os.remove(temporary_file_path_avi)
except OSError:
pass
try:
os.remove(output_file_path)
except OSError:
pass
os.rename(temporary_file_name_video, output_file_path)
else:
os.rename(temporary_file_path_avi, output_file_path)
if __name__ == "__main__":
main(sys.argv[1:])
__author__ = "Bertrand Martel"
__copyright__ = "Copyright 2015, Bertrand Martel"
__credits__ = ["Bertrand Martel"]
__license__ = "MIT"
__version__ = "1.0.0"
__maintainer__ = "Bertrand Martel"
__email__ = "[email protected]"
__status__ = "POC" | mit | -8,222,219,400,538,782,000 | 30.58 | 193 | 0.602471 | false |
csangani/ReproducingSprout | extract_traces.py | 1 | 1317 | ## Create a network trace from the saturator output
import glob
import os
import sys
INPUT_PATH = 'raw_traces'
OUTPUT_PATH = 'cleaned_traces'
def extract_trace(filePath, targetFilePath):
with open(filePath) as f:
with open(targetFilePath, 'w+') as wf:
firstLine = True
for line in f:
value = long(line.lstrip('recv_time=').rstrip(',\n'))
if firstLine:
base = value
firstLine = False
value = (value - base) / 1000000
wf.write('%s\n' % value)
if __name__ == '__main__':
if len(sys.argv) >= 2:
source = sys.argv[1]
else:
source = INPUT_PATH
if len(sys.argv) >= 3:
destination = sys.argv[2]
else:
destination = OUTPUT_PATH
if not os.path.exists(destination):
os.makedirs(destination)
networks = glob.glob('%s/*' % source)
for network in networks:
if not os.path.exists(network.replace(source, destination)):
os.makedirs(network.replace(source, destination))
files = glob.glob('%s/*.rx' % network)
for file in files:
extract_trace(file, file.replace(source, destination).replace('.rx', '.pps'))
| mit | -2,875,549,895,375,812,000 | 27.630435 | 89 | 0.535308 | false |
meshulam/sly | deps/shapely/geos.py | 1 | 25191 | """
Proxies for the libgeos_c shared lib, GEOS-specific exceptions, and utilities
"""
import os
import re
import sys
import atexit
import logging
import threading
from ctypes import CDLL, cdll, pointer, c_void_p, c_size_t, c_char_p, string_at
from ctypes.util import find_library
from . import ftools
from .ctypes_declarations import prototype, EXCEPTION_HANDLER_FUNCTYPE
# Add message handler to this module's logger
LOG = logging.getLogger(__name__)
if 'all' in sys.warnoptions:
# show GEOS messages in console with: python -W all
logging.basicConfig()
else:
# no handler messages shown
class NullHandler(logging.Handler):
def emit(self, record):
pass
LOG.addHandler(NullHandler())
# Find and load the GEOS and C libraries
# If this ever gets any longer, we'll break it into separate modules
def load_dll(libname, fallbacks=None):
lib = find_library(libname)
if lib is not None:
try:
return CDLL(lib)
except OSError:
pass
if fallbacks is not None:
for name in fallbacks:
try:
return CDLL(name)
except OSError:
# move on to the next fallback
pass
# No shared library was loaded. Raise OSError.
raise OSError(
"Could not find library %s or load any of its variants %s" % (
libname, fallbacks or []))
if sys.platform.startswith('linux'):
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = load_dll('c').free
free.argtypes = [c_void_p]
free.restype = None
elif sys.platform == 'darwin':
if hasattr(sys, 'frozen'):
# .app file from py2app
alt_paths = [os.path.join(os.environ['RESOURCEPATH'],
'..', 'Frameworks', 'libgeos_c.dylib')]
else:
alt_paths = [
# The Framework build from Kyng Chaos:
"/Library/Frameworks/GEOS.framework/Versions/Current/GEOS",
# macports
'/opt/local/lib/libgeos_c.dylib',
]
_lgeos = load_dll('geos_c', fallbacks=alt_paths)
free = load_dll('c').free
free.argtypes = [c_void_p]
free.restype = None
elif sys.platform == 'win32':
try:
egg_dlls = os.path.abspath(os.path.join(os.path.dirname(__file__),
"DLLs"))
wininst_dlls = os.path.abspath(os.__file__ + "../../../DLLs")
original_path = os.environ['PATH']
os.environ['PATH'] = "%s;%s;%s" % \
(egg_dlls, wininst_dlls, original_path)
_lgeos = CDLL("geos.dll")
except (ImportError, WindowsError, OSError):
raise
def free(m):
try:
cdll.msvcrt.free(m)
except WindowsError:
# XXX: See http://trac.gispython.org/projects/PCL/ticket/149
pass
elif sys.platform == 'sunos5':
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = CDLL('libc.so.1').free
free.argtypes = [c_void_p]
free.restype = None
else: # other *nix systems
_lgeos = load_dll('geos_c', fallbacks=['libgeos_c.so.1', 'libgeos_c.so'])
free = load_dll('c', fallbacks=['libc.so.6']).free
free.argtypes = [c_void_p]
free.restype = None
def _geos_version():
# extern const char GEOS_DLL *GEOSversion();
GEOSversion = _lgeos.GEOSversion
GEOSversion.restype = c_char_p
GEOSversion.argtypes = []
#define GEOS_CAPI_VERSION "@VERSION@-CAPI-@CAPI_VERSION@"
geos_version_string = GEOSversion()
if sys.version_info[0] >= 3:
geos_version_string = geos_version_string.decode('ascii')
res = re.findall(r'(\d+)\.(\d+)\.(\d+)', geos_version_string)
assert len(res) == 2, res
geos_version = tuple(int(x) for x in res[0])
capi_version = tuple(int(x) for x in res[1])
return geos_version_string, geos_version, capi_version
geos_version_string, geos_version, geos_capi_version = _geos_version()
# If we have the new interface, then record a baseline so that we know what
# additional functions are declared in ctypes_declarations.
if geos_version >= (3, 1, 0):
start_set = set(_lgeos.__dict__)
# Apply prototypes for the libgeos_c functions
prototype(_lgeos, geos_version)
# If we have the new interface, automatically detect all function
# declarations, and declare their re-entrant counterpart.
if geos_version >= (3, 1, 0):
end_set = set(_lgeos.__dict__)
new_func_names = end_set - start_set
for func_name in new_func_names:
new_func_name = "%s_r" % func_name
if hasattr(_lgeos, new_func_name):
new_func = getattr(_lgeos, new_func_name)
old_func = getattr(_lgeos, func_name)
new_func.restype = old_func.restype
if old_func.argtypes is None:
# Handle functions that didn't take an argument before,
# finishGEOS.
new_func.argtypes = [c_void_p]
else:
new_func.argtypes = [c_void_p] + old_func.argtypes
if old_func.errcheck is not None:
new_func.errcheck = old_func.errcheck
# Handle special case.
_lgeos.initGEOS_r.restype = c_void_p
_lgeos.initGEOS_r.argtypes = \
[EXCEPTION_HANDLER_FUNCTYPE, EXCEPTION_HANDLER_FUNCTYPE]
_lgeos.finishGEOS_r.argtypes = [c_void_p]
# Exceptions
class ReadingError(Exception):
pass
class DimensionError(Exception):
pass
class TopologicalError(Exception):
pass
class PredicateError(Exception):
pass
def error_handler(fmt, *args):
if sys.version_info[0] >= 3:
fmt = fmt.decode('ascii')
args = [arg.decode('ascii') for arg in args]
LOG.error(fmt, *args)
def notice_handler(fmt, args):
if sys.version_info[0] >= 3:
fmt = fmt.decode('ascii')
args = args.decode('ascii')
LOG.warning(fmt, args)
error_h = EXCEPTION_HANDLER_FUNCTYPE(error_handler)
notice_h = EXCEPTION_HANDLER_FUNCTYPE(notice_handler)
class WKTReader(object):
_lgeos = None
_reader = None
def __init__(self, lgeos):
"""Create WKT Reader"""
self._lgeos = lgeos
self._reader = self._lgeos.GEOSWKTReader_create()
def __del__(self):
"""Destroy WKT Reader"""
if self._lgeos is not None:
self._lgeos.GEOSWKTReader_destroy(self._reader)
self._reader = None
self._lgeos = None
def read(self, text):
"""Returns geometry from WKT"""
if sys.version_info[0] >= 3:
text = text.encode('ascii')
geom = self._lgeos.GEOSWKTReader_read(self._reader, c_char_p(text))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely.geometry.base import geom_factory
return geom_factory(geom)
class WKTWriter(object):
_lgeos = None
_writer = None
# Establish default output settings
defaults = {}
if geos_version >= (3, 3, 0):
defaults['trim'] = True
defaults['output_dimension'] = 3
# GEOS' defaults for methods without "get"
_trim = False
_rounding_precision = -1
_old_3d = False
@property
def trim(self):
"""Trimming of unnecessary decimals (default: True)"""
return getattr(self, '_trim')
@trim.setter
def trim(self, value):
self._trim = bool(value)
self._lgeos.GEOSWKTWriter_setTrim(self._writer, self._trim)
@property
def rounding_precision(self):
"""Rounding precision when writing the WKT.
A precision of -1 (default) disables it."""
return getattr(self, '_rounding_precision')
@rounding_precision.setter
def rounding_precision(self, value):
self._rounding_precision = int(value)
self._lgeos.GEOSWKTWriter_setRoundingPrecision(
self._writer, self._rounding_precision)
@property
def output_dimension(self):
"""Output dimension, either 2 or 3 (default)"""
return self._lgeos.GEOSWKTWriter_getOutputDimension(
self._writer)
@output_dimension.setter
def output_dimension(self, value):
self._lgeos.GEOSWKTWriter_setOutputDimension(
self._writer, int(value))
@property
def old_3d(self):
"""Show older style for 3D WKT, without 'Z' (default: False)"""
return getattr(self, '_old_3d')
@old_3d.setter
def old_3d(self, value):
self._old_3d = bool(value)
self._lgeos.GEOSWKTWriter_setOld3D(self._writer, self._old_3d)
def __init__(self, lgeos, **settings):
"""Create WKT Writer
Note: writer defaults are set differently for GEOS 3.3.0 and up.
For example, with 'POINT Z (1 2 3)':
newer: POINT Z (1 2 3)
older: POINT (1.0000000000000000 2.0000000000000000)
The older formatting can be achieved for GEOS 3.3.0 and up by setting
the properties:
trim = False
output_dimension = 2
"""
self._lgeos = lgeos
self._writer = self._lgeos.GEOSWKTWriter_create()
applied_settings = self.defaults.copy()
applied_settings.update(settings)
for name in applied_settings:
setattr(self, name, applied_settings[name])
def __setattr__(self, name, value):
"""Limit setting attributes"""
if hasattr(self, name):
object.__setattr__(self, name, value)
else:
raise AttributeError('%r object has no attribute %r' %
(self.__class__.__name__, name))
def __del__(self):
"""Destroy WKT Writer"""
if self._lgeos is not None:
self._lgeos.GEOSWKTWriter_destroy(self._writer)
self._writer = None
self._lgeos = None
def write(self, geom):
"""Returns WKT string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
result = self._lgeos.GEOSWKTWriter_write(self._writer, geom._geom)
text = string_at(result)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return text.decode('ascii')
else:
return text
class WKBReader(object):
_lgeos = None
_reader = None
def __init__(self, lgeos):
"""Create WKB Reader"""
self._lgeos = lgeos
self._reader = self._lgeos.GEOSWKBReader_create()
def __del__(self):
"""Destroy WKB Reader"""
if self._lgeos is not None:
self._lgeos.GEOSWKBReader_destroy(self._reader)
self._reader = None
self._lgeos = None
def read(self, data):
"""Returns geometry from WKB"""
geom = self._lgeos.GEOSWKBReader_read(
self._reader, c_char_p(data), c_size_t(len(data)))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely import geometry
return geometry.base.geom_factory(geom)
def read_hex(self, data):
"""Returns geometry from WKB hex"""
if sys.version_info[0] >= 3:
data = data.encode('ascii')
geom = self._lgeos.GEOSWKBReader_readHEX(
self._reader, c_char_p(data), c_size_t(len(data)))
if not geom:
raise ReadingError("Could not create geometry because of errors "
"while reading input.")
# avoid circular import dependency
from shapely import geometry
return geometry.base.geom_factory(geom)
class WKBWriter(object):
_lgeos = None
_writer = None
# EndianType enum in ByteOrderValues.h
_ENDIAN_BIG = 0
_ENDIAN_LITTLE = 1
# Establish default output setting
defaults = {'output_dimension': 3}
@property
def output_dimension(self):
"""Output dimension, either 2 or 3 (default)"""
return self._lgeos.GEOSWKBWriter_getOutputDimension(self._writer)
@output_dimension.setter
def output_dimension(self, value):
self._lgeos.GEOSWKBWriter_setOutputDimension(
self._writer, int(value))
@property
def big_endian(self):
"""Byte order is big endian, True (default) or False"""
return (self._lgeos.GEOSWKBWriter_getByteOrder(self._writer) ==
self._ENDIAN_BIG)
@big_endian.setter
def big_endian(self, value):
self._lgeos.GEOSWKBWriter_setByteOrder(
self._writer, self._ENDIAN_BIG if value else self._ENDIAN_LITTLE)
@property
def include_srid(self):
"""Include SRID, True or False (default)"""
return bool(self._lgeos.GEOSWKBWriter_getIncludeSRID(self._writer))
@include_srid.setter
def include_srid(self, value):
self._lgeos.GEOSWKBWriter_setIncludeSRID(self._writer, bool(value))
def __init__(self, lgeos, **settings):
"""Create WKB Writer"""
self._lgeos = lgeos
self._writer = self._lgeos.GEOSWKBWriter_create()
applied_settings = self.defaults.copy()
applied_settings.update(settings)
for name in applied_settings:
setattr(self, name, applied_settings[name])
def __setattr__(self, name, value):
"""Limit setting attributes"""
if hasattr(self, name):
object.__setattr__(self, name, value)
else:
raise AttributeError('%r object has no attribute %r' %
(self.__class__.__name__, name))
def __del__(self):
"""Destroy WKB Writer"""
if self._lgeos is not None:
self._lgeos.GEOSWKBWriter_destroy(self._writer)
self._writer = None
self._lgeos = None
def write(self, geom):
"""Returns WKB byte string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
size = c_size_t()
result = self._lgeos.GEOSWKBWriter_write(
self._writer, geom._geom, pointer(size))
data = string_at(result, size.value)
lgeos.GEOSFree(result)
return data
def write_hex(self, geom):
"""Returns WKB hex string for geometry"""
if geom is None or geom._geom is None:
raise ValueError("Null geometry supports no operations")
size = c_size_t()
result = self._lgeos.GEOSWKBWriter_writeHEX(
self._writer, geom._geom, pointer(size))
data = string_at(result, size.value)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return data.decode('ascii')
else:
return data
# Errcheck functions for ctypes
def errcheck_wkb(result, func, argtuple):
'''Returns bytes from a C pointer'''
if not result:
return None
size_ref = argtuple[-1]
size = size_ref.contents
retval = string_at(result, size.value)[:]
lgeos.GEOSFree(result)
return retval
def errcheck_just_free(result, func, argtuple):
'''Returns string from a C pointer'''
retval = string_at(result)
lgeos.GEOSFree(result)
if sys.version_info[0] >= 3:
return retval.decode('ascii')
else:
return retval
def errcheck_predicate(result, func, argtuple):
'''Result is 2 on exception, 1 on True, 0 on False'''
if result == 2:
raise PredicateError("Failed to evaluate %s" % repr(func))
return result
class LGEOSBase(threading.local):
"""Proxy for GEOS C API
This is a base class. Do not instantiate.
"""
methods = {}
def __init__(self, dll):
self._lgeos = dll
self.geos_handle = None
def __del__(self):
"""Cleanup GEOS related processes"""
if self._lgeos is not None:
self._lgeos.finishGEOS()
self._lgeos = None
self.geos_handle = None
class LGEOS300(LGEOSBase):
"""Proxy for GEOS 3.0.0-CAPI-1.4.1
"""
geos_version = (3, 0, 0)
geos_capi_version = (1, 4, 0)
def __init__(self, dll):
super(LGEOS300, self).__init__(dll)
self.geos_handle = self._lgeos.initGEOS(notice_h, error_h)
keys = list(self._lgeos.__dict__.keys())
for key in keys:
setattr(self, key, getattr(self._lgeos, key))
self.GEOSFree = self._lgeos.free
# Deprecated
self.GEOSGeomToWKB_buf.errcheck = errcheck_wkb
self.GEOSGeomToWKT.errcheck = errcheck_just_free
self.GEOSRelate.errcheck = errcheck_just_free
for pred in (
self.GEOSDisjoint,
self.GEOSTouches,
self.GEOSIntersects,
self.GEOSCrosses,
self.GEOSWithin,
self.GEOSContains,
self.GEOSOverlaps,
self.GEOSEquals,
self.GEOSEqualsExact,
self.GEOSisEmpty,
self.GEOSisValid,
self.GEOSisSimple,
self.GEOSisRing,
self.GEOSHasZ):
pred.errcheck = errcheck_predicate
self.methods['area'] = self.GEOSArea
self.methods['boundary'] = self.GEOSBoundary
self.methods['buffer'] = self.GEOSBuffer
self.methods['centroid'] = self.GEOSGetCentroid
self.methods['representative_point'] = self.GEOSPointOnSurface
self.methods['convex_hull'] = self.GEOSConvexHull
self.methods['distance'] = self.GEOSDistance
self.methods['envelope'] = self.GEOSEnvelope
self.methods['length'] = self.GEOSLength
self.methods['has_z'] = self.GEOSHasZ
self.methods['is_empty'] = self.GEOSisEmpty
self.methods['is_ring'] = self.GEOSisRing
self.methods['is_simple'] = self.GEOSisSimple
self.methods['is_valid'] = self.GEOSisValid
self.methods['disjoint'] = self.GEOSDisjoint
self.methods['touches'] = self.GEOSTouches
self.methods['intersects'] = self.GEOSIntersects
self.methods['crosses'] = self.GEOSCrosses
self.methods['within'] = self.GEOSWithin
self.methods['contains'] = self.GEOSContains
self.methods['overlaps'] = self.GEOSOverlaps
self.methods['equals'] = self.GEOSEquals
self.methods['equals_exact'] = self.GEOSEqualsExact
self.methods['relate'] = self.GEOSRelate
self.methods['difference'] = self.GEOSDifference
self.methods['symmetric_difference'] = self.GEOSSymDifference
self.methods['union'] = self.GEOSUnion
self.methods['intersection'] = self.GEOSIntersection
self.methods['simplify'] = self.GEOSSimplify
self.methods['topology_preserve_simplify'] = \
self.GEOSTopologyPreserveSimplify
class LGEOS310(LGEOSBase):
"""Proxy for GEOS 3.1.0-CAPI-1.5.0
"""
geos_version = (3, 1, 0)
geos_capi_version = (1, 5, 0)
def __init__(self, dll):
super(LGEOS310, self).__init__(dll)
self.geos_handle = self._lgeos.initGEOS_r(notice_h, error_h)
keys = list(self._lgeos.__dict__.keys())
for key in [x for x in keys if not x.endswith('_r')]:
if key + '_r' in keys:
reentr_func = getattr(self._lgeos, key + '_r')
attr = ftools.partial(reentr_func, self.geos_handle)
attr.__name__ = reentr_func.__name__
setattr(self, key, attr)
else:
setattr(self, key, getattr(self._lgeos, key))
if not hasattr(self, 'GEOSFree'):
# GEOS < 3.1.1
self.GEOSFree = self._lgeos.free
# Deprecated
self.GEOSGeomToWKB_buf.func.errcheck = errcheck_wkb
self.GEOSGeomToWKT.func.errcheck = errcheck_just_free
self.GEOSRelate.func.errcheck = errcheck_just_free
for pred in (
self.GEOSDisjoint,
self.GEOSTouches,
self.GEOSIntersects,
self.GEOSCrosses,
self.GEOSWithin,
self.GEOSContains,
self.GEOSOverlaps,
self.GEOSEquals,
self.GEOSEqualsExact,
self.GEOSisEmpty,
self.GEOSisValid,
self.GEOSisSimple,
self.GEOSisRing,
self.GEOSHasZ):
pred.func.errcheck = errcheck_predicate
self.GEOSisValidReason.func.errcheck = errcheck_just_free
self.methods['area'] = self.GEOSArea
self.methods['boundary'] = self.GEOSBoundary
self.methods['buffer'] = self.GEOSBuffer
self.methods['centroid'] = self.GEOSGetCentroid
self.methods['representative_point'] = self.GEOSPointOnSurface
self.methods['convex_hull'] = self.GEOSConvexHull
self.methods['distance'] = self.GEOSDistance
self.methods['envelope'] = self.GEOSEnvelope
self.methods['length'] = self.GEOSLength
self.methods['has_z'] = self.GEOSHasZ
self.methods['is_empty'] = self.GEOSisEmpty
self.methods['is_ring'] = self.GEOSisRing
self.methods['is_simple'] = self.GEOSisSimple
self.methods['is_valid'] = self.GEOSisValid
self.methods['disjoint'] = self.GEOSDisjoint
self.methods['touches'] = self.GEOSTouches
self.methods['intersects'] = self.GEOSIntersects
self.methods['crosses'] = self.GEOSCrosses
self.methods['within'] = self.GEOSWithin
self.methods['contains'] = self.GEOSContains
self.methods['overlaps'] = self.GEOSOverlaps
self.methods['equals'] = self.GEOSEquals
self.methods['equals_exact'] = self.GEOSEqualsExact
self.methods['relate'] = self.GEOSRelate
self.methods['difference'] = self.GEOSDifference
self.methods['symmetric_difference'] = self.GEOSSymDifference
self.methods['union'] = self.GEOSUnion
self.methods['intersection'] = self.GEOSIntersection
self.methods['prepared_intersects'] = self.GEOSPreparedIntersects
self.methods['prepared_contains'] = self.GEOSPreparedContains
self.methods['prepared_contains_properly'] = \
self.GEOSPreparedContainsProperly
self.methods['prepared_covers'] = self.GEOSPreparedCovers
self.methods['simplify'] = self.GEOSSimplify
self.methods['topology_preserve_simplify'] = \
self.GEOSTopologyPreserveSimplify
self.methods['cascaded_union'] = self.GEOSUnionCascaded
class LGEOS311(LGEOS310):
"""Proxy for GEOS 3.1.1-CAPI-1.6.0
"""
geos_version = (3, 1, 1)
geos_capi_version = (1, 6, 0)
def __init__(self, dll):
super(LGEOS311, self).__init__(dll)
class LGEOS320(LGEOS311):
"""Proxy for GEOS 3.2.0-CAPI-1.6.0
"""
geos_version = (3, 2, 0)
geos_capi_version = (1, 6, 0)
def __init__(self, dll):
super(LGEOS320, self).__init__(dll)
self.methods['parallel_offset'] = self.GEOSSingleSidedBuffer
self.methods['project'] = self.GEOSProject
self.methods['project_normalized'] = self.GEOSProjectNormalized
self.methods['interpolate'] = self.GEOSInterpolate
self.methods['interpolate_normalized'] = \
self.GEOSInterpolateNormalized
self.methods['buffer_with_style'] = self.GEOSBufferWithStyle
class LGEOS330(LGEOS320):
"""Proxy for GEOS 3.3.0-CAPI-1.7.0
"""
geos_version = (3, 3, 0)
geos_capi_version = (1, 7, 0)
def __init__(self, dll):
super(LGEOS330, self).__init__(dll)
# GEOS 3.3.8 from homebrew has, but doesn't advertise
# GEOSPolygonize_full. We patch it in explicitly here.
key = 'GEOSPolygonize_full'
func = getattr(self._lgeos, key + '_r')
attr = ftools.partial(func, self.geos_handle)
attr.__name__ = func.__name__
setattr(self, key, attr)
for pred in (self.GEOSisClosed,):
pred.func.errcheck = errcheck_predicate
self.methods['unary_union'] = self.GEOSUnaryUnion
self.methods['is_closed'] = self.GEOSisClosed
self.methods['cascaded_union'] = self.methods['unary_union']
self.methods['snap'] = self.GEOSSnap
class LGEOS340(LGEOS330):
"""Proxy for GEOS 3.4.0-CAPI-1.8.0
"""
geos_version = (3, 4, 0)
geos_capi_version = (1, 8, 0)
def __init__(self, dll):
super(LGEOS340, self).__init__(dll)
self.methods['delaunay_triangulation'] = self.GEOSDelaunayTriangulation
self.methods['nearest_points'] = self.GEOSNearestPoints
if geos_version >= (3, 4, 0):
L = LGEOS340
elif geos_version >= (3, 3, 0):
L = LGEOS330
elif geos_version >= (3, 2, 0):
L = LGEOS320
elif geos_version >= (3, 1, 1):
L = LGEOS311
elif geos_version >= (3, 1, 0):
L = LGEOS310
else:
L = LGEOS300
lgeos = L(_lgeos)
def cleanup(proxy):
del proxy
atexit.register(cleanup, lgeos)
| mit | 9,089,816,898,955,361,000 | 32.277411 | 79 | 0.596403 | false |
Subsets and Splits