repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
lostemp/lsk-3.4-android-12.09
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
|
12980
|
5411
|
# SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <[email protected]>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
|
gpl-2.0
|
LaRiffle/axa_challenge
|
fonction_py/train.py
|
1
|
12400
|
from fonction_py.tools import *
from fonction_py.preprocess import *
from sklearn import linear_model
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn import cross_validation
from sklearn.linear_model import LogisticRegression
from sklearn import tree
from sklearn import svm
from sklearn import decomposition
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.grid_search import GridSearchCV
from sklearn.grid_search import RandomizedSearchCV
from scipy.stats import uniform as sp_randint
from sklearn import datasets
from sklearn.linear_model import Ridge
from fonction_py.tim import *
import time
def faireTout():
fields = ['DATE', 'DAY_OFF', 'WEEK_END', 'DAY_WE_DS', 'ASS_ASSIGNMENT', 'CSPL_RECEIVED_CALLS' ] # selectionne les colonnes à lire
c = pd.DataFrame()
<<<<<<< HEAD
listmodel = faireListModel()#recupere le nom et les modeles de chaque truc
data=pd.read_csv("data/trainPure.csv", sep=";", usecols=fields) # LECTURE du fichier de train,
resultat = pd.read_csv("data/submission.txt", sep="\t") # LECTURE dufichier de test
res=[]
model = listmodel[0]
for model in listmodel:
print(model[0]) #affiche le ass assignment
(xTest, x, souvenir, y)=preprocessTOTAL(model[0]) # ajuste le nombre et le nom de feature pour que xTest et x aient les memes
mod= GradientBoostingRegressor(loss='huber', alpha=0.9,n_estimators=100, max_depth=3,learning_rate=.1, min_samples_leaf=9,min_samples_split=9)
mod.fit(x, y) #s'entraine
pred = mod.predict(xTest) # predit
pred[pred>max(y)*1.05]=max(y)*1.05 # pour pas predire trop grand
pred[pred<0]=0 # pas de negatif
pred =np.round(pred).astype(int) # to int
souvenir['prediction']=pred # on l'ajoute a souvenir qui garde le format standard et la date pour qu'on remette tout a la bonne place a la fin
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT']) # on remet chaque prediction à la bonne ligne -> il cree prediction_x et prediction_y car l'ancienne prediction et la nouvelle colonne de prediction
resultat=resultat.fillna(0) # on remplit les endroits ou on a pas predit avec des 0
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y'] # merge les deux colonnes
del resultat['prediction_x']
del resultat['prediction_y']
=======
listmodel = faireListModel()
#'Evenements', 'Gestion Amex'
#setFields = set(pd.read_csv("data/fields.txt", sep=";")['0'].values)
# resultat = pd.read_csv("data/submission.txt", sep="\t")
i=0
# res = []
start_time = time.time()
model = listmodel[24]
data=pd.read_csv("data/trainPure.csv", sep=";", usecols=fields) # LECTURE
resultat = pd.read_csv("data/submission.txt", sep="\t") # LECTURE
res=[]
for model in listmodel:
i = i+1
print(model[0])
x,y = preprocess(data.copy(), model[0]) # rajoute les features
model[1].fit(x, y)
#model.score(xTrain, yTrain)
(xTest, souvenir)=preprocessFINAL(x,model[0])
pred = model[1].predict(xTest)
pred[pred>max(y)*1.05]=max(y)*1.05
pred[pred<0]=0
pred =np.round(pred)
souvenir['prediction']=int(pred)
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT'])
resultat=resultat.fillna(0)
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y']
del resultat['prediction_x']
del resultat['prediction_y']
x,y = preprocess(data.copy(), 'Téléphonie') # rajoute les features
#model.score(xTrain, yTrain)
(xTest, souvenir)=preprocessFINAL(x,'Téléphonie')
pred=telephoniePred(x,y,xTest)
pred[pred>max(y)*1.05]=max(y)*1.05
pred[pred<0]=0
pred =np.round(pred)
souvenir['prediction']=int(pred)
resultat=pd.merge(resultat, souvenir, how='left',on=['DATE', 'ASS_ASSIGNMENT'])
resultat=resultat.fillna(0)
resultat['prediction'] = resultat['prediction_x']+resultat['prediction_y']
del resultat['prediction_x']
del resultat['prediction_y']
<<<<<<< HEAD
pd.DataFrame(res).to_csv("reslist.csv", sep=";", decimal=",")
resultat.to_csv("vraipred.txt", sep="\t", index =False)
=======
>>>>>>> origin/master
resultat['prediction']=resultat['prediction'].astype(int)
resultat.to_csv("pouranalyse.txt", sep="\t", index =False, encoding='utf-8')
>>>>>>> origin/master
return resultat
def faireListModel():
return [('CAT', linear_model.LinearRegression()),
('CMS', RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=5,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=10, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Crises',linear_model.LinearRegression()),
('Domicile', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=90, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion - Accueil Telephonique',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=70, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Assurances',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=20, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Clients', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=90, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=50, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion DZ', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=5,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Relation Clienteles',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=90, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=110, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Gestion Renault', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features=50, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Japon',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=10,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Manager',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=10,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Mécanicien',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Médical',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=30,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Nuit', RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Prestataires',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('RENAULT',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=80,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('RTC',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Regulation Medicale',linear_model.LinearRegression()),
('SAP',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=20,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Services',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Axa',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=20,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Inter',RandomForestRegressor(bootstrap=False, criterion='mse', max_depth=30,
max_features=30, max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=30, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Tech. Total',RandomForestRegressor(bootstrap=True, criterion='mse', max_depth=70,
max_features='auto', max_leaf_nodes=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=100, n_jobs=1, oob_score=False, random_state=None,
verbose=0, warm_start=False)),
('Téléphonie',GradientBoostingRegressor(loss='huber', alpha=0.9,n_estimators=100, max_depth=3,learning_rate=.1, min_samples_leaf=9,min_samples_split=9) )]
|
mit
|
pkats15/hdt_analyzer
|
django_test/django_venv/Lib/site-packages/setuptools/depends.py
|
462
|
6370
|
import sys
import imp
import marshal
from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
from distutils.version import StrictVersion
from setuptools import compat
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name,self.requested_version)
return self.name
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f,p,i = find_module(self.module,paths)
if f: f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def _iter_code(code):
"""Yield '(op,arg)' pair for each operation in code object 'code'"""
from array import array
from dis import HAVE_ARGUMENT, EXTENDED_ARG
bytes = array('b',code.co_code)
eof = len(code.co_code)
ptr = 0
extended_arg = 0
while ptr<eof:
op = bytes[ptr]
if op>=HAVE_ARGUMENT:
arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
ptr += 3
if op==EXTENDED_ARG:
extended_arg = arg * compat.long_type(65536)
continue
else:
arg = None
ptr += 1
yield op,arg
def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
if kind==PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts,module))
return info
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind==PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind==PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind==PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code, symbol, default)
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assigment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for op, arg in _iter_code(code):
if op==LOAD_CONST:
const = code.co_consts[arg]
elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
return const
else:
const = default
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()
|
mit
|
gautam1858/tensorflow
|
tensorflow/python/keras/layers/pooling_test.py
|
10
|
7660
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for pooling layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
class GlobalPoolingTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_globalpooling_1d(self):
testing_utils.layer_test(keras.layers.pooling.GlobalMaxPooling1D,
input_shape=(3, 4, 5))
testing_utils.layer_test(keras.layers.pooling.GlobalMaxPooling1D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 5))
testing_utils.layer_test(
keras.layers.pooling.GlobalAveragePooling1D, input_shape=(3, 4, 5))
testing_utils.layer_test(keras.layers.pooling.GlobalAveragePooling1D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 5))
@tf_test_util.run_in_graph_and_eager_modes
def test_globalpooling_1d_masking_support(self):
model = keras.Sequential()
model.add(keras.layers.Masking(mask_value=0., input_shape=(3, 4)))
model.add(keras.layers.GlobalAveragePooling1D())
model.compile(loss='mae', optimizer='rmsprop')
model_input = np.random.random((2, 3, 4))
model_input[0, 1:, :] = 0
output = model.predict(model_input)
self.assertAllClose(output[0], model_input[0, 0, :])
@tf_test_util.run_in_graph_and_eager_modes
def test_globalpooling_2d(self):
testing_utils.layer_test(
keras.layers.pooling.GlobalMaxPooling2D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 5, 6))
testing_utils.layer_test(
keras.layers.pooling.GlobalMaxPooling2D,
kwargs={'data_format': 'channels_last'},
input_shape=(3, 5, 6, 4))
testing_utils.layer_test(
keras.layers.pooling.GlobalAveragePooling2D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 5, 6))
testing_utils.layer_test(
keras.layers.pooling.GlobalAveragePooling2D,
kwargs={'data_format': 'channels_last'},
input_shape=(3, 5, 6, 4))
@tf_test_util.run_in_graph_and_eager_modes
def test_globalpooling_3d(self):
testing_utils.layer_test(
keras.layers.pooling.GlobalMaxPooling3D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 3, 4, 3))
testing_utils.layer_test(
keras.layers.pooling.GlobalMaxPooling3D,
kwargs={'data_format': 'channels_last'},
input_shape=(3, 4, 3, 4, 3))
testing_utils.layer_test(
keras.layers.pooling.GlobalAveragePooling3D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 4, 3, 4, 3))
testing_utils.layer_test(
keras.layers.pooling.GlobalAveragePooling3D,
kwargs={'data_format': 'channels_last'},
input_shape=(3, 4, 3, 4, 3))
class Pooling2DTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_maxpooling_2d(self):
pool_size = (3, 3)
for strides in [(1, 1), (2, 2)]:
testing_utils.layer_test(
keras.layers.MaxPooling2D,
kwargs={
'strides': strides,
'padding': 'valid',
'pool_size': pool_size
},
input_shape=(3, 5, 6, 4))
@tf_test_util.run_in_graph_and_eager_modes
def test_averagepooling_2d(self):
testing_utils.layer_test(
keras.layers.AveragePooling2D,
kwargs={'strides': (2, 2),
'padding': 'same',
'pool_size': (2, 2)},
input_shape=(3, 5, 6, 4))
testing_utils.layer_test(
keras.layers.AveragePooling2D,
kwargs={'strides': (2, 2),
'padding': 'valid',
'pool_size': (3, 3)},
input_shape=(3, 5, 6, 4))
# This part of the test can only run on GPU but doesn't appear
# to be properly assigned to a GPU when running in eager mode.
if not context.executing_eagerly():
# Only runs on GPU with CUDA, channels_first is not supported on CPU.
# TODO(b/62340061): Support channels_first on CPU.
if test.is_gpu_available(cuda_only=True):
testing_utils.layer_test(
keras.layers.AveragePooling2D,
kwargs={
'strides': (1, 1),
'padding': 'valid',
'pool_size': (2, 2),
'data_format': 'channels_first'
},
input_shape=(3, 4, 5, 6))
class Pooling3DTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_maxpooling_3d(self):
pool_size = (3, 3, 3)
testing_utils.layer_test(
keras.layers.MaxPooling3D,
kwargs={'strides': 2,
'padding': 'valid',
'pool_size': pool_size},
input_shape=(3, 11, 12, 10, 4))
testing_utils.layer_test(
keras.layers.MaxPooling3D,
kwargs={
'strides': 3,
'padding': 'valid',
'data_format': 'channels_first',
'pool_size': pool_size
},
input_shape=(3, 4, 11, 12, 10))
@tf_test_util.run_in_graph_and_eager_modes
def test_averagepooling_3d(self):
pool_size = (3, 3, 3)
testing_utils.layer_test(
keras.layers.AveragePooling3D,
kwargs={'strides': 2,
'padding': 'valid',
'pool_size': pool_size},
input_shape=(3, 11, 12, 10, 4))
testing_utils.layer_test(
keras.layers.AveragePooling3D,
kwargs={
'strides': 3,
'padding': 'valid',
'data_format': 'channels_first',
'pool_size': pool_size
},
input_shape=(3, 4, 11, 12, 10))
class Pooling1DTest(test.TestCase):
@tf_test_util.run_in_graph_and_eager_modes
def test_maxpooling_1d(self):
for padding in ['valid', 'same']:
for stride in [1, 2]:
testing_utils.layer_test(
keras.layers.MaxPooling1D,
kwargs={'strides': stride,
'padding': padding},
input_shape=(3, 5, 4))
testing_utils.layer_test(
keras.layers.MaxPooling1D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 2, 6))
@tf_test_util.run_in_graph_and_eager_modes
def test_averagepooling_1d(self):
for padding in ['valid', 'same']:
for stride in [1, 2]:
testing_utils.layer_test(
keras.layers.AveragePooling1D,
kwargs={'strides': stride,
'padding': padding},
input_shape=(3, 5, 4))
testing_utils.layer_test(
keras.layers.AveragePooling1D,
kwargs={'data_format': 'channels_first'},
input_shape=(3, 2, 6))
if __name__ == '__main__':
test.main()
|
apache-2.0
|
superstack/nova
|
nova/tests/__init__.py
|
2
|
2501
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nova.tests` -- Nova Unittests
=====================================================
.. automodule:: nova.tests
:platform: Unix
.. moduleauthor:: Jesse Andrews <[email protected]>
.. moduleauthor:: Devin Carlen <[email protected]>
.. moduleauthor:: Vishvananda Ishaya <[email protected]>
.. moduleauthor:: Joshua McKenty <[email protected]>
.. moduleauthor:: Manish Singh <[email protected]>
.. moduleauthor:: Andy Smith <[email protected]>
"""
# See http://code.google.com/p/python-nose/issues/detail?id=373
# The code below enables nosetests to work with i18n _() blocks
import __builtin__
setattr(__builtin__, '_', lambda x: x)
def setup():
import os
import shutil
from nova import context
from nova import flags
from nova.db import migration
from nova.network import manager as network_manager
from nova.tests import fake_flags
FLAGS = flags.FLAGS
testdb = os.path.join(FLAGS.state_path, FLAGS.sqlite_db)
if os.path.exists(testdb):
os.unlink(testdb)
migration.db_sync()
ctxt = context.get_admin_context()
network_manager.VlanManager().create_networks(ctxt,
FLAGS.fixed_range,
FLAGS.num_networks,
FLAGS.network_size,
FLAGS.fixed_range_v6,
FLAGS.vlan_start,
FLAGS.vpn_start,
)
cleandb = os.path.join(FLAGS.state_path, FLAGS.sqlite_clean_db)
shutil.copyfile(testdb, cleandb)
|
apache-2.0
|
adrienbrault/home-assistant
|
homeassistant/components/zwave_js/config_flow.py
|
3
|
15849
|
"""Config flow for Z-Wave JS integration."""
from __future__ import annotations
import asyncio
import logging
from typing import Any, cast
import aiohttp
from async_timeout import timeout
import voluptuous as vol
from zwave_js_server.version import VersionInfo, get_server_version
from homeassistant import config_entries, exceptions
from homeassistant.components.hassio import is_hassio
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import AbortFlow
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .addon import AddonError, AddonManager, get_addon_manager
from .const import (
CONF_ADDON_DEVICE,
CONF_ADDON_NETWORK_KEY,
CONF_INTEGRATION_CREATED_ADDON,
CONF_NETWORK_KEY,
CONF_USB_PATH,
CONF_USE_ADDON,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_URL = "ws://localhost:3000"
TITLE = "Z-Wave JS"
ADDON_SETUP_TIMEOUT = 5
ADDON_SETUP_TIMEOUT_ROUNDS = 4
SERVER_VERSION_TIMEOUT = 10
ON_SUPERVISOR_SCHEMA = vol.Schema({vol.Optional(CONF_USE_ADDON, default=True): bool})
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_URL, default=DEFAULT_URL): str})
async def validate_input(hass: HomeAssistant, user_input: dict) -> VersionInfo:
"""Validate if the user input allows us to connect."""
ws_address = user_input[CONF_URL]
if not ws_address.startswith(("ws://", "wss://")):
raise InvalidInput("invalid_ws_url")
try:
return await async_get_version_info(hass, ws_address)
except CannotConnect as err:
raise InvalidInput("cannot_connect") from err
async def async_get_version_info(hass: HomeAssistant, ws_address: str) -> VersionInfo:
"""Return Z-Wave JS version info."""
try:
async with timeout(SERVER_VERSION_TIMEOUT):
version_info: VersionInfo = await get_server_version(
ws_address, async_get_clientsession(hass)
)
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
# We don't want to spam the log if the add-on isn't started
# or takes a long time to start.
_LOGGER.debug("Failed to connect to Z-Wave JS server: %s", err)
raise CannotConnect from err
return version_info
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Z-Wave JS."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
def __init__(self) -> None:
"""Set up flow instance."""
self.network_key: str | None = None
self.usb_path: str | None = None
self.use_addon = False
self.ws_address: str | None = None
# If we install the add-on we should uninstall it on entry remove.
self.integration_created_addon = False
self.install_task: asyncio.Task | None = None
self.start_task: asyncio.Task | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Handle the initial step."""
if is_hassio(self.hass): # type: ignore # no-untyped-call
return await self.async_step_on_supervisor()
return await self.async_step_manual()
async def async_step_manual(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Handle a manual configuration."""
if user_input is None:
return self.async_show_form(
step_id="manual", data_schema=STEP_USER_DATA_SCHEMA
)
errors = {}
try:
version_info = await validate_input(self.hass, user_input)
except InvalidInput as err:
errors["base"] = err.error
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
version_info.home_id, raise_on_progress=False
)
# Make sure we disable any add-on handling
# if the controller is reconfigured in a manual step.
self._abort_if_unique_id_configured(
updates={
**user_input,
CONF_USE_ADDON: False,
CONF_INTEGRATION_CREATED_ADDON: False,
}
)
self.ws_address = user_input[CONF_URL]
return self._async_create_entry_from_vars()
return self.async_show_form(
step_id="manual", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_hassio( # type: ignore # override
self, discovery_info: dict[str, Any]
) -> dict[str, Any]:
"""Receive configuration from add-on discovery info.
This flow is triggered by the Z-Wave JS add-on.
"""
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
try:
version_info = await async_get_version_info(self.hass, self.ws_address)
except CannotConnect:
return self.async_abort(reason="cannot_connect")
await self.async_set_unique_id(version_info.home_id)
self._abort_if_unique_id_configured(updates={CONF_URL: self.ws_address})
return await self.async_step_hassio_confirm()
async def async_step_hassio_confirm(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Confirm the add-on discovery."""
if user_input is not None:
return await self.async_step_on_supervisor(
user_input={CONF_USE_ADDON: True}
)
return self.async_show_form(step_id="hassio_confirm")
@callback
def _async_create_entry_from_vars(self) -> dict[str, Any]:
"""Return a config entry for the flow."""
return self.async_create_entry(
title=TITLE,
data={
CONF_URL: self.ws_address,
CONF_USB_PATH: self.usb_path,
CONF_NETWORK_KEY: self.network_key,
CONF_USE_ADDON: self.use_addon,
CONF_INTEGRATION_CREATED_ADDON: self.integration_created_addon,
},
)
async def async_step_on_supervisor(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Handle logic when on Supervisor host."""
if user_input is None:
return self.async_show_form(
step_id="on_supervisor", data_schema=ON_SUPERVISOR_SCHEMA
)
if not user_input[CONF_USE_ADDON]:
return await self.async_step_manual()
self.use_addon = True
if await self._async_is_addon_running():
addon_config = await self._async_get_addon_config()
self.usb_path = addon_config[CONF_ADDON_DEVICE]
self.network_key = addon_config.get(CONF_ADDON_NETWORK_KEY, "")
return await self.async_step_finish_addon_setup()
if await self._async_is_addon_installed():
return await self.async_step_configure_addon()
return await self.async_step_install_addon()
async def async_step_install_addon(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Install Z-Wave JS add-on."""
if not self.install_task:
self.install_task = self.hass.async_create_task(self._async_install_addon())
return self.async_show_progress(
step_id="install_addon", progress_action="install_addon"
)
try:
await self.install_task
except AddonError as err:
_LOGGER.error("Failed to install Z-Wave JS add-on: %s", err)
return self.async_show_progress_done(next_step_id="install_failed")
self.integration_created_addon = True
return self.async_show_progress_done(next_step_id="configure_addon")
async def async_step_install_failed(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Add-on installation failed."""
return self.async_abort(reason="addon_install_failed")
async def async_step_configure_addon(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Ask for config for Z-Wave JS add-on."""
addon_config = await self._async_get_addon_config()
errors: dict[str, str] = {}
if user_input is not None:
self.network_key = user_input[CONF_NETWORK_KEY]
self.usb_path = user_input[CONF_USB_PATH]
new_addon_config = {
CONF_ADDON_DEVICE: self.usb_path,
CONF_ADDON_NETWORK_KEY: self.network_key,
}
if new_addon_config != addon_config:
await self._async_set_addon_config(new_addon_config)
return await self.async_step_start_addon()
usb_path = addon_config.get(CONF_ADDON_DEVICE, self.usb_path or "")
network_key = addon_config.get(CONF_ADDON_NETWORK_KEY, self.network_key or "")
data_schema = vol.Schema(
{
vol.Required(CONF_USB_PATH, default=usb_path): str,
vol.Optional(CONF_NETWORK_KEY, default=network_key): str,
}
)
return self.async_show_form(
step_id="configure_addon", data_schema=data_schema, errors=errors
)
async def async_step_start_addon(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Start Z-Wave JS add-on."""
if not self.start_task:
self.start_task = self.hass.async_create_task(self._async_start_addon())
return self.async_show_progress(
step_id="start_addon", progress_action="start_addon"
)
try:
await self.start_task
except (CannotConnect, AddonError) as err:
_LOGGER.error("Failed to start Z-Wave JS add-on: %s", err)
return self.async_show_progress_done(next_step_id="start_failed")
return self.async_show_progress_done(next_step_id="finish_addon_setup")
async def async_step_start_failed(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Add-on start failed."""
return self.async_abort(reason="addon_start_failed")
async def _async_start_addon(self) -> None:
"""Start the Z-Wave JS add-on."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
await addon_manager.async_schedule_start_addon()
# Sleep some seconds to let the add-on start properly before connecting.
for _ in range(ADDON_SETUP_TIMEOUT_ROUNDS):
await asyncio.sleep(ADDON_SETUP_TIMEOUT)
try:
if not self.ws_address:
discovery_info = await self._async_get_addon_discovery_info()
self.ws_address = (
f"ws://{discovery_info['host']}:{discovery_info['port']}"
)
await async_get_version_info(self.hass, self.ws_address)
except (AbortFlow, CannotConnect) as err:
_LOGGER.debug(
"Add-on not ready yet, waiting %s seconds: %s",
ADDON_SETUP_TIMEOUT,
err,
)
else:
break
else:
raise CannotConnect("Failed to start add-on: timeout")
finally:
# Continue the flow after show progress when the task is done.
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
async def async_step_finish_addon_setup(
self, user_input: dict[str, Any] | None = None
) -> dict[str, Any]:
"""Prepare info needed to complete the config entry.
Get add-on discovery info and server version info.
Set unique id and abort if already configured.
"""
if not self.ws_address:
discovery_info = await self._async_get_addon_discovery_info()
self.ws_address = f"ws://{discovery_info['host']}:{discovery_info['port']}"
if not self.unique_id:
try:
version_info = await async_get_version_info(self.hass, self.ws_address)
except CannotConnect as err:
raise AbortFlow("cannot_connect") from err
await self.async_set_unique_id(
version_info.home_id, raise_on_progress=False
)
self._abort_if_unique_id_configured(
updates={
CONF_URL: self.ws_address,
CONF_USB_PATH: self.usb_path,
CONF_NETWORK_KEY: self.network_key,
}
)
return self._async_create_entry_from_vars()
async def _async_get_addon_info(self) -> dict:
"""Return and cache Z-Wave JS add-on info."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
addon_info: dict = await addon_manager.async_get_addon_info()
except AddonError as err:
_LOGGER.error("Failed to get Z-Wave JS add-on info: %s", err)
raise AbortFlow("addon_info_failed") from err
return addon_info
async def _async_is_addon_running(self) -> bool:
"""Return True if Z-Wave JS add-on is running."""
addon_info = await self._async_get_addon_info()
return bool(addon_info["state"] == "started")
async def _async_is_addon_installed(self) -> bool:
"""Return True if Z-Wave JS add-on is installed."""
addon_info = await self._async_get_addon_info()
return addon_info["version"] is not None
async def _async_get_addon_config(self) -> dict:
"""Get Z-Wave JS add-on config."""
addon_info = await self._async_get_addon_info()
return cast(dict, addon_info["options"])
async def _async_set_addon_config(self, config: dict) -> None:
"""Set Z-Wave JS add-on config."""
options = {"options": config}
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
await addon_manager.async_set_addon_options(options)
except AddonError as err:
_LOGGER.error("Failed to set Z-Wave JS add-on config: %s", err)
raise AbortFlow("addon_set_config_failed") from err
async def _async_install_addon(self) -> None:
"""Install the Z-Wave JS add-on."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
await addon_manager.async_schedule_install_addon()
finally:
# Continue the flow after show progress when the task is done.
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(flow_id=self.flow_id)
)
async def _async_get_addon_discovery_info(self) -> dict:
"""Return add-on discovery info."""
addon_manager: AddonManager = get_addon_manager(self.hass)
try:
discovery_info_config = await addon_manager.async_get_addon_discovery_info()
except AddonError as err:
_LOGGER.error("Failed to get Z-Wave JS add-on discovery info: %s", err)
raise AbortFlow("addon_get_discovery_info_failed") from err
return discovery_info_config
class CannotConnect(exceptions.HomeAssistantError):
"""Indicate connection error."""
class InvalidInput(exceptions.HomeAssistantError):
"""Error to indicate input data is invalid."""
def __init__(self, error: str) -> None:
"""Initialize error."""
super().__init__()
self.error = error
|
mit
|
vnc-biz/pyzimbra
|
pyzimbra/soap_auth.py
|
2
|
6473
|
# -*- coding: utf-8 -*-
"""
################################################################################
# Copyright (c) 2010, Ilgar Mashayev
#
# E-mail: [email protected]
# Website: http://github.com/ilgarm/pyzimbra
################################################################################
# This file is part of pyzimbra.
#
# Pyzimbra is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyzimbra is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyzimbra. If not, see <http://www.gnu.org/licenses/>.
################################################################################
Soap related methods and classes.
@author: ilgar
"""
from pyzimbra import zconstant, sconstant, util
from pyzimbra.auth import AuthException, AuthToken, Authenticator
from pyzimbra.soap import SoapException
from time import time
import SOAPpy
import hashlib
import hmac
import logging
class SoapAuthenticator(Authenticator):
"""
Soap authenticator.
"""
# --------------------------------------------------------------- properties
# -------------------------------------------------------------------- bound
def __init__(self):
Authenticator.__init__(self)
self.log = logging.getLogger(__name__)
# ------------------------------------------------------------------ unbound
def authenticate_admin(self, transport, account_name, password):
"""
Authenticates administrator using username and password.
"""
Authenticator.authenticate_admin(self, transport, account_name, password)
auth_token = AuthToken()
auth_token.account_name = account_name
params = {sconstant.E_NAME: account_name,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating admin %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ADMIN_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated admin %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def authenticate(self, transport, account_name, password=None):
"""
Authenticates account using soap method.
"""
Authenticator.authenticate(self, transport, account_name, password)
if password == None:
return self.pre_auth(transport, account_name)
else:
return self.auth(transport, account_name, password)
def auth(self, transport, account_name, password):
"""
Authenticates using username and password.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating account %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
if type(res) is tuple:
auth_token.token = res[0].authToken
else:
auth_token.token = res.authToken
if hasattr(res, 'sessionId'):
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def pre_auth(self, transport, account_name):
"""
Authenticates using username and domain key.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
domain = util.get_domain(account_name)
if domain == None:
raise AuthException('Invalid auth token account')
if domain in self.domains:
domain_key = self.domains[domain]
else:
domain_key = None
if domain_key == None:
raise AuthException('Invalid domain key for domain %s' % domain)
self.log.debug('Initialized domain key for account %s'
% account_name)
expires = 0
timestamp = int(time() * 1000)
pak = hmac.new(domain_key, '%s|%s|%s|%s' %
(account_name, sconstant.E_NAME, expires, timestamp),
hashlib.sha1).hexdigest()
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
attrs = {sconstant.A_TIMESTAMP: timestamp, sconstant.A_EXPIRES: expires}
preauth = SOAPpy.Types.stringType(data=pak,
name=sconstant.E_PREAUTH,
attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PREAUTH: preauth}
self.log.debug('Authenticating account %s using domain key'
% account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
|
lgpl-3.0
|
nonamenix/yandex-vesna-generator
|
yandex_vesna_generator/vesna.py
|
1
|
2537
|
# -*- coding: utf-8 -*-
from lxml import etree
from slugify import slugify
class Entry(object):
def __init__(self, title="", paragraphs=[], themes=[], **kwargs):
self.title = title
self.paragraphs = paragraphs
self.themes = themes
self.header_wrapper = kwargs.get("header_wrapper", "h2")
self.paragraph_wrapper = kwargs.get("paragraph_wrapper", "p")
self.slug = slugify(title, to_lower=True)
self.description = self.paragraphs[0][0:kwargs.get("description_length", 220)]
def render_html(self):
html = self.header
html += self.body
return html
@property
def header(self):
return "<%(wrapper)s>%(title)s</%(wrapper)s> \n" % {
'title': self.title,
'wrapper': self.header_wrapper
}
@property
def body(self):
return "".join(["<%(wrapper)s>%(text)s</$(wrapper)s> \n" % {
"text": p,
"wrapper": self.paragraph_wrapper
} for p in self.paragraphs])
def __repr__(self):
return '<Entry theme="%s" id="%s">' % (", ".join(self.themes), hex(id(self)))
def __getitem__(self, field):
return self.__dict__[field]
class VesnaGenerator(object):
""" Class for generate crazy text on your site """
# Themes
AVAILABLE_THEMES = [
'astronomy', 'geology', 'gyroscope', 'literature', 'marketing', 'mathematics', 'music', 'polit',
'agrobiologia', 'law', 'psychology', 'geography', 'physics', 'philosophy', 'chemistry']
def __init__(self, themes=[], entry_options={}):
self.themes = [theme for theme in themes if theme in self.AVAILABLE_THEMES] or self.AVAILABLE_THEMES
self.entry_options = entry_options
# Generate yandex vesna url
self.base_url = "http://referats.yandex.ru/referats/"
self.url = self.base_url + "?t=" + "+".join(self.themes)
self.entries = []
def generate_entry(self):
self.parser = etree.HTMLParser(recover=True)
self.doc = etree.parse(self.url, self.parser)
title = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/strong')[0].text
title = title.encode('utf-8').replace('Тема: «', '').replace('»', '').decode('utf-8')
paragraps = self.doc.xpath('/html/body/div[2]/div[1]/div[1]/div/div[2]/div[1]/p')
return Entry(
title=title,
paragraphs=[p.text for p in paragraps],
themes=self.themes,
**self.entry_options
)
|
apache-2.0
|
raphael0202/spaCy
|
spacy/language.py
|
2
|
15085
|
# coding: utf8
from __future__ import absolute_import, unicode_literals
from contextlib import contextmanager
import shutil
from .tokenizer import Tokenizer
from .vocab import Vocab
from .tagger import Tagger
from .matcher import Matcher
from .lemmatizer import Lemmatizer
from .train import Trainer
from .syntax.parser import get_templates
from .syntax.nonproj import PseudoProjectivity
from .pipeline import DependencyParser, EntityRecognizer
from .syntax.arc_eager import ArcEager
from .syntax.ner import BiluoPushDown
from .compat import json_dumps
from .attrs import IS_STOP
from . import attrs
from . import orth
from . import util
from . import language_data
class BaseDefaults(object):
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(cls.lemma_index, cls.lemma_exc, cls.lemma_rules)
@classmethod
def create_vocab(cls, nlp=None):
lemmatizer = cls.create_lemmatizer(nlp)
if nlp is None or nlp.path is None:
lex_attr_getters = dict(cls.lex_attr_getters)
# This is very messy, but it's the minimal working fix to Issue #639.
# This defaults stuff needs to be refactored (again)
lex_attr_getters[IS_STOP] = lambda string: string.lower() in cls.stop_words
vocab = Vocab(lex_attr_getters=lex_attr_getters, tag_map=cls.tag_map,
lemmatizer=lemmatizer)
else:
vocab = Vocab.load(nlp.path, lex_attr_getters=cls.lex_attr_getters,
tag_map=cls.tag_map, lemmatizer=lemmatizer)
for tag_str, exc in cls.morph_rules.items():
for orth_str, attrs in exc.items():
vocab.morphology.add_special_case(tag_str, orth_str, attrs)
return vocab
@classmethod
def add_vectors(cls, nlp=None):
if nlp is None or nlp.path is None:
return False
else:
vec_path = nlp.path / 'vocab' / 'vec.bin'
if vec_path.exists():
return lambda vocab: vocab.load_vectors_from_bin_loc(vec_path)
@classmethod
def create_tokenizer(cls, nlp=None):
rules = cls.tokenizer_exceptions
if cls.token_match:
token_match = cls.token_match
if cls.prefixes:
prefix_search = util.compile_prefix_regex(cls.prefixes).search
else:
prefix_search = None
if cls.suffixes:
suffix_search = util.compile_suffix_regex(cls.suffixes).search
else:
suffix_search = None
if cls.infixes:
infix_finditer = util.compile_infix_regex(cls.infixes).finditer
else:
infix_finditer = None
vocab = nlp.vocab if nlp is not None else cls.create_vocab(nlp)
return Tokenizer(vocab, rules=rules,
prefix_search=prefix_search, suffix_search=suffix_search,
infix_finditer=infix_finditer, token_match=token_match)
@classmethod
def create_tagger(cls, nlp=None):
if nlp is None:
return Tagger(cls.create_vocab(), features=cls.tagger_features)
elif nlp.path is False:
return Tagger(nlp.vocab, features=cls.tagger_features)
elif nlp.path is None or not (nlp.path / 'pos').exists():
return None
else:
return Tagger.load(nlp.path / 'pos', nlp.vocab)
@classmethod
def create_parser(cls, nlp=None, **cfg):
if nlp is None:
return DependencyParser(cls.create_vocab(), features=cls.parser_features,
**cfg)
elif nlp.path is False:
return DependencyParser(nlp.vocab, features=cls.parser_features, **cfg)
elif nlp.path is None or not (nlp.path / 'deps').exists():
return None
else:
return DependencyParser.load(nlp.path / 'deps', nlp.vocab, **cfg)
@classmethod
def create_entity(cls, nlp=None, **cfg):
if nlp is None:
return EntityRecognizer(cls.create_vocab(), features=cls.entity_features, **cfg)
elif nlp.path is False:
return EntityRecognizer(nlp.vocab, features=cls.entity_features, **cfg)
elif nlp.path is None or not (nlp.path / 'ner').exists():
return None
else:
return EntityRecognizer.load(nlp.path / 'ner', nlp.vocab, **cfg)
@classmethod
def create_matcher(cls, nlp=None):
if nlp is None:
return Matcher(cls.create_vocab())
elif nlp.path is False:
return Matcher(nlp.vocab)
elif nlp.path is None or not (nlp.path / 'vocab').exists():
return None
else:
return Matcher.load(nlp.path / 'vocab', nlp.vocab)
@classmethod
def create_pipeline(self, nlp=None):
pipeline = []
if nlp is None:
return []
if nlp.tagger:
pipeline.append(nlp.tagger)
if nlp.parser:
pipeline.append(nlp.parser)
pipeline.append(PseudoProjectivity.deprojectivize)
if nlp.entity:
pipeline.append(nlp.entity)
return pipeline
token_match = language_data.TOKEN_MATCH
prefixes = tuple(language_data.TOKENIZER_PREFIXES)
suffixes = tuple(language_data.TOKENIZER_SUFFIXES)
infixes = tuple(language_data.TOKENIZER_INFIXES)
tag_map = dict(language_data.TAG_MAP)
tokenizer_exceptions = {}
parser_features = get_templates('parser')
entity_features = get_templates('ner')
tagger_features = Tagger.feature_templates # TODO -- fix this
stop_words = set()
lemma_rules = {}
lemma_exc = {}
lemma_index = {}
morph_rules = {}
lex_attr_getters = {
attrs.LOWER: lambda string: string.lower(),
attrs.NORM: lambda string: string,
attrs.SHAPE: orth.word_shape,
attrs.PREFIX: lambda string: string[0],
attrs.SUFFIX: lambda string: string[-3:],
attrs.CLUSTER: lambda string: 0,
attrs.IS_ALPHA: orth.is_alpha,
attrs.IS_ASCII: orth.is_ascii,
attrs.IS_DIGIT: lambda string: string.isdigit(),
attrs.IS_LOWER: orth.is_lower,
attrs.IS_PUNCT: orth.is_punct,
attrs.IS_SPACE: lambda string: string.isspace(),
attrs.IS_TITLE: orth.is_title,
attrs.IS_UPPER: orth.is_upper,
attrs.IS_BRACKET: orth.is_bracket,
attrs.IS_QUOTE: orth.is_quote,
attrs.IS_LEFT_PUNCT: orth.is_left_punct,
attrs.IS_RIGHT_PUNCT: orth.is_right_punct,
attrs.LIKE_URL: orth.like_url,
attrs.LIKE_NUM: orth.like_number,
attrs.LIKE_EMAIL: orth.like_email,
attrs.IS_STOP: lambda string: False,
attrs.IS_OOV: lambda string: True
}
class Language(object):
"""
A text-processing pipeline. Usually you'll load this once per process, and
pass the instance around your program.
"""
Defaults = BaseDefaults
lang = None
@classmethod
def setup_directory(cls, path, **configs):
"""
Initialise a model directory.
"""
for name, config in configs.items():
directory = path / name
if directory.exists():
shutil.rmtree(str(directory))
directory.mkdir()
with (directory / 'config.json').open('w') as file_:
data = json_dumps(config)
file_.write(data)
if not (path / 'vocab').exists():
(path / 'vocab').mkdir()
@classmethod
@contextmanager
def train(cls, path, gold_tuples, **configs):
parser_cfg = configs.get('deps', {})
if parser_cfg.get('pseudoprojective'):
# preprocess training data here before ArcEager.get_labels() is called
gold_tuples = PseudoProjectivity.preprocess_training_data(gold_tuples)
for subdir in ('deps', 'ner', 'pos'):
if subdir not in configs:
configs[subdir] = {}
if parser_cfg:
configs['deps']['actions'] = ArcEager.get_actions(gold_parses=gold_tuples)
if 'ner' in configs:
configs['ner']['actions'] = BiluoPushDown.get_actions(gold_parses=gold_tuples)
cls.setup_directory(path, **configs)
self = cls(
path=path,
vocab=False,
tokenizer=False,
tagger=False,
parser=False,
entity=False,
matcher=False,
serializer=False,
vectors=False,
pipeline=False)
self.vocab = self.Defaults.create_vocab(self)
self.tokenizer = self.Defaults.create_tokenizer(self)
self.tagger = self.Defaults.create_tagger(self)
self.parser = self.Defaults.create_parser(self)
self.entity = self.Defaults.create_entity(self)
self.pipeline = self.Defaults.create_pipeline(self)
yield Trainer(self, gold_tuples)
self.end_training()
self.save_to_directory(path)
def __init__(self, **overrides):
"""
Create or load the pipeline.
Arguments:
**overrides: Keyword arguments indicating which defaults to override.
Returns:
Language: The newly constructed object.
"""
if 'data_dir' in overrides and 'path' not in overrides:
raise ValueError("The argument 'data_dir' has been renamed to 'path'")
path = util.ensure_path(overrides.get('path', True))
if path is True:
path = util.get_data_path() / self.lang
if not path.exists() and 'path' not in overrides:
path = None
self.meta = overrides.get('meta', {})
self.path = path
self.vocab = self.Defaults.create_vocab(self) \
if 'vocab' not in overrides \
else overrides['vocab']
add_vectors = self.Defaults.add_vectors(self) \
if 'add_vectors' not in overrides \
else overrides['add_vectors']
if self.vocab and add_vectors:
add_vectors(self.vocab)
self.tokenizer = self.Defaults.create_tokenizer(self) \
if 'tokenizer' not in overrides \
else overrides['tokenizer']
self.tagger = self.Defaults.create_tagger(self) \
if 'tagger' not in overrides \
else overrides['tagger']
self.parser = self.Defaults.create_parser(self) \
if 'parser' not in overrides \
else overrides['parser']
self.entity = self.Defaults.create_entity(self) \
if 'entity' not in overrides \
else overrides['entity']
self.matcher = self.Defaults.create_matcher(self) \
if 'matcher' not in overrides \
else overrides['matcher']
if 'make_doc' in overrides:
self.make_doc = overrides['make_doc']
elif 'create_make_doc' in overrides:
self.make_doc = overrides['create_make_doc'](self)
elif not hasattr(self, 'make_doc'):
self.make_doc = lambda text: self.tokenizer(text)
if 'pipeline' in overrides:
self.pipeline = overrides['pipeline']
elif 'create_pipeline' in overrides:
self.pipeline = overrides['create_pipeline'](self)
else:
self.pipeline = [self.tagger, self.parser, self.matcher, self.entity]
def __call__(self, text, tag=True, parse=True, entity=True):
"""
Apply the pipeline to some text. The text can span multiple sentences,
and can contain arbtrary whitespace. Alignment into the original string
is preserved.
Argsuments:
text (unicode): The text to be processed.
Returns:
doc (Doc): A container for accessing the annotations.
Example:
>>> from spacy.en import English
>>> nlp = English()
>>> tokens = nlp('An example sentence. Another example sentence.')
>>> tokens[0].orth_, tokens[0].head.tag_
('An', 'NN')
"""
doc = self.make_doc(text)
if self.entity and entity:
# Add any of the entity labels already set, in case we don't have them.
for token in doc:
if token.ent_type != 0:
self.entity.add_label(token.ent_type)
skip = {self.tagger: not tag, self.parser: not parse, self.entity: not entity}
for proc in self.pipeline:
if proc and not skip.get(proc):
proc(doc)
return doc
def pipe(self, texts, tag=True, parse=True, entity=True, n_threads=2, batch_size=1000):
"""
Process texts as a stream, and yield Doc objects in order.
Supports GIL-free multi-threading.
Arguments:
texts (iterator)
tag (bool)
parse (bool)
entity (bool)
"""
skip = {self.tagger: not tag, self.parser: not parse, self.entity: not entity}
stream = (self.make_doc(text) for text in texts)
for proc in self.pipeline:
if proc and not skip.get(proc):
if hasattr(proc, 'pipe'):
stream = proc.pipe(stream, n_threads=n_threads, batch_size=batch_size)
else:
stream = (proc(item) for item in stream)
for doc in stream:
yield doc
def save_to_directory(self, path):
"""
Save the Vocab, StringStore and pipeline to a directory.
Arguments:
path (string or pathlib path): Path to save the model.
"""
configs = {
'pos': self.tagger.cfg if self.tagger else {},
'deps': self.parser.cfg if self.parser else {},
'ner': self.entity.cfg if self.entity else {},
}
path = util.ensure_path(path)
if not path.exists():
path.mkdir()
self.setup_directory(path, **configs)
strings_loc = path / 'vocab' / 'strings.json'
with strings_loc.open('w', encoding='utf8') as file_:
self.vocab.strings.dump(file_)
self.vocab.dump(path / 'vocab' / 'lexemes.bin')
# TODO: Word vectors?
if self.tagger:
self.tagger.model.dump(str(path / 'pos' / 'model'))
if self.parser:
self.parser.model.dump(str(path / 'deps' / 'model'))
if self.entity:
self.entity.model.dump(str(path / 'ner' / 'model'))
def end_training(self, path=None):
if self.tagger:
self.tagger.model.end_training()
if self.parser:
self.parser.model.end_training()
if self.entity:
self.entity.model.end_training()
# NB: This is slightly different from before --- we no longer default
# to taking nlp.path
if path is not None:
self.save_to_directory(path)
|
mit
|
sysalexis/kbengine
|
kbe/res/scripts/common/Lib/site-packages/pip/commands/freeze.py
|
345
|
4664
|
import re
import sys
import pip
from pip.req import InstallRequirement
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
from pip._vendor import pkg_resources
class FreezeCommand(Command):
"""Output installed packages in requirements format."""
name = 'freeze'
usage = """
%prog [options]"""
summary = 'Output installed packages in requirements format.'
def __init__(self, *args, **kw):
super(FreezeCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirement',
action='store',
default=None,
metavar='file',
help="Use the order in the given requirements file and it's comments when generating output.")
self.cmd_opts.add_option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='URL',
help='URL for finding packages, which will be added to the output.')
self.cmd_opts.add_option(
'-l', '--local',
dest='local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not output globally-installed packages.')
self.parser.insert_option_group(0, self.cmd_opts)
def setup_logging(self):
logger.move_stdout_to_stderr()
def run(self, options, args):
requirement = options.requirement
find_links = options.find_links or []
local_only = options.local
## FIXME: Obviously this should be settable:
find_tags = False
skip_match = None
skip_regex = options.skip_requirements_regex
if skip_regex:
skip_match = re.compile(skip_regex)
dependency_links = []
f = sys.stdout
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only):
req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
installations[req.name] = req
if requirement:
req_f = open(requirement)
for line in req_f:
if not line.strip() or line.strip().startswith('#'):
f.write(line)
continue
if skip_match and skip_match.search(line):
f.write(line)
continue
elif line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
elif (line.startswith('-r') or line.startswith('--requirement')
or line.startswith('-Z') or line.startswith('--always-unzip')
or line.startswith('-f') or line.startswith('-i')
or line.startswith('--extra-index-url')
or line.startswith('--find-links')
or line.startswith('--index-url')):
f.write(line)
continue
else:
line_req = InstallRequirement.from_line(line)
if not line_req.name:
logger.notify("Skipping line because it's not clear what it would install: %s"
% line.strip())
logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
continue
if line_req.name not in installations:
logger.warn("Requirement file contains %s, but that package is not installed"
% line.strip())
continue
f.write(str(installations[line_req.name]))
del installations[line_req.name]
f.write('## The following requirements were added by pip --freeze:\n')
for installation in sorted(installations.values(), key=lambda x: x.name):
f.write(str(installation))
|
lgpl-3.0
|
SlimRoms/kernel_motorola_msm8960-common
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
|
11088
|
3246
|
# Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
|
gpl-2.0
|
npe9/depot_tools
|
third_party/gsutil/gslib/commands/rm.py
|
50
|
8989
|
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto
from boto.exception import GSResponseError
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from gslib.name_expansion import NameExpansionIterator
from gslib.util import NO_MAX
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil rm [-f] [-R] uri...
<B>DESCRIPTION</B>
The gsutil rm command removes objects.
For example, the command:
gsutil rm gs://bucket/subdir/*
will remove all objects in gs://bucket/subdir, but not in any of its
sub-directories. In contrast:
gsutil rm gs://bucket/subdir/**
will remove all objects under gs://bucket/subdir or any of its
subdirectories.
You can also use the -R option to specify recursive object deletion. Thus, for
example, the following two commands will both remove all objects in a bucket:
gsutil rm gs://bucket/**
gsutil rm -R gs://bucket
If you have a large number of objects to remove you might want to use the
gsutil -m option, to perform a parallel (multi-threaded/multi-processing)
removes:
gsutil -m rm -R gs://my_bucket/subdir
Note that gsutil rm will refuse to remove files from the local
file system. For example this will fail:
gsutil rm *.txt
<B>OPTIONS</B>
-f Continues silently (without printing error messages) despite
errors when removing multiple objects.
-R, -r Causes bucket contents to be removed recursively (i.e., including
all objects and subdirectories). Will not delete the bucket
itself; you need to run the gsutil rb command separately to do
that.
-a Delete all versions of an object.
""")
class RmCommand(Command):
"""Implementation of gsutil rm command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'rm',
# List of command name aliases.
COMMAND_NAME_ALIASES : ['del', 'delete', 'remove'],
# Min number of args required by this command.
MIN_ARGS : 1,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : NO_MAX,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : 'afrRv',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 0,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'rm',
# List of help name aliases.
HELP_NAME_ALIASES : ['del', 'delete', 'remove'],
# Type of help:
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Remove objects',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
# self.recursion_requested initialized in command.py (so can be checked
# in parent class for all commands).
self.continue_on_error = False
self.all_versions = False
if self.sub_opts:
for o, unused_a in self.sub_opts:
if o == '-a':
self.all_versions = True
elif o == '-f':
self.continue_on_error = True
elif o == '-r' or o == '-R':
self.recursion_requested = True
elif o == '-v':
self.THREADED_LOGGER.info('WARNING: The %s -v option is no longer'
' needed, and will eventually be removed.\n'
% self.command_name)
# Used to track if any files failed to be removed.
self.everything_removed_okay = True
# Tracks if any URIs matched the given args.
remove_func = self._MkRemoveFunc()
exception_handler = self._MkRemoveExceptionHandler()
try:
# Expand wildcards, dirs, buckets, and bucket subdirs in URIs.
name_expansion_iterator = NameExpansionIterator(
self.command_name, self.proj_id_handler, self.headers, self.debug,
self.bucket_storage_uri_class, self.args, self.recursion_requested,
flat=self.recursion_requested, all_versions=self.all_versions)
# Perform remove requests in parallel (-m) mode, if requested, using
# configured number of parallel processes and threads. Otherwise,
# perform requests with sequential function calls in current process.
self.Apply(remove_func, name_expansion_iterator, exception_handler)
# Assuming the bucket has versioning enabled, uri's that don't map to
# objects should throw an error even with all_versions, since the prior
# round of deletes only sends objects to a history table.
# This assumption that rm -a is only called for versioned buckets should be
# corrected, but the fix is non-trivial.
except CommandException as e:
if not self.continue_on_error:
raise
except GSResponseError, e:
if not self.continue_on_error:
raise
if not self.everything_removed_okay and not self.continue_on_error:
raise CommandException('Some files could not be removed.')
# If this was a gsutil rm -r command covering any bucket subdirs,
# remove any dir_$folder$ objects (which are created by various web UI
# tools to simulate folders).
if self.recursion_requested:
folder_object_wildcards = []
for uri_str in self.args:
uri = self.suri_builder.StorageUri(uri_str)
if uri.names_object:
folder_object_wildcards.append('%s**_$folder$' % uri)
if len(folder_object_wildcards):
self.continue_on_error = True
try:
name_expansion_iterator = NameExpansionIterator(
self.command_name, self.proj_id_handler, self.headers, self.debug,
self.bucket_storage_uri_class, folder_object_wildcards,
self.recursion_requested, flat=True,
all_versions=self.all_versions)
self.Apply(remove_func, name_expansion_iterator, exception_handler)
except CommandException as e:
# Ignore exception from name expansion due to an absent folder file.
if not e.reason.startswith('No URIs matched:'):
raise
return 0
def _MkRemoveExceptionHandler(self):
def RemoveExceptionHandler(e):
"""Simple exception handler to allow post-completion status."""
self.THREADED_LOGGER.error(str(e))
self.everything_removed_okay = False
return RemoveExceptionHandler
def _MkRemoveFunc(self):
def RemoveFunc(name_expansion_result):
exp_src_uri = self.suri_builder.StorageUri(
name_expansion_result.GetExpandedUriStr(),
is_latest=name_expansion_result.is_latest)
if exp_src_uri.names_container():
if exp_src_uri.is_cloud_uri():
# Before offering advice about how to do rm + rb, ensure those
# commands won't fail because of bucket naming problems.
boto.s3.connection.check_lowercase_bucketname(exp_src_uri.bucket_name)
uri_str = exp_src_uri.object_name.rstrip('/')
raise CommandException('"rm" command will not remove buckets. To '
'delete this/these bucket(s) do:\n\tgsutil rm '
'%s/*\n\tgsutil rb %s' % (uri_str, uri_str))
# Perform delete.
self.THREADED_LOGGER.info('Removing %s...',
name_expansion_result.expanded_uri_str)
try:
exp_src_uri.delete_key(validate=False, headers=self.headers)
except:
if self.continue_on_error:
self.everything_removed_okay = False
else:
raise
return RemoveFunc
|
bsd-3-clause
|
marcelovilaca/DIRAC
|
ConfigurationSystem/Client/CSAPI.py
|
2
|
27658
|
""" CSAPI exposes update functionalities to the Configuration.
Most of these functions can only be done by administrators
"""
__RCSID__ = "$Id$"
import types
from DIRAC import gLogger, gConfig, S_OK, S_ERROR
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.Utilities import List
from DIRAC.Core.Security.X509Chain import X509Chain
from DIRAC.Core.Security import Locations
from DIRAC.ConfigurationSystem.private.Modificator import Modificator
from DIRAC.ConfigurationSystem.Client.Helpers import CSGlobals
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
class CSAPI( object ):
""" CSAPI objects need an initialization phase
"""
def __init__( self ):
"""
Initialization function
"""
self.__csModified = False
self.__baseSecurity = "/Registry"
self.__userDN = ''
self.__userGroup = ''
self.__rpcClient = None
self.__csMod = None
self.__initialized = S_ERROR( "Not initialized" )
self.initialize()
if not self.__initialized[ 'OK' ]:
gLogger.error( self.__initialized )
def __getProxyID( self ):
proxyLocation = Locations.getProxyLocation()
if not proxyLocation:
gLogger.error( "No proxy found!" )
return False
chain = X509Chain()
if not chain.loadProxyFromFile( proxyLocation ):
gLogger.error( "Can't read proxy!", proxyLocation )
return False
retVal = chain.getIssuerCert()
if not retVal[ 'OK' ]:
gLogger.error( "Can't parse proxy!", retVal[ 'Message' ] )
return False
idCert = retVal[ 'Value' ]
self.__userDN = idCert.getSubjectDN()[ 'Value' ]
self.__userGroup = chain.getDIRACGroup()[ 'Value' ]
return True
def __getCertificateID( self ):
certLocation = Locations.getHostCertificateAndKeyLocation()
if not certLocation:
gLogger.error( "No certificate found!" )
return False
chain = X509Chain()
retVal = chain.loadChainFromFile( certLocation[ 0 ] )
if not retVal[ 'OK' ]:
gLogger.error( "Can't parse certificate!", retVal[ 'Message' ] )
return False
idCert = chain.getIssuerCert()[ 'Value' ]
self.__userDN = idCert.getSubjectDN()[ 'Value' ]
self.__userGroup = 'host'
return True
def initialize( self ):
if self.__initialized[ 'OK' ]:
return self.__initialized
if not gConfig.useServerCertificate():
res = self.__getProxyID()
else:
res = self.__getCertificateID()
if not res:
self.__initialized = S_ERROR( "Cannot locate client credentials" )
return self.__initialized
retVal = gConfig.getOption( "/DIRAC/Configuration/MasterServer" )
if not retVal[ 'OK' ]:
self.__initialized = S_ERROR( "Master server is not known. Is everything initialized?" )
return self.__initialized
self.__rpcClient = RPCClient( gConfig.getValue( "/DIRAC/Configuration/MasterServer", "" ) )
self.__csMod = Modificator( self.__rpcClient, "%s - %s" % ( self.__userGroup, self.__userDN ) )
retVal = self.downloadCSData()
if not retVal[ 'OK' ]:
self.__initialized = S_ERROR( "Can not download the remote cfg. Is everything initialized?" )
return self.__initialized
self.__initialized = S_OK()
return self.__initialized
def downloadCSData( self ):
if not self.__csMod:
return S_ERROR( "CSAPI not yet initialized" )
result = self.__csMod.loadFromRemote()
if not result[ 'OK' ]:
return result
self.__csModified = False
self.__csMod.updateGConfigurationData()
return S_OK()
def listUsers( self , group = False ):
if not self.__initialized[ 'OK' ]:
return self.__initialized
if not group:
return S_OK( self.__csMod.getSections( "%s/Users" % self.__baseSecurity ) )
else:
users = self.__csMod.getValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ) )
if not users:
return S_OK( [] )
else:
return S_OK( List.fromChar( users ) )
def listHosts( self ):
if not self.__initialized[ 'OK' ]:
return self.__initialized
return S_OK( self.__csMod.getSections( "%s/Hosts" % self.__baseSecurity ) )
def describeUsers( self, users = None ):
if users is None: users = []
if not self.__initialized[ 'OK' ]:
return self.__initialized
return S_OK( self.__describeEntity( users ) )
def describeHosts( self, hosts = None ):
if hosts is None: hosts = []
if not self.__initialized[ 'OK' ]:
return self.__initialized
return S_OK( self.__describeEntity( hosts, True ) )
def __describeEntity( self, mask, hosts = False ):
if hosts:
csSection = "%s/Hosts" % self.__baseSecurity
else:
csSection = "%s/Users" % self.__baseSecurity
if mask:
entities = [ entity for entity in self.__csMod.getSections( csSection ) if entity in mask ]
else:
entities = self.__csMod.getSections( csSection )
entitiesDict = {}
for entity in entities:
entitiesDict[ entity ] = {}
for option in self.__csMod.getOptions( "%s/%s" % ( csSection, entity ) ):
entitiesDict[ entity ][ option ] = self.__csMod.getValue( "%s/%s/%s" % ( csSection, entity, option ) )
if not hosts:
groupsDict = self.describeGroups()[ 'Value' ]
entitiesDict[ entity ][ 'Groups' ] = []
for group in groupsDict:
if 'Users' in groupsDict[ group ] and entity in groupsDict[ group ][ 'Users' ]:
entitiesDict[ entity ][ 'Groups' ].append( group )
entitiesDict[ entity ][ 'Groups' ].sort()
return entitiesDict
def listGroups( self ):
"""
List all groups
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
return S_OK( self.__csMod.getSections( "%s/Groups" % self.__baseSecurity ) )
def describeGroups( self, mask = None ):
"""
List all groups that are in the mask (or all if no mask) with their properties
"""
if mask is None: mask = []
if not self.__initialized[ 'OK' ]:
return self.__initialized
groups = [ group for group in self.__csMod.getSections( "%s/Groups" % self.__baseSecurity ) if not mask or ( mask and group in mask ) ]
groupsDict = {}
for group in groups:
groupsDict[ group ] = {}
for option in self.__csMod.getOptions( "%s/Groups/%s" % ( self.__baseSecurity, group ) ):
groupsDict[ group ][ option ] = self.__csMod.getValue( "%s/Groups/%s/%s" % ( self.__baseSecurity, group, option ) )
if option in ( "Users", "Properties" ):
groupsDict[ group ][ option ] = List.fromChar( groupsDict[ group ][ option ] )
return S_OK( groupsDict )
def deleteUsers( self, users ):
"""
Delete a user/s can receive as a param either a string or a list
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
if type( users ) == types.StringType:
users = [ users ]
usersData = self.describeUsers( users )['Value']
for username in users:
if not username in usersData:
gLogger.warn( "User %s does not exist" )
continue
userGroups = usersData[ username ][ 'Groups' ]
for group in userGroups:
self.__removeUserFromGroup( group, username )
gLogger.info( "Deleted user %s from group %s" % ( username, group ) )
self.__csMod.removeSection( "%s/Users/%s" % ( self.__baseSecurity, username ) )
gLogger.info( "Deleted user %s" % username )
self.__csModified = True
return S_OK( True )
def __removeUserFromGroup( self, group, username ):
"""
Remove user from a group
"""
usersInGroup = self.__csMod.getValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ) )
if usersInGroup is not None:
userList = List.fromChar( usersInGroup, "," )
userPos = userList.index( username )
userList.pop( userPos )
self.__csMod.setOptionValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ), ",".join( userList ) )
def __addUserToGroup( self, group, username ):
"""
Add user to a group
"""
usersInGroup = self.__csMod.getValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ) )
if usersInGroup is not None:
userList = List.fromChar( usersInGroup )
if username not in userList:
userList.append( username )
self.__csMod.setOptionValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ), ",".join( userList ) )
else:
gLogger.warn( "User %s is already in group %s" % ( username, group ) )
def addUser( self, username, properties ):
"""
Add a user to the cs
:param str username: group name
:param dict properties: dictionary describing user properties:
- DN
- groups
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
for prop in ( "DN", "Groups" ):
if prop not in properties:
gLogger.error( "Missing property for user", "%s: %s" % ( prop, username ) )
return S_OK( False )
if username in self.listUsers()['Value']:
gLogger.error( "User is already registered", username )
return S_OK( False )
groups = self.listGroups()['Value']
for userGroup in properties[ 'Groups' ]:
if not userGroup in groups:
gLogger.error( "User group is not a valid group", "%s %s" % ( username, userGroup ) )
return S_OK( False )
self.__csMod.createSection( "%s/Users/%s" % ( self.__baseSecurity, username ) )
for prop in properties:
if prop == "Groups":
continue
self.__csMod.setOptionValue( "%s/Users/%s/%s" % ( self.__baseSecurity, username, prop ), properties[ prop ] )
for userGroup in properties[ 'Groups' ]:
gLogger.info( "Added user %s to group %s" % ( username, userGroup ) )
self.__addUserToGroup( userGroup, username )
gLogger.info( "Registered user %s" % username )
self.__csModified = True
return S_OK( True )
def modifyUser( self, username, properties, createIfNonExistant = False ):
"""
Modify a user
:param str username: group name
:param dict properties: dictionary describing user properties:
- DN
- Groups
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
modifiedUser = False
userData = self.describeUsers( [ username ] )['Value']
if username not in userData:
if createIfNonExistant:
gLogger.info( "Registering user %s" % username )
return self.addUser( username, properties )
gLogger.error( "User is not registered", username )
return S_OK( False )
for prop in properties:
if prop == "Groups":
continue
prevVal = self.__csMod.getValue( "%s/Users/%s/%s" % ( self.__baseSecurity, username, prop ) )
if not prevVal or prevVal != properties[ prop ]:
gLogger.info( "Setting %s property for user %s to %s" % ( prop, username, properties[ prop ] ) )
self.__csMod.setOptionValue( "%s/Users/%s/%s" % ( self.__baseSecurity, username, prop ), properties[ prop ] )
modifiedUser = True
if 'Groups' in properties:
groups = self.listGroups()['Value']
for userGroup in properties[ 'Groups' ]:
if not userGroup in groups:
gLogger.error( "User group is not a valid group", "%s %s" % ( username, userGroup ) )
return S_OK( False )
groupsToBeDeletedFrom = []
groupsToBeAddedTo = []
for prevGroup in userData[ username ][ 'Groups' ]:
if prevGroup not in properties[ 'Groups' ]:
groupsToBeDeletedFrom.append( prevGroup )
modifiedUser = True
for newGroup in properties[ 'Groups' ]:
if newGroup not in userData[ username ][ 'Groups' ]:
groupsToBeAddedTo.append( newGroup )
modifiedUser = True
for group in groupsToBeDeletedFrom:
self.__removeUserFromGroup( group, username )
gLogger.info( "Removed user %s from group %s" % ( username, group ) )
for group in groupsToBeAddedTo:
self.__addUserToGroup( group, username )
gLogger.info( "Added user %s to group %s" % ( username, group ) )
if modifiedUser:
gLogger.info( "Modified user %s" % username )
self.__csModified = True
else:
gLogger.info( "Nothing to modify for user %s" % username )
return S_OK( True )
def addGroup( self, groupname, properties ):
"""
Add a group to the cs
:param str groupname: group name
:param dict properties: dictionary describing group properties:
- Users
- Properties
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
if groupname in self.listGroups()['Value']:
gLogger.error( "Group is already registered", groupname )
return S_OK( False )
self.__csMod.createSection( "%s/Groups/%s" % ( self.__baseSecurity, groupname ) )
for prop in properties:
self.__csMod.setOptionValue( "%s/Groups/%s/%s" % ( self.__baseSecurity, groupname, prop ), properties[ prop ] )
gLogger.info( "Registered group %s" % groupname )
self.__csModified = True
return S_OK( True )
def modifyGroup( self, groupname, properties, createIfNonExistant = False ):
"""
Modify a user
:param str groupname: group name
:param dict properties: dictionary describing group properties:
- Users
- Properties
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
modifiedGroup = False
groupData = self.describeGroups( [ groupname ] )['Value']
if groupname not in groupData:
if createIfNonExistant:
gLogger.info( "Registering group %s" % groupname )
return self.addGroup( groupname, properties )
gLogger.error( "Group is not registered", groupname )
return S_OK( False )
for prop in properties:
prevVal = self.__csMod.getValue( "%s/Groups/%s/%s" % ( self.__baseSecurity, groupname, prop ) )
if not prevVal or prevVal != properties[ prop ]:
gLogger.info( "Setting %s property for group %s to %s" % ( prop, groupname, properties[ prop ] ) )
self.__csMod.setOptionValue( "%s/Groups/%s/%s" % ( self.__baseSecurity, groupname, prop ), properties[ prop ] )
modifiedGroup = True
if modifiedGroup:
gLogger.info( "Modified group %s" % groupname )
self.__csModified = True
else:
gLogger.info( "Nothing to modify for group %s" % groupname )
return S_OK( True )
def addHost( self, hostname, properties ):
"""
Add a host to the cs
:param str hostname: hostname name
:param dict properties: dictionary describing host properties:
- DN
- Properties
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
for prop in ( "DN", ):
if prop not in properties:
gLogger.error( "Missing property for host", "%s %s" % ( prop, hostname ) )
return S_OK( False )
if hostname in self.listHosts()['Value']:
gLogger.error( "Host is already registered", hostname )
return S_OK( False )
self.__csMod.createSection( "%s/Hosts/%s" % ( self.__baseSecurity, hostname ) )
for prop in properties:
self.__csMod.setOptionValue( "%s/Hosts/%s/%s" % ( self.__baseSecurity, hostname, prop ), properties[ prop ] )
gLogger.info( "Registered host %s" % hostname )
self.__csModified = True
return S_OK( True )
def addShifter( self, shifters = None ):
"""
Adds or modify one or more shifters. Also, adds the shifter section in case this is not present.
Shifter identities are used in several places, mostly for running agents
shifters should be in the form {'ShifterRole':{'User':'aUserName', 'Group':'aDIRACGroup'}}
:return: S_OK/S_ERROR
"""
def getOpsSection():
"""
Where is the shifters section?
"""
vo = CSGlobals.getVO()
setup = CSGlobals.getSetup()
if vo:
res = gConfig.getSections( '/Operations/%s/%s/Shifter' % (vo, setup) )
if res['OK']:
return S_OK( '/Operations/%s/%s/Shifter' % ( vo, setup ) )
res = gConfig.getSections( '/Operations/%s/Defaults/Shifter' % vo )
if res['OK']:
return S_OK( '/Operations/%s/Defaults/Shifter' % vo )
else:
res = gConfig.getSections( '/Operations/%s/Shifter' % setup )
if res['OK']:
return S_OK( '/Operations/%s/Shifter' % setup )
res = gConfig.getSections( '/Operations/Defaults/Shifter' )
if res['OK']:
return S_OK( '/Operations/Defaults/Shifter' )
return S_ERROR( "No shifter section" )
if shifters is None: shifters = {}
if not self.__initialized['OK']:
return self.__initialized
# get current shifters
opsH = Operations( )
currentShifterRoles = opsH.getSections( 'Shifter' )
if not currentShifterRoles['OK']:
# we assume the shifter section is not present
currentShifterRoles = []
else:
currentShifterRoles = currentShifterRoles['Value']
currentShiftersDict = {}
for currentShifterRole in currentShifterRoles:
currentShifter = opsH.getOptionsDict( 'Shifter/%s' % currentShifterRole )
if not currentShifter['OK']:
return currentShifter
currentShifter = currentShifter['Value']
currentShiftersDict[currentShifterRole] = currentShifter
# Removing from shifters what does not need to be changed
for sRole in shifters:
if sRole in currentShiftersDict:
if currentShiftersDict[sRole] == shifters[sRole]:
shifters.pop( sRole )
# get shifters section to modify
section = getOpsSection()
# Is this section present?
if not section['OK']:
if section['Message'] == "No shifter section":
gLogger.warn( section['Message'] )
gLogger.info( "Adding shifter section" )
vo = CSGlobals.getVO()
if vo:
section = '/Operations/%s/Defaults/Shifter' % vo
else:
section = '/Operations/Defaults/Shifter'
res = self.__csMod.createSection( section )
if not res:
gLogger.error( "Section %s not created" % section )
return S_ERROR( "Section %s not created" % section )
else:
gLogger.error( section['Message'] )
return section
else:
section = section['Value']
#add or modify shifters
for shifter in shifters:
self.__csMod.removeSection( section + '/' + shifter )
self.__csMod.createSection( section + '/' + shifter )
self.__csMod.createSection( section + '/' + shifter + '/' + 'User' )
self.__csMod.createSection( section + '/' + shifter + '/' + 'Group' )
self.__csMod.setOptionValue( section + '/' + shifter + '/' + 'User', shifters[shifter]['User'] )
self.__csMod.setOptionValue( section + '/' + shifter + '/' + 'Group', shifters[shifter]['Group'] )
self.__csModified = True
return S_OK( True )
def modifyHost( self, hostname, properties, createIfNonExistant = False ):
"""
Modify a host
:param str hostname: hostname name
:param dict properties: dictionary describing host properties:
- DN
- Properties
- <extra params>
:return True/False
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
modifiedHost = False
hostData = self.describeHosts( [ hostname ] )['Value']
if hostname not in hostData:
if createIfNonExistant:
gLogger.info( "Registering host %s" % hostname )
return self.addHost( hostname, properties )
gLogger.error( "Host is not registered", hostname )
return S_OK( False )
for prop in properties:
prevVal = self.__csMod.getValue( "%s/Hosts/%s/%s" % ( self.__baseSecurity, hostname, prop ) )
if not prevVal or prevVal != properties[ prop ]:
gLogger.info( "Setting %s property for host %s to %s" % ( prop, hostname, properties[ prop ] ) )
self.__csMod.setOptionValue( "%s/Hosts/%s/%s" % ( self.__baseSecurity, hostname, prop ), properties[ prop ] )
modifiedHost = True
if modifiedHost:
gLogger.info( "Modified host %s" % hostname )
self.__csModified = True
else:
gLogger.info( "Nothing to modify for host %s" % hostname )
return S_OK( True )
def syncUsersWithCFG( self, usersCFG ):
"""
Sync users with the cfg contents. Usernames have to be sections containing
DN, Groups, and extra properties as parameters
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
done = True
for user in usersCFG.listSections():
properties = {}
propList = usersCFG[ user ].listOptions()
for prop in propList:
if prop == "Groups":
properties[ prop ] = List.fromChar( usersCFG[ user ][ prop ] )
else:
properties[ prop ] = usersCFG[ user ][ prop ]
if not self.modifyUser( user, properties, createIfNonExistant = True ):
done = False
return S_OK( done )
def sortUsersAndGroups( self ):
self.__csMod.sortAlphabetically( "%s/Users" % self.__baseSecurity )
self.__csMod.sortAlphabetically( "%s/Hosts" % self.__baseSecurity )
for group in self.__csMod.getSections( "%s/Groups" % self.__baseSecurity ):
usersOptionPath = "%s/Groups/%s/Users" % ( self.__baseSecurity, group )
users = self.__csMod.getValue( usersOptionPath )
usersList = List.fromChar( users )
usersList.sort()
sortedUsers = ", ".join( usersList )
if users != sortedUsers:
self.__csMod.setOptionValue( usersOptionPath, sortedUsers )
def checkForUnexistantUsersInGroups( self ):
allUsers = self.__csMod.getSections( "%s/Users" % self.__baseSecurity )
allGroups = self.__csMod.getSections( "%s/Groups" % self.__baseSecurity )
for group in allGroups:
usersInGroup = self.__csMod.getValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ) )
if usersInGroup:
filteredUsers = []
usersInGroup = List.fromChar( usersInGroup )
for user in usersInGroup:
if user in allUsers:
filteredUsers.append( user )
self.__csMod.setOptionValue( "%s/Groups/%s/Users" % ( self.__baseSecurity, group ),
",".join( filteredUsers ) )
def commitChanges( self, sortUsers = True ):
if not self.__initialized[ 'OK' ]:
return self.__initialized
if self.__csModified:
self.checkForUnexistantUsersInGroups()
if sortUsers:
self.sortUsersAndGroups()
retVal = self.__csMod.commit()
if not retVal[ 'OK' ]:
gLogger.error( "Can't commit new configuration data", "%s" % retVal[ 'Message' ] )
return retVal
return self.downloadCSData()
return S_OK()
def commit( self ):
""" Commit the accumulated changes to the CS server
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
if self.__csModified:
retVal = self.__csMod.commit()
if not retVal[ 'OK' ]:
gLogger.error( "Can't commit new configuration data", "%s" % retVal[ 'Message' ] )
return retVal
return self.downloadCSData()
return S_OK()
def mergeFromCFG( self, cfg ):
""" Merge the internal CFG data with the input
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
self.__csMod.mergeFromCFG( cfg )
self.__csModified = True
return S_OK()
def modifyValue( self, optionPath, newValue ):
"""Modify an existing value at the specified options path.
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
prevVal = self.__csMod.getValue( optionPath )
if not prevVal:
return S_ERROR( 'Trying to set %s to %s but option does not exist' % ( optionPath, newValue ) )
gLogger.verbose( "Changing %s from \n%s \nto \n%s" % ( optionPath, prevVal, newValue ) )
self.__csMod.setOptionValue( optionPath, newValue )
self.__csModified = True
return S_OK( 'Modified %s' % optionPath )
def setOption( self, optionPath, optionValue ):
"""Create an option at the specified path.
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
self.__csMod.setOptionValue( optionPath, optionValue )
self.__csModified = True
return S_OK( 'Created new option %s = %s' % ( optionPath, optionValue ) )
def setOptionComment( self, optionPath, comment ):
"""Create an option at the specified path.
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
self.__csMod.setComment( optionPath, comment )
self.__csModified = True
return S_OK( 'Set option comment %s : %s' % ( optionPath, comment ) )
def delOption( self, optionPath ):
""" Delete an option
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
if not self.__csMod.removeOption( optionPath ):
return S_ERROR( "Couldn't delete option %s" % optionPath )
self.__csModified = True
return S_OK( 'Deleted option %s' % optionPath )
def createSection( self, sectionPath, comment = "" ):
""" Create a new section
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
self.__csMod.createSection( sectionPath )
self.__csModified = True
if comment:
self.__csMod.setComment( sectionPath, comment )
return S_OK()
def delSection( self, sectionPath ):
""" Delete a section
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
if not self.__csMod.removeSection( sectionPath ):
return S_ERROR( "Could not delete section %s " % sectionPath )
self.__csModified = True
return S_OK( )
def copySection( self, originalPath, targetPath ):
""" Copy a whole section to a new location
"""
if not self.__initialized['OK']:
return self.__initialized
cfg = self.__csMod.getCFG( )
sectionCfg = cfg[originalPath]
result = self.createSection( targetPath )
if not result['OK']:
return result
if not self.__csMod.mergeSectionFromCFG( targetPath, sectionCfg ):
return S_ERROR( "Could not merge cfg into section %s" % targetPath )
self.__csModified = True
return S_OK( )
def moveSection( self, originalPath, targetPath ):
""" Move a whole section to a new location
"""
result = self.copySection( originalPath, targetPath )
if not result['OK']:
return result
result = self.delSection( originalPath )
if not result['OK']:
return result
self.__csModified = True
return S_OK()
def mergeCFGUnderSection( self, sectionPath, cfg ):
""" Merge the given cfg under a certain section
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
result = self.createSection( sectionPath )
if not result[ 'OK' ]:
return result
if not self.__csMod.mergeSectionFromCFG( sectionPath, cfg ):
return S_ERROR( "Could not merge cfg into section %s" % sectionPath )
self.__csModified = True
return S_OK()
def mergeWithCFG( self, cfg ):
""" Merge the given cfg with the current config
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
self.__csMod.mergeFromCFG( cfg )
self.__csModified = True
return S_OK()
def getCurrentCFG( self ):
""" Get the current CFG as it is
"""
if not self.__initialized[ 'OK' ]:
return self.__initialized
return S_OK( self.__csMod.getCFG() )
|
gpl-3.0
|
SoVictor/Lerna
|
core/urls.py
|
3
|
1511
|
from django.conf.urls import url
from .views import (
ContestIndexView, TrainingIndexView, TrainingView, ProblemView, AttemptsView,
AttemptDetailsView, SubmitView, StandingsView, UnfrozenStandingsView, XMLStandingsView,
UnfrozenXMLStandingsView, ClarificationsView,
)
def _url(regex, cls, name):
return url(regex, cls.as_view(), name=name)
urlpatterns = (
_url(r'^$', ContestIndexView, 'contests'),
_url(r'^trainings/?$', TrainingIndexView, 'trainings'),
_url(r'^training/(?P<contest_id>\d+)/?$', TrainingView, 'training'),
_url(r'^(?P<contest_id>\d+)/(?P<problem_number>\d+)/?$', ProblemView, 'problem'),
_url(r'^(?P<contest_id>\d+)/(?P<problem_number>\d+)/submit/?$', SubmitView, 'submit'),
_url(r'^(?P<contest_id>\d+)/submit/?$', SubmitView, 'submit'),
_url(r'^(?P<contest_id>\d+)/attempts/?$', AttemptsView, 'attempts'),
_url(r'^(?P<contest_id>\d+)/attempts/(?P<page>\d+)/?$', AttemptsView, 'attempts'),
_url(r'^attempt/(?P<attempt_id>\d+)/?$', AttemptDetailsView, 'attempt'),
_url(r'^(?P<contest_id>\d+)/standings/?$', StandingsView, 'standings'),
_url(r'^(?P<contest_id>\d+)/standings\.xml/?$', XMLStandingsView, 'standings-xml'),
_url(r'^(?P<contest_id>\d+)/unfrozen-standings/?$', UnfrozenStandingsView, 'standings-unfrozen'),
_url(r'^(?P<contest_id>\d+)/unfrozen-standings\.xml/?$', UnfrozenXMLStandingsView, 'standings-xml-unfrozen'),
_url(r'^(?P<contest_id>\d+)/clarifications/?$', ClarificationsView, 'clarifications'),
)
|
gpl-2.0
|
razorpay/razorpay-python
|
tests/test_client_utility.py
|
1
|
1858
|
import responses
from .helpers import mock_file, ClientTestCase
from razorpay.errors import SignatureVerificationError
class TestClientValidator(ClientTestCase):
def setUp(self):
super(TestClientValidator, self).setUp()
@responses.activate
def test_verify_payment_signature(self):
sig = 'b2335e3b0801106b84a7faff035df56ecffde06918c9ddd1f0fafbb37a51cc89'
parameters = {}
parameters['razorpay_order_id'] = 'fake_order_id'
parameters['razorpay_payment_id'] = 'fake_payment_id'
parameters['razorpay_signature'] = sig
self.assertEqual(
self.client.utility.verify_payment_signature(parameters),
True)
@responses.activate
def test_verify_payment_signature_with_exception(self):
parameters = {}
parameters['razorpay_order_id'] = 'fake_order_id'
parameters['razorpay_payment_id'] = 'fake_payment_id'
parameters['razorpay_signature'] = 'test_signature'
self.assertRaises(
SignatureVerificationError,
self.client.utility.verify_payment_signature,
parameters)
@responses.activate
def test_verify_webhook_signature(self):
secret = self.client.auth[1]
sig = 'd60e67fd884556c045e9be7dad57903e33efc7172c17c6e3ef77db42d2b366e9'
body = mock_file('fake_payment_authorized_webhook')
self.assertEqual(
self.client.utility.verify_webhook_signature(body, sig, secret),
True)
@responses.activate
def test_verify_webhook_signature_with_exception(self):
secret = self.client.auth[1]
sig = 'test_signature'
body = ''
self.assertRaises(
SignatureVerificationError,
self.client.utility.verify_webhook_signature,
body,
sig,
secret)
|
mit
|
ridfrustum/lettuce
|
tests/integration/lib/Django-1.3/django/contrib/staticfiles/utils.py
|
160
|
1802
|
import os
import fnmatch
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def is_ignored(path, ignore_patterns=[]):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
for pattern in ignore_patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def get_files(storage, ignore_patterns=[], location=''):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
directories, files = storage.listdir(location)
for fn in files:
if is_ignored(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
yield fn
for dir in directories:
if is_ignored(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
for fn in get_files(storage, ignore_patterns, dir):
yield fn
def check_settings():
"""
Checks if the staticfiles settings have sane values.
"""
if not settings.STATIC_URL:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == settings.STATIC_URL:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
|
gpl-3.0
|
hsaputra/tensorflow
|
tensorflow/contrib/tensor_forest/python/tensor_forest_test.py
|
32
|
5484
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.tensor_forest.ops.tensor_forest."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensor_forest.python import tensor_forest
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class TensorForestTest(test_util.TensorFlowTestCase):
def testForestHParams(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000,
split_after_samples=25,
num_features=60).fill()
self.assertEquals(2, hparams.num_classes)
self.assertEquals(3, hparams.num_output_columns)
self.assertEquals(10, hparams.num_splits_to_consider)
# Default value of valid_leaf_threshold
self.assertEquals(1, hparams.valid_leaf_threshold)
self.assertEquals(0, hparams.base_random_seed)
def testForestHParamsBigTree(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000000,
split_after_samples=25,
num_features=1000).fill()
self.assertEquals(31, hparams.num_splits_to_consider)
def testForestHParamsStringParams(self):
hparams = tensor_forest.ForestHParams(
num_classes=2,
num_trees=100,
max_nodes=1000000,
split_after_samples="25",
num_splits_to_consider="1000000",
num_features=1000).fill()
self.assertEquals("1000000", hparams.num_splits_to_consider)
def testTrainingConstructionClassification(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testTrainingConstructionRegression(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25,
regression=True).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testInferenceConstruction(self):
input_data = [[-1., 0.], [-1., 2.], # node 1
[1., 0.], [1., -2.]] # node 2
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=2,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
probs, paths, var = graph_builder.inference_graph(input_data)
self.assertTrue(isinstance(probs, ops.Tensor))
self.assertTrue(isinstance(paths, ops.Tensor))
self.assertIsNone(var)
def testTrainingConstructionClassificationSparse(self):
input_data = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 3], [1, 0], [1, 7], [2, 1], [3, 9]],
values=[-1.0, 0.0, -1., 2., 1., -2.0],
dense_shape=[4, 10])
input_labels = [0, 1, 2, 3]
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=10,
num_trees=10,
max_nodes=1000,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
graph = graph_builder.training_graph(input_data, input_labels)
self.assertTrue(isinstance(graph, ops.Operation))
def testInferenceConstructionSparse(self):
input_data = sparse_tensor.SparseTensor(
indices=[[0, 0], [0, 3],
[1, 0], [1, 7],
[2, 1],
[3, 9]],
values=[-1.0, 0.0,
-1., 2.,
1.,
-2.0],
dense_shape=[4, 10])
params = tensor_forest.ForestHParams(
num_classes=4,
num_features=10,
num_trees=10,
max_nodes=1000,
regression=True,
split_after_samples=25).fill()
graph_builder = tensor_forest.RandomForestGraphs(params)
probs, paths, var = graph_builder.inference_graph(input_data)
self.assertTrue(isinstance(probs, ops.Tensor))
self.assertTrue(isinstance(paths, ops.Tensor))
self.assertTrue(isinstance(var, ops.Tensor))
if __name__ == "__main__":
googletest.main()
|
apache-2.0
|
wkubiak/grpc
|
src/python/src/grpc/framework/alpha/interfaces.py
|
36
|
13412
|
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Interfaces of GRPC."""
import abc
import enum
# exceptions is referenced from specification in this module.
from grpc.framework.alpha import exceptions # pylint: disable=unused-import
from grpc.framework.foundation import activated
from grpc.framework.foundation import future
@enum.unique
class Cardinality(enum.Enum):
"""Constants for the four cardinalities of RPC."""
UNARY_UNARY = 'request-unary/response-unary'
UNARY_STREAM = 'request-unary/response-streaming'
STREAM_UNARY = 'request-streaming/response-unary'
STREAM_STREAM = 'request-streaming/response-streaming'
@enum.unique
class Abortion(enum.Enum):
"""Categories of RPC abortion."""
CANCELLED = 'cancelled'
EXPIRED = 'expired'
NETWORK_FAILURE = 'network failure'
SERVICED_FAILURE = 'serviced failure'
SERVICER_FAILURE = 'servicer failure'
class CancellableIterator(object):
"""Implements the Iterator protocol and affords a cancel method."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __iter__(self):
"""Returns the self object in accordance with the Iterator protocol."""
raise NotImplementedError()
@abc.abstractmethod
def next(self):
"""Returns a value or raises StopIteration per the Iterator protocol."""
raise NotImplementedError()
@abc.abstractmethod
def cancel(self):
"""Requests cancellation of whatever computation underlies this iterator."""
raise NotImplementedError()
class RpcContext(object):
"""Provides RPC-related information and action."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def is_active(self):
"""Describes whether the RPC is active or has terminated."""
raise NotImplementedError()
@abc.abstractmethod
def time_remaining(self):
"""Describes the length of allowed time remaining for the RPC.
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have timed
out.
"""
raise NotImplementedError()
@abc.abstractmethod
def add_abortion_callback(self, abortion_callback):
"""Registers a callback to be called if the RPC is aborted.
Args:
abortion_callback: A callable to be called and passed an Abortion value
in the event of RPC abortion.
"""
raise NotImplementedError()
class UnaryUnarySyncAsync(object):
"""Affords invoking a unary-unary RPC synchronously or asynchronously.
Values implementing this interface are directly callable and present an
"async" method. Both calls take a request value and a numeric timeout.
Direct invocation of a value of this type invokes its associated RPC and
blocks until the RPC's response is available. Calling the "async" method
of a value of this type invokes its associated RPC and immediately returns a
future.Future bound to the asynchronous execution of the RPC.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __call__(self, request, timeout):
"""Synchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
Returns:
The response value for the RPC.
Raises:
exceptions.RpcError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def async(self, request, timeout):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
Returns:
A future.Future representing the RPC. In the event of RPC completion, the
returned Future's result value will be the response value of the RPC.
In the event of RPC abortion, the returned Future's exception value
will be an exceptions.RpcError.
"""
raise NotImplementedError()
class StreamUnarySyncAsync(object):
"""Affords invoking a stream-unary RPC synchronously or asynchronously.
Values implementing this interface are directly callable and present an
"async" method. Both calls take an iterator of request values and a numeric
timeout. Direct invocation of a value of this type invokes its associated RPC
and blocks until the RPC's response is available. Calling the "async" method
of a value of this type invokes its associated RPC and immediately returns a
future.Future bound to the asynchronous execution of the RPC.
"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __call__(self, request_iterator, timeout):
"""Synchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
Returns:
The response value for the RPC.
Raises:
exceptions.RpcError: Indicating that the RPC was aborted.
"""
raise NotImplementedError()
@abc.abstractmethod
def async(self, request_iterator, timeout):
"""Asynchronously invokes the underlying RPC.
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: A duration of time in seconds to allow for the RPC.
Returns:
A future.Future representing the RPC. In the event of RPC completion, the
returned Future's result value will be the response value of the RPC.
In the event of RPC abortion, the returned Future's exception value
will be an exceptions.RpcError.
"""
raise NotImplementedError()
class RpcMethodDescription(object):
"""A type for the common aspects of RPC method descriptions."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def cardinality(self):
"""Identifies the cardinality of this RpcMethodDescription.
Returns:
A Cardinality value identifying whether or not this
RpcMethodDescription is request-unary or request-streaming and
whether or not it is response-unary or response-streaming.
"""
raise NotImplementedError()
class RpcMethodInvocationDescription(RpcMethodDescription):
"""Invocation-side description of an RPC method."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def serialize_request(self, request):
"""Serializes a request value.
Args:
request: A request value appropriate for the RPC method described by this
RpcMethodInvocationDescription.
Returns:
The serialization of the given request value as a
bytestring.
"""
raise NotImplementedError()
@abc.abstractmethod
def deserialize_response(self, serialized_response):
"""Deserializes a response value.
Args:
serialized_response: A bytestring that is the serialization of a response
value appropriate for the RPC method described by this
RpcMethodInvocationDescription.
Returns:
A response value corresponding to the given bytestring.
"""
raise NotImplementedError()
class RpcMethodServiceDescription(RpcMethodDescription):
"""Service-side description of an RPC method."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def deserialize_request(self, serialized_request):
"""Deserializes a request value.
Args:
serialized_request: A bytestring that is the serialization of a request
value appropriate for the RPC method described by this
RpcMethodServiceDescription.
Returns:
A request value corresponding to the given bytestring.
"""
raise NotImplementedError()
@abc.abstractmethod
def serialize_response(self, response):
"""Serializes a response value.
Args:
response: A response value appropriate for the RPC method described by
this RpcMethodServiceDescription.
Returns:
The serialization of the given response value as a
bytestring.
"""
raise NotImplementedError()
@abc.abstractmethod
def service_unary_unary(self, request, context):
"""Carries out this RPC.
This method may only be called if the cardinality of this
RpcMethodServiceDescription is Cardinality.UNARY_UNARY.
Args:
request: A request value appropriate for the RPC method described by this
RpcMethodServiceDescription.
context: An RpcContext object for the RPC.
Returns:
A response value appropriate for the RPC method described by this
RpcMethodServiceDescription.
"""
raise NotImplementedError()
@abc.abstractmethod
def service_unary_stream(self, request, context):
"""Carries out this RPC.
This method may only be called if the cardinality of this
RpcMethodServiceDescription is Cardinality.UNARY_STREAM.
Args:
request: A request value appropriate for the RPC method described by this
RpcMethodServiceDescription.
context: An RpcContext object for the RPC.
Yields:
Zero or more response values appropriate for the RPC method described by
this RpcMethodServiceDescription.
"""
raise NotImplementedError()
@abc.abstractmethod
def service_stream_unary(self, request_iterator, context):
"""Carries out this RPC.
This method may only be called if the cardinality of this
RpcMethodServiceDescription is Cardinality.STREAM_UNARY.
Args:
request_iterator: An iterator of request values appropriate for the RPC
method described by this RpcMethodServiceDescription.
context: An RpcContext object for the RPC.
Returns:
A response value appropriate for the RPC method described by this
RpcMethodServiceDescription.
"""
raise NotImplementedError()
@abc.abstractmethod
def service_stream_stream(self, request_iterator, context):
"""Carries out this RPC.
This method may only be called if the cardinality of this
RpcMethodServiceDescription is Cardinality.STREAM_STREAM.
Args:
request_iterator: An iterator of request values appropriate for the RPC
method described by this RpcMethodServiceDescription.
context: An RpcContext object for the RPC.
Yields:
Zero or more response values appropriate for the RPC method described by
this RpcMethodServiceDescription.
"""
raise NotImplementedError()
class Stub(object):
"""A stub with callable RPC method names for attributes.
Instances of this type are context managers and only afford RPC invocation
when used in context.
Instances of this type, when used in context, respond to attribute access
as follows: if the requested attribute is the name of a unary-unary RPC
method, the value of the attribute will be a UnaryUnarySyncAsync with which
to invoke the RPC method. If the requested attribute is the name of a
unary-stream RPC method, the value of the attribute will be a callable taking
a request object and a timeout parameter and returning a CancellableIterator
that yields the response values of the RPC. If the requested attribute is the
name of a stream-unary RPC method, the value of the attribute will be a
StreamUnarySyncAsync with which to invoke the RPC method. If the requested
attribute is the name of a stream-stream RPC method, the value of the
attribute will be a callable taking an iterator of request objects and a
timeout and returning a CancellableIterator that yields the response values
of the RPC.
In all cases indication of abortion is indicated by raising of
exceptions.RpcError, exceptions.CancellationError,
and exceptions.ExpirationError.
"""
__metaclass__ = abc.ABCMeta
class Server(activated.Activated):
"""A GRPC Server."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def port(self):
"""Reports the port on which the server is serving.
This method may only be called while the server is activated.
Returns:
The port on which the server is serving.
"""
raise NotImplementedError()
|
bsd-3-clause
|
nosuchtim/VizBench
|
src/PyLoopyCam/testit.py
|
1
|
5268
|
"""
Copyright (c) 2015, Tim Thompson
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of Tim Thompson, nosuch.com, nor the names of
any contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys
import time
import traceback
import thread
import threading
import copy
import asyncore
import asynchat
import socket
import sys
import re
import xml.dom.minidom as xmldom
import string
import pygame.pypm
import os.path
import os, pygame
import pickle
import random
from os.path import isdir, isfile, isabs, abspath
from urllib import quote, unquote
from threading import *
from ctypes import *
from time import sleep
from Queue import Queue, Empty
from xml.sax import saxutils
from xml.dom import Node
from traceback import format_exc
from dircache import listdir
from pygame.locals import *
from thread import *
from ffff import *
global debug
debug = False
global debugosc
debugosc = False
global debugosc2
debugosc2 = False
class NthEventServer(Thread):
"""
Provides an event stream that can serve multiple listeners
track of what fingers are currently down, smoothing drag motion, etc.
"""
oneServer = None
def __init__(self):
Thread.__init__(self)
self.setDaemon(True)
NthEventServer.oneServer = self
print "NthEventServer.oneServer = ", NthEventServer.oneServer
self.dispenser = PushedEventDispenser()
self.throttle = 0.005
self.throttle = 0.0
self.inputs = {}
self.outputs = {}
self.cv = threading.Condition()
self.events = {}
self.firstevent = 0
self.nextevent = 0
self.osc_recipients = {"music":[], "graphic":[]}
self.start()
self.too_old_seconds = 30.0
self.event_inputs = {}
self.forward_inputs = {}
self.forward_finger = None
self.tm0 = time.time()
self.osc_count = 0
def send_osc(self, o, apptype):
(msg_addr, msg_data) = o
if msg_addr == "":
print "No msg_addr value in send_osc?"
return
now = time.time()
self.osc_count += 1
if now - self.tm0 > 1.0:
print "OSC Per second = ", self.osc_count
self.osc_count = 0
self.tm0 = now
msg_addr = str(msg_addr)
b = createBinaryMsg(msg_addr, msg_data)
# print "createBinary msg_addr=",msg_addr," msg_data=",msg_data
print("SHOULD BE sending %s OSC=%s" % (apptype, o.__str__()))
# r.osc_socket.sendto(b, (r.osc_addr, r.osc_port))
def main():
debug = True
httpaddr = "127.0.0.1"
httpport = 7777
rootdir = None
print "SYS.ARGV len=", len(sys.argv)
argn = len(sys.argv)
if len(sys.argv) == 1:
print "NO arguments..."
else:
argn = 1
if sys.argv[argn] == "-d":
debug = True
print "Debug is True"
argn += 1
else:
debug = False
argn += 1
for i in range(argn, len (sys.argv)):
a = sys.argv[i]
print("a = ", a)
if a.startswith("rootdir:"):
rootdir = abspath(a[8:])
elif a.startswith("httpaddr:"):
httpaddr = a[9:]
elif a.startswith("httpport:"):
httpport = int(a[9:])
try:
import os
position = (-800, 0)
position = (600, 360)
os.environ['SDL_VIDEO_WINDOW_POS'] = str(position[0]) + "," + str(position[1])
pygame.init()
width = 250
height = 500
flags = pygame.SRCALPHA
# from panel import NthControlPanel
# ui = NthControlPanel(width, height, flags)
# time.sleep(1.0)
# pygame.event.set_grab(True)
try:
ffff = Ffff("localhost",80)
except:
print "EXCEPT caught in creating Ffff! Exception=", format_exc()
plugin = ffff.get_ffgl("Twisted")
param = plugin.get_param("Twirl")
# ffff.set_all_params(plugin,1.0)
for nm in plugin.param:
p = plugin.param[nm]
val = random.random() % 1.0
ffff.change_plugin_param_val(plugin,p,val)
except KeyboardInterrupt:
print("KeyboardInterrupt received...\n");
# server.shutdown_quick()
except:
s = format_exc()
if not re.search(".*shutdown_quick.*", s):
print("Exception while running myserver?\n");
print(s)
# server.shutdown_quick()
if __name__ == '__main__':
main()
# import cProfile
# cProfile.run('main()')
|
mit
|
courtarro/gnuradio
|
grc/gui/Block.py
|
11
|
13166
|
"""
Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
import pygtk
pygtk.require('2.0')
import gtk
import pango
from . import Actions, Colors, Utils, Constants
from . Element import Element
from ..core.Param import num_to_str
from ..core.utils import odict
from ..core.utils.complexity import calculate_flowgraph_complexity
from ..core.Block import Block as _Block
BLOCK_MARKUP_TMPL="""\
#set $foreground = $block.is_valid() and 'black' or 'red'
<span foreground="$foreground" font_desc="$font"><b>$encode($block.get_name())</b></span>"""
# Includes the additional complexity markup if enabled
COMMENT_COMPLEXITY_MARKUP_TMPL="""\
#set $foreground = $block.get_enabled() and '#444' or '#888'
#if $complexity
<span foreground="#444" size="medium" font_desc="$font"><b>$encode($complexity)</b></span>#slurp
#end if
#if $complexity and $comment
<span></span>
#end if
#if $comment
<span foreground="$foreground" font_desc="$font">$encode($comment)</span>#slurp
#end if
"""
class Block(Element, _Block):
"""The graphical signal block."""
def __init__(self, flow_graph, n):
"""
Block contructor.
Add graphics related params to the block.
"""
_Block.__init__(self, flow_graph, n)
self.W = 0
self.H = 0
#add the position param
self.get_params().append(self.get_parent().get_parent().Param(
block=self,
n=odict({
'name': 'GUI Coordinate',
'key': '_coordinate',
'type': 'raw',
'value': '(0, 0)',
'hide': 'all',
})
))
self.get_params().append(self.get_parent().get_parent().Param(
block=self,
n=odict({
'name': 'GUI Rotation',
'key': '_rotation',
'type': 'raw',
'value': '0',
'hide': 'all',
})
))
Element.__init__(self)
self._comment_pixmap = None
self.has_busses = [False, False] # source, sink
def get_coordinate(self):
"""
Get the coordinate from the position param.
Returns:
the coordinate tuple (x, y) or (0, 0) if failure
"""
proximity = Constants.BORDER_PROXIMITY_SENSITIVITY
try: #should evaluate to tuple
x, y = Utils.scale(eval(self.get_param('_coordinate').get_value()))
fgW, fgH = self.get_parent().get_size()
if x <= 0:
x = 0
elif x >= fgW - proximity:
x = fgW - proximity
if y <= 0:
y = 0
elif y >= fgH - proximity:
y = fgH - proximity
return (x, y)
except:
self.set_coordinate((0, 0))
return (0, 0)
def set_coordinate(self, coor):
"""
Set the coordinate into the position param.
Args:
coor: the coordinate tuple (x, y)
"""
if Actions.TOGGLE_SNAP_TO_GRID.get_active():
offset_x, offset_y = (0, self.H/2) if self.is_horizontal() else (self.H/2, 0)
coor = (
Utils.align_to_grid(coor[0] + offset_x) - offset_x,
Utils.align_to_grid(coor[1] + offset_y) - offset_y
)
self.get_param('_coordinate').set_value(str(Utils.scale(coor, reverse=True)))
def bound_move_delta(self, delta_coor):
"""
Limit potential moves from exceeding the bounds of the canvas
Args:
delta_coor: requested delta coordinate (dX, dY) to move
Returns:
The delta coordinate possible to move while keeping the block on the canvas
or the input (dX, dY) on failure
"""
dX, dY = delta_coor
try:
fgW, fgH = self.get_parent().get_size()
x, y = Utils.scale(eval(self.get_param('_coordinate').get_value()))
if self.is_horizontal():
sW, sH = self.W, self.H
else:
sW, sH = self.H, self.W
if x + dX < 0:
dX = -x
elif dX + x + sW >= fgW:
dX = fgW - x - sW
if y + dY < 0:
dY = -y
elif dY + y + sH >= fgH:
dY = fgH - y - sH
except:
pass
return ( dX, dY )
def get_rotation(self):
"""
Get the rotation from the position param.
Returns:
the rotation in degrees or 0 if failure
"""
try: #should evaluate to dict
rotation = eval(self.get_param('_rotation').get_value())
return int(rotation)
except:
self.set_rotation(Constants.POSSIBLE_ROTATIONS[0])
return Constants.POSSIBLE_ROTATIONS[0]
def set_rotation(self, rot):
"""
Set the rotation into the position param.
Args:
rot: the rotation in degrees
"""
self.get_param('_rotation').set_value(str(rot))
def create_shapes(self):
"""Update the block, parameters, and ports when a change occurs."""
Element.create_shapes(self)
if self.is_horizontal(): self.add_area((0, 0), (self.W, self.H))
elif self.is_vertical(): self.add_area((0, 0), (self.H, self.W))
def create_labels(self):
"""Create the labels for the signal block."""
Element.create_labels(self)
self._bg_color = self.is_dummy_block and Colors.MISSING_BLOCK_BACKGROUND_COLOR or \
self.get_bypassed() and Colors.BLOCK_BYPASSED_COLOR or \
self.get_enabled() and Colors.BLOCK_ENABLED_COLOR or Colors.BLOCK_DISABLED_COLOR
layouts = list()
#create the main layout
layout = gtk.DrawingArea().create_pango_layout('')
layouts.append(layout)
layout.set_markup(Utils.parse_template(BLOCK_MARKUP_TMPL, block=self, font=Constants.BLOCK_FONT))
self.label_width, self.label_height = layout.get_pixel_size()
#display the params
if self.is_dummy_block:
markups = [
'<span foreground="black" font_desc="{font}"><b>key: </b>{key}</span>'
''.format(font=Constants.PARAM_FONT, key=self._key)
]
else:
markups = [param.get_markup() for param in self.get_params() if param.get_hide() not in ('all', 'part')]
if markups:
layout = gtk.DrawingArea().create_pango_layout('')
layout.set_spacing(Constants.LABEL_SEPARATION * pango.SCALE)
layout.set_markup('\n'.join(markups))
layouts.append(layout)
w, h = layout.get_pixel_size()
self.label_width = max(w, self.label_width)
self.label_height += h + Constants.LABEL_SEPARATION
width = self.label_width
height = self.label_height
#setup the pixmap
pixmap = self.get_parent().new_pixmap(width, height)
gc = pixmap.new_gc()
gc.set_foreground(self._bg_color)
pixmap.draw_rectangle(gc, True, 0, 0, width, height)
#draw the layouts
h_off = 0
for i,layout in enumerate(layouts):
w,h = layout.get_pixel_size()
if i == 0: w_off = (width-w)/2
else: w_off = 0
pixmap.draw_layout(gc, w_off, h_off, layout)
h_off = h + h_off + Constants.LABEL_SEPARATION
#create vertical and horizontal pixmaps
self.horizontal_label = pixmap
if self.is_vertical():
self.vertical_label = self.get_parent().new_pixmap(height, width)
Utils.rotate_pixmap(gc, self.horizontal_label, self.vertical_label)
#calculate width and height needed
W = self.label_width + 2 * Constants.BLOCK_LABEL_PADDING
def get_min_height_for_ports():
visible_ports = filter(lambda p: not p.get_hide(), ports)
min_height = 2*Constants.PORT_BORDER_SEPARATION + len(visible_ports) * Constants.PORT_SEPARATION
if visible_ports:
min_height -= ports[0].H
return min_height
H = max(
[ # labels
self.label_height + 2 * Constants.BLOCK_LABEL_PADDING
] +
[ # ports
get_min_height_for_ports() for ports in (self.get_sources_gui(), self.get_sinks_gui())
] +
[ # bus ports only
2 * Constants.PORT_BORDER_SEPARATION +
sum([port.H + Constants.PORT_SPACING for port in ports if port.get_type() == 'bus']) - Constants.PORT_SPACING
for ports in (self.get_sources_gui(), self.get_sinks_gui())
]
)
self.W, self.H = Utils.align_to_grid((W, H))
self.has_busses = [
any(port.get_type() == 'bus' for port in ports)
for ports in (self.get_sources_gui(), self.get_sinks_gui())
]
self.create_comment_label()
def create_comment_label(self):
comment = self.get_comment() # Returns None if there are no comments
complexity = None
# Show the flowgraph complexity on the top block if enabled
if Actions.TOGGLE_SHOW_FLOWGRAPH_COMPLEXITY.get_active() and self.get_key() == "options":
complexity = calculate_flowgraph_complexity(self.get_parent())
complexity = "Complexity: {0}bal".format(num_to_str(complexity))
layout = gtk.DrawingArea().create_pango_layout('')
layout.set_markup(Utils.parse_template(COMMENT_COMPLEXITY_MARKUP_TMPL,
block=self,
comment=comment,
complexity=complexity,
font=Constants.BLOCK_FONT))
# Setup the pixel map. Make sure that layout not empty
width, height = layout.get_pixel_size()
if width and height:
padding = Constants.BLOCK_LABEL_PADDING
pixmap = self.get_parent().new_pixmap(width + 2 * padding,
height + 2 * padding)
gc = pixmap.new_gc()
gc.set_foreground(Colors.COMMENT_BACKGROUND_COLOR)
pixmap.draw_rectangle(
gc, True, 0, 0, width + 2 * padding, height + 2 * padding)
pixmap.draw_layout(gc, padding, padding, layout)
self._comment_pixmap = pixmap
else:
self._comment_pixmap = None
def draw(self, gc, window):
"""
Draw the signal block with label and inputs/outputs.
Args:
gc: the graphics context
window: the gtk window to draw on
"""
# draw ports
for port in self.get_ports_gui():
port.draw(gc, window)
# draw main block
x, y = self.get_coordinate()
Element.draw(
self, gc, window, bg_color=self._bg_color,
border_color=self.is_highlighted() and Colors.HIGHLIGHT_COLOR or
self.is_dummy_block and Colors.MISSING_BLOCK_BORDER_COLOR or Colors.BORDER_COLOR,
)
#draw label image
if self.is_horizontal():
window.draw_drawable(gc, self.horizontal_label, 0, 0, x+Constants.BLOCK_LABEL_PADDING, y+(self.H-self.label_height)/2, -1, -1)
elif self.is_vertical():
window.draw_drawable(gc, self.vertical_label, 0, 0, x+(self.H-self.label_height)/2, y+Constants.BLOCK_LABEL_PADDING, -1, -1)
def what_is_selected(self, coor, coor_m=None):
"""
Get the element that is selected.
Args:
coor: the (x,y) tuple
coor_m: the (x_m, y_m) tuple
Returns:
this block, a port, or None
"""
for port in self.get_ports_gui():
port_selected = port.what_is_selected(coor, coor_m)
if port_selected: return port_selected
return Element.what_is_selected(self, coor, coor_m)
def draw_comment(self, gc, window):
if not self._comment_pixmap:
return
x, y = self.get_coordinate()
if self.is_horizontal():
y += self.H + Constants.BLOCK_LABEL_PADDING
else:
x += self.H + Constants.BLOCK_LABEL_PADDING
window.draw_drawable(gc, self._comment_pixmap, 0, 0, x, y, -1, -1)
|
gpl-3.0
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-loganalytics/azure/mgmt/loganalytics/models/storage_account.py
|
2
|
1050
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class StorageAccount(Model):
"""Describes a storage account connection.
:param id: The Azure Resource Manager ID of the storage account resource.
:type id: str
:param key: The storage account key.
:type key: str
"""
_validation = {
'id': {'required': True},
'key': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'key': {'key': 'key', 'type': 'str'},
}
def __init__(self, id, key):
self.id = id
self.key = key
|
mit
|
AnimeshSinha1309/WebsiteEdunet
|
WebsiteEdunet/env/Lib/site-packages/django/contrib/gis/gdal/base.py
|
654
|
1179
|
from ctypes import c_void_p
from django.contrib.gis.gdal.error import GDALException
from django.utils import six
class GDALBase(object):
"""
Base object for GDAL objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initially the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
if self._ptr:
return self._ptr
else:
raise GDALException('GDAL %s pointer no longer valid.' % self.__class__.__name__)
def _set_ptr(self, ptr):
# Only allow the pointer to be set with pointers of the
# compatible type or None (NULL).
if isinstance(ptr, six.integer_types):
self._ptr = self.ptr_type(ptr)
elif ptr is None or isinstance(ptr, self.ptr_type):
self._ptr = ptr
else:
raise TypeError('Incompatible pointer type')
ptr = property(_get_ptr, _set_ptr)
|
mit
|
juntalis/aio-pika
|
docs/source/rabbitmq-tutorial/examples/3-publish-subscribe/receive_logs.py
|
1
|
1064
|
import asyncio
from aio_pika import connect, IncomingMessage, ExchangeType
loop = asyncio.get_event_loop()
def on_message(message: IncomingMessage):
with message.process():
print("[x] %r" % message.body)
async def main():
# Perform connection
connection = await connect("amqp://guest:guest@localhost/", loop=loop)
# Creating a channel
channel = await connection.channel()
await channel.set_qos(prefetch_count=1)
logs_exchange = await channel.declare_exchange(
'logs',
ExchangeType.FANOUT
)
# Declaring queue
queue = await channel.declare_queue(exclusive=True)
# Binding the queue to the exchange
await queue.bind(logs_exchange)
# Start listening the queue with name 'task_queue'
queue.consume(on_message)
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.create_task(main())
# we enter a never-ending loop that waits for data and runs callbacks whenever necessary.
print(' [*] Waiting for logs. To exit press CTRL+C')
loop.run_forever()
|
apache-2.0
|
quantmind/pulsar-queue
|
pq/utils/version.py
|
7
|
1595
|
import datetime
import os
import subprocess
symbol = {'alpha': 'a', 'beta': 'b'}
def get_version(version, filename=None):
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
main = '.'.join(map(str, version[:3]))
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset(filename)
if git_changeset:
sub = '.dev%s' % git_changeset
if version[3] != 'final' and not sub:
sub = '%s%s' % (symbol.get(version[3], version[3]), version[4])
return main + sub
def sh(command, cwd=None):
return subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
cwd=cwd,
universal_newlines=True).communicate()[0]
def get_git_changeset(filename=None):
"""Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
"""
dirname = os.path.dirname(filename or __file__)
git_show = sh('git show --pretty=format:%ct --quiet HEAD',
cwd=dirname)
timestamp = git_show.partition('\n')[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
|
bsd-3-clause
|
cogmission/nupic
|
src/nupic/frameworks/opf/predictionmetricsmanager.py
|
32
|
16625
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This script implements PredictionMetricsManager, a helper class that handles
# pooling of multiple record and field prediction metrics calculators
import logging
import copy
import pprint
from collections import (namedtuple,
deque)
from nupic.data.inference_shifter import InferenceShifter
from nupic.frameworks.opf import metrics
from opfutils import InferenceType, InferenceElement
# MetricValueElement class
#
# Represents an individual metric value element in a list returned by
# PredictionMetricsManager.getMetrics()
#
# spec: A MetricSpec value (a copy) that was used to construct
# the metric instance that generated the metric value
# value: The metric value
MetricValueElement = namedtuple("MetricValueElement", ["spec", "value"])
class MetricsManager(object):
""" This is a class to handle the computation of metrics properly. This class
takes in an inferenceType, and it assumes that it is associcated with a single
model """
# Map from inference element to sensor input element. This helps us find the
# appropriate ground truth field for a given inference element
def __init__(self, metricSpecs, fieldInfo, inferenceType):
"""
Constructs a Metrics Manager
Parameters:
-----------------------------------------------------------------------
metricSpecs: A sequence of MetricSpecs that specify which metrics should
be calculated
inferenceType: An opfutils.inferenceType value that specifies the inference
type of the associated model. This affects how metrics are
calculated. FOR EXAMPLE, temporal models save the inference
from the previous timestep to match it to the ground truth
value in the current timestep
"""
self.__metricSpecs = []
self.__metrics = []
self.__metricLabels = []
# Maps field names to indices. Useful for looking up input/predictions by
# field name
self.__fieldNameIndexMap = dict( [(info.name, i) \
for i, info in enumerate(fieldInfo)] )
self.__constructMetricsModules(metricSpecs)
self.__currentGroundTruth = None
self.__currentInference = None
self.__currentResult = None
self.__isTemporal = InferenceType.isTemporal(inferenceType)
if self.__isTemporal:
self.__inferenceShifter = InferenceShifter()
def update(self, results):
"""
Compute the new metrics values, given the next inference/ground-truth values
Parameters:
-----------------------------------------------------------------------
results: An opfutils.ModelResult object that was computed during the last
iteration of the model
Returns: A dictionary where each key is the metric-name, and the values are
it scalar value.
"""
#print "\n\n---------------------------------------------------------------"
#print "Model results: \nrawInput:%s \ninferences:%s" % \
# (pprint.pformat(results.rawInput), pprint.pformat(results.inferences))
self._addResults(results)
if not self.__metricSpecs \
or self.__currentInference is None:
return {}
metricResults = {}
for metric, spec, label in zip(self.__metrics,
self.__metricSpecs,
self.__metricLabels):
inferenceElement = spec.inferenceElement
field = spec.field
groundTruth = self._getGroundTruth(inferenceElement)
inference = self._getInference(inferenceElement)
rawRecord = self._getRawGroundTruth()
result = self.__currentResult
if field:
if type(inference) in (list, tuple):
if field in self.__fieldNameIndexMap:
# NOTE: If the predicted field is not fed in at the bottom, we
# won't have it in our fieldNameIndexMap
fieldIndex = self.__fieldNameIndexMap[field]
inference = inference[fieldIndex]
else:
inference = None
if groundTruth is not None:
if type(groundTruth) in (list, tuple):
if field in self.__fieldNameIndexMap:
# NOTE: If the predicted field is not fed in at the bottom, we
# won't have it in our fieldNameIndexMap
fieldIndex = self.__fieldNameIndexMap[field]
groundTruth = groundTruth[fieldIndex]
else:
groundTruth = None
else:
# groundTruth could be a dict based off of field names
groundTruth = groundTruth[field]
metric.addInstance(groundTruth=groundTruth,
prediction=inference,
record=rawRecord,
result=result)
metricResults[label] = metric.getMetric()['value']
return metricResults
def getMetrics(self):
""" Gets the current metric values
Returns: A dictionary where each key is the metric-name, and the values are
it scalar value. Same as the output of update()
"""
result = {}
for metricObj, label in zip(self.__metrics, self.__metricLabels):
value = metricObj.getMetric()
result[label] = value['value']
return result
def getMetricDetails(self, metricLabel):
""" Gets detailed info about a given metric, in addition to its value. This
may including any statistics or auxilary data that are computed for a given
metric
Parameters:
-----------------------------------------------------------------------
metricLabel: The string label of the given metric (see metrics.MetricSpec)
Returns: A dictionary of metric information, as returned by
opf.metric.Metric.getMetric()
"""
try:
metricIndex = self.__metricLabels.index(metricLabel)
except IndexError:
return None
return self.__metrics[metricIndex].getMetric()
def getMetricLabels(self):
""" Return the list of labels for the metrics that are being calculated"""
return tuple(self.__metricLabels)
def _addResults(self, results):
"""
Stores the current model results in the manager's internal store
Parameters:
-----------------------------------------------------------------------
results: A ModelResults object that contains the current timestep's
input/inferences
"""
# -----------------------------------------------------------------------
# If the model potentially has temporal inferences.
if self.__isTemporal:
shiftedInferences = self.__inferenceShifter.shift(results).inferences
self.__currentResult = copy.deepcopy(results)
self.__currentResult.inferences = shiftedInferences
self.__currentInference = shiftedInferences
# -----------------------------------------------------------------------
# The current model has no temporal inferences.
else:
self.__currentResult = copy.deepcopy(results)
self.__currentInference = copy.deepcopy(results.inferences)
# -----------------------------------------------------------------------
# Save the current ground-truth results
self.__currentGroundTruth = copy.deepcopy(results)
def _getGroundTruth(self, inferenceElement):
"""
Get the actual value for this field
Parameters:
-----------------------------------------------------------------------
sensorInputElement: The inference element (part of the inference) that
is being used for this metric
"""
sensorInputElement = InferenceElement.getInputElement(inferenceElement)
if sensorInputElement is None:
return None
return getattr(self.__currentGroundTruth.sensorInput, sensorInputElement)
def _getInference(self, inferenceElement):
"""
Get what the inferred value for this field was
Parameters:
-----------------------------------------------------------------------
inferenceElement: The inference element (part of the inference) that
is being used for this metric
"""
if self.__currentInference is not None:
return self.__currentInference.get(inferenceElement, None)
return None
def _getRawGroundTruth(self):
"""
Get what the inferred value for this field was
Parameters:
-----------------------------------------------------------------------
inferenceElement: The inference element (part of the inference) that
is being used for this metric
"""
return self.__currentGroundTruth.rawInput
def __constructMetricsModules(self, metricSpecs):
"""
Creates the required metrics modules
Parameters:
-----------------------------------------------------------------------
metricSpecs:
A sequence of MetricSpec objects that specify which metric modules to
instantiate
"""
if not metricSpecs:
return
self.__metricSpecs = metricSpecs
for spec in metricSpecs:
if not InferenceElement.validate(spec.inferenceElement):
raise ValueError("Invalid inference element for metric spec: %r" %spec)
self.__metrics.append(metrics.getModule(spec))
self.__metricLabels.append(spec.getLabel())
def test():
_testMetricsMgr()
_testTemporalShift()
_testMetricLabels()
return
def _testMetricsMgr():
print "*Testing Metrics Managers*..."
from nupic.data.fieldmeta import (
FieldMetaInfo,
FieldMetaType,
FieldMetaSpecial)
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import ModelResult, SensorInput
onlineMetrics = (MetricSpec(metric="aae", inferenceElement='', \
field="consumption", params={}),)
print "TESTING METRICS MANAGER (BASIC PLUMBING TEST)..."
modelFieldMetaInfo = (
FieldMetaInfo(name='temperature',
type=FieldMetaType.float,
special=FieldMetaSpecial.none),
FieldMetaInfo(name='consumption',
type=FieldMetaType.float,
special=FieldMetaSpecial.none)
)
# -----------------------------------------------------------------------
# Test to make sure that invalid InferenceElements are caught
try:
MetricsManager(
metricSpecs=onlineMetrics,
fieldInfo=modelFieldMetaInfo,
inferenceType=InferenceType.TemporalNextStep)
except ValueError:
print "Caught bad inference element: PASS"
print
onlineMetrics = (MetricSpec(metric="aae",
inferenceElement=InferenceElement.prediction,
field="consumption", params={}),)
temporalMetrics = MetricsManager(
metricSpecs=onlineMetrics,
fieldInfo=modelFieldMetaInfo,
inferenceType=InferenceType.TemporalNextStep)
inputs = [
{
'groundTruthRow' : [9, 7],
'predictionsDict' : {
InferenceType.TemporalNextStep: [12, 17]
}
},
{
'groundTruthRow' : [12, 17],
'predictionsDict' : {
InferenceType.TemporalNextStep: [14, 19]
}
},
{
'groundTruthRow' : [14, 20],
'predictionsDict' : {
InferenceType.TemporalNextStep: [16, 21]
}
},
{
'groundTruthRow' : [9, 7],
'predictionsDict' : {
InferenceType.TemporalNextStep:None
}
},
]
for element in inputs:
groundTruthRow=element['groundTruthRow']
tPredictionRow=element['predictionsDict'][InferenceType.TemporalNextStep]
result = ModelResult(sensorInput=SensorInput(dataRow=groundTruthRow,
dataEncodings=None,
sequenceReset=0,
category=None),
inferences={'prediction':tPredictionRow})
temporalMetrics.update(result)
assert temporalMetrics.getMetrics().values()[0] == 15.0 / 3.0, \
"Expected %f, got %f" %(15.0/3.0,
temporalMetrics.getMetrics().values()[0])
print "ok"
return
def _testTemporalShift():
""" Test to see if the metrics manager correctly shifts records for multistep
prediction cases
"""
print "*Testing Multistep temporal shift*..."
from nupic.data.fieldmeta import (
FieldMetaInfo,
FieldMetaType,
FieldMetaSpecial)
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import ModelResult, SensorInput
onlineMetrics = ()
modelFieldMetaInfo = (
FieldMetaInfo(name='consumption',
type=FieldMetaType.float,
special=FieldMetaSpecial.none),)
mgr = MetricsManager(metricSpecs=onlineMetrics,
fieldInfo=modelFieldMetaInfo,
inferenceType=InferenceType.TemporalMultiStep)
groundTruths = [{'consumption':i} for i in range(10)]
oneStepInfs = reversed(range(10))
threeStepInfs = range(5, 15)
for iterNum, gt, os, ts in zip(xrange(10), groundTruths,
oneStepInfs, threeStepInfs):
inferences = {InferenceElement.multiStepPredictions:{1: os, 3: ts}}
sensorInput = SensorInput(dataDict = [gt])
result = ModelResult(sensorInput=sensorInput, inferences=inferences)
mgr.update(result)
assert mgr._getGroundTruth(InferenceElement.multiStepPredictions)[0] == gt
if iterNum < 1:
#assert mgr._getInference(InferenceElement.multiStepPredictions) is None
assert mgr._getInference(InferenceElement.multiStepPredictions)[1] is None
else:
prediction = mgr._getInference(InferenceElement.multiStepPredictions)[1]
assert prediction == 10 - iterNum
if iterNum < 3:
inference = mgr._getInference(InferenceElement.multiStepPredictions)
assert inference is None or inference[3] is None
else:
prediction = mgr._getInference(InferenceElement.multiStepPredictions)[3]
assert prediction == iterNum + 2
def _testMetricLabels():
print "\n*Testing Metric Label Generation*..."
from nupic.frameworks.opf.metrics import MetricSpec
testTuples = [
(MetricSpec('rmse', InferenceElement.prediction, 'consumption'),
"prediction:rmse:field=consumption"),
(MetricSpec('rmse', InferenceElement.classification),
"classification:rmse"),
(MetricSpec('rmse', InferenceElement.encodings, 'pounds',
params=dict(window=100)),
"encodings:rmse:window=100:field=pounds"),
(MetricSpec('aae', InferenceElement.prediction, 'pounds',
params=dict(window=100, paramA = 10.2, paramB = 20)),
"prediction:aae:paramA=10.2:paramB=20:window=100:field=pounds"),
(MetricSpec('aae', InferenceElement.prediction,'pounds',
params={'window':100, 'paramA':10.2, '1paramB':20}),
"prediction:aae:1paramB=20:paramA=10.2:window=100:field=pounds"),
(MetricSpec('aae', InferenceElement.prediction,'pounds',
params=dict(window=100, paramA = 10.2, paramB =-20)),
"prediction:aae:paramA=10.2:paramB=-20:window=100:field=pounds"),
(MetricSpec('aae', InferenceElement.prediction, 'pounds',
params=dict(window=100, paramA = 10.2, paramB ='square')),
"prediction:aae:paramA=10.2:paramB='square':window=100:field=pounds"),
]
for test in testTuples:
try:
assert test[0].getLabel() == test[1]
except:
print "Failed Creating label"
print "Expected %s \t Got %s" % (test[1], test[0].getLabel())
return
print "ok"
if __name__ == "__main__":
test()
|
agpl-3.0
|
saurabh3949/mxnet
|
tests/python/unittest/test_module.py
|
3
|
29730
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import mxnet.ndarray as nd
from mxnet.test_utils import *
import numpy as np
from functools import reduce
from mxnet.module.executor_group import DataParallelExecutorGroup
from common import assertRaises
from collections import namedtuple
import numpy.random as rnd
def test_module_dtype():
dtype = np.float16
dshape = (3, 8, 7)
sym = mx.sym.Variable('data')
sym = mx.sym.Activation(data=sym, act_type='relu', __layout__='TNC')
mod = mx.mod.Module(sym, ('data',), None, context=[mx.cpu(0), mx.cpu(1)])
mod.bind(data_shapes=[mx.io.DataDesc('data', dshape, dtype, layout='TNC')])
mod.init_params()
mod.forward(mx.io.DataBatch(data=[mx.nd.ones(dshape, dtype=dtype)],
label=None))
mod.backward([mx.nd.ones(dshape, dtype=dtype)])
for x in mod.get_outputs():
assert x.dtype == dtype
def test_module_input_grads():
a = mx.sym.Variable('a', __layout__='NC')
b = mx.sym.Variable('b', __layout__='NC')
c = mx.sym.Variable('c', __layout__='NC')
c = a + 2 * b + 3 * c
net = mx.mod.Module(c, data_names=['b', 'c', 'a'], label_names=None,
context=[mx.cpu(0), mx.cpu(1)])
net.bind(data_shapes=[['b', (5, 5)], ['c', (5, 5)], ['a', (5, 5)]],
label_shapes=None, inputs_need_grad=True)
net.init_params()
net.forward(data_batch=mx.io.DataBatch(data=[nd.ones((5, 5)),
nd.ones((5, 5)),
nd.ones((5, 5))]))
net.backward(out_grads=[nd.ones((5, 5))])
input_grads = net.get_input_grads()
b_grad = input_grads[0].asnumpy()
c_grad = input_grads[1].asnumpy()
a_grad = input_grads[2].asnumpy()
assert np.all(a_grad == 1), a_grad
assert np.all(b_grad == 2), b_grad
assert np.all(c_grad == 3), c_grad
def test_module_layout():
sym = mx.sym.Variable('data')
sym = mx.sym.Activation(data=sym, act_type='relu', __layout__='TNC')
dshape = (3, 8, 7)
mod = mx.mod.Module(sym, ('data',), None, context=[mx.cpu(0), mx.cpu(1)])
mod.bind(data_shapes=[mx.io.DataDesc('data', dshape, layout='TNC')])
mod.init_params()
mod.forward(mx.io.DataBatch(data=[mx.nd.ones(dshape)],
label=None))
mod.backward([mx.nd.ones(dshape)])
assert mod.get_outputs()[0].shape == dshape
hdshape = (3, 4, 7)
for x in mod.get_outputs(merge_multi_context=False)[0]:
assert x.shape == hdshape
def test_save_load():
def dict_equ(a, b):
assert set(a) == set(b)
for k in a:
assert (a[k].asnumpy() == b[k].asnumpy()).all()
sym = mx.sym.Variable('data')
sym = mx.sym.FullyConnected(sym, num_hidden=100)
# single device
mod = mx.mod.Module(sym, ('data',))
mod.bind(data_shapes=[('data', (10, 10))])
mod.init_params()
mod.init_optimizer(optimizer_params={'learning_rate':0.1, 'momentum':0.9})
mod.update()
mod.save_checkpoint('test', 0, save_optimizer_states=True)
mod2 = mx.mod.Module.load('test', 0, load_optimizer_states=True, data_names=('data',))
mod2.bind(data_shapes=[('data', (10, 10))])
mod2.init_optimizer(optimizer_params={'learning_rate':0.1, 'momentum':0.9})
assert mod._symbol.tojson() == mod2._symbol.tojson()
dict_equ(mod.get_params()[0], mod2.get_params()[0])
dict_equ(mod._updater.states, mod2._updater.states)
# multi device
mod = mx.mod.Module(sym, ('data',), context=[mx.cpu(0), mx.cpu(1)])
mod.bind(data_shapes=[('data', (10, 10))])
mod.init_params()
mod.init_optimizer(optimizer_params={'learning_rate':0.1, 'momentum':0.9})
mod.update()
mod.save_checkpoint('test', 0, save_optimizer_states=True)
mod2 = mx.mod.Module.load('test', 0, load_optimizer_states=True, data_names=('data',))
mod2.bind(data_shapes=[('data', (10, 10))])
mod2.init_optimizer(optimizer_params={'learning_rate':0.1, 'momentum':0.9})
assert mod._symbol.tojson() == mod2._symbol.tojson()
dict_equ(mod.get_params()[0], mod2.get_params()[0])
dict_equ(mod._kvstore._updater.states, mod2._updater.states)
def test_module_reshape():
data = mx.sym.Variable('data')
sym = mx.sym.FullyConnected(data, num_hidden=20, name='fc')
dshape = (7, 20)
mod = mx.mod.Module(sym, ('data',), None, context=[mx.cpu(0), mx.cpu(1)])
mod.bind(data_shapes=[('data', dshape)])
mod.init_params()
mod.init_optimizer(optimizer_params={'learning_rate': 1})
mod.forward(mx.io.DataBatch(data=[mx.nd.ones(dshape)],
label=None))
mod.backward([mx.nd.ones(dshape)])
mod.update()
assert mod.get_outputs()[0].shape == dshape
assert (mod.get_params()[0]['fc_bias'].asnumpy() == -1).all()
dshape = (14, 20)
mod.reshape(data_shapes=[('data', dshape)])
mod.forward(mx.io.DataBatch(data=[mx.nd.ones(dshape)],
label=None))
mod.backward([mx.nd.ones(dshape)])
mod.update()
assert mod.get_outputs()[0].shape == dshape
assert (mod.get_params()[0]['fc_bias'].asnumpy() == -3).all()
def test_module_states():
stack = mx.rnn.SequentialRNNCell()
for i in range(2):
stack.add(mx.rnn.LSTMCell(num_hidden=20, prefix='lstm_l%d_'%i))
begin_state = stack.begin_state(func=mx.sym.Variable)
_, states = stack.unroll(10, begin_state=begin_state, inputs=mx.sym.Variable('data'))
state_names = [i.name for i in begin_state]
mod = mx.mod.Module(mx.sym.Group(states), context=[mx.cpu(0), mx.cpu(1)],
label_names=None, state_names=state_names)
mod.bind(data_shapes=[('data', (5, 10))], label_shapes=None, for_training=False)
mod.init_params()
batch = mx.io.DataBatch(data=[mx.nd.zeros((5, 10))], label=[])
mod.set_states(value=1)
mod.forward(batch)
out = mod.get_outputs(merge_multi_context=False)
out1 = mod.get_outputs(merge_multi_context=True)
mod.set_states(states=out)
mod.forward(batch)
out2 = mod.get_outputs(merge_multi_context=True)
for x1, x2 in zip(out1, out2):
assert not mx.test_utils.almost_equal(x1.asnumpy(), x2.asnumpy(), rtol=1e-3)
def test_module_switch_bucket():
vocab_dim = 5000
num_hidden = 100
num_embedding = 100
num_layer = 2
default_key = 10
test_key = 5
batch_size = 32
contexts = [mx.cpu(0)]
initializer = mx.init.Xavier(factor_type="in", magnitude=2.34)
#generate symbols for an LSTM network
def sym_gen(seq_len):
data = mx.sym.Variable('data')
label = mx.sym.Variable('softmax_label')
embed = mx.sym.Embedding(data=data, input_dim=vocab_dim,
output_dim=num_embedding, name='embed')
stack = mx.rnn.SequentialRNNCell()
for i in range(num_layer):
stack.add(mx.rnn.LSTMCell(num_hidden=num_hidden, prefix='lstm_l%d_'%i))
outputs, states = stack.unroll(seq_len, inputs=embed, merge_outputs=True)
pred = mx.sym.Reshape(outputs, shape=(-1, num_hidden))
pred = mx.sym.FullyConnected(data=pred, num_hidden=vocab_dim, name='pred')
label = mx.sym.Reshape(label, shape=(-1,))
pred = mx.sym.SoftmaxOutput(data=pred, label=label, name='softmax')
return pred, ('data',), ('softmax_label',)
def create_bucketing_module(key):
model = mx.mod.BucketingModule(
sym_gen = sym_gen,
default_bucket_key = key,
context = contexts)
model.bind([('data', (batch_size, key))],
[('softmax_label', (batch_size, key))], True, False)
model.init_params(initializer=initializer)
return model
#initialize the bucketing module with the default bucket key
bucketing_model = create_bucketing_module(default_key)
#switch to test_key
bucketing_model.switch_bucket(test_key, [('data', (batch_size, test_key))],
[('softmax_label', (batch_size, test_key))])
total_bytes_before = bucketing_model._buckets[default_key]._total_exec_bytes
#remove test_key and switch again
del bucketing_model._buckets[test_key]
bucketing_model.switch_bucket(test_key, [('data', (batch_size, test_key))],
[('softmax_label', (batch_size, test_key))])
total_bytes_after = bucketing_model._buckets[default_key]._total_exec_bytes
#the default bucket is expected to reuse the bytes allocated
assert total_bytes_after == total_bytes_before
def test_module_set_params():
# data iter
mx.random.seed(11)
data = mx.nd.array([[0.05, .10]]);
label = mx.nd.array([[.01, 0.99]]);
train_data = mx.io.NDArrayIter(data, label, batch_size=1)
# symbols
x = mx.symbol.Variable('data')
x = mx.symbol.FullyConnected(name='fc_0', data=x, num_hidden=2)
x = mx.symbol.Activation(name="act_0", data=x, act_type='sigmoid')
x = mx.symbol.FullyConnected(name='fc_1', data=x, num_hidden=2)
x = mx.symbol.Activation(name="act_1", data=x, act_type='sigmoid')
x = mx.symbol.LinearRegressionOutput(data=x, name='softmax', grad_scale=2)
# create module
mod = mx.mod.Module(x, context=[mx.cpu()]);
mod.bind(train_data.provide_data, label_shapes=train_data.provide_label,
for_training=True)
arg_params_correct = {'fc_0_weight': mx.nd.array([[.15, .20], [.25, .30]]),
'fc_0_bias' : mx.nd.array([.35, .35]),
'fc_1_weight': mx.nd.array([[.40, .45], [.50, .55]]),
'fc_1_bias' : mx.nd.array([.60, .60])}
arg_params_missing = {'fc_0_weight': mx.nd.array([[.15, .20], [.25, .30]]),
'fc_0_bias' : mx.nd.array([.35, .35]),
'fc_1_weight': mx.nd.array([[.40, .45], [.50, .55]])}
arg_params_extra = {'fc_0_weight': mx.nd.array([[.15, .20], [.25, .30]]),
'fc_0_bias' : mx.nd.array([.35, .35]),
'fc_1_weight': mx.nd.array([[.40, .45], [.50, .55]]),
'fc_1_bias' : mx.nd.array([.60, .60]),
'fc_2_weight': mx.nd.array([.60, .60])}
arg_params_missing_extra = {'fc_2_weight': mx.nd.array([.60, .60])}
# test regular set_params
mod.set_params(force_init=True, arg_params=arg_params_correct, aux_params={})
# test allow missing
mod.set_params(force_init=True, arg_params=arg_params_missing, aux_params={}, allow_missing=True)
assertRaises(RuntimeError, mod.set_params,
force_init=True, arg_params=arg_params_missing,
aux_params={}, allow_missing=False)
# test allow extra
mod.set_params(force_init=True, arg_params=arg_params_extra, aux_params={}, allow_missing=True, allow_extra=True)
assertRaises(ValueError, mod.set_params,
force_init=True, arg_params=arg_params_extra,
aux_params={}, allow_missing=True, allow_extra=False)
# test allow missing + extra,
assertRaises(RuntimeError, mod.set_params,
force_init=True, arg_params=arg_params_missing_extra,
aux_params={}, allow_missing=False, allow_extra=False)
# test allow missing + extra, this will throw a runtime error
assertRaises(ValueError, mod.set_params,
force_init=True, arg_params=arg_params_missing_extra,
aux_params={}, allow_missing=True, allow_extra=False)
def test_monitor():
# data iter
mx.random.seed(11)
data = mx.nd.array([[0.05, .10]]);
label = mx.nd.array([[.01, 0.99]]);
train_data = mx.io.NDArrayIter(data, label, batch_size=1)
# symbols
x = mx.symbol.Variable('data')
x = mx.symbol.FullyConnected(name='fc_0', data=x, num_hidden=2)
x = mx.symbol.Activation(name="act_0", data=x, act_type='sigmoid')
x = mx.symbol.FullyConnected(name='fc_1', data=x, num_hidden=2)
x = mx.symbol.Activation(name="act_1", data=x, act_type='sigmoid')
x = mx.symbol.LinearRegressionOutput(data=x, name='softmax', grad_scale=2)
# create monitor
def mean_abs(x):
sum_abs = mx.ndarray.sum(mx.ndarray.abs(x))
return mx.ndarray.divide(sum_abs, reduce(lambda x, y: x * y, x.shape))
mon = mx.mon.Monitor(1, stat_func=mean_abs, pattern='.*', sort=True)
# create module
mod = mx.mod.Module(x, context=[mx.cpu()]);
mod.bind(train_data.provide_data, label_shapes=train_data.provide_label,
for_training=True)
mod.install_monitor(mon)
arg_params = {'fc_0_weight': mx.nd.array([[.15, .20], [.25, .30]]),
'fc_0_bias' : mx.nd.array([.35, .35]),
'fc_1_weight': mx.nd.array([[.40, .45], [.50, .55]]),
'fc_1_bias' : mx.nd.array([.60, .60])}
mod.init_params(arg_params=arg_params)
data_iter = iter(train_data)
data_batch = next(data_iter)
mon.tic()
mod.forward_backward(data_batch)
res = mon.toc()
keys = ['act_0', 'act_1', 'data', 'fc_0', 'fc_1', 'softmax']
mon_result_counts = [0, 0, 0, 0, 0, 0]
assert(len(res) == 21)
for n, k, v in res:
for idx, key in enumerate(keys):
if k.startswith(key):
mon_result_counts[idx] += 1
break
assert(mon_result_counts == [2, 2, 1, 6, 6, 4])
def test_executor_group():
def get_rnn_sym(num_layers, num_words, num_hidden, num_embed, seq_len):
stack = mx.rnn.SequentialRNNCell()
for i in range(num_layers):
stack.add(mx.rnn.LSTMCell(num_hidden=num_hidden, prefix='lstm_l%d_' % i))
data = mx.sym.Variable('data')
label = mx.sym.Variable('softmax_label')
embed = mx.sym.Embedding(data=data, input_dim=num_words,
output_dim=num_embed, name='embed')
stack.reset()
outputs, states = stack.unroll(seq_len, inputs=embed, merge_outputs=True)
pred = mx.sym.Reshape(outputs, shape=(-1, num_hidden))
pred = mx.sym.FullyConnected(data=pred, num_hidden=num_words, name='pred')
label = mx.sym.Reshape(label, shape=(-1,))
pred = mx.sym.SoftmaxOutput(data=pred, label=label, name='softmax')
return pred
def test_shared_exec_group(exec_grp_shared, exec_grp_created, shared_arg_names=None, extra_args=None):
# Test shared data arrays
for i in range(len(exec_grp_shared.execs)):
# test same shared_data_arrays for two exec groups
shared_data_array1 = exec_grp_shared.shared_data_arrays[i]
shared_data_array2 = exec_grp_created.shared_data_arrays[i]
if extra_args is not None:
assert len(shared_data_array1) == len(extra_args),\
"exec_grp_shared.shared_data_arrays[%d] should have same number of args as extra_args"
assert len(shared_data_array1) == len(shared_data_array2),\
"length of shared_data_array of the shared executor group not equal to the created executor group"
for k, v in shared_data_array1.items():
if extra_args is not None:
assert k in extra_args, "arg %s is not in extra_args" % k
assert k in shared_data_array2,\
"arg %s of the shared executor group not in the shared_data_array of the created executor group" % k
assert mx.test_utils.same_array(v, shared_data_array2[k])
for data_name, array in exec_grp_shared.shared_data_arrays[i].items():
assert data_name in exec_grp_created.shared_data_arrays[i], \
"Shared input data '%s' is not in " \
"shared_data_arrays of created executor group." % (data_name)
assert mx.test_utils.same_array(array, exec_grp_created.shared_data_arrays[i][data_name]), \
"Shared input data '%s' does not share memory." % (data_name)
# Test shared argument arrays and gradient arrays
exec_shared = exec_grp_shared.execs[i]
exec_created = exec_grp_created.execs[i]
if shared_arg_names is not None:
# test shared arguments
for arg_name in shared_arg_names:
assert arg_name in exec_created.arg_dict, \
"Shared argument '%s' is not in arg_dict of created executor group." % (arg_name)
assert mx.test_utils.same_array(exec_shared.arg_dict[arg_name], exec_created.arg_dict[arg_name]), \
"Shared argument '%s' does not share memory." % (arg_name)
# test shared argument gradients
for arg_name in shared_arg_names:
assert arg_name in exec_created.grad_dict, \
"Shared argument gradient '%s' is not in " \
"grad_dict of created executor group." % (arg_name)
assert mx.test_utils.same_array(exec_shared.grad_dict[arg_name], exec_created.grad_dict[arg_name]), \
"Shared argument gradient '%s' does not sharing memory." % (arg_name)
for arg_name, grad in exec_grp_shared.grad_req.items():
assert grad == exec_grp_created.grad_req[arg_name], \
"Gradient requirements for shared argument '%s' are inconsistent. " \
"Shared executor group requires '%s' while created executor group requires '%s'" \
%(arg_name, grad, exec_grp_created.grad_req[arg_name])
contexts = [mx.cpu(0), mx.cpu(1)]
workload = [1] * len(contexts)
batch_size = 32
max_bucket_size = 80
num_words = 1000
num_hidden = 100
num_embed = 200
data_shapes = [('data', (batch_size, max_bucket_size))]
label_shapes = [('softmax_label', (batch_size, max_bucket_size))]
# generate an rnn sym with #layers=5
sym = get_rnn_sym(num_layers=3, num_words=num_words, num_hidden=num_hidden,
num_embed=num_embed, seq_len=max_bucket_size)
arg_names1 = sym.list_arguments()
input_names = [name[0] for name in data_shapes] + [name[0] for name in label_shapes]
shared_arg_names = [name for name in arg_names1 if name not in input_names]
exec_group1 = DataParallelExecutorGroup(symbol=sym, contexts=contexts,
workload=workload, data_shapes=data_shapes,
label_shapes=label_shapes, param_names=shared_arg_names,
for_training=True, inputs_need_grad=False)
# shared_data_arrays should only have input "data" and "softmax_label" arrays
for i in range(len(contexts)):
assert len(exec_group1.shared_data_arrays[i]) == len(input_names),\
"exec_group1.shared_data_arrays[%d] should have the same number of names as in input_names" % i
for name in input_names:
assert name in exec_group1.shared_data_arrays[i],\
"arg %s should be in exec_group1.shared_data_arrays[%d]" % (name, i)
# generate an rnn sym with #layers=5
sym = get_rnn_sym(num_layers=5, num_words=num_words, num_hidden=num_hidden,
num_embed=num_embed, seq_len=max_bucket_size)
arg_names2 = sym.list_arguments()
exec_group2 = DataParallelExecutorGroup(symbol=sym, contexts=contexts,
workload=workload, data_shapes=data_shapes,
label_shapes=label_shapes, param_names=shared_arg_names,
for_training=True, inputs_need_grad=False,
shared_group=exec_group1)
extra_args = [name for name in arg_names2 if name not in shared_arg_names]
test_shared_exec_group(exec_grp_shared=exec_group1, exec_grp_created=exec_group2,
shared_arg_names=shared_arg_names, extra_args=extra_args)
def test_factorization_machine_module():
""" Test factorization machine model with sparse operators """
mx.random.seed(11)
rnd.seed(11)
def fm(factor_size, feature_dim, init):
x = mx.symbol.Variable("data", stype='csr')
v = mx.symbol.Variable("v", shape=(feature_dim, factor_size),
init=init, stype='row_sparse')
w1_weight = mx.symbol.var('w1_weight', shape=(feature_dim, 1),
init=init, stype='row_sparse')
w1_bias = mx.symbol.var('w1_bias', shape=(1))
w1 = mx.symbol.broadcast_add(mx.symbol.dot(x, w1_weight), w1_bias)
v_s = mx.symbol._internal._square_sum(data=v, axis=1, keepdims=True)
x_s = mx.symbol.square(data=x)
bd_sum = mx.sym.dot(x_s, v_s)
w2 = mx.symbol.dot(x, v)
w2_squared = 0.5 * mx.symbol.square(data=w2)
w_all = mx.symbol.Concat(w1, w2_squared, dim=1)
sum1 = mx.symbol.sum(data=w_all, axis=1, keepdims=True)
sum2 = 0.5 * mx.symbol.negative(bd_sum)
model = mx.sym.elemwise_add(sum1, sum2)
y = mx.symbol.Variable("label")
model = mx.symbol.LinearRegressionOutput(data=model, label=y)
return model
# model
ctx = default_context()
init = mx.initializer.Normal(sigma=0.01)
factor_size = 4
feature_dim = 10000
model = fm(factor_size, feature_dim, init)
# data iter
num_batches = 5
batch_size = 64
num_samples = batch_size * num_batches
# generate some random csr data
csr_nd = rand_ndarray((num_samples, feature_dim), 'csr', 0.1)
label = mx.nd.ones((num_samples,1))
# the alternative is to use LibSVMIter
train_iter = mx.io.NDArrayIter(data=csr_nd, label={'label':label},
batch_size=batch_size, last_batch_handle='discard')
# create module
mod = mx.mod.Module(symbol=model, data_names=['data'], label_names=['label'])
# allocate memory by given the input data and lable shapes
mod.bind(data_shapes=train_iter.provide_data, label_shapes=train_iter.provide_label)
# initialize parameters by uniform random numbers
mod.init_params(initializer=init)
# use Sparse SGD with learning rate 0.1 to train
adam = mx.optimizer.Adam(clip_gradient=5.0, learning_rate=0.001, rescale_grad=1.0/batch_size)
mod.init_optimizer(optimizer=adam)
# use accuracy as the metric
metric = mx.metric.create('MSE')
# train 10 epoch
for epoch in range(10):
train_iter.reset()
metric.reset()
for batch in train_iter:
mod.forward(batch, is_train=True) # compute predictions
mod.update_metric(metric, batch.label) # accumulate prediction accuracy
mod.backward() # compute gradients
mod.update() # update parameters
# print('Epoch %d, Training %s' % (epoch, metric.get()))
assert(metric.get()[1] < 0.05), metric.get()[1]
def test_module_initializer():
def regression_model(m):
x = mx.symbol.var("data", stype='csr')
v = mx.symbol.var("v", shape=(m, 1), init=mx.init.Uniform(scale=.1),
stype='row_sparse')
model = mx.symbol.dot(lhs=x, rhs=v)
y = mx.symbol.Variable("label")
model = mx.symbol.LinearRegressionOutput(data=model, label=y, name="out")
return model
n, m = 128, 100
model = regression_model(m)
data = mx.nd.zeros(shape=(n, m), stype='csr')
label = mx.nd.zeros((n, 1))
iterator = mx.io.NDArrayIter(data=data, label={'label':label},
batch_size=n, last_batch_handle='discard')
# create module
mod = mx.mod.Module(symbol=model, data_names=['data'], label_names=['label'])
mod.bind(data_shapes=iterator.provide_data, label_shapes=iterator.provide_label)
mod.init_params()
v = mod._arg_params['v']
assert(v.stype == 'row_sparse')
assert(np.sum(v.asnumpy()) != 0)
def test_forward_reshape():
num_class=10
data1 = mx.sym.Variable('data1')
data2 = mx.sym.Variable('data2')
conv1 = mx.sym.Convolution(data=data1, kernel=(2, 2), num_filter=2, stride=(2, 2))
conv2 = mx.sym.Convolution(data=data2, kernel=(3, 3), num_filter=3, stride=(1, 1))
pooling1 = mx.sym.Pooling(data=conv1, kernel=(2, 2), stride=(1, 1), pool_type="avg")
pooling2 = mx.sym.Pooling(data=conv2, kernel=(2, 2), stride=(1, 1), pool_type="max")
flatten1 = mx.sym.flatten(data=pooling1)
flatten2 = mx.sym.flatten(data=pooling2)
sum = mx.sym.sum(data=flatten1, axis=1) + mx.sym.sum(data=flatten2, axis=1)
fc = mx.sym.FullyConnected(data=sum, num_hidden=num_class)
sym = mx.sym.SoftmaxOutput(data=fc, name='softmax')
dshape1 = (10, 3, 64, 64)
dshape2 = (10, 3, 32, 32)
lshape = (10,)
mod = mx.mod.Module(symbol=sym, data_names=['data1', 'data2'],
label_names=['softmax_label'])
mod.bind(data_shapes=[('data1', dshape1), ('data2', dshape2)],
label_shapes=[('softmax_label', lshape)])
mod.init_params()
mod.init_optimizer(optimizer_params={'learning_rate': 0.01})
# Train with original data shapes
data_batch = mx.io.DataBatch(data=[mx.nd.random.uniform(0, 9, dshape1),
mx.nd.random.uniform(5, 15, dshape2)],
label=[mx.nd.ones(lshape)])
mod.forward(data_batch)
assert mod.get_outputs()[0].shape == tuple([lshape[0], num_class])
mod.backward()
mod.update()
# Train with different batch size
dshape1 = (3, 3, 64, 64)
dshape2 = (3, 3, 32, 32)
lshape = (3,)
data_batch = mx.io.DataBatch(data=[mx.nd.random.uniform(0, 9, dshape1),
mx.nd.random.uniform(5, 15, dshape2)],
label=[mx.nd.ones(lshape)])
mod.forward(data_batch)
assert mod.get_outputs()[0].shape == tuple([lshape[0], num_class])
mod.backward()
mod.update()
dshape1 = (20, 3, 64, 64)
dshape2 = (20, 3, 32, 32)
lshape = (20,)
data_batch = mx.io.DataBatch(data=[mx.nd.random.uniform(3, 5, dshape1),
mx.nd.random.uniform(10, 25, dshape2)],
label=[mx.nd.ones(lshape)])
mod.forward(data_batch)
assert mod.get_outputs()[0].shape == tuple([lshape[0], num_class])
mod.backward()
mod.update()
#Train with both different batch size and data shapes
dshape1 = (20, 3, 120, 120)
dshape2 = (20, 3, 32, 64)
lshape = (20,)
data_batch = mx.io.DataBatch(data=[mx.nd.random.uniform(0, 9, dshape1),
mx.nd.random.uniform(5, 15, dshape2)],
label=[mx.nd.ones(lshape)])
mod.forward(data_batch)
assert mod.get_outputs()[0].shape == tuple([lshape[0], num_class])
mod.backward()
mod.update()
dshape1 = (5, 3, 28, 40)
dshape2 = (5, 3, 24, 16)
lshape = (5,)
data_batch = mx.io.DataBatch(data=[mx.nd.random.uniform(0, 9, dshape1),
mx.nd.random.uniform(15, 25, dshape2)],
label=[mx.nd.ones(lshape)])
mod.forward(data_batch)
assert mod.get_outputs()[0].shape == tuple([lshape[0], num_class])
mod.backward()
mod.update()
#Test score
dataset_shape1 = (30, 3, 30, 30)
dataset_shape2 = (30, 3, 20, 40)
labelset_shape = (30,)
eval_dataiter = mx.io.NDArrayIter(data=[mx.nd.random.uniform(0, 9, dataset_shape1),
mx.nd.random.uniform(15, 25, dataset_shape2)],
label=[mx.nd.ones(labelset_shape)],
batch_size=5)
assert len(mod.score(eval_data=eval_dataiter, eval_metric='acc')) == 1
#Test prediction
dshape1 = (1, 3, 30, 30)
dshape2 = (1, 3, 20, 40)
dataset_shape1 = (10, 3, 30, 30)
dataset_shape2 = (10, 3, 20, 40)
pred_dataiter = mx.io.NDArrayIter(data=[mx.nd.random.uniform(0, 9, dataset_shape1),
mx.nd.random.uniform(15, 25, dataset_shape2)])
mod.bind(data_shapes=[('data1', dshape1), ('data2', dshape2)],
for_training=False, force_rebind=True)
assert mod.predict(pred_dataiter).shape == tuple([10, num_class])
#Test forward with other data batch API
Batch = namedtuple('Batch', ['data'])
data = mx.sym.Variable('data')
out = data * 2
mod = mx.mod.Module(symbol=out, label_names=None)
mod.bind(data_shapes=[('data', (1, 10))])
mod.init_params()
data1 = [mx.nd.ones((1, 10))]
mod.forward(Batch(data1))
assert mod.get_outputs()[0].shape == (1, 10)
data2 = [mx.nd.ones((3, 5))]
mod.forward(Batch(data2))
assert mod.get_outputs()[0].shape == (3, 5)
if __name__ == '__main__':
import nose
nose.runmodule()
|
apache-2.0
|
salomon1184/bite-project
|
deps/gdata-python-client/samples/apps/marketplace_sample/gdata/tlslite/utils/Python_RSAKey.py
|
239
|
7707
|
"""Pure-Python RSA implementation."""
from cryptomath import *
import xmltools
from ASN1Parser import ASN1Parser
from RSAKey import *
class Python_RSAKey(RSAKey):
def __init__(self, n=0, e=0, d=0, p=0, q=0, dP=0, dQ=0, qInv=0):
if (n and not e) or (e and not n):
raise AssertionError()
self.n = n
self.e = e
self.d = d
self.p = p
self.q = q
self.dP = dP
self.dQ = dQ
self.qInv = qInv
self.blinder = 0
self.unblinder = 0
def hasPrivateKey(self):
return self.d != 0
def hash(self):
s = self.writeXMLPublicKey('\t\t')
return hashAndBase64(s.strip())
def _rawPrivateKeyOp(self, m):
#Create blinding values, on the first pass:
if not self.blinder:
self.unblinder = getRandomNumber(2, self.n)
self.blinder = powMod(invMod(self.unblinder, self.n), self.e,
self.n)
#Blind the input
m = (m * self.blinder) % self.n
#Perform the RSA operation
c = self._rawPrivateKeyOpHelper(m)
#Unblind the output
c = (c * self.unblinder) % self.n
#Update blinding values
self.blinder = (self.blinder * self.blinder) % self.n
self.unblinder = (self.unblinder * self.unblinder) % self.n
#Return the output
return c
def _rawPrivateKeyOpHelper(self, m):
#Non-CRT version
#c = powMod(m, self.d, self.n)
#CRT version (~3x faster)
s1 = powMod(m, self.dP, self.p)
s2 = powMod(m, self.dQ, self.q)
h = ((s1 - s2) * self.qInv) % self.p
c = s2 + self.q * h
return c
def _rawPublicKeyOp(self, c):
m = powMod(c, self.e, self.n)
return m
def acceptsPassword(self): return False
def write(self, indent=''):
if self.d:
s = indent+'<privateKey xmlns="http://trevp.net/rsa">\n'
else:
s = indent+'<publicKey xmlns="http://trevp.net/rsa">\n'
s += indent+'\t<n>%s</n>\n' % numberToBase64(self.n)
s += indent+'\t<e>%s</e>\n' % numberToBase64(self.e)
if self.d:
s += indent+'\t<d>%s</d>\n' % numberToBase64(self.d)
s += indent+'\t<p>%s</p>\n' % numberToBase64(self.p)
s += indent+'\t<q>%s</q>\n' % numberToBase64(self.q)
s += indent+'\t<dP>%s</dP>\n' % numberToBase64(self.dP)
s += indent+'\t<dQ>%s</dQ>\n' % numberToBase64(self.dQ)
s += indent+'\t<qInv>%s</qInv>\n' % numberToBase64(self.qInv)
s += indent+'</privateKey>'
else:
s += indent+'</publicKey>'
#Only add \n if part of a larger structure
if indent != '':
s += '\n'
return s
def writeXMLPublicKey(self, indent=''):
return Python_RSAKey(self.n, self.e).write(indent)
def generate(bits):
key = Python_RSAKey()
p = getRandomPrime(bits/2, False)
q = getRandomPrime(bits/2, False)
t = lcm(p-1, q-1)
key.n = p * q
key.e = 3L #Needed to be long, for Java
key.d = invMod(key.e, t)
key.p = p
key.q = q
key.dP = key.d % (p-1)
key.dQ = key.d % (q-1)
key.qInv = invMod(q, p)
return key
generate = staticmethod(generate)
def parsePEM(s, passwordCallback=None):
"""Parse a string containing a <privateKey> or <publicKey>, or
PEM-encoded key."""
start = s.find("-----BEGIN PRIVATE KEY-----")
if start != -1:
end = s.find("-----END PRIVATE KEY-----")
if end == -1:
raise SyntaxError("Missing PEM Postfix")
s = s[start+len("-----BEGIN PRIVATE KEY -----") : end]
bytes = base64ToBytes(s)
return Python_RSAKey._parsePKCS8(bytes)
else:
start = s.find("-----BEGIN RSA PRIVATE KEY-----")
if start != -1:
end = s.find("-----END RSA PRIVATE KEY-----")
if end == -1:
raise SyntaxError("Missing PEM Postfix")
s = s[start+len("-----BEGIN RSA PRIVATE KEY -----") : end]
bytes = base64ToBytes(s)
return Python_RSAKey._parseSSLeay(bytes)
raise SyntaxError("Missing PEM Prefix")
parsePEM = staticmethod(parsePEM)
def parseXML(s):
element = xmltools.parseAndStripWhitespace(s)
return Python_RSAKey._parseXML(element)
parseXML = staticmethod(parseXML)
def _parsePKCS8(bytes):
p = ASN1Parser(bytes)
version = p.getChild(0).value[0]
if version != 0:
raise SyntaxError("Unrecognized PKCS8 version")
rsaOID = p.getChild(1).value
if list(rsaOID) != [6, 9, 42, 134, 72, 134, 247, 13, 1, 1, 1, 5, 0]:
raise SyntaxError("Unrecognized AlgorithmIdentifier")
#Get the privateKey
privateKeyP = p.getChild(2)
#Adjust for OCTET STRING encapsulation
privateKeyP = ASN1Parser(privateKeyP.value)
return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
_parsePKCS8 = staticmethod(_parsePKCS8)
def _parseSSLeay(bytes):
privateKeyP = ASN1Parser(bytes)
return Python_RSAKey._parseASN1PrivateKey(privateKeyP)
_parseSSLeay = staticmethod(_parseSSLeay)
def _parseASN1PrivateKey(privateKeyP):
version = privateKeyP.getChild(0).value[0]
if version != 0:
raise SyntaxError("Unrecognized RSAPrivateKey version")
n = bytesToNumber(privateKeyP.getChild(1).value)
e = bytesToNumber(privateKeyP.getChild(2).value)
d = bytesToNumber(privateKeyP.getChild(3).value)
p = bytesToNumber(privateKeyP.getChild(4).value)
q = bytesToNumber(privateKeyP.getChild(5).value)
dP = bytesToNumber(privateKeyP.getChild(6).value)
dQ = bytesToNumber(privateKeyP.getChild(7).value)
qInv = bytesToNumber(privateKeyP.getChild(8).value)
return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
_parseASN1PrivateKey = staticmethod(_parseASN1PrivateKey)
def _parseXML(element):
try:
xmltools.checkName(element, "privateKey")
except SyntaxError:
xmltools.checkName(element, "publicKey")
#Parse attributes
xmltools.getReqAttribute(element, "xmlns", "http://trevp.net/rsa\Z")
xmltools.checkNoMoreAttributes(element)
#Parse public values (<n> and <e>)
n = base64ToNumber(xmltools.getText(xmltools.getChild(element, 0, "n"), xmltools.base64RegEx))
e = base64ToNumber(xmltools.getText(xmltools.getChild(element, 1, "e"), xmltools.base64RegEx))
d = 0
p = 0
q = 0
dP = 0
dQ = 0
qInv = 0
#Parse private values, if present
if element.childNodes.length>=3:
d = base64ToNumber(xmltools.getText(xmltools.getChild(element, 2, "d"), xmltools.base64RegEx))
p = base64ToNumber(xmltools.getText(xmltools.getChild(element, 3, "p"), xmltools.base64RegEx))
q = base64ToNumber(xmltools.getText(xmltools.getChild(element, 4, "q"), xmltools.base64RegEx))
dP = base64ToNumber(xmltools.getText(xmltools.getChild(element, 5, "dP"), xmltools.base64RegEx))
dQ = base64ToNumber(xmltools.getText(xmltools.getChild(element, 6, "dQ"), xmltools.base64RegEx))
qInv = base64ToNumber(xmltools.getText(xmltools.getLastChild(element, 7, "qInv"), xmltools.base64RegEx))
return Python_RSAKey(n, e, d, p, q, dP, dQ, qInv)
_parseXML = staticmethod(_parseXML)
|
apache-2.0
|
solashirai/edx-platform
|
cms/djangoapps/contentstore/tests/test_contentstore.py
|
15
|
93797
|
# -*- coding: utf-8 -*-
import copy
import mock
from mock import patch
import shutil
import lxml.html
from lxml import etree
import ddt
from datetime import timedelta
from fs.osfs import OSFS
from json import loads
from path import Path as path
from textwrap import dedent
from uuid import uuid4
from functools import wraps
from unittest import SkipTest
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
from openedx.core.lib.tempdir import mkdtemp_clean
from common.test.utils import XssTestMixin
from contentstore.tests.utils import parse_json, AjaxEnabledTestClient, CourseTestCase
from contentstore.views.component import ADVANCED_COMPONENT_TYPES
from edxval.api import create_video, get_videos_for_course
from xmodule.contentstore.django import contentstore
from xmodule.contentstore.utils import restore_asset_from_trashcan, empty_asset_trashcan
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from opaque_keys.edx.keys import UsageKey, CourseKey
from opaque_keys.edx.locations import AssetLocation, CourseLocator
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, LibraryFactory, check_mongo_calls
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.xml_importer import import_course_from_xml, perform_xlint
from xmodule.capa_module import CapaDescriptor
from xmodule.course_module import CourseDescriptor, Textbook
from xmodule.seq_module import SequenceDescriptor
from contentstore.utils import delete_course_and_groups, reverse_url, reverse_course_url
from django_comment_common.utils import are_permissions_roles_seeded
from student import auth
from student.models import CourseEnrollment
from student.roles import CourseCreatorRole, CourseInstructorRole
from opaque_keys import InvalidKeyError
from contentstore.tests.utils import get_url
from course_action_state.models import CourseRerunState, CourseRerunUIStateManager
from course_action_state.managers import CourseActionStateItemNotFoundError
from xmodule.contentstore.content import StaticContent
from xmodule.modulestore.django import modulestore
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
def requires_pillow_jpeg(func):
"""
A decorator to indicate that the function requires JPEG support for Pillow,
otherwise it cannot be run
"""
@wraps(func)
def decorated_func(*args, **kwargs):
"""
Execute the function if we have JPEG support in Pillow.
"""
try:
from PIL import Image
except ImportError:
raise SkipTest("Pillow is not installed (or not found)")
if not getattr(Image.core, "jpeg_decoder", False):
raise SkipTest("Pillow cannot open JPEG files")
return func(*args, **kwargs)
return decorated_func
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class ContentStoreTestCase(CourseTestCase):
"""
Base class for Content Store Test Cases
"""
class ImportRequiredTestCases(ContentStoreTestCase):
"""
Tests which legitimately need to import a course
"""
def test_no_static_link_rewrites_on_import(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
handouts_usage_key = course.id.make_usage_key('course_info', 'handouts')
handouts = self.store.get_item(handouts_usage_key)
self.assertIn('/static/', handouts.data)
handouts_usage_key = course.id.make_usage_key('html', 'toyhtml')
handouts = self.store.get_item(handouts_usage_key)
self.assertIn('/static/', handouts.data)
def test_xlint_fails(self):
err_cnt = perform_xlint(TEST_DATA_DIR, ['toy'])
self.assertGreater(err_cnt, 0)
def test_invalid_asset_overwrite(self):
"""
Tests that an asset with invalid displayname can be overwritten if multiple assets have same displayname.
It Verifies that:
During import, if ('/') or ('\') is present in displayname of an asset, it is replaced with underscores '_'.
Export does not fail when an asset has '/' in its displayname. If the converted display matches with
any other asset, then it will be replaced.
Asset name in XML: "/invalid\\displayname/subs-esLhHcdKGWvKs.srt"
"""
content_store = contentstore()
expected_displayname = '_invalid_displayname_subs-esLhHcdKGWvKs.srt'
import_course_from_xml(
self.store,
self.user.id,
TEST_DATA_DIR,
['import_draft_order'],
static_content_store=content_store,
verbose=True,
create_if_not_present=True
)
# Verify the course has imported successfully
course = self.store.get_course(self.store.make_course_key(
'test_org',
'import_draft_order',
'import_draft_order'
))
self.assertIsNotNone(course)
# Add a new asset in the course, and make sure to name it such that it overwrite the one existing
# asset in the course. (i.e. _invalid_displayname_subs-esLhHcdKGWvKs.srt)
asset_key = course.id.make_asset_key('asset', 'sample_asset.srt')
content = StaticContent(
asset_key, expected_displayname, 'application/text', 'test',
)
content_store.save(content)
# Get & verify that course actually has two assets
assets, count = content_store.get_all_content_for_course(course.id)
self.assertEqual(count, 2)
# Verify both assets have similar `displayname` after saving.
for asset in assets:
self.assertEquals(asset['displayname'], expected_displayname)
# Test course export does not fail
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
export_course_to_xml(self.store, content_store, course.id, root_dir, 'test_export')
filesystem = OSFS(root_dir / 'test_export/static')
exported_static_files = filesystem.listdir()
# Verify that asset have been overwritten during export.
self.assertEqual(len(exported_static_files), 1)
self.assertTrue(filesystem.exists(expected_displayname))
self.assertEqual(exported_static_files[0], expected_displayname)
# Remove exported course
shutil.rmtree(root_dir)
def test_about_overrides(self):
'''
This test case verifies that a course can use specialized override for about data,
e.g. /about/Fall_2012/effort.html
while there is a base definition in /about/effort.html
'''
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course_key = course_items[0].id
effort = self.store.get_item(course_key.make_usage_key('about', 'effort'))
self.assertEqual(effort.data, '6 hours')
# this one should be in a non-override folder
effort = self.store.get_item(course_key.make_usage_key('about', 'end_date'))
self.assertEqual(effort.data, 'TBD')
@requires_pillow_jpeg
def test_asset_import(self):
'''
This test validates that an image asset is imported and a thumbnail was generated for a .gif
'''
content_store = contentstore()
import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], static_content_store=content_store, verbose=True,
create_if_not_present=True
)
course = self.store.get_course(self.store.make_course_key('edX', 'toy', '2012_Fall'))
self.assertIsNotNone(course)
# make sure we have some assets in our contentstore
all_assets, __ = content_store.get_all_content_for_course(course.id)
self.assertGreater(len(all_assets), 0)
# make sure we have some thumbnails in our contentstore
all_thumbnails = content_store.get_all_content_thumbnails_for_course(course.id)
self.assertGreater(len(all_thumbnails), 0)
location = AssetLocation.from_deprecated_string('/c4x/edX/toy/asset/just_a_test.jpg')
content = content_store.find(location)
self.assertIsNotNone(content)
self.assertIsNotNone(content.thumbnail_location)
thumbnail = content_store.find(content.thumbnail_location)
self.assertIsNotNone(thumbnail)
def test_course_info_updates_import_export(self):
"""
Test that course info updates are imported and exported with all content fields ('data', 'items')
"""
content_store = contentstore()
data_dir = TEST_DATA_DIR
courses = import_course_from_xml(
self.store, self.user.id, data_dir, ['course_info_updates'],
static_content_store=content_store, verbose=True, create_if_not_present=True
)
course = courses[0]
self.assertIsNotNone(course)
course_updates = self.store.get_item(course.id.make_usage_key('course_info', 'updates'))
self.assertIsNotNone(course_updates)
# check that course which is imported has files 'updates.html' and 'updates.items.json'
filesystem = OSFS(data_dir + '/course_info_updates/info')
self.assertTrue(filesystem.exists('updates.html'))
self.assertTrue(filesystem.exists('updates.items.json'))
# verify that course info update module has same data content as in data file from which it is imported
# check 'data' field content
with filesystem.open('updates.html', 'r') as course_policy:
on_disk = course_policy.read()
self.assertEqual(course_updates.data, on_disk)
# check 'items' field content
with filesystem.open('updates.items.json', 'r') as course_policy:
on_disk = loads(course_policy.read())
self.assertEqual(course_updates.items, on_disk)
# now export the course to a tempdir and test that it contains files 'updates.html' and 'updates.items.json'
# with same content as in course 'info' directory
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
export_course_to_xml(self.store, content_store, course.id, root_dir, 'test_export')
# check that exported course has files 'updates.html' and 'updates.items.json'
filesystem = OSFS(root_dir / 'test_export/info')
self.assertTrue(filesystem.exists('updates.html'))
self.assertTrue(filesystem.exists('updates.items.json'))
# verify that exported course has same data content as in course_info_update module
with filesystem.open('updates.html', 'r') as grading_policy:
on_disk = grading_policy.read()
self.assertEqual(on_disk, course_updates.data)
with filesystem.open('updates.items.json', 'r') as grading_policy:
on_disk = loads(grading_policy.read())
self.assertEqual(on_disk, course_updates.items)
def test_rewrite_nonportable_links_on_import(self):
content_store = contentstore()
import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'],
static_content_store=content_store, create_if_not_present=True
)
# first check a static asset link
course_key = self.store.make_course_key('edX', 'toy', 'run')
html_module_location = course_key.make_usage_key('html', 'nonportable')
html_module = self.store.get_item(html_module_location)
self.assertIn('/static/foo.jpg', html_module.data)
# then check a intra courseware link
html_module_location = course_key.make_usage_key('html', 'nonportable_link')
html_module = self.store.get_item(html_module_location)
self.assertIn('/jump_to_id/nonportable_link', html_module.data)
def verify_content_existence(self, store, root_dir, course_id, dirname, category_name, filename_suffix=''):
filesystem = OSFS(root_dir / 'test_export')
self.assertTrue(filesystem.exists(dirname))
items = store.get_items(course_id, qualifiers={'category': category_name})
for item in items:
filesystem = OSFS(root_dir / ('test_export/' + dirname))
self.assertTrue(filesystem.exists(item.location.name + filename_suffix))
@mock.patch('xmodule.course_module.requests.get')
def test_export_course_roundtrip(self, mock_get):
mock_get.return_value.text = dedent("""
<?xml version="1.0"?><table_of_contents>
<entry page="5" page_label="ii" name="Table of Contents"/>
</table_of_contents>
""").strip()
content_store = contentstore()
course_id = self.import_and_populate_course()
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, 'test_export')
# check for static tabs
self.verify_content_existence(self.store, root_dir, course_id, 'tabs', 'static_tab', '.html')
# check for about content
self.verify_content_existence(self.store, root_dir, course_id, 'about', 'about', '.html')
# assert that there is an html and video directory in drafts:
draft_dir = OSFS(root_dir / 'test_export/drafts')
self.assertTrue(draft_dir.exists('html'))
self.assertTrue(draft_dir.exists('video'))
# and assert that they contain the created modules
self.assertIn(self.DRAFT_HTML + ".xml", draft_dir.listdir('html'))
self.assertIn(self.DRAFT_VIDEO + ".xml", draft_dir.listdir('video'))
# and assert the child of the orphaned draft wasn't exported
self.assertNotIn(self.ORPHAN_DRAFT_HTML + ".xml", draft_dir.listdir('html'))
# check for grading_policy.json
filesystem = OSFS(root_dir / 'test_export/policies/2012_Fall')
self.assertTrue(filesystem.exists('grading_policy.json'))
course = self.store.get_course(course_id)
# compare what's on disk compared to what we have in our course
with filesystem.open('grading_policy.json', 'r') as grading_policy:
on_disk = loads(grading_policy.read())
self.assertEqual(on_disk, course.grading_policy)
# check for policy.json
self.assertTrue(filesystem.exists('policy.json'))
# compare what's on disk to what we have in the course module
with filesystem.open('policy.json', 'r') as course_policy:
on_disk = loads(course_policy.read())
self.assertIn('course/2012_Fall', on_disk)
self.assertEqual(on_disk['course/2012_Fall'], own_metadata(course))
# remove old course
self.store.delete_course(course_id, self.user.id)
# reimport over old course
self.check_import(root_dir, content_store, course_id)
# import to different course id
new_course_id = self.store.make_course_key('anotherX', 'anotherToy', 'Someday')
self.check_import(root_dir, content_store, new_course_id)
self.assertCoursesEqual(course_id, new_course_id)
shutil.rmtree(root_dir)
def check_import(self, root_dir, content_store, course_id):
"""Imports the course in root_dir into the given course_id and verifies its content"""
# reimport
import_course_from_xml(
self.store,
self.user.id,
root_dir,
['test_export'],
static_content_store=content_store,
target_id=course_id,
)
# verify content of the course
self.check_populated_course(course_id)
# verify additional export attributes
def verify_export_attrs_removed(attributes):
"""Verifies all temporary attributes added during export are removed"""
self.assertNotIn('index_in_children_list', attributes)
self.assertNotIn('parent_sequential_url', attributes)
self.assertNotIn('parent_url', attributes)
vertical = self.store.get_item(course_id.make_usage_key('vertical', self.TEST_VERTICAL))
verify_export_attrs_removed(vertical.xml_attributes)
for child in vertical.get_children():
verify_export_attrs_removed(child.xml_attributes)
if hasattr(child, 'data'):
verify_export_attrs_removed(child.data)
def test_export_course_with_metadata_only_video(self):
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
# create a new video module and add it as a child to a vertical
# this re-creates a bug whereby since the video template doesn't have
# anything in 'data' field, the export was blowing up
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
ItemFactory.create(parent_location=parent.location, category="video", display_name="untitled")
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, 'test_export')
shutil.rmtree(root_dir)
def test_export_course_with_metadata_only_word_cloud(self):
"""
Similar to `test_export_course_with_metadata_only_video`.
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['word_cloud'], create_if_not_present=True)
course_id = self.store.make_course_key('HarvardX', 'ER22x', '2013_Spring')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
ItemFactory.create(parent_location=parent.location, category="word_cloud", display_name="untitled")
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, 'test_export')
shutil.rmtree(root_dir)
def test_import_after_renaming_xml_data(self):
"""
Test that import works fine on split mongo after renaming the blocks url.
"""
split_store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split) # pylint: disable=W0212
import_course_from_xml(
split_store, self.user.id, TEST_DATA_DIR,
['course_before_rename'],
create_if_not_present=True
)
course_after_rename = import_course_from_xml(
split_store, self.user.id, TEST_DATA_DIR,
['course_after_rename'],
create_if_not_present=True
)
all_items = split_store.get_items(course_after_rename[0].id, qualifiers={'category': 'chapter'})
renamed_chapter = [item for item in all_items if item.location.block_id == 'renamed_chapter'][0]
self.assertIsNotNone(renamed_chapter.published_on)
self.assertIsNotNone(renamed_chapter.parent)
self.assertTrue(renamed_chapter.location in course_after_rename[0].children)
original_chapter = [item for item in all_items
if item.location.block_id == 'b9870b9af59841a49e6e02765d0e3bbf'][0]
self.assertIsNone(original_chapter.published_on)
self.assertIsNone(original_chapter.parent)
self.assertFalse(original_chapter.location in course_after_rename[0].children)
def test_empty_data_roundtrip(self):
"""
Test that an empty `data` field is preserved through
export/import.
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
# Create a module, and ensure that its `data` field is empty
word_cloud = ItemFactory.create(parent_location=parent.location, category="word_cloud", display_name="untitled")
del word_cloud.data
self.assertEquals(word_cloud.data, '')
# Export the course
root_dir = path(mkdtemp_clean())
export_course_to_xml(self.store, content_store, course_id, root_dir, 'test_roundtrip')
# Reimport and get the video back
import_course_from_xml(self.store, self.user.id, root_dir)
imported_word_cloud = self.store.get_item(course_id.make_usage_key('word_cloud', 'untitled'))
# It should now contain empty data
self.assertEquals(imported_word_cloud.data, '')
def test_html_export_roundtrip(self):
"""
Test that a course which has HTML that has style formatting is preserved in export/import
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
# Export the course
root_dir = path(mkdtemp_clean())
export_course_to_xml(self.store, content_store, course_id, root_dir, 'test_roundtrip')
# Reimport and get the video back
import_course_from_xml(self.store, self.user.id, root_dir, create_if_not_present=True)
# get the sample HTML with styling information
html_module = self.store.get_item(course_id.make_usage_key('html', 'with_styling'))
self.assertIn('<p style="font:italic bold 72px/30px Georgia, serif; color: red; ">', html_module.data)
# get the sample HTML with just a simple <img> tag information
html_module = self.store.get_item(course_id.make_usage_key('html', 'just_img'))
self.assertIn('<img src="/static/foo_bar.jpg" />', html_module.data)
def test_export_course_without_content_store(self):
# Create toy course
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course_id = course_items[0].id
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
export_course_to_xml(self.store, None, course_id, root_dir, 'test_export_no_content_store')
# Delete the course from module store and reimport it
self.store.delete_course(course_id, self.user.id)
import_course_from_xml(
self.store, self.user.id, root_dir, ['test_export_no_content_store'],
static_content_store=None,
target_id=course_id
)
# Verify reimported course
items = self.store.get_items(
course_id,
qualifiers={
'category': 'sequential',
'name': 'vertical_sequential',
}
)
self.assertEqual(len(items), 1)
def test_export_course_no_xml_attributes(self):
"""
Test that a module without an `xml_attributes` attr will still be
exported successfully
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
vertical = verticals[0]
# create OpenAssessmentBlock:
open_assessment = ItemFactory.create(
parent_location=vertical.location,
category="openassessment",
display_name="untitled",
)
# convert it to draft
draft_open_assessment = self.store.convert_to_draft(
open_assessment.location, self.user.id
)
# note that it has no `xml_attributes` attribute
self.assertFalse(hasattr(draft_open_assessment, "xml_attributes"))
# export should still complete successfully
root_dir = path(mkdtemp_clean())
export_course_to_xml(
self.store,
content_store,
course_id,
root_dir,
'test_no_xml_attributes'
)
@ddt.ddt
class MiscCourseTests(ContentStoreTestCase):
"""
Tests that rely on the toy courses.
"""
def setUp(self):
super(MiscCourseTests, self).setUp()
# save locs not items b/c the items won't have the subsequently created children in them until refetched
self.chapter_loc = self.store.create_child(
self.user.id, self.course.location, 'chapter', 'test_chapter'
).location
self.seq_loc = self.store.create_child(
self.user.id, self.chapter_loc, 'sequential', 'test_seq'
).location
self.vert_loc = self.store.create_child(self.user.id, self.seq_loc, 'vertical', 'test_vert').location
# now create some things quasi like the toy course had
self.problem = self.store.create_child(
self.user.id, self.vert_loc, 'problem', 'test_problem', fields={
"data": "<problem>Test</problem>"
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'video', fields={
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_1_5": "DYpADpL7jAY",
"name": "sample_video",
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'video', fields={
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_1_5": "DYpADpL7jAY",
"name": "truncated_video",
"end_time": 10.0,
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'poll_question', fields={
"name": "T1_changemind_poll_foo_2",
"display_name": "Change your answer",
"question": "Have you changed your mind?",
"answers": [{"id": "yes", "text": "Yes"}, {"id": "no", "text": "No"}],
}
)
self.course = self.store.publish(self.course.location, self.user.id)
def check_components_on_page(self, component_types, expected_types):
"""
Ensure that the right types end up on the page.
component_types is the list of advanced components.
expected_types is the list of elements that should appear on the page.
expected_types and component_types should be similar, but not
exactly the same -- for example, 'video' in
component_types should cause 'Video' to be present.
"""
self.course.advanced_modules = component_types
self.store.update_item(self.course, self.user.id)
# just pick one vertical
resp = self.client.get_html(get_url('container_handler', self.vert_loc))
self.assertEqual(resp.status_code, 200)
for expected in expected_types:
self.assertIn(expected, resp.content)
@ddt.data("<script>alert(1)</script>", "alert('hi')", "</script><script>alert(1)</script>")
def test_container_handler_xss_prevent(self, malicious_code):
"""
Test that XSS attack is prevented
"""
resp = self.client.get_html(get_url('container_handler', self.vert_loc) + '?action=' + malicious_code)
self.assertEqual(resp.status_code, 200)
# Test that malicious code does not appear in html
self.assertNotIn(malicious_code, resp.content)
@patch('django.conf.settings.DEPRECATED_ADVANCED_COMPONENT_TYPES', [])
def test_advanced_components_in_edit_unit(self):
# This could be made better, but for now let's just assert that we see the advanced modules mentioned in the page
# response HTML
self.check_components_on_page(
ADVANCED_COMPONENT_TYPES,
['Word cloud', 'Annotation', 'Text Annotation', 'Video Annotation', 'Image Annotation',
'split_test'],
)
@ddt.data('/Fake/asset/displayname', '\\Fake\\asset\\displayname')
def test_export_on_invalid_displayname(self, invalid_displayname):
""" Tests that assets with invalid 'displayname' does not cause export to fail """
content_store = contentstore()
exported_asset_name = '_Fake_asset_displayname'
# Create an asset with slash `invalid_displayname` '
asset_key = self.course.id.make_asset_key('asset', "fake_asset.txt")
content = StaticContent(
asset_key, invalid_displayname, 'application/text', 'test',
)
content_store.save(content)
# Verify that the course has only one asset and it has been added with an invalid asset name.
assets, count = content_store.get_all_content_for_course(self.course.id)
self.assertEqual(count, 1)
display_name = assets[0]['displayname']
self.assertEqual(display_name, invalid_displayname)
# Now export the course to a tempdir and test that it contains assets. The export should pass
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, 'test_export')
filesystem = OSFS(root_dir / 'test_export/static')
exported_static_files = filesystem.listdir()
# Verify that only single asset has been exported with the expected asset name.
self.assertTrue(filesystem.exists(exported_asset_name))
self.assertEqual(len(exported_static_files), 1)
# Remove tempdir
shutil.rmtree(root_dir)
def test_assets_overwrite(self):
""" Tests that assets will similar 'displayname' will be overwritten during export """
content_store = contentstore()
asset_displayname = 'Fake_asset.txt'
# Create two assets with similar 'displayname'
for i in range(2):
asset_path = 'sample_asset_{}.txt'.format(i)
asset_key = self.course.id.make_asset_key('asset', asset_path)
content = StaticContent(
asset_key, asset_displayname, 'application/text', 'test',
)
content_store.save(content)
# Fetch & verify course assets to be equal to 2.
assets, count = content_store.get_all_content_for_course(self.course.id)
self.assertEqual(count, 2)
# Verify both assets have similar 'displayname' after saving.
for asset in assets:
self.assertEquals(asset['displayname'], asset_displayname)
# Now export the course to a tempdir and test that it contains assets.
root_dir = path(mkdtemp_clean())
print 'Exporting to tempdir = {0}'.format(root_dir)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, 'test_export')
# Verify that asset have been overwritten during export.
filesystem = OSFS(root_dir / 'test_export/static')
exported_static_files = filesystem.listdir()
self.assertTrue(filesystem.exists(asset_displayname))
self.assertEqual(len(exported_static_files), 1)
# Remove tempdir
shutil.rmtree(root_dir)
def test_advanced_components_require_two_clicks(self):
self.check_components_on_page(['word_cloud'], ['Word cloud'])
def test_malformed_edit_unit_request(self):
# just pick one vertical
usage_key = self.course.id.make_usage_key('vertical', None)
resp = self.client.get_html(get_url('container_handler', usage_key))
self.assertEqual(resp.status_code, 400)
def test_edit_unit(self):
"""Verifies rendering the editor in all the verticals in the given test course"""
self._check_verticals([self.vert_loc])
def _get_draft_counts(self, item):
cnt = 1 if getattr(item, 'is_draft', False) else 0
for child in item.get_children():
cnt = cnt + self._get_draft_counts(child)
return cnt
def test_get_items(self):
'''
This verifies a bug we had where the None setting in get_items() meant 'wildcard'
Unfortunately, None = published for the revision field, so get_items() would return
both draft and non-draft copies.
'''
self.store.convert_to_draft(self.problem.location, self.user.id)
# Query get_items() and find the html item. This should just return back a single item (not 2).
direct_store_items = self.store.get_items(
self.course.id, revision=ModuleStoreEnum.RevisionOption.published_only
)
items_from_direct_store = [item for item in direct_store_items if item.location == self.problem.location]
self.assertEqual(len(items_from_direct_store), 1)
self.assertFalse(getattr(items_from_direct_store[0], 'is_draft', False))
# Fetch from the draft store.
draft_store_items = self.store.get_items(
self.course.id, revision=ModuleStoreEnum.RevisionOption.draft_only
)
items_from_draft_store = [item for item in draft_store_items if item.location == self.problem.location]
self.assertEqual(len(items_from_draft_store), 1)
# TODO the below won't work for split mongo
self.assertTrue(getattr(items_from_draft_store[0], 'is_draft', False))
def test_draft_metadata(self):
'''
This verifies a bug we had where inherited metadata was getting written to the
module as 'own-metadata' when publishing. Also verifies the metadata inheritance is
properly computed
'''
# refetch course so it has all the children correct
course = self.store.update_item(self.course, self.user.id)
course.graceperiod = timedelta(days=1, hours=5, minutes=59, seconds=59)
course = self.store.update_item(course, self.user.id)
problem = self.store.get_item(self.problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
self.store.convert_to_draft(problem.location, self.user.id)
# refetch to check metadata
problem = self.store.get_item(problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
# publish module
self.store.publish(problem.location, self.user.id)
# refetch to check metadata
problem = self.store.get_item(problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
# put back in draft and change metadata and see if it's now marked as 'own_metadata'
self.store.convert_to_draft(problem.location, self.user.id)
problem = self.store.get_item(problem.location)
new_graceperiod = timedelta(hours=1)
self.assertNotIn('graceperiod', own_metadata(problem))
problem.graceperiod = new_graceperiod
# Save the data that we've just changed to the underlying
# MongoKeyValueStore before we update the mongo datastore.
problem.save()
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
self.store.update_item(problem, self.user.id)
# read back to make sure it reads as 'own-metadata'
problem = self.store.get_item(problem.location)
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
# republish
self.store.publish(problem.location, self.user.id)
# and re-read and verify 'own-metadata'
self.store.convert_to_draft(problem.location, self.user.id)
problem = self.store.get_item(problem.location)
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
def test_get_depth_with_drafts(self):
# make sure no draft items have been returned
num_drafts = self._get_draft_counts(self.course)
self.assertEqual(num_drafts, 0)
# put into draft
self.store.convert_to_draft(self.problem.location, self.user.id)
# make sure we can query that item and verify that it is a draft
draft_problem = self.store.get_item(self.problem.location)
self.assertTrue(getattr(draft_problem, 'is_draft', False))
# now requery with depth
course = self.store.get_course(self.course.id, depth=None)
# make sure just one draft item have been returned
num_drafts = self._get_draft_counts(course)
self.assertEqual(num_drafts, 1)
@mock.patch('xmodule.course_module.requests.get')
def test_import_textbook_as_content_element(self, mock_get):
mock_get.return_value.text = dedent("""
<?xml version="1.0"?><table_of_contents>
<entry page="5" page_label="ii" name="Table of Contents"/>
</table_of_contents>
""").strip()
self.course.textbooks = [Textbook("Textbook", "https://s3.amazonaws.com/edx-textbooks/guttag_computation_v3/")]
course = self.store.update_item(self.course, self.user.id)
self.assertGreater(len(course.textbooks), 0)
def test_import_polls(self):
items = self.store.get_items(self.course.id, qualifiers={'category': 'poll_question'})
self.assertTrue(len(items) > 0)
# check that there's actually content in the 'question' field
self.assertGreater(len(items[0].question), 0)
def test_module_preview_in_whitelist(self):
"""
Tests the ajax callback to render an XModule
"""
with override_settings(COURSES_WITH_UNSAFE_CODE=[unicode(self.course.id)]):
# also try a custom response which will trigger the 'is this course in whitelist' logic
resp = self.client.get_json(
get_url('xblock_view_handler', self.vert_loc, kwargs={'view_name': 'container_preview'})
)
self.assertEqual(resp.status_code, 200)
vertical = self.store.get_item(self.vert_loc)
for child in vertical.children:
self.assertContains(resp, unicode(child))
def test_delete(self):
# make sure the parent points to the child object which is to be deleted
# need to refetch chapter b/c at the time it was assigned it had no children
chapter = self.store.get_item(self.chapter_loc)
self.assertIn(self.seq_loc, chapter.children)
self.client.delete(get_url('xblock_handler', self.seq_loc))
with self.assertRaises(ItemNotFoundError):
self.store.get_item(self.seq_loc)
chapter = self.store.get_item(self.chapter_loc)
# make sure the parent no longer points to the child object which was deleted
self.assertNotIn(self.seq_loc, chapter.children)
def test_asset_delete_and_restore(self):
'''
This test will exercise the soft delete/restore functionality of the assets
'''
asset_key = self._delete_asset_in_course()
# now try to find it in store, but they should not be there any longer
content = contentstore().find(asset_key, throw_on_not_found=False)
self.assertIsNone(content)
# now try to find it and the thumbnail in trashcan - should be in there
content = contentstore('trashcan').find(asset_key, throw_on_not_found=False)
self.assertIsNotNone(content)
# let's restore the asset
restore_asset_from_trashcan(unicode(asset_key))
# now try to find it in courseware store, and they should be back after restore
content = contentstore('trashcan').find(asset_key, throw_on_not_found=False)
self.assertIsNotNone(content)
def _delete_asset_in_course(self):
"""
Helper method for:
1) importing course from xml
2) finding asset in course (verifying non-empty)
3) computing thumbnail location of asset
4) deleting the asset from the course
"""
asset_key = self.course.id.make_asset_key('asset', 'sample_static.txt')
content = StaticContent(
asset_key, "Fake asset", "application/text", "test",
)
contentstore().save(content)
# go through the website to do the delete, since the soft-delete logic is in the view
url = reverse_course_url(
'assets_handler',
self.course.id,
kwargs={'asset_key_string': unicode(asset_key)}
)
resp = self.client.delete(url)
self.assertEqual(resp.status_code, 204)
return asset_key
def test_empty_trashcan(self):
'''
This test will exercise the emptying of the asset trashcan
'''
self._delete_asset_in_course()
# make sure there's something in the trashcan
all_assets, __ = contentstore('trashcan').get_all_content_for_course(self.course.id)
self.assertGreater(len(all_assets), 0)
# empty the trashcan
empty_asset_trashcan([self.course.id])
# make sure trashcan is empty
all_assets, count = contentstore('trashcan').get_all_content_for_course(self.course.id)
self.assertEqual(len(all_assets), 0)
self.assertEqual(count, 0)
def test_illegal_draft_crud_ops(self):
# this test presumes old mongo and split_draft not full split
with self.assertRaises(InvalidVersionError):
self.store.convert_to_draft(self.chapter_loc, self.user.id)
chapter = self.store.get_item(self.chapter_loc)
chapter.data = 'chapter data'
self.store.update_item(chapter, self.user.id)
newobject = self.store.get_item(self.chapter_loc)
self.assertFalse(getattr(newobject, 'is_draft', False))
with self.assertRaises(InvalidVersionError):
self.store.unpublish(self.chapter_loc, self.user.id)
def test_bad_contentstore_request(self):
"""
Test that user get proper responses for urls with invalid url or
asset/course key
"""
resp = self.client.get_html('/c4x/CDX/123123/asset/&invalid.png')
self.assertEqual(resp.status_code, 400)
resp = self.client.get_html('/c4x/CDX/123123/asset/invalid.png')
self.assertEqual(resp.status_code, 404)
# Now test that 404 response is returned when user tries to access
# asset of some invalid course from split ModuleStore
with self.store.default_store(ModuleStoreEnum.Type.split):
resp = self.client.get_html('/c4x/InvalidOrg/InvalidCourse/asset/invalid.png')
self.assertEqual(resp.status_code, 404)
def test_delete_course(self):
"""
This test creates a course, makes a draft item, and deletes the course. This will also assert that the
draft content is also deleted
"""
# add an asset
asset_key = self.course.id.make_asset_key('asset', 'sample_static.txt')
content = StaticContent(
asset_key, "Fake asset", "application/text", "test",
)
contentstore().save(content)
assets, count = contentstore().get_all_content_for_course(self.course.id)
self.assertGreater(len(assets), 0)
self.assertGreater(count, 0)
self.store.convert_to_draft(self.vert_loc, self.user.id)
# delete the course
self.store.delete_course(self.course.id, self.user.id)
# assert that there's absolutely no non-draft modules in the course
# this should also include all draft items
items = self.store.get_items(self.course.id)
self.assertEqual(len(items), 0)
# assert that all content in the asset library is also deleted
assets, count = contentstore().get_all_content_for_course(self.course.id)
self.assertEqual(len(assets), 0)
self.assertEqual(count, 0)
def test_course_handouts_rewrites(self):
"""
Test that the xblock_handler rewrites static handout links
"""
handouts = self.store.create_item(
self.user.id, self.course.id, 'course_info', 'handouts', fields={
"data": "<a href='/static/handouts/sample_handout.txt'>Sample</a>",
}
)
# get module info (json)
resp = self.client.get(get_url('xblock_handler', handouts.location))
# make sure we got a successful response
self.assertEqual(resp.status_code, 200)
# check that /static/ has been converted to the full path
# note, we know the link it should be because that's what in the 'toy' course in the test data
asset_key = self.course.id.make_asset_key('asset', 'handouts_sample_handout.txt')
self.assertContains(resp, unicode(asset_key))
def test_prefetch_children(self):
# make sure we haven't done too many round trips to DB:
# 1) the course,
# 2 & 3) for the chapters and sequentials
# Because we're querying from the top of the tree, we cache information needed for inheritance,
# so we don't need to make an extra query to compute it.
# set the branch to 'publish' in order to prevent extra lookups of draft versions
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, self.course.id):
with check_mongo_calls(3):
course = self.store.get_course(self.course.id, depth=2)
# make sure we pre-fetched a known sequential which should be at depth=2
self.assertIn(self.seq_loc, course.system.module_data)
# make sure we don't have a specific vertical which should be at depth=3
self.assertNotIn(self.vert_loc, course.system.module_data)
# Now, test with the branch set to draft. No extra round trips b/c it doesn't go deep enough to get
# beyond direct only categories
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
with check_mongo_calls(3):
self.store.get_course(self.course.id, depth=2)
def _check_verticals(self, locations):
""" Test getting the editing HTML for each vertical. """
# Assert is here to make sure that the course being tested actually has verticals (units) to check.
self.assertGreater(len(locations), 0)
for loc in locations:
resp = self.client.get_html(get_url('container_handler', loc))
self.assertEqual(resp.status_code, 200)
@ddt.ddt
class ContentStoreTest(ContentStoreTestCase, XssTestMixin):
"""
Tests for the CMS ContentStore application.
"""
def setUp(self):
super(ContentStoreTest, self).setUp()
self.course_data = {
'org': 'MITx',
'number': '111',
'display_name': 'Robot Super Course',
'run': '2013_Spring'
}
def assert_created_course(self, number_suffix=None):
"""
Checks that the course was created properly.
"""
test_course_data = {}
test_course_data.update(self.course_data)
if number_suffix:
test_course_data['number'] = '{0}_{1}'.format(test_course_data['number'], number_suffix)
course_key = _get_course_id(self.store, test_course_data)
_create_course(self, course_key, test_course_data)
# Verify that the creator is now registered in the course.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_key))
return test_course_data
def assert_create_course_failed(self, error_message):
"""
Checks that the course not created.
"""
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 400)
data = parse_json(resp)
self.assertEqual(data['error'], error_message)
def test_create_course(self):
"""Test new course creation - happy path"""
self.assert_created_course()
@override_settings(DEFAULT_COURSE_LANGUAGE='hr')
def test_create_course_default_language(self):
"""Test new course creation and verify default language"""
test_course_data = self.assert_created_course()
course_id = _get_course_id(self.store, test_course_data)
course_module = self.store.get_course(course_id)
self.assertEquals(course_module.language, 'hr')
def test_create_course_with_dots(self):
"""Test new course creation with dots in the name"""
self.course_data['org'] = 'org.foo.bar'
self.course_data['number'] = 'course.number'
self.course_data['run'] = 'run.name'
self.assert_created_course()
def test_create_course_check_forum_seeding(self):
"""Test new course creation and verify forum seeding """
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
self.assertTrue(are_permissions_roles_seeded(_get_course_id(self.store, test_course_data)))
def test_forum_unseeding_on_delete(self):
"""Test new course creation and verify forum unseeding """
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
self.assertTrue(are_permissions_roles_seeded(course_id))
delete_course_and_groups(course_id, self.user.id)
# should raise an exception for checking permissions on deleted course
with self.assertRaises(ItemNotFoundError):
are_permissions_roles_seeded(course_id)
def test_forum_unseeding_with_multiple_courses(self):
"""Test new course creation and verify forum unseeding when there are multiple courses"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
second_course_data = self.assert_created_course(number_suffix=uuid4().hex)
# unseed the forums for the first course
course_id = _get_course_id(self.store, test_course_data)
delete_course_and_groups(course_id, self.user.id)
# should raise an exception for checking permissions on deleted course
with self.assertRaises(ItemNotFoundError):
are_permissions_roles_seeded(course_id)
second_course_id = _get_course_id(self.store, second_course_data)
# permissions should still be there for the other course
self.assertTrue(are_permissions_roles_seeded(second_course_id))
def test_course_enrollments_and_roles_on_delete(self):
"""
Test that course deletion doesn't remove course enrollments or user's roles
"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
# test that a user gets his enrollment and its 'student' role as default on creating a course
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_id))
self.assertTrue(self.user.roles.filter(name="Student", course_id=course_id))
delete_course_and_groups(course_id, self.user.id)
# check that user's enrollment for this course is not deleted
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_id))
# check that user has form role "Student" for this course even after deleting it
self.assertTrue(self.user.roles.filter(name="Student", course_id=course_id))
def test_course_access_groups_on_delete(self):
"""
Test that course deletion removes users from 'instructor' and 'staff' groups of this course
of all format e.g, 'instructor_edX/Course/Run', 'instructor_edX.Course.Run', 'instructor_Course'
"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
# Add user in possible groups and check that user in instructor groups of this course
instructor_role = CourseInstructorRole(course_id)
auth.add_users(self.user, instructor_role, self.user)
self.assertTrue(len(instructor_role.users_with_role()) > 0)
# Now delete course and check that user not in instructor groups of this course
delete_course_and_groups(course_id, self.user.id)
# Update our cached user since its roles have changed
self.user = User.objects.get_by_natural_key(self.user.natural_key()[0])
self.assertFalse(instructor_role.has_user(self.user))
self.assertEqual(len(instructor_role.users_with_role()), 0)
def test_create_course_after_delete(self):
"""
Test that course creation works after deleting a course with the same URL
"""
test_course_data = self.assert_created_course()
course_id = _get_course_id(self.store, test_course_data)
delete_course_and_groups(course_id, self.user.id)
self.assert_created_course()
def test_create_course_duplicate_course(self):
"""Test new course creation - error path"""
self.client.ajax_post('/course/', self.course_data)
self.assert_course_creation_failed('There is already a course defined with the same organization and course number. Please change either organization or course number to be unique.')
def assert_course_creation_failed(self, error_message):
"""
Checks that the course did not get created
"""
test_enrollment = False
try:
course_id = _get_course_id(self.store, self.course_data)
initially_enrolled = CourseEnrollment.is_enrolled(self.user, course_id)
test_enrollment = True
except InvalidKeyError:
# b/c the intent of the test with bad chars isn't to test auth but to test the handler, ignore
pass
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
data = parse_json(resp)
self.assertRegexpMatches(data['ErrMsg'], error_message)
if test_enrollment:
# One test case involves trying to create the same course twice. Hence for that course,
# the user will be enrolled. In the other cases, initially_enrolled will be False.
self.assertEqual(initially_enrolled, CourseEnrollment.is_enrolled(self.user, course_id))
def test_create_course_duplicate_number(self):
"""Test new course creation - error path"""
self.client.ajax_post('/course/', self.course_data)
self.course_data['display_name'] = 'Robot Super Course Two'
self.course_data['run'] = '2013_Summer'
self.assert_course_creation_failed('There is already a course defined with the same organization and course number. Please change either organization or course number to be unique.')
def test_create_course_case_change(self):
"""Test new course creation - error path due to case insensitive name equality"""
self.course_data['number'] = 'capital'
self.client.ajax_post('/course/', self.course_data)
cache_current = self.course_data['org']
self.course_data['org'] = self.course_data['org'].lower()
self.assert_course_creation_failed('There is already a course defined with the same organization and course number. Please change either organization or course number to be unique.')
self.course_data['org'] = cache_current
self.client.ajax_post('/course/', self.course_data)
cache_current = self.course_data['number']
self.course_data['number'] = self.course_data['number'].upper()
self.assert_course_creation_failed('There is already a course defined with the same organization and course number. Please change either organization or course number to be unique.')
def test_course_substring(self):
"""
Test that a new course can be created whose name is a substring of an existing course
"""
self.client.ajax_post('/course/', self.course_data)
cache_current = self.course_data['number']
self.course_data['number'] = '{}a'.format(self.course_data['number'])
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
self.course_data['number'] = cache_current
self.course_data['org'] = 'a{}'.format(self.course_data['org'])
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
def test_create_course_with_bad_organization(self):
"""Test new course creation - error path for bad organization name"""
self.course_data['org'] = 'University of California, Berkeley'
self.assert_course_creation_failed(r"(?s)Unable to create course 'Robot Super Course'.*")
def test_create_course_with_course_creation_disabled_staff(self):
"""Test new course creation -- course creation disabled, but staff access."""
with mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': True}):
self.assert_created_course()
def test_create_course_with_course_creation_disabled_not_staff(self):
"""Test new course creation -- error path for course creation disabled, not staff access."""
with mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': True}):
self.user.is_staff = False
self.user.save()
self.assert_course_permission_denied()
def test_create_course_no_course_creators_staff(self):
"""Test new course creation -- course creation group enabled, staff, group is empty."""
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}):
self.assert_created_course()
def test_create_course_no_course_creators_not_staff(self):
"""Test new course creation -- error path for course creator group enabled, not staff, group is empty."""
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
self.user.is_staff = False
self.user.save()
self.assert_course_permission_denied()
def test_create_course_with_course_creator(self):
"""Test new course creation -- use course creator group"""
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
auth.add_users(self.user, CourseCreatorRole(), self.user)
self.assert_created_course()
def test_create_course_with_unicode_in_id_disabled(self):
"""
Test new course creation with feature setting: ALLOW_UNICODE_COURSE_ID disabled.
"""
with mock.patch.dict('django.conf.settings.FEATURES', {'ALLOW_UNICODE_COURSE_ID': False}):
error_message = "Special characters not allowed in organization, course number, and course run."
self.course_data['org'] = u'��������������'
self.assert_create_course_failed(error_message)
self.course_data['number'] = u'��chantillon'
self.assert_create_course_failed(error_message)
self.course_data['run'] = u'����������'
self.assert_create_course_failed(error_message)
def assert_course_permission_denied(self):
"""
Checks that the course did not get created due to a PermissionError.
"""
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 403)
def test_course_index_view_with_no_courses(self):
"""Test viewing the index page with no courses"""
resp = self.client.get_html('/home/')
self.assertContains(
resp,
'<h1 class="page-header">Studio Home</h1>',
status_code=200,
html=True
)
def test_course_factory(self):
"""Test that the course factory works correctly."""
course = CourseFactory.create()
self.assertIsInstance(course, CourseDescriptor)
def test_item_factory(self):
"""Test that the item factory works correctly."""
course = CourseFactory.create()
item = ItemFactory.create(parent_location=course.location)
self.assertIsInstance(item, SequenceDescriptor)
def test_course_index_view_with_course(self):
"""Test viewing the index page with an existing course"""
CourseFactory.create(display_name='Robot Super Educational Course')
resp = self.client.get_html('/home/')
self.assertContains(
resp,
'<h3 class="course-title">Robot Super Educational Course</h3>',
status_code=200,
html=True
)
def test_course_index_view_xss(self):
"""Test that the index page correctly escapes course names with script
tags."""
CourseFactory.create(
display_name='<script>alert("course XSS")</script>'
)
LibraryFactory.create(display_name='<script>alert("library XSS")</script>')
resp = self.client.get_html('/home/')
for xss in ('course', 'library'):
html = '<script>alert("{name} XSS")</script>'.format(
name=xss
)
self.assert_no_xss(resp, html)
def test_course_overview_view_with_course(self):
"""Test viewing the course overview page with an existing course"""
course = CourseFactory.create()
resp = self._show_course_overview(course.id)
self.assertContains(
resp,
'<article class="outline outline-complex outline-course" data-locator="{locator}" data-course-key="{course_key}">'.format(
locator=unicode(course.location),
course_key=unicode(course.id),
),
status_code=200,
html=True
)
def test_create_item(self):
"""Test creating a new xblock instance."""
course = CourseFactory.create()
section_data = {
'parent_locator': unicode(course.location),
'category': 'chapter',
'display_name': 'Section One',
}
resp = self.client.ajax_post(reverse_url('xblock_handler'), section_data)
self.assertEqual(resp.status_code, 200)
data = parse_json(resp)
retarget = unicode(course.id.make_usage_key('chapter', 'REPLACE')).replace('REPLACE', r'([0-9]|[a-f]){3,}')
self.assertRegexpMatches(data['locator'], retarget)
def test_capa_module(self):
"""Test that a problem treats markdown specially."""
course = CourseFactory.create()
problem_data = {
'parent_locator': unicode(course.location),
'category': 'problem'
}
resp = self.client.ajax_post(reverse_url('xblock_handler'), problem_data)
self.assertEqual(resp.status_code, 200)
payload = parse_json(resp)
problem_loc = UsageKey.from_string(payload['locator'])
problem = self.store.get_item(problem_loc)
# should be a CapaDescriptor
self.assertIsInstance(problem, CapaDescriptor, "New problem is not a CapaDescriptor")
context = problem.get_context()
self.assertIn('markdown', context, "markdown is missing from context")
self.assertNotIn('markdown', problem.editable_metadata_fields, "Markdown slipped into the editable metadata fields")
def test_cms_imported_course_walkthrough(self):
"""
Import and walk through some common URL endpoints. This just verifies non-500 and no other
correct behavior, so it is not a deep test
"""
def test_get_html(handler):
# Helper function for getting HTML for a page in Studio and
# checking that it does not error.
resp = self.client.get_html(
get_url(handler, course_key, 'course_key_string')
)
self.assertEqual(resp.status_code, 200)
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['simple'], create_if_not_present=True
)
course_key = course_items[0].id
resp = self._show_course_overview(course_key)
self.assertEqual(resp.status_code, 200)
self.assertContains(resp, 'Chapter 2')
# go to various pages
test_get_html('import_handler')
test_get_html('export_handler')
test_get_html('course_team_handler')
test_get_html('course_info_handler')
test_get_html('assets_handler')
test_get_html('tabs_handler')
test_get_html('settings_handler')
test_get_html('grading_handler')
test_get_html('advanced_settings_handler')
test_get_html('textbooks_list_handler')
# go look at the Edit page
unit_key = course_key.make_usage_key('vertical', 'test_vertical')
resp = self.client.get_html(get_url('container_handler', unit_key))
self.assertEqual(resp.status_code, 200)
def delete_item(category, name):
""" Helper method for testing the deletion of an xblock item. """
item_key = course_key.make_usage_key(category, name)
resp = self.client.delete(get_url('xblock_handler', item_key))
self.assertEqual(resp.status_code, 204)
# delete a component
delete_item(category='html', name='test_html')
# delete a unit
delete_item(category='vertical', name='test_vertical')
# delete a unit
delete_item(category='sequential', name='test_sequence')
# delete a chapter
delete_item(category='chapter', name='chapter_2')
def test_import_into_new_course_id(self):
target_id = _get_course_id(self.store, self.course_data)
_create_course(self, target_id, self.course_data)
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
modules = self.store.get_items(target_id)
# we should have a number of modules in there
# we can't specify an exact number since it'll always be changing
self.assertGreater(len(modules), 10)
#
# test various re-namespacing elements
#
# first check PDF textbooks, to make sure the url paths got updated
course_module = self.store.get_course(target_id)
self.assertEqual(len(course_module.pdf_textbooks), 1)
self.assertEqual(len(course_module.pdf_textbooks[0]["chapters"]), 2)
self.assertEqual(course_module.pdf_textbooks[0]["chapters"][0]["url"], '/static/Chapter1.pdf')
self.assertEqual(course_module.pdf_textbooks[0]["chapters"][1]["url"], '/static/Chapter2.pdf')
def test_import_into_new_course_id_wiki_slug_renamespacing(self):
# If reimporting into the same course do not change the wiki_slug.
target_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
course_data = {
'org': target_id.org,
'number': target_id.course,
'display_name': 'Robot Super Course',
'run': target_id.run
}
_create_course(self, target_id, course_data)
course_module = self.store.get_course(target_id)
course_module.wiki_slug = 'toy'
course_module.save()
# Import a course with wiki_slug == location.course
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'toy')
# But change the wiki_slug if it is a different course.
target_id = self.store.make_course_key('MITx', '111', '2013_Spring')
course_data = {
'org': target_id.org,
'number': target_id.course,
'display_name': 'Robot Super Course',
'run': target_id.run
}
_create_course(self, target_id, course_data)
# Import a course with wiki_slug == location.course
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
# Now try importing a course with wiki_slug == '{0}.{1}.{2}'.format(location.org, location.course, location.run)
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['two_toys'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
def test_import_metadata_with_attempts_empty_string(self):
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['simple'], create_if_not_present=True)
did_load_item = False
try:
course_key = self.store.make_course_key('edX', 'simple', 'problem')
usage_key = course_key.make_usage_key('problem', 'ps01-simple')
self.store.get_item(usage_key)
did_load_item = True
except ItemNotFoundError:
pass
# make sure we found the item (e.g. it didn't error while loading)
self.assertTrue(did_load_item)
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_forum_id_generation(self, default_store):
"""
Test that a discussion item, even if it doesn't set its discussion_id,
consistently generates the same one
"""
course = CourseFactory.create(default_store=default_store)
# create a discussion item
discussion_item = self.store.create_item(self.user.id, course.id, 'discussion', 'new_component')
# now fetch it from the modulestore to instantiate its descriptor
fetched = self.store.get_item(discussion_item.location)
# refetch it to be safe
refetched = self.store.get_item(discussion_item.location)
# and make sure the same discussion items have the same discussion ids
self.assertEqual(fetched.discussion_id, discussion_item.discussion_id)
self.assertEqual(fetched.discussion_id, refetched.discussion_id)
# and make sure that the id isn't the old "$$GUID$$"
self.assertNotEqual(discussion_item.discussion_id, '$$GUID$$')
def test_metadata_inheritance(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
verticals = self.store.get_items(course.id, qualifiers={'category': 'vertical'})
# let's assert on the metadata_inheritance on an existing vertical
for vertical in verticals:
self.assertEqual(course.xqa_key, vertical.xqa_key)
self.assertEqual(course.start, vertical.start)
self.assertGreater(len(verticals), 0)
# crate a new module and add it as a child to a vertical
parent = verticals[0]
new_block = self.store.create_child(
self.user.id, parent.location, 'html', 'new_component'
)
# flush the cache
new_block = self.store.get_item(new_block.location)
# check for grace period definition which should be defined at the course level
self.assertEqual(parent.graceperiod, new_block.graceperiod)
self.assertEqual(parent.start, new_block.start)
self.assertEqual(course.start, new_block.start)
self.assertEqual(course.xqa_key, new_block.xqa_key)
#
# now let's define an override at the leaf node level
#
new_block.graceperiod = timedelta(1)
self.store.update_item(new_block, self.user.id)
# flush the cache and refetch
new_block = self.store.get_item(new_block.location)
self.assertEqual(timedelta(1), new_block.graceperiod)
def test_default_metadata_inheritance(self):
course = CourseFactory.create()
vertical = ItemFactory.create(parent_location=course.location)
course.children.append(vertical)
# in memory
self.assertIsNotNone(course.start)
self.assertEqual(course.start, vertical.start)
self.assertEqual(course.textbooks, [])
self.assertIn('GRADER', course.grading_policy)
self.assertIn('GRADE_CUTOFFS', course.grading_policy)
# by fetching
fetched_course = self.store.get_item(course.location)
fetched_item = self.store.get_item(vertical.location)
self.assertIsNotNone(fetched_course.start)
self.assertEqual(course.start, fetched_course.start)
self.assertEqual(fetched_course.start, fetched_item.start)
self.assertEqual(course.textbooks, fetched_course.textbooks)
def test_image_import(self):
"""Test backwards compatibilty of course image."""
content_store = contentstore()
# Use conditional_and_poll, as it's got an image already
courses = import_course_from_xml(
self.store,
self.user.id,
TEST_DATA_DIR,
['conditional_and_poll'],
static_content_store=content_store,
create_if_not_present=True
)
course = courses[0]
# Make sure the course image is set to the right place
self.assertEqual(course.course_image, 'images_course_image.jpg')
# Ensure that the imported course image is present -- this shouldn't raise an exception
asset_key = course.id.make_asset_key('asset', course.course_image)
content_store.find(asset_key)
def _show_course_overview(self, course_key):
"""
Show the course overview page.
"""
resp = self.client.get_html(get_url('course_handler', course_key, 'course_key_string'))
return resp
def test_wiki_slug(self):
"""When creating a course a unique wiki_slug should be set."""
course_key = _get_course_id(self.store, self.course_data)
_create_course(self, course_key, self.course_data)
course_module = self.store.get_course(course_key)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
def test_course_handler_with_invalid_course_key_string(self):
"""Test viewing the course overview page with invalid course id"""
response = self.client.get_html('/course/edX/test')
self.assertEquals(response.status_code, 404)
class MetadataSaveTestCase(ContentStoreTestCase):
"""Test that metadata is correctly cached and decached."""
def setUp(self):
super(MetadataSaveTestCase, self).setUp()
course = CourseFactory.create()
video_sample_xml = '''
<video display_name="Test Video"
youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8"
show_captions="false"
from="00:00:01"
to="00:01:00">
<source src="http://www.example.com/file.mp4"/>
<track src="http://www.example.com/track"/>
</video>
'''
self.video_descriptor = ItemFactory.create(
parent_location=course.location, category='video',
data={'data': video_sample_xml}
)
def test_metadata_not_persistence(self):
"""
Test that descriptors which set metadata fields in their
constructor are correctly deleted.
"""
self.assertIn('html5_sources', own_metadata(self.video_descriptor))
attrs_to_strip = {
'show_captions',
'youtube_id_1_0',
'youtube_id_0_75',
'youtube_id_1_25',
'youtube_id_1_5',
'start_time',
'end_time',
'source',
'html5_sources',
'track'
}
location = self.video_descriptor.location
for field_name in attrs_to_strip:
delattr(self.video_descriptor, field_name)
self.assertNotIn('html5_sources', own_metadata(self.video_descriptor))
self.store.update_item(self.video_descriptor, self.user.id)
module = self.store.get_item(location)
self.assertNotIn('html5_sources', own_metadata(module))
def test_metadata_persistence(self):
# TODO: create the same test as `test_metadata_not_persistence`,
# but check persistence for some other module.
pass
class RerunCourseTest(ContentStoreTestCase):
"""
Tests for Rerunning a course via the view handler
"""
def setUp(self):
super(RerunCourseTest, self).setUp()
self.destination_course_data = {
'org': 'MITx',
'number': '111',
'display_name': 'Robot Super Course',
'run': '2013_Spring'
}
def post_rerun_request(
self, source_course_key, destination_course_data=None, response_code=200, expect_error=False
):
"""Create and send an ajax post for the rerun request"""
# create data to post
rerun_course_data = {'source_course_key': unicode(source_course_key)}
if not destination_course_data:
destination_course_data = self.destination_course_data
rerun_course_data.update(destination_course_data)
destination_course_key = _get_course_id(self.store, destination_course_data)
# post the request
course_url = get_url('course_handler', destination_course_key, 'course_key_string')
response = self.client.ajax_post(course_url, rerun_course_data)
# verify response
self.assertEqual(response.status_code, response_code)
if not expect_error:
json_resp = parse_json(response)
self.assertNotIn('ErrMsg', json_resp)
destination_course_key = CourseKey.from_string(json_resp['destination_course_key'])
return destination_course_key
def get_course_listing_elements(self, html, course_key):
"""Returns the elements in the course listing section of html that have the given course_key"""
return html.cssselect('.course-item[data-course-key="{}"]'.format(unicode(course_key)))
def get_unsucceeded_course_action_elements(self, html, course_key):
"""Returns the elements in the unsucceeded course action section that have the given course_key"""
return html.cssselect('.courses-processing li[data-course-key="{}"]'.format(unicode(course_key)))
def assertInCourseListing(self, course_key):
"""
Asserts that the given course key is in the accessible course listing section of the html
and NOT in the unsucceeded course action section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_course_listing_elements(course_listing, course_key)), 1)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 0)
def assertInUnsucceededCourseActions(self, course_key):
"""
Asserts that the given course key is in the unsucceeded course action section of the html
and NOT in the accessible course listing section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_course_listing_elements(course_listing, course_key)), 0)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 1)
def verify_rerun_course(self, source_course_key, destination_course_key, destination_display_name):
"""
Verify the contents of the course rerun action
"""
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
expected_states = {
'state': CourseRerunUIStateManager.State.SUCCEEDED,
'display_name': destination_display_name,
'source_course_key': source_course_key,
'course_key': destination_course_key,
'should_display': True,
}
for field_name, expected_value in expected_states.iteritems():
self.assertEquals(getattr(rerun_state, field_name), expected_value)
# Verify that the creator is now enrolled in the course.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, destination_course_key))
# Verify both courses are in the course listing section
self.assertInCourseListing(source_course_key)
self.assertInCourseListing(destination_course_key)
def test_rerun_course_no_videos_in_val(self):
"""
Test when rerunning a course with no videos, VAL copies nothing
"""
source_course = CourseFactory.create()
destination_course_key = self.post_rerun_request(source_course.id)
self.verify_rerun_course(source_course.id, destination_course_key, self.destination_course_data['display_name'])
videos = list(get_videos_for_course(destination_course_key))
self.assertEqual(0, len(videos))
self.assertInCourseListing(destination_course_key)
def test_rerun_course_success(self):
source_course = CourseFactory.create()
create_video(
dict(
edx_video_id="tree-hugger",
courses=[source_course.id],
status='test',
duration=2,
encoded_videos=[]
)
)
destination_course_key = self.post_rerun_request(source_course.id)
self.verify_rerun_course(source_course.id, destination_course_key, self.destination_course_data['display_name'])
# Verify that the VAL copies videos to the rerun
source_videos = list(get_videos_for_course(source_course.id))
target_videos = list(get_videos_for_course(destination_course_key))
self.assertEqual(1, len(source_videos))
self.assertEqual(source_videos, target_videos)
def test_rerun_course_resets_advertised_date(self):
source_course = CourseFactory.create(advertised_start="01-12-2015")
destination_course_key = self.post_rerun_request(source_course.id)
destination_course = self.store.get_course(destination_course_key)
self.assertEqual(None, destination_course.advertised_start)
def test_rerun_of_rerun(self):
source_course = CourseFactory.create()
rerun_course_key = self.post_rerun_request(source_course.id)
rerun_of_rerun_data = {
'org': rerun_course_key.org,
'number': rerun_course_key.course,
'display_name': 'rerun of rerun',
'run': 'rerun2'
}
rerun_of_rerun_course_key = self.post_rerun_request(rerun_course_key, rerun_of_rerun_data)
self.verify_rerun_course(rerun_course_key, rerun_of_rerun_course_key, rerun_of_rerun_data['display_name'])
def test_rerun_course_fail_no_source_course(self):
existent_course_key = CourseFactory.create().id
non_existent_course_key = CourseLocator("org", "non_existent_course", "non_existent_run")
destination_course_key = self.post_rerun_request(non_existent_course_key)
# Verify that the course rerun action is marked failed
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertIn("Cannot find a course at", rerun_state.message)
# Verify that the creator is not enrolled in the course.
self.assertFalse(CourseEnrollment.is_enrolled(self.user, non_existent_course_key))
# Verify that the existing course continues to be in the course listings
self.assertInCourseListing(existent_course_key)
# Verify that the failed course is NOT in the course listings
self.assertInUnsucceededCourseActions(destination_course_key)
def test_rerun_course_fail_duplicate_course(self):
existent_course_key = CourseFactory.create().id
destination_course_data = {
'org': existent_course_key.org,
'number': existent_course_key.course,
'display_name': 'existing course',
'run': existent_course_key.run
}
destination_course_key = self.post_rerun_request(
existent_course_key, destination_course_data, expect_error=True
)
# Verify that the course rerun action doesn't exist
with self.assertRaises(CourseActionStateItemNotFoundError):
CourseRerunState.objects.find_first(course_key=destination_course_key)
# Verify that the existing course continues to be in the course listing
self.assertInCourseListing(existent_course_key)
def test_rerun_with_permission_denied(self):
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
source_course = CourseFactory.create()
auth.add_users(self.user, CourseCreatorRole(), self.user)
self.user.is_staff = False
self.user.save()
self.post_rerun_request(source_course.id, response_code=403, expect_error=True)
def test_rerun_error(self):
error_message = "Mock Error Message"
with mock.patch(
'xmodule.modulestore.mixed.MixedModuleStore.clone_course',
mock.Mock(side_effect=Exception(error_message))
):
source_course = CourseFactory.create()
destination_course_key = self.post_rerun_request(source_course.id)
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertIn(error_message, rerun_state.message)
def test_rerun_error_trunc_message(self):
"""
CourseActionUIState.message is sometimes populated with the contents
of Python tracebacks. This test ensures we don't crash when attempting
to insert a value exceeding its max_length (note that sqlite does not
complain if this happens, but MySQL throws an error).
"""
with mock.patch(
'xmodule.modulestore.mixed.MixedModuleStore.clone_course',
mock.Mock(side_effect=Exception()),
):
source_course = CourseFactory.create()
message_too_long = "traceback".rjust(CourseRerunState.MAX_MESSAGE_LENGTH * 2, '-')
with mock.patch('traceback.format_exc', return_value=message_too_long):
destination_course_key = self.post_rerun_request(source_course.id)
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertTrue(rerun_state.message.endswith("traceback"))
self.assertEqual(len(rerun_state.message), CourseRerunState.MAX_MESSAGE_LENGTH)
def test_rerun_course_wiki_slug(self):
"""
Test that unique wiki_slug is assigned to rerun course.
"""
course_data = {
'org': 'edX',
'number': '123',
'display_name': 'Rerun Course',
'run': '2013'
}
source_wiki_slug = '{0}.{1}.{2}'.format(course_data['org'], course_data['number'], course_data['run'])
source_course_key = _get_course_id(self.store, course_data)
_create_course(self, source_course_key, course_data)
source_course = self.store.get_course(source_course_key)
# Verify created course's wiki_slug.
self.assertEquals(source_course.wiki_slug, source_wiki_slug)
destination_course_data = course_data
destination_course_data['run'] = '2013_Rerun'
destination_course_key = self.post_rerun_request(
source_course.id, destination_course_data=destination_course_data
)
self.verify_rerun_course(source_course.id, destination_course_key, destination_course_data['display_name'])
destination_course = self.store.get_course(destination_course_key)
destination_wiki_slug = '{0}.{1}.{2}'.format(
destination_course.id.org, destination_course.id.course, destination_course.id.run
)
# Verify rerun course's wiki_slug.
self.assertEquals(destination_course.wiki_slug, destination_wiki_slug)
class ContentLicenseTest(ContentStoreTestCase):
"""
Tests around content licenses
"""
def test_course_license_export(self):
content_store = contentstore()
root_dir = path(mkdtemp_clean())
self.course.license = "creative-commons: BY SA"
self.store.update_item(self.course, None)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, 'test_license')
fname = "{block}.xml".format(block=self.course.scope_ids.usage_id.block_id)
run_file_path = root_dir / "test_license" / "course" / fname
run_xml = etree.parse(run_file_path.open())
self.assertEqual(run_xml.getroot().get("license"), "creative-commons: BY SA")
def test_video_license_export(self):
content_store = contentstore()
root_dir = path(mkdtemp_clean())
video_descriptor = ItemFactory.create(
parent_location=self.course.location, category='video',
license="all-rights-reserved"
)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, 'test_license')
fname = "{block}.xml".format(block=video_descriptor.scope_ids.usage_id.block_id)
video_file_path = root_dir / "test_license" / "video" / fname
video_xml = etree.parse(video_file_path.open())
self.assertEqual(video_xml.getroot().get("license"), "all-rights-reserved")
def test_license_import(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
self.assertEqual(course.license, "creative-commons: BY")
videos = self.store.get_items(course.id, qualifiers={'category': 'video'})
self.assertEqual(videos[0].license, "all-rights-reserved")
class EntryPageTestCase(TestCase):
"""
Tests entry pages that aren't specific to a course.
"""
def setUp(self):
super(EntryPageTestCase, self).setUp()
self.client = AjaxEnabledTestClient()
def _test_page(self, page, status_code=200):
resp = self.client.get_html(page)
self.assertEqual(resp.status_code, status_code)
def test_how_it_works(self):
self._test_page("/howitworks")
def test_signup(self):
self._test_page("/signup")
def test_login(self):
self._test_page("/signin")
def test_logout(self):
# Logout redirects.
self._test_page("/logout", 302)
class SigninPageTestCase(TestCase):
"""
Tests that the CSRF token is directly included in the signin form. This is
important to make sure that the script is functional independently of any
other script.
"""
def test_csrf_token_is_present_in_form(self):
# Expected html:
# <form>
# ...
# <fieldset>
# ...
# <input name="csrfmiddlewaretoken" value="...">
# ...
# </fieldset>
# ...
#</form>
response = self.client.get("/signin")
csrf_token = response.cookies.get("csrftoken")
form = lxml.html.fromstring(response.content).get_element_by_id("login_form")
csrf_input_field = form.find(".//input[@name='csrfmiddlewaretoken']")
self.assertIsNotNone(csrf_token)
self.assertIsNotNone(csrf_token.value)
self.assertIsNotNone(csrf_input_field)
self.assertEqual(csrf_token.value, csrf_input_field.attrib["value"])
def _create_course(test, course_key, course_data):
"""
Creates a course via an AJAX request and verifies the URL returned in the response.
"""
course_url = get_url('course_handler', course_key, 'course_key_string')
response = test.client.ajax_post(course_url, course_data)
test.assertEqual(response.status_code, 200)
data = parse_json(response)
test.assertNotIn('ErrMsg', data)
test.assertEqual(data['url'], course_url)
def _get_course_id(store, course_data):
"""Returns the course ID."""
return store.make_course_key(course_data['org'], course_data['number'], course_data['run'])
|
agpl-3.0
|
marqueedev/django
|
tests/utils_tests/test_regex_helper.py
|
448
|
1784
|
from __future__ import unicode_literals
import unittest
from django.utils import regex_helper
class NormalizeTests(unittest.TestCase):
def test_empty(self):
pattern = r""
expected = [('', [])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_escape(self):
pattern = r"\\\^\$\.\|\?\*\+\(\)\["
expected = [('\\^$.|?*+()[', [])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_group_positional(self):
pattern = r"(.*)-(.+)"
expected = [('%(_0)s-%(_1)s', ['_0', '_1'])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_group_ignored(self):
pattern = r"(?i)(?L)(?m)(?s)(?u)(?#)"
expected = [('', [])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_group_noncapturing(self):
pattern = r"(?:non-capturing)"
expected = [('non-capturing', [])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_group_named(self):
pattern = r"(?P<first_group_name>.*)-(?P<second_group_name>.*)"
expected = [('%(first_group_name)s-%(second_group_name)s',
['first_group_name', 'second_group_name'])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
def test_group_backreference(self):
pattern = r"(?P<first_group_name>.*)-(?P=first_group_name)"
expected = [('%(first_group_name)s-%(first_group_name)s',
['first_group_name'])]
result = regex_helper.normalize(pattern)
self.assertEqual(result, expected)
|
bsd-3-clause
|
scripni/rethinkdb
|
test/rql_test/connections/http_support/jinja2/testsuite/lexnparse.py
|
402
|
22314
|
# -*- coding: utf-8 -*-
"""
jinja2.testsuite.lexnparse
~~~~~~~~~~~~~~~~~~~~~~~~~~
All the unittests regarding lexing, parsing and syntax.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment, Template, TemplateSyntaxError, \
UndefinedError, nodes
from jinja2._compat import next, iteritems, text_type, PY2
from jinja2.lexer import Token, TokenStream, TOKEN_EOF, \
TOKEN_BLOCK_BEGIN, TOKEN_BLOCK_END
env = Environment()
# how does a string look like in jinja syntax?
if PY2:
def jinja_string_repr(string):
return repr(string)[1:]
else:
jinja_string_repr = repr
class TokenStreamTestCase(JinjaTestCase):
test_tokens = [Token(1, TOKEN_BLOCK_BEGIN, ''),
Token(2, TOKEN_BLOCK_END, ''),
]
def test_simple(self):
ts = TokenStream(self.test_tokens, "foo", "bar")
assert ts.current.type is TOKEN_BLOCK_BEGIN
assert bool(ts)
assert not bool(ts.eos)
next(ts)
assert ts.current.type is TOKEN_BLOCK_END
assert bool(ts)
assert not bool(ts.eos)
next(ts)
assert ts.current.type is TOKEN_EOF
assert not bool(ts)
assert bool(ts.eos)
def test_iter(self):
token_types = [t.type for t in TokenStream(self.test_tokens, "foo", "bar")]
assert token_types == ['block_begin', 'block_end', ]
class LexerTestCase(JinjaTestCase):
def test_raw1(self):
tmpl = env.from_string('{% raw %}foo{% endraw %}|'
'{%raw%}{{ bar }}|{% baz %}{% endraw %}')
assert tmpl.render() == 'foo|{{ bar }}|{% baz %}'
def test_raw2(self):
tmpl = env.from_string('1 {%- raw -%} 2 {%- endraw -%} 3')
assert tmpl.render() == '123'
def test_balancing(self):
env = Environment('{%', '%}', '${', '}')
tmpl = env.from_string('''{% for item in seq
%}${{'foo': item}|upper}{% endfor %}''')
assert tmpl.render(seq=list(range(3))) == "{'FOO': 0}{'FOO': 1}{'FOO': 2}"
def test_comments(self):
env = Environment('<!--', '-->', '{', '}')
tmpl = env.from_string('''\
<ul>
<!--- for item in seq -->
<li>{item}</li>
<!--- endfor -->
</ul>''')
assert tmpl.render(seq=list(range(3))) == ("<ul>\n <li>0</li>\n "
"<li>1</li>\n <li>2</li>\n</ul>")
def test_string_escapes(self):
for char in u'\0', u'\u2668', u'\xe4', u'\t', u'\r', u'\n':
tmpl = env.from_string('{{ %s }}' % jinja_string_repr(char))
assert tmpl.render() == char
assert env.from_string('{{ "\N{HOT SPRINGS}" }}').render() == u'\u2668'
def test_bytefallback(self):
from pprint import pformat
tmpl = env.from_string(u'''{{ 'foo'|pprint }}|{{ 'bär'|pprint }}''')
assert tmpl.render() == pformat('foo') + '|' + pformat(u'bär')
def test_operators(self):
from jinja2.lexer import operators
for test, expect in iteritems(operators):
if test in '([{}])':
continue
stream = env.lexer.tokenize('{{ %s }}' % test)
next(stream)
assert stream.current.type == expect
def test_normalizing(self):
for seq in '\r', '\r\n', '\n':
env = Environment(newline_sequence=seq)
tmpl = env.from_string('1\n2\r\n3\n4\n')
result = tmpl.render()
assert result.replace(seq, 'X') == '1X2X3X4'
def test_trailing_newline(self):
for keep in [True, False]:
env = Environment(keep_trailing_newline=keep)
for template,expected in [
('', {}),
('no\nnewline', {}),
('with\nnewline\n', {False: 'with\nnewline'}),
('with\nseveral\n\n\n', {False: 'with\nseveral\n\n'}),
]:
tmpl = env.from_string(template)
expect = expected.get(keep, template)
result = tmpl.render()
assert result == expect, (keep, template, result, expect)
class ParserTestCase(JinjaTestCase):
def test_php_syntax(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->')
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->\
<? for item in seq -?>
<?= item ?>
<?- endfor ?>''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_erb_syntax(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>')
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>\
<% for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_comment_syntax(self):
env = Environment('<!--', '-->', '${', '}', '<!--#', '-->')
tmpl = env.from_string('''\
<!--# I'm a comment, I'm not interesting -->\
<!-- for item in seq --->
${item}
<!--- endfor -->''')
assert tmpl.render(seq=list(range(5))) == '01234'
def test_balancing(self):
tmpl = env.from_string('''{{{'foo':'bar'}.foo}}''')
assert tmpl.render() == 'bar'
def test_start_comment(self):
tmpl = env.from_string('''{# foo comment
and bar comment #}
{% macro blub() %}foo{% endmacro %}
{{ blub() }}''')
assert tmpl.render().strip() == 'foo'
def test_line_syntax(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item}
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
list(range(5))
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##')
tmpl = env.from_string('''\
<%# regular comment %>
% for item in seq:
${item} ## the rest of the stuff
% endfor''')
assert [int(x.strip()) for x in tmpl.render(seq=list(range(5))).split()] == \
list(range(5))
def test_line_syntax_priority(self):
# XXX: why is the whitespace there in front of the newline?
env = Environment('{%', '%}', '${', '}', '/*', '*/', '##', '#')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
## for item in seq:
* ${item} # this is just extra stuff
## endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n* 2'
env = Environment('{%', '%}', '${', '}', '/*', '*/', '#', '##')
tmpl = env.from_string('''\
/* ignore me.
I'm a multiline comment */
# for item in seq:
* ${item} ## this is just extra stuff
## extra stuff i just want to ignore
# endfor''')
assert tmpl.render(seq=[1, 2]).strip() == '* 1\n\n* 2'
def test_error_messages(self):
def assert_error(code, expected):
try:
Template(code)
except TemplateSyntaxError as e:
assert str(e) == expected, 'unexpected error message'
else:
assert False, 'that was supposed to be an error'
assert_error('{% for item in seq %}...{% endif %}',
"Encountered unknown tag 'endif'. Jinja was looking "
"for the following tags: 'endfor' or 'else'. The "
"innermost block that needs to be closed is 'for'.")
assert_error('{% if foo %}{% for item in seq %}...{% endfor %}{% endfor %}',
"Encountered unknown tag 'endfor'. Jinja was looking for "
"the following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% if foo %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'elif' or 'else' or 'endif'. The "
"innermost block that needs to be closed is 'if'.")
assert_error('{% for item in seq %}',
"Unexpected end of template. Jinja was looking for the "
"following tags: 'endfor' or 'else'. The innermost block "
"that needs to be closed is 'for'.")
assert_error('{% block foo-bar-baz %}',
"Block names in Jinja have to be valid Python identifiers "
"and may not contain hyphens, use an underscore instead.")
assert_error('{% unknown_tag %}',
"Encountered unknown tag 'unknown_tag'.")
class SyntaxTestCase(JinjaTestCase):
def test_call(self):
env = Environment()
env.globals['foo'] = lambda a, b, c, e, g: a + b + c + e + g
tmpl = env.from_string("{{ foo('a', c='d', e='f', *['b'], **{'g': 'h'}) }}")
assert tmpl.render() == 'abdfh'
def test_slicing(self):
tmpl = env.from_string('{{ [1, 2, 3][:] }}|{{ [1, 2, 3][::-1] }}')
assert tmpl.render() == '[1, 2, 3]|[3, 2, 1]'
def test_attr(self):
tmpl = env.from_string("{{ foo.bar }}|{{ foo['bar'] }}")
assert tmpl.render(foo={'bar': 42}) == '42|42'
def test_subscript(self):
tmpl = env.from_string("{{ foo[0] }}|{{ foo[-1] }}")
assert tmpl.render(foo=[0, 1, 2]) == '0|2'
def test_tuple(self):
tmpl = env.from_string('{{ () }}|{{ (1,) }}|{{ (1, 2) }}')
assert tmpl.render() == '()|(1,)|(1, 2)'
def test_math(self):
tmpl = env.from_string('{{ (1 + 1 * 2) - 3 / 2 }}|{{ 2**3 }}')
assert tmpl.render() == '1.5|8'
def test_div(self):
tmpl = env.from_string('{{ 3 // 2 }}|{{ 3 / 2 }}|{{ 3 % 2 }}')
assert tmpl.render() == '1|1.5|1'
def test_unary(self):
tmpl = env.from_string('{{ +3 }}|{{ -3 }}')
assert tmpl.render() == '3|-3'
def test_concat(self):
tmpl = env.from_string("{{ [1, 2] ~ 'foo' }}")
assert tmpl.render() == '[1, 2]foo'
def test_compare(self):
tmpl = env.from_string('{{ 1 > 0 }}|{{ 1 >= 1 }}|{{ 2 < 3 }}|'
'{{ 2 == 2 }}|{{ 1 <= 1 }}')
assert tmpl.render() == 'True|True|True|True|True'
def test_inop(self):
tmpl = env.from_string('{{ 1 in [1, 2, 3] }}|{{ 1 not in [1, 2, 3] }}')
assert tmpl.render() == 'True|False'
def test_literals(self):
tmpl = env.from_string('{{ [] }}|{{ {} }}|{{ () }}')
assert tmpl.render().lower() == '[]|{}|()'
def test_bool(self):
tmpl = env.from_string('{{ true and false }}|{{ false '
'or true }}|{{ not false }}')
assert tmpl.render() == 'False|True|True'
def test_grouping(self):
tmpl = env.from_string('{{ (true and false) or (false and true) and not false }}')
assert tmpl.render() == 'False'
def test_django_attr(self):
tmpl = env.from_string('{{ [1, 2, 3].0 }}|{{ [[1]].0.0 }}')
assert tmpl.render() == '1|1'
def test_conditional_expression(self):
tmpl = env.from_string('''{{ 0 if true else 1 }}''')
assert tmpl.render() == '0'
def test_short_conditional_expression(self):
tmpl = env.from_string('<{{ 1 if false }}>')
assert tmpl.render() == '<>'
tmpl = env.from_string('<{{ (1 if false).bar }}>')
self.assert_raises(UndefinedError, tmpl.render)
def test_filter_priority(self):
tmpl = env.from_string('{{ "foo"|upper + "bar"|upper }}')
assert tmpl.render() == 'FOOBAR'
def test_function_calls(self):
tests = [
(True, '*foo, bar'),
(True, '*foo, *bar'),
(True, '*foo, bar=42'),
(True, '**foo, *bar'),
(True, '**foo, bar'),
(False, 'foo, bar'),
(False, 'foo, bar=42'),
(False, 'foo, bar=23, *args'),
(False, 'a, b=c, *d, **e'),
(False, '*foo, **bar')
]
for should_fail, sig in tests:
if should_fail:
self.assert_raises(TemplateSyntaxError,
env.from_string, '{{ foo(%s) }}' % sig)
else:
env.from_string('foo(%s)' % sig)
def test_tuple_expr(self):
for tmpl in [
'{{ () }}',
'{{ (1, 2) }}',
'{{ (1, 2,) }}',
'{{ 1, }}',
'{{ 1, 2 }}',
'{% for foo, bar in seq %}...{% endfor %}',
'{% for x in foo, bar %}...{% endfor %}',
'{% for x in foo, %}...{% endfor %}'
]:
assert env.from_string(tmpl)
def test_trailing_comma(self):
tmpl = env.from_string('{{ (1, 2,) }}|{{ [1, 2,] }}|{{ {1: 2,} }}')
assert tmpl.render().lower() == '(1, 2)|[1, 2]|{1: 2}'
def test_block_end_name(self):
env.from_string('{% block foo %}...{% endblock foo %}')
self.assert_raises(TemplateSyntaxError, env.from_string,
'{% block x %}{% endblock y %}')
def test_constant_casing(self):
for const in True, False, None:
tmpl = env.from_string('{{ %s }}|{{ %s }}|{{ %s }}' % (
str(const), str(const).lower(), str(const).upper()
))
assert tmpl.render() == '%s|%s|' % (const, const)
def test_test_chaining(self):
self.assert_raises(TemplateSyntaxError, env.from_string,
'{{ foo is string is sequence }}')
assert env.from_string('{{ 42 is string or 42 is number }}'
).render() == 'True'
def test_string_concatenation(self):
tmpl = env.from_string('{{ "foo" "bar" "baz" }}')
assert tmpl.render() == 'foobarbaz'
def test_notin(self):
bar = range(100)
tmpl = env.from_string('''{{ not 42 in bar }}''')
assert tmpl.render(bar=bar) == text_type(not 42 in bar)
def test_implicit_subscribed_tuple(self):
class Foo(object):
def __getitem__(self, x):
return x
t = env.from_string('{{ foo[1, 2] }}')
assert t.render(foo=Foo()) == u'(1, 2)'
def test_raw2(self):
tmpl = env.from_string('{% raw %}{{ FOO }} and {% BAR %}{% endraw %}')
assert tmpl.render() == '{{ FOO }} and {% BAR %}'
def test_const(self):
tmpl = env.from_string('{{ true }}|{{ false }}|{{ none }}|'
'{{ none is defined }}|{{ missing is defined }}')
assert tmpl.render() == 'True|False|None|True|False'
def test_neg_filter_priority(self):
node = env.parse('{{ -1|foo }}')
assert isinstance(node.body[0].nodes[0], nodes.Filter)
assert isinstance(node.body[0].nodes[0].node, nodes.Neg)
def test_const_assign(self):
constass1 = '''{% set true = 42 %}'''
constass2 = '''{% for none in seq %}{% endfor %}'''
for tmpl in constass1, constass2:
self.assert_raises(TemplateSyntaxError, env.from_string, tmpl)
def test_localset(self):
tmpl = env.from_string('''{% set foo = 0 %}\
{% for item in [1, 2] %}{% set foo = 1 %}{% endfor %}\
{{ foo }}''')
assert tmpl.render() == '0'
def test_parse_unary(self):
tmpl = env.from_string('{{ -foo["bar"] }}')
assert tmpl.render(foo={'bar': 42}) == '-42'
tmpl = env.from_string('{{ -foo["bar"]|abs }}')
assert tmpl.render(foo={'bar': 42}) == '42'
class LstripBlocksTestCase(JinjaTestCase):
def test_lstrip(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}\n {% endif %}''')
assert tmpl.render() == "\n"
def test_lstrip_trim(self):
env = Environment(lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' {% if True %}\n {% endif %}''')
assert tmpl.render() == ""
def test_no_lstrip(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {%+ if True %}\n {%+ endif %}''')
assert tmpl.render() == " \n "
def test_lstrip_endline(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' hello{% if True %}\n goodbye{% endif %}''')
assert tmpl.render() == " hello\n goodbye"
def test_lstrip_inline(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}hello {% endif %}''')
assert tmpl.render() == 'hello '
def test_lstrip_nested(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% if True %}a {% if True %}b {% endif %}c {% endif %}''')
assert tmpl.render() == 'a b c '
def test_lstrip_left_chars(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' abc {% if True %}
hello{% endif %}''')
assert tmpl.render() == ' abc \n hello'
def test_lstrip_embeded_strings(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {% set x = " {% str %} " %}{{ x }}''')
assert tmpl.render() == ' {% str %} '
def test_lstrip_preserve_leading_newlines(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string('''\n\n\n{% set hello = 1 %}''')
assert tmpl.render() == '\n\n\n'
def test_lstrip_comment(self):
env = Environment(lstrip_blocks=True, trim_blocks=False)
tmpl = env.from_string(''' {# if True #}
hello
{#endif#}''')
assert tmpl.render() == '\nhello\n'
def test_lstrip_angle_bracket_simple(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' <% if True %>hello <% endif %>''')
assert tmpl.render() == 'hello '
def test_lstrip_angle_bracket_comment(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string(''' <%# if True %>hello <%# endif %>''')
assert tmpl.render() == 'hello '
def test_lstrip_angle_bracket(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# regular comment %>
<% for item in seq %>
${item} ## the rest of the stuff
<% endfor %>''')
assert tmpl.render(seq=range(5)) == \
''.join('%s\n' % x for x in range(5))
def test_lstrip_angle_bracket_compact(self):
env = Environment('<%', '%>', '${', '}', '<%#', '%>', '%', '##',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%#regular comment%>
<%for item in seq%>
${item} ## the rest of the stuff
<%endfor%>''')
assert tmpl.render(seq=range(5)) == \
''.join('%s\n' % x for x in range(5))
def test_php_syntax_with_manual(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<? for item in seq -?>
<?= item ?>
<?- endfor ?>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_php_syntax(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<? for item in seq ?>
<?= item ?>
<? endfor ?>''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_php_syntax_compact(self):
env = Environment('<?', '?>', '<?=', '?>', '<!--', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!-- I'm a comment, I'm not interesting -->
<?for item in seq?>
<?=item?>
<?endfor?>''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_erb_syntax(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
#env.from_string('')
#for n,r in env.lexer.rules.iteritems():
# print n
#print env.lexer.rules['root'][0][0].pattern
#print "'%s'" % tmpl.render(seq=range(5))
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<% for item in seq %>
<%= item %>
<% endfor %>
''')
assert tmpl.render(seq=range(5)) == ''.join(' %s\n' % x for x in range(5))
def test_erb_syntax_with_manual(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<% for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=range(5)) == '01234'
def test_erb_syntax_no_lstrip(self):
env = Environment('<%', '%>', '<%=', '%>', '<%#', '%>',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<%# I'm a comment, I'm not interesting %>
<%+ for item in seq -%>
<%= item %>
<%- endfor %>''')
assert tmpl.render(seq=range(5)) == ' 01234'
def test_comment_syntax(self):
env = Environment('<!--', '-->', '${', '}', '<!--#', '-->',
lstrip_blocks=True, trim_blocks=True)
tmpl = env.from_string('''\
<!--# I'm a comment, I'm not interesting -->\
<!-- for item in seq --->
${item}
<!--- endfor -->''')
assert tmpl.render(seq=range(5)) == '01234'
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TokenStreamTestCase))
suite.addTest(unittest.makeSuite(LexerTestCase))
suite.addTest(unittest.makeSuite(ParserTestCase))
suite.addTest(unittest.makeSuite(SyntaxTestCase))
suite.addTest(unittest.makeSuite(LstripBlocksTestCase))
return suite
|
agpl-3.0
|
valexandersaulys/airbnb_kaggle_contest
|
venv/lib/python3.4/site-packages/setuptools/msvc9_support.py
|
429
|
2187
|
try:
import distutils.msvc9compiler
except ImportError:
pass
unpatched = dict()
def patch_for_specialized_compiler():
"""
Patch functions in distutils.msvc9compiler to use the standalone compiler
build for Python (Windows only). Fall back to original behavior when the
standalone compiler is not available.
"""
if 'distutils' not in globals():
# The module isn't available to be patched
return
if unpatched:
# Already patched
return
unpatched.update(vars(distutils.msvc9compiler))
distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
distutils.msvc9compiler.query_vcvarsall = query_vcvarsall
def find_vcvarsall(version):
Reg = distutils.msvc9compiler.Reg
VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = VC_BASE % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
key = VC_BASE % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
import os
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
return unpatched['find_vcvarsall'](version)
def query_vcvarsall(version, *args, **kwargs):
try:
return unpatched['query_vcvarsall'](version, *args, **kwargs)
except distutils.errors.DistutilsPlatformError as exc:
if exc and "vcvarsall.bat" in exc.args[0]:
message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
if int(version) == 9:
# This redirection link is maintained by Microsoft.
# Contact [email protected] if it needs updating.
raise distutils.errors.DistutilsPlatformError(
message + ' Get it from http://aka.ms/vcpython27'
)
raise distutils.errors.DistutilsPlatformError(message)
raise
|
gpl-2.0
|
Panos512/invenio
|
modules/miscutil/lib/sequtils_regression_tests.py
|
16
|
7833
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Test unit for the miscutil/sequtils module. """
from invenio.testutils import InvenioTestCase
try:
from mock import patch
HAS_MOCK = True
except ImportError:
HAS_MOCK = False
from invenio.testutils import make_test_suite, run_test_suite
from invenio.dbquery import run_sql
from invenio.sequtils import SequenceGenerator
from invenio.sequtils_texkey import TexkeySeq, TexkeyNoAuthorError, \
task_run_core, TexkeyNoYearError
def get_bibrecord_mock(_):
return {'001': [([], ' ', ' ', '1086086', 1)],
'111': [([('a',
'Mock conference'),
('d', '14-16 Sep 2011'),
('x', '2050-09-14'),
('c', 'xxxxx')],
' ',
' ',
'',
3)],
'270': [([('m', '[email protected]')], ' ', ' ', '', 5)],
'856': [([('u', 'http://dummy.com/')], '4', ' ', '', 6)],
'970': [([('a', 'CONF-XXXXXX')], ' ', ' ', '', 2)],
'980': [([('a', 'CONFERENCES')], ' ', ' ', '', 7)]}
def get_sample_texkey(num_test=0):
sample_records = []
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Boyle, P.A.</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Christ, N.H.</subfield>
</datafield>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="c">2012-12-06</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Broekhoven-Fiene, Hannah</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Matthews, Brenda C.</subfield>
</datafield>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="c">2012-12-06</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="b">CERN</subfield>
<subfield code="a">Geneva</subfield>
<subfield code="c">2012-11-06</subfield>
</datafield>
<datafield tag="300" ind1=" " ind2=" ">
<subfield code="a">16</subfield>
</datafield>
<datafield tag="710" ind1=" " ind2=" ">
<subfield code="g">ATLAS Collaboration</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="269" ind1=" " ind2=" ">
<subfield code="b">CERN</subfield>
<subfield code="a">Geneva</subfield>
<subfield code="c">2012-11-06</subfield>
</datafield>
<datafield tag="300" ind1=" " ind2=" ">
<subfield code="a">16</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
xml = """
<record>
<datafield tag="100" ind1=" " ind2=" ">
<subfield code="a">Broekhoven-Fiene, Hannah</subfield>
</datafield>
<datafield tag="700" ind1=" " ind2=" ">
<subfield code="a">Matthews, Brenda C.</subfield>
</datafield>
</record>
"""
sample_records.append(xml)
return sample_records[num_test]
class IntSeq(SequenceGenerator):
seq_name = 'test_int'
def _next_value(self, x):
return x + 1
class TestIntSequenceGeneratorClass(InvenioTestCase):
def test_sequence_next_int(self):
int_seq = IntSeq()
next_int = int_seq.next_value(1)
self.assertEqual(next_int, 2)
# Check if the value was stored in the DB
res = run_sql("""SELECT seq_value FROM seqSTORE
WHERE seq_value=%s AND seq_name=%s""",
(2, int_seq.seq_name))
self.assertEqual(int(res[0][0]), 2)
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE WHERE seq_name="test_int" """)
class TestCnumSequenceGeneratorClass(InvenioTestCase):
if HAS_MOCK:
@patch('invenio.bibedit_utils.get_bibrecord',
get_bibrecord_mock)
def test_get_next_cnum(self):
from invenio.sequtils_cnum import CnumSeq
cnum_seq = CnumSeq()
res = cnum_seq.next_value('xx')
self.assertEqual(res, 'C50-09-14')
res = cnum_seq.next_value('xx')
self.assertEqual(res, 'C50-09-14.1')
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE
WHERE seq_name="cnum"
AND seq_value IN ("C50-09-14", "C50-09-14.1") """)
class TestTexkeySequenceGeneratorClass(InvenioTestCase):
def setUp(self):
self.texkey1 = ""
self.texkey2 = ""
self.texkey3 = ""
def test_get_next_texkey1(self):
""" Generate the first texkey """
texkey_seq = TexkeySeq()
self.texkey1 = texkey_seq.next_value(xml_record=get_sample_texkey(0))
self.assertEqual(self.texkey1[:-3], 'Boyle:2012')
def test_get_next_texkey2(self):
""" Generate the second texkey """
texkey_seq = TexkeySeq()
self.texkey2 = texkey_seq.next_value(xml_record=get_sample_texkey(1))
self.assertEqual(self.texkey2[:-3], 'Broekhoven-Fiene:2012')
def test_get_next_texkey3(self):
""" Generate the third texkey """
texkey_seq = TexkeySeq()
self.texkey3 = texkey_seq.next_value(xml_record=get_sample_texkey(2))
self.assertEqual(self.texkey3[:-3], 'ATLAS:2012')
def test_get_next_texkey_no_author(self):
""" Generate an error while getting a texkey with no author """
texkey_seq = TexkeySeq()
self.assertRaises(TexkeyNoAuthorError,
texkey_seq.next_value, xml_record=get_sample_texkey(3))
def test_get_next_texkey_no_year(self):
""" Generate an error while getting a texkey with no year """
texkey_seq = TexkeySeq()
self.assertRaises(TexkeyNoYearError,
texkey_seq.next_value, xml_record=get_sample_texkey(4))
def tearDown(self):
# Clean DB entries
run_sql(""" DELETE FROM seqSTORE
WHERE seq_name="texkey"
AND seq_value IN ("%s", "%s", "%s") """ % (self.texkey1,
self.texkey2,
self.texkey3))
class TestTexkeydaemonClass(InvenioTestCase):
def test_task_run_core(self):
""" Basic task_run_core check """
task_run_core()
TEST_SUITE = make_test_suite(TestIntSequenceGeneratorClass,
TestCnumSequenceGeneratorClass,
TestTexkeySequenceGeneratorClass,
TestTexkeydaemonClass)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=True)
|
gpl-2.0
|
spotify/cobbler
|
cobbler/modules/authn_configfile.py
|
3
|
2386
|
"""
Authentication module that uses /etc/cobbler/auth.conf
Choice of authentication module is in /etc/cobbler/modules.conf
Copyright 2007-2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import distutils.sysconfig
import ConfigParser
import sys
import os
from utils import _
from utils import md5
import traceback
plib = distutils.sysconfig.get_python_lib()
mod_path="%s/cobbler" % plib
sys.path.insert(0, mod_path)
import cexceptions
import utils
def register():
"""
The mandatory cobbler module registration hook.
"""
return "authn"
def __parse_storage():
if not os.path.exists("/etc/cobbler/users.digest"):
return []
fd = open("/etc/cobbler/users.digest")
data = fd.read()
fd.close()
results = []
lines = data.split("\n")
for line in lines:
try:
line = line.strip()
tokens = line.split(":")
results.append([tokens[0],tokens[1],tokens[2]])
except:
pass
return results
def authenticate(api_handle,username,password):
"""
Validate a username/password combo, returning True/False
Thanks to http://trac.edgewall.org/ticket/845 for supplying
the algorithm info.
"""
# debugging only (not safe to enable)
# api_handle.logger.debug("backend authenticate (%s,%s)" % (username,password))
userlist = __parse_storage()
for (user,realm,actual_blob) in userlist:
if user == username and realm == "Cobbler":
input = ":".join([user,realm,password])
input_blob = md5(input).hexdigest()
if input_blob.lower() == actual_blob.lower():
return True
return False
|
gpl-2.0
|
NuxiNL/linux
|
tools/perf/tests/attr.py
|
1266
|
9424
|
#! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
gpl-2.0
|
aristeu/linux-2.6
|
tools/testing/selftests/drivers/net/mlxsw/sharedbuffer_configuration.py
|
119
|
12654
|
#!/usr/bin/python
# SPDX-License-Identifier: GPL-2.0
import subprocess
import json as j
import random
class SkipTest(Exception):
pass
class RandomValuePicker:
"""
Class for storing shared buffer configuration. Can handle 3 different
objects, pool, tcbind and portpool. Provide an interface to get random
values for a specific object type as the follow:
1. Pool:
- random size
2. TcBind:
- random pool number
- random threshold
3. PortPool:
- random threshold
"""
def __init__(self, pools):
self._pools = []
for pool in pools:
self._pools.append(pool)
def _cell_size(self):
return self._pools[0]["cell_size"]
def _get_static_size(self, th):
# For threshold of 16, this works out to be about 12MB on Spectrum-1,
# and about 17MB on Spectrum-2.
return th * 8000 * self._cell_size()
def _get_size(self):
return self._get_static_size(16)
def _get_thtype(self):
return "static"
def _get_th(self, pool):
# Threshold value could be any integer between 3 to 16
th = random.randint(3, 16)
if pool["thtype"] == "dynamic":
return th
else:
return self._get_static_size(th)
def _get_pool(self, direction):
ing_pools = []
egr_pools = []
for pool in self._pools:
if pool["type"] == "ingress":
ing_pools.append(pool)
else:
egr_pools.append(pool)
if direction == "ingress":
arr = ing_pools
else:
arr = egr_pools
return arr[random.randint(0, len(arr) - 1)]
def get_value(self, objid):
if isinstance(objid, Pool):
if objid["pool"] in [4, 8, 9, 10]:
# The threshold type of pools 4, 8, 9 and 10 cannot be changed
raise SkipTest()
else:
return (self._get_size(), self._get_thtype())
if isinstance(objid, TcBind):
if objid["tc"] >= 8:
# Multicast TCs cannot be changed
raise SkipTest()
else:
pool = self._get_pool(objid["type"])
th = self._get_th(pool)
pool_n = pool["pool"]
return (pool_n, th)
if isinstance(objid, PortPool):
pool_n = objid["pool"]
pool = self._pools[pool_n]
assert pool["pool"] == pool_n
th = self._get_th(pool)
return (th,)
class RecordValuePickerException(Exception):
pass
class RecordValuePicker:
"""
Class for storing shared buffer configuration. Can handle 2 different
objects, pool and tcbind. Provide an interface to get the stored values per
object type.
"""
def __init__(self, objlist):
self._recs = []
for item in objlist:
self._recs.append({"objid": item, "value": item.var_tuple()})
def get_value(self, objid):
if isinstance(objid, Pool) and objid["pool"] in [4, 8, 9, 10]:
# The threshold type of pools 4, 8, 9 and 10 cannot be changed
raise SkipTest()
if isinstance(objid, TcBind) and objid["tc"] >= 8:
# Multicast TCs cannot be changed
raise SkipTest()
for rec in self._recs:
if rec["objid"].weak_eq(objid):
return rec["value"]
raise RecordValuePickerException()
def run_cmd(cmd, json=False):
out = subprocess.check_output(cmd, shell=True)
if json:
return j.loads(out)
return out
def run_json_cmd(cmd):
return run_cmd(cmd, json=True)
def log_test(test_name, err_msg=None):
if err_msg:
print("\t%s" % err_msg)
print("TEST: %-80s [FAIL]" % test_name)
else:
print("TEST: %-80s [ OK ]" % test_name)
class CommonItem(dict):
varitems = []
def var_tuple(self):
ret = []
self.varitems.sort()
for key in self.varitems:
ret.append(self[key])
return tuple(ret)
def weak_eq(self, other):
for key in self:
if key in self.varitems:
continue
if self[key] != other[key]:
return False
return True
class CommonList(list):
def get_by(self, by_obj):
for item in self:
if item.weak_eq(by_obj):
return item
return None
def del_by(self, by_obj):
for item in self:
if item.weak_eq(by_obj):
self.remove(item)
class Pool(CommonItem):
varitems = ["size", "thtype"]
def dl_set(self, dlname, size, thtype):
run_cmd("devlink sb pool set {} sb {} pool {} size {} thtype {}".format(dlname, self["sb"],
self["pool"],
size, thtype))
class PoolList(CommonList):
pass
def get_pools(dlname, direction=None):
d = run_json_cmd("devlink sb pool show -j")
pools = PoolList()
for pooldict in d["pool"][dlname]:
if not direction or direction == pooldict["type"]:
pools.append(Pool(pooldict))
return pools
def do_check_pools(dlname, pools, vp):
for pool in pools:
pre_pools = get_pools(dlname)
try:
(size, thtype) = vp.get_value(pool)
except SkipTest:
continue
pool.dl_set(dlname, size, thtype)
post_pools = get_pools(dlname)
pool = post_pools.get_by(pool)
err_msg = None
if pool["size"] != size:
err_msg = "Incorrect pool size (got {}, expected {})".format(pool["size"], size)
if pool["thtype"] != thtype:
err_msg = "Incorrect pool threshold type (got {}, expected {})".format(pool["thtype"], thtype)
pre_pools.del_by(pool)
post_pools.del_by(pool)
if pre_pools != post_pools:
err_msg = "Other pool setup changed as well"
log_test("pool {} of sb {} set verification".format(pool["pool"],
pool["sb"]), err_msg)
def check_pools(dlname, pools):
# Save defaults
record_vp = RecordValuePicker(pools)
# For each pool, set random size and static threshold type
do_check_pools(dlname, pools, RandomValuePicker(pools))
# Restore defaults
do_check_pools(dlname, pools, record_vp)
class TcBind(CommonItem):
varitems = ["pool", "threshold"]
def __init__(self, port, d):
super(TcBind, self).__init__(d)
self["dlportname"] = port.name
def dl_set(self, pool, th):
run_cmd("devlink sb tc bind set {} sb {} tc {} type {} pool {} th {}".format(self["dlportname"],
self["sb"],
self["tc"],
self["type"],
pool, th))
class TcBindList(CommonList):
pass
def get_tcbinds(ports, verify_existence=False):
d = run_json_cmd("devlink sb tc bind show -j -n")
tcbinds = TcBindList()
for port in ports:
err_msg = None
if port.name not in d["tc_bind"] or len(d["tc_bind"][port.name]) == 0:
err_msg = "No tc bind for port"
else:
for tcbinddict in d["tc_bind"][port.name]:
tcbinds.append(TcBind(port, tcbinddict))
if verify_existence:
log_test("tc bind existence for port {} verification".format(port.name), err_msg)
return tcbinds
def do_check_tcbind(ports, tcbinds, vp):
for tcbind in tcbinds:
pre_tcbinds = get_tcbinds(ports)
try:
(pool, th) = vp.get_value(tcbind)
except SkipTest:
continue
tcbind.dl_set(pool, th)
post_tcbinds = get_tcbinds(ports)
tcbind = post_tcbinds.get_by(tcbind)
err_msg = None
if tcbind["pool"] != pool:
err_msg = "Incorrect pool (got {}, expected {})".format(tcbind["pool"], pool)
if tcbind["threshold"] != th:
err_msg = "Incorrect threshold (got {}, expected {})".format(tcbind["threshold"], th)
pre_tcbinds.del_by(tcbind)
post_tcbinds.del_by(tcbind)
if pre_tcbinds != post_tcbinds:
err_msg = "Other tc bind setup changed as well"
log_test("tc bind {}-{} of sb {} set verification".format(tcbind["dlportname"],
tcbind["tc"],
tcbind["sb"]), err_msg)
def check_tcbind(dlname, ports, pools):
tcbinds = get_tcbinds(ports, verify_existence=True)
# Save defaults
record_vp = RecordValuePicker(tcbinds)
# Bind each port and unicast TC (TCs < 8) to a random pool and a random
# threshold
do_check_tcbind(ports, tcbinds, RandomValuePicker(pools))
# Restore defaults
do_check_tcbind(ports, tcbinds, record_vp)
class PortPool(CommonItem):
varitems = ["threshold"]
def __init__(self, port, d):
super(PortPool, self).__init__(d)
self["dlportname"] = port.name
def dl_set(self, th):
run_cmd("devlink sb port pool set {} sb {} pool {} th {}".format(self["dlportname"],
self["sb"],
self["pool"], th))
class PortPoolList(CommonList):
pass
def get_portpools(ports, verify_existence=False):
d = run_json_cmd("devlink sb port pool -j -n")
portpools = PortPoolList()
for port in ports:
err_msg = None
if port.name not in d["port_pool"] or len(d["port_pool"][port.name]) == 0:
err_msg = "No port pool for port"
else:
for portpooldict in d["port_pool"][port.name]:
portpools.append(PortPool(port, portpooldict))
if verify_existence:
log_test("port pool existence for port {} verification".format(port.name), err_msg)
return portpools
def do_check_portpool(ports, portpools, vp):
for portpool in portpools:
pre_portpools = get_portpools(ports)
(th,) = vp.get_value(portpool)
portpool.dl_set(th)
post_portpools = get_portpools(ports)
portpool = post_portpools.get_by(portpool)
err_msg = None
if portpool["threshold"] != th:
err_msg = "Incorrect threshold (got {}, expected {})".format(portpool["threshold"], th)
pre_portpools.del_by(portpool)
post_portpools.del_by(portpool)
if pre_portpools != post_portpools:
err_msg = "Other port pool setup changed as well"
log_test("port pool {}-{} of sb {} set verification".format(portpool["dlportname"],
portpool["pool"],
portpool["sb"]), err_msg)
def check_portpool(dlname, ports, pools):
portpools = get_portpools(ports, verify_existence=True)
# Save defaults
record_vp = RecordValuePicker(portpools)
# For each port pool, set a random threshold
do_check_portpool(ports, portpools, RandomValuePicker(pools))
# Restore defaults
do_check_portpool(ports, portpools, record_vp)
class Port:
def __init__(self, name):
self.name = name
class PortList(list):
pass
def get_ports(dlname):
d = run_json_cmd("devlink port show -j")
ports = PortList()
for name in d["port"]:
if name.find(dlname) == 0 and d["port"][name]["flavour"] == "physical":
ports.append(Port(name))
return ports
def get_device():
devices_info = run_json_cmd("devlink -j dev info")["info"]
for d in devices_info:
if "mlxsw_spectrum" in devices_info[d]["driver"]:
return d
return None
class UnavailableDevlinkNameException(Exception):
pass
def test_sb_configuration():
# Use static seed
random.seed(0)
dlname = get_device()
if not dlname:
raise UnavailableDevlinkNameException()
ports = get_ports(dlname)
pools = get_pools(dlname)
check_pools(dlname, pools)
check_tcbind(dlname, ports, pools)
check_portpool(dlname, ports, pools)
test_sb_configuration()
|
gpl-2.0
|
griddynamics/bunch
|
lettuce_bunch/dependencies.py
|
1
|
2875
|
# -*- coding: utf-8 -*-
# <Bunch - BDD test tool for Lettuce scenarios>
# Copyright (c) 2012 Grid Dynamics Consulting Services, Inc, All Rights Reserved
# http://www.griddynamics.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from exceptions import CyclicDependencySpecification
from topsort import topsort_levels,CycleError
from itertools import chain, tee, izip, product
def pairwise(iterable):
a, b = tee(iterable)
next(b)
return izip(a, b)
def dependency_lists_to_pairs(dependency_lists):
return chain(*(pairwise(dep_list) for dep_list in dependency_lists))
def dependency_groups_to_pairs(groups):
return chain(*(product(a,b) for a,b in pairwise(groups)))
def split_solitaries(deps):
solitaries = []
linked = []
for dep in deps:
if len(dep) == 1 and len(dep[0]) > 0:
solitaries.append(dep[0])
else:
linked.append(dep)
return solitaries, linked
def filter_empties(deps):
return filter(None, deps)
def combine_fixture_deps(deps):
solitaries, linked = split_solitaries(filter_empties(deps))
try:
result = [sorted(group) for group in topsort_levels(chain(*map(dependency_groups_to_pairs, linked)))]
for solitary in solitaries:
if solitary not in result:
result.append(solitary)
except CycleError as cycle_details:
raise CyclicDependencySpecification(cycle_details)
return result
|
gpl-3.0
|
tonyshardlow/reg_sde
|
run_pf.py
|
1
|
1560
|
from __future__ import (absolute_import, division,
print_function, unicode_literals)
exec(open("ground.py").read())
# mine
import hamiltonian
import diffeo
import sde
from utility import *
#
#
# all data defined in utility (exp2,...)
#
def run(dict):
import os.path
if 'fname' in dict:
filename=dict['fname']
else:
print("No filename given")
exit(1)
print("filename: ",filename+dict['ext'])
#
G=hamiltonian.GaussGreen(dict['ell'],0)
no_steps=dict['no_steps']
#
SDE = sde.SDE(G)
SDE.set_no_steps(no_steps)
SDE.set_landmarks(dict['landmarks_n'])
SDE.set_lam_beta(dict['lam'],dict['beta'],True)
# plot a push-forward sample (with current shape)
plot_setup()
plt.axis('equal')
plt.axis('off')
Q0=dict['landmarks'][0,:,:]
D=SDE.sample_push_forward(Q0)
D.plot_qpath_01(0)
D.plot_warped_grid(10)
plt.savefig(filename+dict['ext']+'.pdf',bbox_inches='tight')
print("...finished.")
#
####################################################################
if __name__ == "__main__":
# do this
plt.ion()
noise_var=0.2
dict=exp1(noise_var)
#dict=exp2(noise_var)
#dict=exp4(noise_var)
# dict=exp4(noise_var)
dict['lam']=0.5
scale=1.0e1;betas=np.array([1., 2., 4.0, 8.])*scale
exts=['a_pf', 'b_pf', 'c_pf', 'd_pf']
for i in range(4):
print("=======")
dict['beta']=betas[i]
dict['ext']=exts[i]
run(dict)
|
mit
|
austinharris/gem5-riscv
|
ext/ply/test/yacc_error3.py
|
174
|
1485
|
# -----------------------------------------------------------------------------
# yacc_error3.py
#
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
p_error = "blah"
yacc.yacc()
|
bsd-3-clause
|
usakhelo/FreeCAD
|
src/Tools/MakeAppTools.py
|
32
|
2611
|
import os, sys, re,string,FCFileTools
verbose = 0
dcount = fcount = 0
def replaceTemplate(dirName, oldName, newName):
"""
modify contents from dirName and below, replace oldName by newName
"""
for file in os.listdir(dirName):
pathName = os.path.join(dirName, file)
if not os.path.isdir(pathName):
try:
print pathName
origFile = open(pathName) # open file
lines = origFile.readlines() # read the file...
origFile.close() # ... and close it
output = open(pathName,"w") # open the file again
for line in lines:
if (string.find(line, oldName) != -1): # search for 'oldName' and replace it
line = string.replace(line, oldName, newName)
output.write(line) # write the modified line back
output.close # close the file
except:
print 'Error modifying', pathName, '--skipped'
print sys.exc_type, sys.exc_value
else:
try:
replaceTemplate(pathName, oldName, newName)
except:
print 'Error changing to directory', pathName, '--skipped'
print sys.exc_type, sys.exc_value
def copyTemplate(dirFrom, dirTo, oldName, newName, MatchFile, MatchDir):
"""
copy contents of dirFrom and below to dirTo
"""
global dcount, fcount
for file in os.listdir(dirFrom): # for files/dirs here
print file
pathFrom = os.path.join(dirFrom, file)
pathTo = os.path.join(dirTo, file) # extend both paths
if (string.find(pathTo, oldName) != -1):
pathTo = string.replace(pathTo, oldName, newName) # rename file if 'oldName' is found
if not os.path.isdir(pathFrom): # copy simple files
hit = 0
for matchpat in MatchFile:
if(re.match(matchpat,file)):
hit = 1
break
if hit:
print 'Ignore file '+file
continue
try:
if verbose > 1: print 'copying', pathFrom, 'to', pathTo
FCFileTools.cpfile(pathFrom, pathTo)
fcount = fcount+1
except:
print 'Error copying', pathFrom, 'to', pathTo, '--skipped'
print sys.exc_type, sys.exc_value
else:
hit = 0
for matchpat in MatchDir:
if(re.match(matchpat,file)):
hit = 1
break
if hit:
print 'Ignore directory '+file
continue
if verbose: print 'copying dir', pathFrom, 'to', pathTo
try:
os.mkdir(pathTo) # make new subdir
copyTemplate(pathFrom, pathTo, oldName, newName, MatchFile, MatchDir) # recur into subdirs
dcount = dcount+1
except:
print 'Error creating', pathTo, '--skipped'
print sys.exc_type, sys.exc_value
|
lgpl-2.1
|
fernandog/Medusa
|
ext/pbr/util.py
|
9
|
23178
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2013 Association of Universities for Research in Astronomy
# (AURA)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of AURA and its representatives may not be used to
# endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY AURA ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL AURA BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
"""The code in this module is mostly copy/pasted out of the distutils2 source
code, as recommended by Tarek Ziade. As such, it may be subject to some change
as distutils2 development continues, and will have to be kept up to date.
I didn't want to use it directly from distutils2 itself, since I do not want it
to be an installation dependency for our packages yet--it is still too unstable
(the latest version on PyPI doesn't even install).
"""
# These first two imports are not used, but are needed to get around an
# irritating Python bug that can crop up when using ./setup.py test.
# See: http://www.eby-sarna.com/pipermail/peak/2010-May/003355.html
try:
import multiprocessing # flake8: noqa
except ImportError:
pass
import logging # flake8: noqa
import os
import re
import sys
import traceback
from collections import defaultdict
import distutils.ccompiler
import pkg_resources
from distutils import log
from distutils import errors
from setuptools.command.egg_info import manifest_maker
from setuptools import dist as st_dist
from setuptools import extension
try:
import ConfigParser as configparser
except ImportError:
import configparser
from pbr import extra_files
import pbr.hooks
# A simplified RE for this; just checks that the line ends with version
# predicates in ()
_VERSION_SPEC_RE = re.compile(r'\s*(.*?)\s*\((.*)\)\s*$')
# Mappings from setup() keyword arguments to setup.cfg options;
# The values are (section, option) tuples, or simply (section,) tuples if
# the option has the same name as the setup() argument
D1_D2_SETUP_ARGS = {
"name": ("metadata",),
"version": ("metadata",),
"author": ("metadata",),
"author_email": ("metadata",),
"maintainer": ("metadata",),
"maintainer_email": ("metadata",),
"url": ("metadata", "home_page"),
"project_urls": ("metadata",),
"description": ("metadata", "summary"),
"keywords": ("metadata",),
"long_description": ("metadata", "description"),
"long_description_content_type": ("metadata", "description_content_type"),
"download_url": ("metadata",),
"classifiers": ("metadata", "classifier"),
"platforms": ("metadata", "platform"), # **
"license": ("metadata",),
# Use setuptools install_requires, not
# broken distutils requires
"install_requires": ("metadata", "requires_dist"),
"setup_requires": ("metadata", "setup_requires_dist"),
"provides": ("metadata", "provides_dist"), # **
"obsoletes": ("metadata", "obsoletes_dist"), # **
"package_dir": ("files", 'packages_root'),
"packages": ("files",),
"package_data": ("files",),
"namespace_packages": ("files",),
"data_files": ("files",),
"scripts": ("files",),
"py_modules": ("files", "modules"), # **
"cmdclass": ("global", "commands"),
# Not supported in distutils2, but provided for
# backwards compatibility with setuptools
"use_2to3": ("backwards_compat", "use_2to3"),
"zip_safe": ("backwards_compat", "zip_safe"),
"tests_require": ("backwards_compat", "tests_require"),
"dependency_links": ("backwards_compat",),
"include_package_data": ("backwards_compat",),
}
# setup() arguments that can have multiple values in setup.cfg
MULTI_FIELDS = ("classifiers",
"platforms",
"install_requires",
"provides",
"obsoletes",
"namespace_packages",
"packages",
"package_data",
"data_files",
"scripts",
"py_modules",
"dependency_links",
"setup_requires",
"tests_require",
"cmdclass")
# setup() arguments that can have mapping values in setup.cfg
MAP_FIELDS = ("project_urls",)
# setup() arguments that contain boolean values
BOOL_FIELDS = ("use_2to3", "zip_safe", "include_package_data")
CSV_FIELDS = ("keywords",)
def resolve_name(name):
"""Resolve a name like ``module.object`` to an object and return it.
Raise ImportError if the module or name is not found.
"""
parts = name.split('.')
cursor = len(parts) - 1
module_name = parts[:cursor]
attr_name = parts[-1]
while cursor > 0:
try:
ret = __import__('.'.join(module_name), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
def cfg_to_args(path='setup.cfg', script_args=()):
"""Distutils2 to distutils1 compatibility util.
This method uses an existing setup.cfg to generate a dictionary of
keywords that can be used by distutils.core.setup(kwargs**).
:param path:
The setup.cfg path.
:param script_args:
List of commands setup.py was called with.
:raises DistutilsFileError:
When the setup.cfg file is not found.
"""
# The method source code really starts here.
if sys.version_info >= (3, 2):
parser = configparser.ConfigParser()
else:
parser = configparser.SafeConfigParser()
if not os.path.exists(path):
raise errors.DistutilsFileError("file '%s' does not exist" %
os.path.abspath(path))
try:
parser.read(path, encoding='utf-8')
except TypeError:
# Python 2 doesn't accept the encoding kwarg
parser.read(path)
config = {}
for section in parser.sections():
config[section] = dict()
for k, value in parser.items(section):
config[section][k.replace('-', '_')] = value
# Run setup_hooks, if configured
setup_hooks = has_get_option(config, 'global', 'setup_hooks')
package_dir = has_get_option(config, 'files', 'packages_root')
# Add the source package directory to sys.path in case it contains
# additional hooks, and to make sure it's on the path before any existing
# installations of the package
if package_dir:
package_dir = os.path.abspath(package_dir)
sys.path.insert(0, package_dir)
try:
if setup_hooks:
setup_hooks = [
hook for hook in split_multiline(setup_hooks)
if hook != 'pbr.hooks.setup_hook']
for hook in setup_hooks:
hook_fn = resolve_name(hook)
try :
hook_fn(config)
except SystemExit:
log.error('setup hook %s terminated the installation')
except:
e = sys.exc_info()[1]
log.error('setup hook %s raised exception: %s\n' %
(hook, e))
log.error(traceback.format_exc())
sys.exit(1)
# Run the pbr hook
pbr.hooks.setup_hook(config)
kwargs = setup_cfg_to_setup_kwargs(config, script_args)
# Set default config overrides
kwargs['include_package_data'] = True
kwargs['zip_safe'] = False
register_custom_compilers(config)
ext_modules = get_extension_modules(config)
if ext_modules:
kwargs['ext_modules'] = ext_modules
entry_points = get_entry_points(config)
if entry_points:
kwargs['entry_points'] = entry_points
# Handle the [files]/extra_files option
files_extra_files = has_get_option(config, 'files', 'extra_files')
if files_extra_files:
extra_files.set_extra_files(split_multiline(files_extra_files))
finally:
# Perform cleanup if any paths were added to sys.path
if package_dir:
sys.path.pop(0)
return kwargs
def setup_cfg_to_setup_kwargs(config, script_args=()):
"""Processes the setup.cfg options and converts them to arguments accepted
by setuptools' setup() function.
"""
kwargs = {}
# Temporarily holds install_requires and extra_requires while we
# parse env_markers.
all_requirements = {}
for arg in D1_D2_SETUP_ARGS:
if len(D1_D2_SETUP_ARGS[arg]) == 2:
# The distutils field name is different than distutils2's.
section, option = D1_D2_SETUP_ARGS[arg]
elif len(D1_D2_SETUP_ARGS[arg]) == 1:
# The distutils field name is the same thant distutils2's.
section = D1_D2_SETUP_ARGS[arg][0]
option = arg
in_cfg_value = has_get_option(config, section, option)
if not in_cfg_value:
# There is no such option in the setup.cfg
if arg == "long_description":
in_cfg_value = has_get_option(config, section,
"description_file")
if in_cfg_value:
in_cfg_value = split_multiline(in_cfg_value)
value = ''
for filename in in_cfg_value:
description_file = open(filename)
try:
value += description_file.read().strip() + '\n\n'
finally:
description_file.close()
in_cfg_value = value
else:
continue
if arg in CSV_FIELDS:
in_cfg_value = split_csv(in_cfg_value)
if arg in MULTI_FIELDS:
in_cfg_value = split_multiline(in_cfg_value)
elif arg in MAP_FIELDS:
in_cfg_map = {}
for i in split_multiline(in_cfg_value):
k, v = i.split('=')
in_cfg_map[k.strip()] = v.strip()
in_cfg_value = in_cfg_map
elif arg in BOOL_FIELDS:
# Provide some flexibility here...
if in_cfg_value.lower() in ('true', 't', '1', 'yes', 'y'):
in_cfg_value = True
else:
in_cfg_value = False
if in_cfg_value:
if arg in ('install_requires', 'tests_require'):
# Replaces PEP345-style version specs with the sort expected by
# setuptools
in_cfg_value = [_VERSION_SPEC_RE.sub(r'\1\2', pred)
for pred in in_cfg_value]
if arg == 'install_requires':
# Split install_requires into package,env_marker tuples
# These will be re-assembled later
install_requires = []
requirement_pattern = '(?P<package>[^;]*);?(?P<env_marker>[^#]*?)(?:\s*#.*)?$'
for requirement in in_cfg_value:
m = re.match(requirement_pattern, requirement)
requirement_package = m.group('package').strip()
env_marker = m.group('env_marker').strip()
install_requires.append((requirement_package,env_marker))
all_requirements[''] = install_requires
elif arg == 'package_dir':
in_cfg_value = {'': in_cfg_value}
elif arg in ('package_data', 'data_files'):
data_files = {}
firstline = True
prev = None
for line in in_cfg_value:
if '=' in line:
key, value = line.split('=', 1)
key, value = (key.strip(), value.strip())
if key in data_files:
# Multiple duplicates of the same package name;
# this is for backwards compatibility of the old
# format prior to d2to1 0.2.6.
prev = data_files[key]
prev.extend(value.split())
else:
prev = data_files[key.strip()] = value.split()
elif firstline:
raise errors.DistutilsOptionError(
'malformed package_data first line %r (misses '
'"=")' % line)
else:
prev.extend(line.strip().split())
firstline = False
if arg == 'data_files':
# the data_files value is a pointlessly different structure
# from the package_data value
data_files = data_files.items()
in_cfg_value = data_files
elif arg == 'cmdclass':
cmdclass = {}
dist = st_dist.Distribution()
for cls_name in in_cfg_value:
cls = resolve_name(cls_name)
cmd = cls(dist)
cmdclass[cmd.get_command_name()] = cls
in_cfg_value = cmdclass
kwargs[arg] = in_cfg_value
# Transform requirements with embedded environment markers to
# setuptools' supported marker-per-requirement format.
#
# install_requires are treated as a special case of extras, before
# being put back in the expected place
#
# fred =
# foo:marker
# bar
# -> {'fred': ['bar'], 'fred:marker':['foo']}
if 'extras' in config:
requirement_pattern = '(?P<package>[^:]*):?(?P<env_marker>[^#]*?)(?:\s*#.*)?$'
extras = config['extras']
# Add contents of test-requirements, if any, into an extra named
# 'test' if one does not already exist.
if 'test' not in extras:
from pbr import packaging
extras['test'] = "\n".join(packaging.parse_requirements(
packaging.TEST_REQUIREMENTS_FILES)).replace(';', ':')
for extra in extras:
extra_requirements = []
requirements = split_multiline(extras[extra])
for requirement in requirements:
m = re.match(requirement_pattern, requirement)
extras_value = m.group('package').strip()
env_marker = m.group('env_marker')
extra_requirements.append((extras_value,env_marker))
all_requirements[extra] = extra_requirements
# Transform the full list of requirements into:
# - install_requires, for those that have no extra and no
# env_marker
# - named extras, for those with an extra name (which may include
# an env_marker)
# - and as a special case, install_requires with an env_marker are
# treated as named extras where the name is the empty string
extras_require = {}
for req_group in all_requirements:
for requirement, env_marker in all_requirements[req_group]:
if env_marker:
extras_key = '%s:(%s)' % (req_group, env_marker)
# We do not want to poison wheel creation with locally
# evaluated markers. sdists always re-create the egg_info
# and as such do not need guarded, and pip will never call
# multiple setup.py commands at once.
if 'bdist_wheel' not in script_args:
try:
if pkg_resources.evaluate_marker('(%s)' % env_marker):
extras_key = req_group
except SyntaxError:
log.error(
"Marker evaluation failed, see the following "
"error. For more information see: "
"http://docs.openstack.org/"
"developer/pbr/compatibility.html#evaluate-marker"
)
raise
else:
extras_key = req_group
extras_require.setdefault(extras_key, []).append(requirement)
kwargs['install_requires'] = extras_require.pop('', [])
kwargs['extras_require'] = extras_require
return kwargs
def register_custom_compilers(config):
"""Handle custom compilers; this has no real equivalent in distutils, where
additional compilers could only be added programmatically, so we have to
hack it in somehow.
"""
compilers = has_get_option(config, 'global', 'compilers')
if compilers:
compilers = split_multiline(compilers)
for compiler in compilers:
compiler = resolve_name(compiler)
# In distutils2 compilers these class attributes exist; for
# distutils1 we just have to make something up
if hasattr(compiler, 'name'):
name = compiler.name
else:
name = compiler.__name__
if hasattr(compiler, 'description'):
desc = compiler.description
else:
desc = 'custom compiler %s' % name
module_name = compiler.__module__
# Note; this *will* override built in compilers with the same name
# TODO: Maybe display a warning about this?
cc = distutils.ccompiler.compiler_class
cc[name] = (module_name, compiler.__name__, desc)
# HACK!!!! Distutils assumes all compiler modules are in the
# distutils package
sys.modules['distutils.' + module_name] = sys.modules[module_name]
def get_extension_modules(config):
"""Handle extension modules"""
EXTENSION_FIELDS = ("sources",
"include_dirs",
"define_macros",
"undef_macros",
"library_dirs",
"libraries",
"runtime_library_dirs",
"extra_objects",
"extra_compile_args",
"extra_link_args",
"export_symbols",
"swig_opts",
"depends")
ext_modules = []
for section in config:
if ':' in section:
labels = section.split(':', 1)
else:
# Backwards compatibility for old syntax; don't use this though
labels = section.split('=', 1)
labels = [l.strip() for l in labels]
if (len(labels) == 2) and (labels[0] == 'extension'):
ext_args = {}
for field in EXTENSION_FIELDS:
value = has_get_option(config, section, field)
# All extension module options besides name can have multiple
# values
if not value:
continue
value = split_multiline(value)
if field == 'define_macros':
macros = []
for macro in value:
macro = macro.split('=', 1)
if len(macro) == 1:
macro = (macro[0].strip(), None)
else:
macro = (macro[0].strip(), macro[1].strip())
macros.append(macro)
value = macros
ext_args[field] = value
if ext_args:
if 'name' not in ext_args:
ext_args['name'] = labels[1]
ext_modules.append(extension.Extension(ext_args.pop('name'),
**ext_args))
return ext_modules
def get_entry_points(config):
"""Process the [entry_points] section of setup.cfg to handle setuptools
entry points. This is, of course, not a standard feature of
distutils2/packaging, but as there is not currently a standard alternative
in packaging, we provide support for them.
"""
if not 'entry_points' in config:
return {}
return dict((option, split_multiline(value))
for option, value in config['entry_points'].items())
def has_get_option(config, section, option):
if section in config and option in config[section]:
return config[section][option]
else:
return False
def split_multiline(value):
"""Special behaviour when we have a multi line options"""
value = [element for element in
(line.strip() for line in value.split('\n'))
if element and not element.startswith('#')]
return value
def split_csv(value):
"""Special behaviour when we have a comma separated options"""
value = [element for element in
(chunk.strip() for chunk in value.split(','))
if element]
return value
# The following classes are used to hack Distribution.command_options a bit
class DefaultGetDict(defaultdict):
"""Like defaultdict, but the get() method also sets and returns the default
value.
"""
def get(self, key, default=None):
if default is None:
default = self.default_factory()
return super(DefaultGetDict, self).setdefault(key, default)
|
gpl-3.0
|
ge0rgi/cinder
|
cinder/tests/unit/image/test_cache.py
|
1
|
12125
|
# Copyright (C) 2015 Pure Storage, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from datetime import timedelta
import ddt
import mock
from oslo_utils import timeutils
from cinder import context as ctxt
from cinder.db.sqlalchemy import models
from cinder.image import cache as image_cache
from cinder import objects
from cinder import test
from cinder.tests.unit import fake_constants as fake
@ddt.ddt
class ImageVolumeCacheTestCase(test.TestCase):
def setUp(self):
super(ImageVolumeCacheTestCase, self).setUp()
self.mock_db = mock.Mock()
self.mock_volume_api = mock.Mock()
self.context = ctxt.get_admin_context()
self.volume = models.Volume()
vol_params = {'id': fake.VOLUME_ID,
'host': 'foo@bar#whatever',
'cluster_name': 'cluster',
'size': 0}
self.volume.update(vol_params)
self.volume_ovo = objects.Volume(self.context, **vol_params)
def _build_cache(self, max_gb=0, max_count=0):
cache = image_cache.ImageVolumeCache(self.mock_db,
self.mock_volume_api,
max_gb,
max_count)
cache.notifier = self.notifier
return cache
def _build_entry(self, size=10):
entry = {
'id': 1,
'host': 'test@foo#bar',
'cluster_name': 'cluster@foo#bar',
'image_id': 'c7a8b8d4-e519-46c7-a0df-ddf1b9b9fff2',
'image_updated_at': timeutils.utcnow(with_timezone=True),
'volume_id': '70a599e0-31e7-49b7-b260-868f441e862b',
'size': size,
'last_used': timeutils.utcnow(with_timezone=True)
}
return entry
def test_get_by_image_volume(self):
cache = self._build_cache()
ret = {'id': 1}
volume_id = '70a599e0-31e7-49b7-b260-868f441e862b'
self.mock_db.image_volume_cache_get_by_volume_id.return_value = ret
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertEqual(ret, entry)
self.mock_db.image_volume_cache_get_by_volume_id.return_value = None
entry = cache.get_by_image_volume(self.context, volume_id)
self.assertIsNone(entry)
def test_evict(self):
cache = self._build_cache()
entry = self._build_entry()
cache.evict(self.context, entry)
self.mock_db.image_volume_cache_delete.assert_called_once_with(
self.context,
entry['volume_id']
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.evict', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@ddt.data(True, False)
def test_get_entry(self, clustered):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at']
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
if not clustered:
self.volume_ovo.cluster_name = None
expect = {'host': self.volume.host}
else:
expect = {'cluster_name': self.volume.cluster_name}
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertDictEqual(entry, found_entry)
(self.mock_db.
image_volume_cache_get_and_update_last_used.assert_called_once_with)(
self.context,
entry['image_id'],
**expect
)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.hit', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(entry['host'], msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_get_entry_not_exists(self):
cache = self._build_cache()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': timeutils.utcnow(with_timezone=True)
}
image_id = 'c7a8b8d4-e519-46c7-a0df-ddf1b9b9fff2'
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = None
found_entry = cache.get_entry(self.context,
self.volume_ovo,
image_id,
image_meta)
self.assertIsNone(found_entry)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(image_id, msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
@mock.patch('cinder.objects.Volume.get_by_id')
def test_get_entry_needs_update(self, mock_volume_by_id):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'is_public': True,
'owner': '70a599e0-31e7-49b7-b260-868f441e862b',
'properties': {
'virtual_size': '1.7'
},
'updated_at': entry['image_updated_at'] + timedelta(hours=2)
}
(self.mock_db.
image_volume_cache_get_and_update_last_used.return_value) = entry
mock_volume = mock.MagicMock()
mock_volume_by_id.return_value = mock_volume
found_entry = cache.get_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
# Expect that the cache entry is not returned and the image-volume
# for it is deleted.
self.assertIsNone(found_entry)
self.mock_volume_api.delete.assert_called_with(self.context,
mock_volume)
msg = self.notifier.notifications[0]
self.assertEqual('image_volume_cache.miss', msg['event_type'])
self.assertEqual('INFO', msg['priority'])
self.assertEqual(self.volume.host, msg['payload']['host'])
self.assertEqual(entry['image_id'], msg['payload']['image_id'])
self.assertEqual(1, len(self.notifier.notifications))
def test_create_cache_entry(self):
cache = self._build_cache()
entry = self._build_entry()
image_meta = {
'updated_at': entry['image_updated_at']
}
self.mock_db.image_volume_cache_create.return_value = entry
created_entry = cache.create_cache_entry(self.context,
self.volume_ovo,
entry['image_id'],
image_meta)
self.assertEqual(entry, created_entry)
self.mock_db.image_volume_cache_create.assert_called_once_with(
self.context,
self.volume_ovo.host,
self.volume_ovo.cluster_name,
entry['image_id'],
entry['image_updated_at'].replace(tzinfo=None),
self.volume_ovo.id,
self.volume_ovo.size
)
def test_ensure_space_unlimited(self):
cache = self._build_cache(max_gb=0, max_count=0)
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
self.volume.size = 500
has_space = cache.ensure_space(self.context, self.volume)
self.assertTrue(has_space)
def test_ensure_space_no_entries(self):
cache = self._build_cache(max_gb=100, max_count=10)
self.mock_db.image_volume_cache_get_all.return_value = []
self.volume_ovo.size = 5
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.volume_ovo.size = 101
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
def test_ensure_space_need_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=12)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=10)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 15
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_need_count(self):
cache = self._build_cache(max_gb=30, max_count=2)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 12
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(1, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
def test_ensure_space_need_gb_and_count(self):
cache = self._build_cache(max_gb=30, max_count=3)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = []
entry1 = self._build_entry(size=10)
entries.append(entry1)
entry2 = self._build_entry(size=5)
entries.append(entry2)
entry3 = self._build_entry(size=12)
entries.append(entry3)
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 16
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertTrue(has_space)
self.assertEqual(2, mock_delete.call_count)
mock_delete.assert_any_call(self.context, entry2)
mock_delete.assert_any_call(self.context, entry3)
def test_ensure_space_cant_free_enough_gb(self):
cache = self._build_cache(max_gb=30, max_count=10)
mock_delete = mock.patch.object(cache, '_delete_image_volume').start()
entries = list(self._build_entry(size=25))
self.mock_db.image_volume_cache_get_all.return_value = entries
self.volume_ovo.size = 50
has_space = cache.ensure_space(self.context, self.volume_ovo)
self.assertFalse(has_space)
mock_delete.assert_not_called()
|
apache-2.0
|
Maximilian-Reuter/SickRage-1
|
lib/github/Notification.py
|
74
|
5593
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Peter Golm <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# Copyright 2013 martinqt <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.Repository
import github.NotificationSubject
class Notification(github.GithubObject.CompletableGithubObject):
"""
This class represents Notifications. The reference can be found here http://developer.github.com/v3/activity/notifications/
"""
@property
def id(self):
"""
:type: string
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def last_read_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._last_read_at)
return self._last_read_at.value
@property
def repository(self):
"""
:type: :class:`github.Repository.Repository`
"""
self._completeIfNotSet(self._repository)
return self._repository.value
@property
def subject(self):
"""
:type: :class:`github.NotificationSubject.NotificationSubject`
"""
self._completeIfNotSet(self._subject)
return self._subject.value
@property
def reason(self):
"""
:type: string
"""
self._completeIfNotSet(self._reason)
return self._reason.value
@property
def subscription_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscription_url)
return self._subscription_url.value
@property
def unread(self):
"""
:type: bool
"""
self._completeIfNotSet(self._unread)
return self._unread.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def _initAttributes(self):
self._id = github.GithubObject.NotSet
self._last_read_at = github.GithubObject.NotSet
self._repository = github.GithubObject.NotSet
self._reason = github.GithubObject.NotSet
self._subscription_url = github.GithubObject.NotSet
self._unread = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "id" in attributes: # pragma no branch
self._id = self._makeStringAttribute(attributes["id"])
if "last_read_at" in attributes: # pragma no branch
self._last_read_at = self._makeDatetimeAttribute(attributes["last_read_at"])
if "repository" in attributes: # pragma no branch
self._repository = self._makeClassAttribute(github.Repository.Repository, attributes["repository"])
if "subject" in attributes: # pragma no branch
self._subject = self._makeClassAttribute(github.NotificationSubject.NotificationSubject, attributes["subject"])
if "reason" in attributes: # pragma no branch
self._reason = self._makeStringAttribute(attributes["reason"])
if "subscription_url" in attributes: # pragma no branch
self._subscription_url = self._makeStringAttribute(attributes["subscription_url"])
if "unread" in attributes: # pragma no branch
self._unread = self._makeBoolAttribute(attributes["unread"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
|
gpl-3.0
|
badpass/ansible-modules-core
|
cloud/amazon/_ec2_ami_search.py
|
75
|
6407
|
#!/usr/bin/python
#
# (c) 2013, Nimbis Services
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_ami_search
short_description: Retrieve AWS AMI information for a given operating system.
deprecated: "in favor of the ec2_ami_find module"
version_added: "1.6"
description:
- Look up the most recent AMI on AWS for a given operating system.
- Returns C(ami), C(aki), C(ari), C(serial), C(tag)
- If there is no AKI or ARI associated with an image, these will be C(null).
- Only supports images from cloud-images.ubuntu.com
- 'Example output: C({"ami": "ami-69f5a900", "changed": false, "aki": "aki-88aa75e1", "tag": "release", "ari": null, "serial": "20131024"})'
version_added: "1.6"
options:
distro:
description: Linux distribution (e.g., C(ubuntu))
required: true
choices: ["ubuntu"]
release:
description: short name of the release (e.g., C(precise))
required: true
stream:
description: Type of release.
required: false
default: "server"
choices: ["server", "desktop"]
store:
description: Back-end store for instance
required: false
default: "ebs"
choices: ["ebs", "ebs-io1", "ebs-ssd", "instance-store"]
arch:
description: CPU architecture
required: false
default: "amd64"
choices: ["i386", "amd64"]
region:
description: EC2 region
required: false
default: us-east-1
choices: ["ap-northeast-1", "ap-southeast-1", "ap-southeast-2",
"eu-central-1", "eu-west-1", "sa-east-1", "us-east-1",
"us-west-1", "us-west-2", "us-gov-west-1"]
virt:
description: virutalization type
required: false
default: paravirtual
choices: ["paravirtual", "hvm"]
author: Lorin Hochstein
'''
EXAMPLES = '''
- name: Launch an Ubuntu 12.04 (Precise Pangolin) EC2 instance
hosts: 127.0.0.1
connection: local
tasks:
- name: Get the Ubuntu precise AMI
ec2_ami_search: distro=ubuntu release=precise region=us-west-1 store=instance-store
register: ubuntu_image
- name: Start the EC2 instance
ec2: image={{ ubuntu_image.ami }} instance_type=m1.small key_name=mykey
'''
import csv
import json
import urlparse
SUPPORTED_DISTROS = ['ubuntu']
AWS_REGIONS = ['ap-northeast-1',
'ap-southeast-1',
'ap-southeast-2',
'eu-central-1',
'eu-west-1',
'sa-east-1',
'us-east-1',
'us-west-1',
'us-west-2',
"us-gov-west-1"]
def get_url(module, url):
""" Get url and return response """
r, info = fetch_url(module, url)
if info['status'] != 200:
# Backwards compat
info['status_code'] = info['status']
module.fail_json(**info)
return r
def ubuntu(module):
""" Get the ami for ubuntu """
release = module.params['release']
stream = module.params['stream']
store = module.params['store']
arch = module.params['arch']
region = module.params['region']
virt = module.params['virt']
url = get_ubuntu_url(release, stream)
req = get_url(module, url)
reader = csv.reader(req, delimiter='\t')
try:
ami, aki, ari, tag, serial = lookup_ubuntu_ami(reader, release, stream,
store, arch, region, virt)
module.exit_json(changed=False, ami=ami, aki=aki, ari=ari, tag=tag,
serial=serial)
except KeyError:
module.fail_json(msg="No matching AMI found")
def lookup_ubuntu_ami(table, release, stream, store, arch, region, virt):
""" Look up the Ubuntu AMI that matches query given a table of AMIs
table: an iterable that returns a row of
(release, stream, tag, serial, region, ami, aki, ari, virt)
release: ubuntu release name
stream: 'server' or 'desktop'
store: 'ebs', 'ebs-io1', 'ebs-ssd' or 'instance-store'
arch: 'i386' or 'amd64'
region: EC2 region
virt: 'paravirtual' or 'hvm'
Returns (ami, aki, ari, tag, serial)"""
expected = (release, stream, store, arch, region, virt)
for row in table:
(actual_release, actual_stream, tag, serial,
actual_store, actual_arch, actual_region, ami, aki, ari,
actual_virt) = row
actual = (actual_release, actual_stream, actual_store, actual_arch,
actual_region, actual_virt)
if actual == expected:
# aki and ari are sometimes blank
if aki == '':
aki = None
if ari == '':
ari = None
return (ami, aki, ari, tag, serial)
raise KeyError()
def get_ubuntu_url(release, stream):
url = "https://cloud-images.ubuntu.com/query/%s/%s/released.current.txt"
return url % (release, stream)
def main():
arg_spec = dict(
distro=dict(required=True, choices=SUPPORTED_DISTROS),
release=dict(required=True),
stream=dict(required=False, default='server',
choices=['desktop', 'server']),
store=dict(required=False, default='ebs',
choices=['ebs', 'ebs-io1', 'ebs-ssd', 'instance-store']),
arch=dict(required=False, default='amd64',
choices=['i386', 'amd64']),
region=dict(required=False, default='us-east-1', choices=AWS_REGIONS),
virt=dict(required=False, default='paravirtual',
choices=['paravirtual', 'hvm']),
)
module = AnsibleModule(argument_spec=arg_spec)
distro = module.params['distro']
if distro == 'ubuntu':
ubuntu(module)
else:
module.fail_json(msg="Unsupported distro: %s" % distro)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
SantosDevelopers/sborganicos
|
venv/lib/python3.5/site-packages/django/core/management/templates.py
|
57
|
14053
|
import cgi
import errno
import io
import mimetypes
import os
import posixpath
import re
import shutil
import stat
import sys
import tempfile
from os import path
import django
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.management.utils import handle_extensions
from django.template import Context, Engine
from django.utils import archive, six
from django.utils.six.moves.urllib.request import urlretrieve
from django.utils.version import get_docs_version
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
class TemplateCommand(BaseCommand):
"""
Copies either a Django application layout template or a Django project
layout template into the specified directory.
:param style: A color style object (see django.core.management.color).
:param app_or_project: The string 'app' or 'project'.
:param name: The name of the application or project.
:param directory: The directory to which the template should be copied.
:param options: The additional variables passed to project or app templates
"""
requires_system_checks = False
# The supported URL schemes
url_schemes = ['http', 'https', 'ftp']
# Can't perform any active locale changes during this command, because
# setting might not be available at all.
leave_locale_alone = True
# Rewrite the following suffixes when determining the target filename.
rewrite_template_suffixes = (
# Allow shipping invalid .py files without byte-compilation.
('.py-tpl', '.py'),
)
def add_arguments(self, parser):
parser.add_argument('name', help='Name of the application or project.')
parser.add_argument('directory', nargs='?', help='Optional destination directory')
parser.add_argument('--template', help='The path or URL to load the template from.')
parser.add_argument(
'--extension', '-e', dest='extensions',
action='append', default=['py'],
help='The file extension(s) to render (default: "py"). '
'Separate multiple extensions with commas, or use '
'-e multiple times.'
)
parser.add_argument(
'--name', '-n', dest='files',
action='append', default=[],
help='The file name(s) to render. Separate multiple extensions '
'with commas, or use -n multiple times.'
)
def handle(self, app_or_project, name, target=None, **options):
self.app_or_project = app_or_project
self.paths_to_remove = []
self.verbosity = options['verbosity']
self.validate_name(name, app_or_project)
# if some directory is given, make sure it's nicely expanded
if target is None:
top_dir = path.join(os.getcwd(), name)
try:
os.makedirs(top_dir)
except OSError as e:
if e.errno == errno.EEXIST:
message = "'%s' already exists" % top_dir
else:
message = e
raise CommandError(message)
else:
top_dir = os.path.abspath(path.expanduser(target))
if not os.path.exists(top_dir):
raise CommandError("Destination directory '%s' does not "
"exist, please create it first." % top_dir)
extensions = tuple(handle_extensions(options['extensions']))
extra_files = []
for file in options['files']:
extra_files.extend(map(lambda x: x.strip(), file.split(',')))
if self.verbosity >= 2:
self.stdout.write("Rendering %s template files with "
"extensions: %s\n" %
(app_or_project, ', '.join(extensions)))
self.stdout.write("Rendering %s template files with "
"filenames: %s\n" %
(app_or_project, ', '.join(extra_files)))
base_name = '%s_name' % app_or_project
base_subdir = '%s_template' % app_or_project
base_directory = '%s_directory' % app_or_project
camel_case_name = 'camel_case_%s_name' % app_or_project
camel_case_value = ''.join(x for x in name.title() if x != '_')
context = Context(dict(options, **{
base_name: name,
base_directory: top_dir,
camel_case_name: camel_case_value,
'docs_version': get_docs_version(),
'django_version': django.__version__,
'unicode_literals': '' if six.PY3 else '# -*- coding: utf-8 -*-\n'
'from __future__ import unicode_literals\n\n',
}), autoescape=False)
# Setup a stub settings environment for template rendering
if not settings.configured:
settings.configure()
django.setup()
template_dir = self.handle_template(options['template'],
base_subdir)
prefix_length = len(template_dir) + 1
for root, dirs, files in os.walk(template_dir):
path_rest = root[prefix_length:]
relative_dir = path_rest.replace(base_name, name)
if relative_dir:
target_dir = path.join(top_dir, relative_dir)
if not path.exists(target_dir):
os.mkdir(target_dir)
for dirname in dirs[:]:
if dirname.startswith('.') or dirname == '__pycache__':
dirs.remove(dirname)
for filename in files:
if filename.endswith(('.pyo', '.pyc', '.py.class')):
# Ignore some files as they cause various breakages.
continue
old_path = path.join(root, filename)
new_path = path.join(top_dir, relative_dir,
filename.replace(base_name, name))
for old_suffix, new_suffix in self.rewrite_template_suffixes:
if new_path.endswith(old_suffix):
new_path = new_path[:-len(old_suffix)] + new_suffix
break # Only rewrite once
if path.exists(new_path):
raise CommandError("%s already exists, overlaying a "
"project or app into an existing "
"directory won't replace conflicting "
"files" % new_path)
# Only render the Python files, as we don't want to
# accidentally render Django templates files
if new_path.endswith(extensions) or filename in extra_files:
with io.open(old_path, 'r', encoding='utf-8') as template_file:
content = template_file.read()
template = Engine().from_string(content)
content = template.render(context)
with io.open(new_path, 'w', encoding='utf-8') as new_file:
new_file.write(content)
else:
shutil.copyfile(old_path, new_path)
if self.verbosity >= 2:
self.stdout.write("Creating %s\n" % new_path)
try:
shutil.copymode(old_path, new_path)
self.make_writeable(new_path)
except OSError:
self.stderr.write(
"Notice: Couldn't set permission bits on %s. You're "
"probably using an uncommon filesystem setup. No "
"problem." % new_path, self.style.NOTICE)
if self.paths_to_remove:
if self.verbosity >= 2:
self.stdout.write("Cleaning up temporary files.\n")
for path_to_remove in self.paths_to_remove:
if path.isfile(path_to_remove):
os.remove(path_to_remove)
else:
shutil.rmtree(path_to_remove)
def handle_template(self, template, subdir):
"""
Determines where the app or project templates are.
Use django.__path__[0] as the default because we don't
know into which directory Django has been installed.
"""
if template is None:
return path.join(django.__path__[0], 'conf', subdir)
else:
if template.startswith('file://'):
template = template[7:]
expanded_template = path.expanduser(template)
expanded_template = path.normpath(expanded_template)
if path.isdir(expanded_template):
return expanded_template
if self.is_url(template):
# downloads the file and returns the path
absolute_path = self.download(template)
else:
absolute_path = path.abspath(expanded_template)
if path.exists(absolute_path):
return self.extract(absolute_path)
raise CommandError("couldn't handle %s template %s." %
(self.app_or_project, template))
def validate_name(self, name, app_or_project):
if name is None:
raise CommandError("you must provide %s %s name" % (
"an" if app_or_project == "app" else "a", app_or_project))
# If it's not a valid directory name.
if six.PY2:
if not re.search(r'^[_a-zA-Z]\w*$', name):
# Provide a smart error message, depending on the error.
if not re.search(r'^[_a-zA-Z]', name):
message = 'make sure the name begins with a letter or underscore'
else:
message = 'use only numbers, letters and underscores'
raise CommandError("%r is not a valid %s name. Please %s." %
(name, app_or_project, message))
else:
if not name.isidentifier():
raise CommandError(
"%r is not a valid %s name. Please make sure the name is "
"a valid identifier." % (name, app_or_project)
)
def download(self, url):
"""
Downloads the given URL and returns the file name.
"""
def cleanup_url(url):
tmp = url.rstrip('/')
filename = tmp.split('/')[-1]
if url.endswith('/'):
display_url = tmp + '/'
else:
display_url = url
return filename, display_url
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_download')
self.paths_to_remove.append(tempdir)
filename, display_url = cleanup_url(url)
if self.verbosity >= 2:
self.stdout.write("Downloading %s\n" % display_url)
try:
the_path, info = urlretrieve(url, path.join(tempdir, filename))
except IOError as e:
raise CommandError("couldn't download URL %s to %s: %s" %
(url, filename, e))
used_name = the_path.split('/')[-1]
# Trying to get better name from response headers
content_disposition = info.get('content-disposition')
if content_disposition:
_, params = cgi.parse_header(content_disposition)
guessed_filename = params.get('filename') or used_name
else:
guessed_filename = used_name
# Falling back to content type guessing
ext = self.splitext(guessed_filename)[1]
content_type = info.get('content-type')
if not ext and content_type:
ext = mimetypes.guess_extension(content_type)
if ext:
guessed_filename += ext
# Move the temporary file to a filename that has better
# chances of being recognized by the archive utils
if used_name != guessed_filename:
guessed_path = path.join(tempdir, guessed_filename)
shutil.move(the_path, guessed_path)
return guessed_path
# Giving up
return the_path
def splitext(self, the_path):
"""
Like os.path.splitext, but takes off .tar, too
"""
base, ext = posixpath.splitext(the_path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def extract(self, filename):
"""
Extracts the given file to a temporarily and returns
the path of the directory with the extracted content.
"""
prefix = 'django_%s_template_' % self.app_or_project
tempdir = tempfile.mkdtemp(prefix=prefix, suffix='_extract')
self.paths_to_remove.append(tempdir)
if self.verbosity >= 2:
self.stdout.write("Extracting %s\n" % filename)
try:
archive.extract(filename, tempdir)
return tempdir
except (archive.ArchiveException, IOError) as e:
raise CommandError("couldn't extract file %s to %s: %s" %
(filename, tempdir, e))
def is_url(self, template):
"""
Returns True if the name looks like a URL
"""
if ':' not in template:
return False
scheme = template.split(':', 1)[0].lower()
return scheme in self.url_schemes
def make_writeable(self, filename):
"""
Make sure that the file is writeable.
Useful if our source is read-only.
"""
if sys.platform.startswith('java'):
# On Jython there is no os.access()
return
if not os.access(filename, os.W_OK):
st = os.stat(filename)
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
os.chmod(filename, new_permissions)
|
mit
|
xbed/Mixly_Arduino
|
mixly_arduino/mpBuild/ESP32_MixGo/lib/mixgo.py
|
1
|
5214
|
from machine import Pin
from machine import PWM
from machine import ADC
from machine import DAC
from machine import I2C
from machine import Timer
from machine import RTC
from machine import TouchPad
import time
from neopixel import NeoPixel
def get_brightness(pin = 39):
return ADCSensor(pin).read()
def get_soundlevel(pin = 35):
return ADCSensor(pin).read()
# Button
class Button:
def __init__(self, pin):
from machine import Pin
self.pin = Pin(pin, Pin.IN)
def get_presses(self, delay = 1):
last_time, last_state, presses = time.time(), 0, 0
while time.time() < last_time + delay:
time.sleep_ms(50)
if last_state == 0 and self.pin.value() == 1:
last_state = 1
if last_state == 1 and self.pin.value() == 0:
last_state, presses = 0, presses + 1
return presses
def is_pressed(self, flag = 0):
return self.pin.value() == flag
def was_pressed(self, flag = 0):
last_state = self.pin.value()
if flag:
if not last_state:
return False
else:
while self.pin.value():
time.sleep_ms(10)
return True
else:
if last_state:
return False
else:
while not self.pin.value():
time.sleep_ms(10)
return True
def irq(self, handler, trigger):
self.pin.irq(handler = handler, trigger = trigger)
# Pin
class MyPin(Pin):
def write_digital(self,val):
self.init(Pin.OUT)
self.value(val)
def read_digital(self):
self.init(Pin.IN)
return self.value()
def write_analog(self,val):
id = int(str(self)[4:-1]) #unsafe!
self = PWM(Pin(id),duty=val)
def dac_write(self,val):
id = int(str(self)[4:-1]) #unsafe!
self = DAC(Pin(id)).write(val)
def read_analog(self):
id = int(str(self)[4:-1]) #unsafe!
self = ADC(Pin(id))
return self.read()
def set_frequency(self,val):
id = int(str(self)[4:-1])
self = PWM(Pin(id),freq=val)
def is_touched(self):
id = int(str(self)[4:-1]) #unsafe!
if id in (0,2,4,12,13,14,15,27,32,33):
# print(TouchPad(Pin(id)).read())
return (TouchPad(Pin(id)).read() - 150 < 0)
else:
self.init(Pin.IN)
return self.value() == 1
class Infrared(MyPin):
def near(self):
id = int(str(self)[4:-1]) #unsafe!
pin15=Pin(15,Pin.OUT)
pin15.value(1)
adc=ADC(Pin(id))
adc.atten(ADC.ATTN_11DB)
approximate =adc.read()
pin15.value(0)
return approximate
# Servo
class Servo:
def __init__(self,pin):
self.pin=pin
def write_angle(self,angle):
id = int(str(self.pin)[4:-1])
PWM(Pin(id),freq=50,duty=int(40 + 75 * angle / 180))
# Sonar
class Sonar:
def __init__(self, trig, echo):
self.trig=Pin(trig, Pin.OUT)
self.echo=Pin(echo, Pin.IN)
def checkdist(self):
self.trig.value(0)
self.echo.value(0)
self.trig.value(1)
time.sleep_us(10)
self.trig.value(0)
while(self.echo.value()==0):
pass
t1 = time.ticks_us()
while(self.echo.value()==1):
pass
t2 = time.ticks_us()
return round(time.ticks_diff(t2, t1) / 10000 * 340 / 2, 2)
class led:
def __init__(self, pin, flag=1):
self.val = flag
self.pin = pin
self.flag = flag
def setbrightness(self,val):
self.val = val
if self.flag:
PWM(Pin(self.pin)).duty(self.val)
else:
PWM(Pin(self.pin)).duty(1023 - self.val)
def setonoff(self,val):
if(val == -1):
Pin(self.pin,Pin.OUT).value(1 - Pin(self.pin).value())
elif(val == 1):
Pin(self.pin,Pin.OUT).value(self.flag)
elif(val == 0):
Pin(self.pin,Pin.OUT).value(1 - self.flag)
def getonoff(self):
if self.flag:
return Pin(self.pin).value()
else:
return 1 - Pin(self.pin).value()
class ADCSensor:
def __init__(self,pin):
self.adc=ADC(Pin(pin))
self.adc.atten(ADC.ATTN_11DB)
def read(self):
return self.adc.read()
class RGB:
def __init__(self, pin, num):
self = NeoPixel(Pin(pin), num)
def write(self,n,r,g,b):
self[n] = (r, g, b)
self.write()
i2c = I2C(scl = Pin(22), sda = Pin(21), freq = 100000)
buf = bytearray(1)
rtc = RTC()
tim = Timer(-1)
try:
i2c.readfrom_mem_into(0x68, 0X75, buf)
except:
pass
else:
if buf[0] == 0x71:
from mpu9250 import *
mpu = MPU9250(i2c)
compass = Compass(mpu)
button_a = Button(17)
button_b = Button(16)
led1 = led(pin = 0, flag = 0)
led2 = led(pin = 5, flag = 0)
infrared_left = Infrared(34)
infrared_right = Infrared(36)
touch1 = MyPin(32)
touch2 = MyPin(33)
touch3 = MyPin(25)
touch4 = MyPin(26)
rgb = NeoPixel(Pin(2), 2)
|
apache-2.0
|
sudkannan/xen-hv
|
dist/install/usr/lib64/python2.6/site-packages/xen/xend/server/vfbif.py
|
43
|
3171
|
from xen.xend.server.DevController import DevController
from xen.xend.XendLogging import log
from xen.xend.XendError import VmError
import xen.xend
import os
CONFIG_ENTRIES = ['type', 'vncdisplay', 'vnclisten', 'vncpasswd', 'vncunused',
'display', 'xauthority', 'keymap', 'vnc', 'sdl', 'uuid',
'location', 'protocol', 'opengl']
class VfbifController(DevController):
"""Virtual frame buffer controller. Handles all vfb devices for a domain.
Note that we only support a single vfb per domain at the moment.
"""
def __init__(self, vm):
DevController.__init__(self, vm)
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
back = dict([(k, str(config[k])) for k in CONFIG_ENTRIES
if config.has_key(k)])
devid = 0
return (devid, back, {})
def getDeviceConfiguration(self, devid, transaction = None):
result = DevController.getDeviceConfiguration(self, devid, transaction)
if transaction is None:
devinfo = self.readBackend(devid, *CONFIG_ENTRIES)
else:
devinfo = self.readBackendTxn(transaction, devid, *CONFIG_ENTRIES)
return dict([(CONFIG_ENTRIES[i], devinfo[i])
for i in range(len(CONFIG_ENTRIES))
if devinfo[i] is not None])
def waitForDevice(self, devid):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def reconfigureDevice(self, _, config):
""" Only allow appending location information of vnc port into
xenstore."""
if 'location' in config:
(devid, back, front) = self.getDeviceDetails(config)
self.writeBackend(devid, 'location', config['location'])
return back.get('uuid')
raise VmError('Refusing to reconfigure device vfb:%d' % devid)
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
class VkbdifController(DevController):
"""Virtual keyboard controller. Handles all vkbd devices for a domain.
"""
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
devid = 0
back = {}
front = {}
return (devid, back, front)
def waitForDevice(self, config):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
|
gpl-2.0
|
tomasdubec/openstack-cinder
|
cinder/tests/api/test_xmlutil.py
|
4
|
25565
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
from cinder.api import xmlutil
from cinder import test
class SelectorTest(test.TestCase):
obj_for_test = {'test': {'name': 'test',
'values': [1, 2, 3],
'attrs': {'foo': 1,
'bar': 2,
'baz': 3, }, }, }
def test_empty_selector(self):
sel = xmlutil.Selector()
self.assertEqual(len(sel.chain), 0)
self.assertEqual(sel(self.obj_for_test), self.obj_for_test)
def test_dict_selector(self):
sel = xmlutil.Selector('test')
self.assertEqual(len(sel.chain), 1)
self.assertEqual(sel.chain[0], 'test')
self.assertEqual(sel(self.obj_for_test),
self.obj_for_test['test'])
def test_datum_selector(self):
sel = xmlutil.Selector('test', 'name')
self.assertEqual(len(sel.chain), 2)
self.assertEqual(sel.chain[0], 'test')
self.assertEqual(sel.chain[1], 'name')
self.assertEqual(sel(self.obj_for_test), 'test')
def test_list_selector(self):
sel = xmlutil.Selector('test', 'values', 0)
self.assertEqual(len(sel.chain), 3)
self.assertEqual(sel.chain[0], 'test')
self.assertEqual(sel.chain[1], 'values')
self.assertEqual(sel.chain[2], 0)
self.assertEqual(sel(self.obj_for_test), 1)
def test_items_selector(self):
sel = xmlutil.Selector('test', 'attrs', xmlutil.get_items)
self.assertEqual(len(sel.chain), 3)
self.assertEqual(sel.chain[2], xmlutil.get_items)
for key, val in sel(self.obj_for_test):
self.assertEqual(self.obj_for_test['test']['attrs'][key], val)
def test_missing_key_selector(self):
sel = xmlutil.Selector('test2', 'attrs')
self.assertEqual(sel(self.obj_for_test), None)
self.assertRaises(KeyError, sel, self.obj_for_test, True)
def test_constant_selector(self):
sel = xmlutil.ConstantSelector('Foobar')
self.assertEqual(sel.value, 'Foobar')
self.assertEqual(sel(self.obj_for_test), 'Foobar')
class TemplateElementTest(test.TestCase):
def test_element_initial_attributes(self):
# Create a template element with some attributes
elem = xmlutil.TemplateElement('test', attrib=dict(a=1, b=2, c=3),
c=4, d=5, e=6)
# Verify all the attributes are as expected
expected = dict(a=1, b=2, c=4, d=5, e=6)
for k, v in expected.items():
self.assertEqual(elem.attrib[k].chain[0], v)
def test_element_get_attributes(self):
expected = dict(a=1, b=2, c=3)
# Create a template element with some attributes
elem = xmlutil.TemplateElement('test', attrib=expected)
# Verify that get() retrieves the attributes
for k, v in expected.items():
self.assertEqual(elem.get(k).chain[0], v)
def test_element_set_attributes(self):
attrs = dict(a=None, b='foo', c=xmlutil.Selector('foo', 'bar'))
# Create a bare template element with no attributes
elem = xmlutil.TemplateElement('test')
# Set the attribute values
for k, v in attrs.items():
elem.set(k, v)
# Now verify what got set
self.assertEqual(len(elem.attrib['a'].chain), 1)
self.assertEqual(elem.attrib['a'].chain[0], 'a')
self.assertEqual(len(elem.attrib['b'].chain), 1)
self.assertEqual(elem.attrib['b'].chain[0], 'foo')
self.assertEqual(elem.attrib['c'], attrs['c'])
def test_element_attribute_keys(self):
attrs = dict(a=1, b=2, c=3, d=4)
expected = set(attrs.keys())
# Create a template element with some attributes
elem = xmlutil.TemplateElement('test', attrib=attrs)
# Now verify keys
self.assertEqual(set(elem.keys()), expected)
def test_element_attribute_items(self):
expected = dict(a=xmlutil.Selector(1),
b=xmlutil.Selector(2),
c=xmlutil.Selector(3))
keys = set(expected.keys())
# Create a template element with some attributes
elem = xmlutil.TemplateElement('test', attrib=expected)
# Now verify items
for k, v in elem.items():
self.assertEqual(expected[k], v)
keys.remove(k)
# Did we visit all keys?
self.assertEqual(len(keys), 0)
def test_element_selector_none(self):
# Create a template element with no selector
elem = xmlutil.TemplateElement('test')
self.assertEqual(len(elem.selector.chain), 0)
def test_element_selector_string(self):
# Create a template element with a string selector
elem = xmlutil.TemplateElement('test', selector='test')
self.assertEqual(len(elem.selector.chain), 1)
self.assertEqual(elem.selector.chain[0], 'test')
def test_element_selector(self):
sel = xmlutil.Selector('a', 'b')
# Create a template element with an explicit selector
elem = xmlutil.TemplateElement('test', selector=sel)
self.assertEqual(elem.selector, sel)
def test_element_subselector_none(self):
# Create a template element with no subselector
elem = xmlutil.TemplateElement('test')
self.assertEqual(elem.subselector, None)
def test_element_subselector_string(self):
# Create a template element with a string subselector
elem = xmlutil.TemplateElement('test', subselector='test')
self.assertEqual(len(elem.subselector.chain), 1)
self.assertEqual(elem.subselector.chain[0], 'test')
def test_element_subselector(self):
sel = xmlutil.Selector('a', 'b')
# Create a template element with an explicit subselector
elem = xmlutil.TemplateElement('test', subselector=sel)
self.assertEqual(elem.subselector, sel)
def test_element_append_child(self):
# Create an element
elem = xmlutil.TemplateElement('test')
# Make sure the element starts off empty
self.assertEqual(len(elem), 0)
# Create a child element
child = xmlutil.TemplateElement('child')
# Append the child to the parent
elem.append(child)
# Verify that the child was added
self.assertEqual(len(elem), 1)
self.assertEqual(elem[0], child)
self.assertEqual('child' in elem, True)
self.assertEqual(elem['child'], child)
# Ensure that multiple children of the same name are rejected
child2 = xmlutil.TemplateElement('child')
self.assertRaises(KeyError, elem.append, child2)
def test_element_extend_children(self):
# Create an element
elem = xmlutil.TemplateElement('test')
# Make sure the element starts off empty
self.assertEqual(len(elem), 0)
# Create a few children
children = [xmlutil.TemplateElement('child1'),
xmlutil.TemplateElement('child2'),
xmlutil.TemplateElement('child3'), ]
# Extend the parent by those children
elem.extend(children)
# Verify that the children were added
self.assertEqual(len(elem), 3)
for idx in range(len(elem)):
self.assertEqual(children[idx], elem[idx])
self.assertEqual(children[idx].tag in elem, True)
self.assertEqual(elem[children[idx].tag], children[idx])
# Ensure that multiple children of the same name are rejected
children2 = [xmlutil.TemplateElement('child4'),
xmlutil.TemplateElement('child1'), ]
self.assertRaises(KeyError, elem.extend, children2)
# Also ensure that child4 was not added
self.assertEqual(len(elem), 3)
self.assertEqual(elem[-1].tag, 'child3')
def test_element_insert_child(self):
# Create an element
elem = xmlutil.TemplateElement('test')
# Make sure the element starts off empty
self.assertEqual(len(elem), 0)
# Create a few children
children = [xmlutil.TemplateElement('child1'),
xmlutil.TemplateElement('child2'),
xmlutil.TemplateElement('child3'), ]
# Extend the parent by those children
elem.extend(children)
# Create a child to insert
child = xmlutil.TemplateElement('child4')
# Insert it
elem.insert(1, child)
# Ensure the child was inserted in the right place
self.assertEqual(len(elem), 4)
children.insert(1, child)
for idx in range(len(elem)):
self.assertEqual(children[idx], elem[idx])
self.assertEqual(children[idx].tag in elem, True)
self.assertEqual(elem[children[idx].tag], children[idx])
# Ensure that multiple children of the same name are rejected
child2 = xmlutil.TemplateElement('child2')
self.assertRaises(KeyError, elem.insert, 2, child2)
def test_element_remove_child(self):
# Create an element
elem = xmlutil.TemplateElement('test')
# Make sure the element starts off empty
self.assertEqual(len(elem), 0)
# Create a few children
children = [xmlutil.TemplateElement('child1'),
xmlutil.TemplateElement('child2'),
xmlutil.TemplateElement('child3'), ]
# Extend the parent by those children
elem.extend(children)
# Create a test child to remove
child = xmlutil.TemplateElement('child2')
# Try to remove it
self.assertRaises(ValueError, elem.remove, child)
# Ensure that no child was removed
self.assertEqual(len(elem), 3)
# Now remove a legitimate child
elem.remove(children[1])
# Ensure that the child was removed
self.assertEqual(len(elem), 2)
self.assertEqual(elem[0], children[0])
self.assertEqual(elem[1], children[2])
self.assertEqual('child2' in elem, False)
# Ensure the child cannot be retrieved by name
def get_key(elem, key):
return elem[key]
self.assertRaises(KeyError, get_key, elem, 'child2')
def test_element_text(self):
# Create an element
elem = xmlutil.TemplateElement('test')
# Ensure that it has no text
self.assertEqual(elem.text, None)
# Try setting it to a string and ensure it becomes a selector
elem.text = 'test'
self.assertEqual(hasattr(elem.text, 'chain'), True)
self.assertEqual(len(elem.text.chain), 1)
self.assertEqual(elem.text.chain[0], 'test')
# Try resetting the text to None
elem.text = None
self.assertEqual(elem.text, None)
# Now make up a selector and try setting the text to that
sel = xmlutil.Selector()
elem.text = sel
self.assertEqual(elem.text, sel)
# Finally, try deleting the text and see what happens
del elem.text
self.assertEqual(elem.text, None)
def test_apply_attrs(self):
# Create a template element
attrs = dict(attr1=xmlutil.ConstantSelector(1),
attr2=xmlutil.ConstantSelector(2))
tmpl_elem = xmlutil.TemplateElement('test', attrib=attrs)
# Create an etree element
elem = etree.Element('test')
# Apply the template to the element
tmpl_elem.apply(elem, None)
# Now, verify the correct attributes were set
for k, v in elem.items():
self.assertEqual(str(attrs[k].value), v)
def test_apply_text(self):
# Create a template element
tmpl_elem = xmlutil.TemplateElement('test')
tmpl_elem.text = xmlutil.ConstantSelector(1)
# Create an etree element
elem = etree.Element('test')
# Apply the template to the element
tmpl_elem.apply(elem, None)
# Now, verify the text was set
self.assertEqual(str(tmpl_elem.text.value), elem.text)
def test__render(self):
attrs = dict(attr1=xmlutil.ConstantSelector(1),
attr2=xmlutil.ConstantSelector(2),
attr3=xmlutil.ConstantSelector(3))
# Create a master template element
master_elem = xmlutil.TemplateElement('test', attr1=attrs['attr1'])
# Create a couple of slave template element
slave_elems = [xmlutil.TemplateElement('test', attr2=attrs['attr2']),
xmlutil.TemplateElement('test', attr3=attrs['attr3']), ]
# Try the render
elem = master_elem._render(None, None, slave_elems, None)
# Verify the particulars of the render
self.assertEqual(elem.tag, 'test')
self.assertEqual(len(elem.nsmap), 0)
for k, v in elem.items():
self.assertEqual(str(attrs[k].value), v)
# Create a parent for the element to be rendered
parent = etree.Element('parent')
# Try the render again...
elem = master_elem._render(parent, None, slave_elems, dict(a='foo'))
# Verify the particulars of the render
self.assertEqual(len(parent), 1)
self.assertEqual(parent[0], elem)
self.assertEqual(len(elem.nsmap), 1)
self.assertEqual(elem.nsmap['a'], 'foo')
def test_render(self):
# Create a template element
tmpl_elem = xmlutil.TemplateElement('test')
tmpl_elem.text = xmlutil.Selector()
# Create the object we're going to render
obj = ['elem1', 'elem2', 'elem3', 'elem4']
# Try a render with no object
elems = tmpl_elem.render(None, None)
self.assertEqual(len(elems), 0)
# Try a render with one object
elems = tmpl_elem.render(None, 'foo')
self.assertEqual(len(elems), 1)
self.assertEqual(elems[0][0].text, 'foo')
self.assertEqual(elems[0][1], 'foo')
# Now, try rendering an object with multiple entries
parent = etree.Element('parent')
elems = tmpl_elem.render(parent, obj)
self.assertEqual(len(elems), 4)
# Check the results
for idx in range(len(obj)):
self.assertEqual(elems[idx][0].text, obj[idx])
self.assertEqual(elems[idx][1], obj[idx])
def test_subelement(self):
# Try the SubTemplateElement constructor
parent = xmlutil.SubTemplateElement(None, 'parent')
self.assertEqual(parent.tag, 'parent')
self.assertEqual(len(parent), 0)
# Now try it with a parent element
child = xmlutil.SubTemplateElement(parent, 'child')
self.assertEqual(child.tag, 'child')
self.assertEqual(len(parent), 1)
self.assertEqual(parent[0], child)
def test_wrap(self):
# These are strange methods, but they make things easier
elem = xmlutil.TemplateElement('test')
self.assertEqual(elem.unwrap(), elem)
self.assertEqual(elem.wrap().root, elem)
def test_dyntag(self):
obj = ['a', 'b', 'c']
# Create a template element with a dynamic tag
tmpl_elem = xmlutil.TemplateElement(xmlutil.Selector())
# Try the render
parent = etree.Element('parent')
elems = tmpl_elem.render(parent, obj)
# Verify the particulars of the render
self.assertEqual(len(elems), len(obj))
for idx in range(len(obj)):
self.assertEqual(elems[idx][0].tag, obj[idx])
class TemplateTest(test.TestCase):
def test_wrap(self):
# These are strange methods, but they make things easier
elem = xmlutil.TemplateElement('test')
tmpl = xmlutil.Template(elem)
self.assertEqual(tmpl.unwrap(), elem)
self.assertEqual(tmpl.wrap(), tmpl)
def test__siblings(self):
# Set up a basic template
elem = xmlutil.TemplateElement('test')
tmpl = xmlutil.Template(elem)
# Check that we get the right siblings
siblings = tmpl._siblings()
self.assertEqual(len(siblings), 1)
self.assertEqual(siblings[0], elem)
def test__nsmap(self):
# Set up a basic template
elem = xmlutil.TemplateElement('test')
tmpl = xmlutil.Template(elem, nsmap=dict(a="foo"))
# Check out that we get the right namespace dictionary
nsmap = tmpl._nsmap()
self.assertNotEqual(id(nsmap), id(tmpl.nsmap))
self.assertEqual(len(nsmap), 1)
self.assertEqual(nsmap['a'], 'foo')
def test_master_attach(self):
# Set up a master template
elem = xmlutil.TemplateElement('test')
tmpl = xmlutil.MasterTemplate(elem, 1)
# Make sure it has a root but no slaves
self.assertEqual(tmpl.root, elem)
self.assertEqual(len(tmpl.slaves), 0)
# Try to attach an invalid slave
bad_elem = xmlutil.TemplateElement('test2')
self.assertRaises(ValueError, tmpl.attach, bad_elem)
self.assertEqual(len(tmpl.slaves), 0)
# Try to attach an invalid and a valid slave
good_elem = xmlutil.TemplateElement('test')
self.assertRaises(ValueError, tmpl.attach, good_elem, bad_elem)
self.assertEqual(len(tmpl.slaves), 0)
# Try to attach an inapplicable template
class InapplicableTemplate(xmlutil.Template):
def apply(self, master):
return False
inapp_tmpl = InapplicableTemplate(good_elem)
tmpl.attach(inapp_tmpl)
self.assertEqual(len(tmpl.slaves), 0)
# Now try attaching an applicable template
tmpl.attach(good_elem)
self.assertEqual(len(tmpl.slaves), 1)
self.assertEqual(tmpl.slaves[0].root, good_elem)
def test_master_copy(self):
# Construct a master template
elem = xmlutil.TemplateElement('test')
tmpl = xmlutil.MasterTemplate(elem, 1, nsmap=dict(a='foo'))
# Give it a slave
slave = xmlutil.TemplateElement('test')
tmpl.attach(slave)
# Construct a copy
copy = tmpl.copy()
# Check to see if we actually managed a copy
self.assertNotEqual(tmpl, copy)
self.assertEqual(tmpl.root, copy.root)
self.assertEqual(tmpl.version, copy.version)
self.assertEqual(id(tmpl.nsmap), id(copy.nsmap))
self.assertNotEqual(id(tmpl.slaves), id(copy.slaves))
self.assertEqual(len(tmpl.slaves), len(copy.slaves))
self.assertEqual(tmpl.slaves[0], copy.slaves[0])
def test_slave_apply(self):
# Construct a master template
elem = xmlutil.TemplateElement('test')
master = xmlutil.MasterTemplate(elem, 3)
# Construct a slave template with applicable minimum version
slave = xmlutil.SlaveTemplate(elem, 2)
self.assertEqual(slave.apply(master), True)
# Construct a slave template with equal minimum version
slave = xmlutil.SlaveTemplate(elem, 3)
self.assertEqual(slave.apply(master), True)
# Construct a slave template with inapplicable minimum version
slave = xmlutil.SlaveTemplate(elem, 4)
self.assertEqual(slave.apply(master), False)
# Construct a slave template with applicable version range
slave = xmlutil.SlaveTemplate(elem, 2, 4)
self.assertEqual(slave.apply(master), True)
# Construct a slave template with low version range
slave = xmlutil.SlaveTemplate(elem, 1, 2)
self.assertEqual(slave.apply(master), False)
# Construct a slave template with high version range
slave = xmlutil.SlaveTemplate(elem, 4, 5)
self.assertEqual(slave.apply(master), False)
# Construct a slave template with matching version range
slave = xmlutil.SlaveTemplate(elem, 3, 3)
self.assertEqual(slave.apply(master), True)
def test__serialize(self):
# Our test object to serialize
obj = {'test': {'name': 'foobar',
'values': [1, 2, 3, 4],
'attrs': {'a': 1,
'b': 2,
'c': 3,
'd': 4, },
'image': {'name': 'image_foobar', 'id': 42, }, }, }
# Set up our master template
root = xmlutil.TemplateElement('test', selector='test',
name='name')
value = xmlutil.SubTemplateElement(root, 'value', selector='values')
value.text = xmlutil.Selector()
attrs = xmlutil.SubTemplateElement(root, 'attrs', selector='attrs')
xmlutil.SubTemplateElement(attrs, 'attr', selector=xmlutil.get_items,
key=0, value=1)
master = xmlutil.MasterTemplate(root, 1, nsmap=dict(f='foo'))
# Set up our slave template
root_slave = xmlutil.TemplateElement('test', selector='test')
image = xmlutil.SubTemplateElement(root_slave, 'image',
selector='image', id='id')
image.text = xmlutil.Selector('name')
slave = xmlutil.SlaveTemplate(root_slave, 1, nsmap=dict(b='bar'))
# Attach the slave to the master...
master.attach(slave)
# Try serializing our object
siblings = master._siblings()
nsmap = master._nsmap()
result = master._serialize(None, obj, siblings, nsmap)
# Now we get to manually walk the element tree...
self.assertEqual(result.tag, 'test')
self.assertEqual(len(result.nsmap), 2)
self.assertEqual(result.nsmap['f'], 'foo')
self.assertEqual(result.nsmap['b'], 'bar')
self.assertEqual(result.get('name'), obj['test']['name'])
for idx, val in enumerate(obj['test']['values']):
self.assertEqual(result[idx].tag, 'value')
self.assertEqual(result[idx].text, str(val))
idx += 1
self.assertEqual(result[idx].tag, 'attrs')
for attr in result[idx]:
self.assertEqual(attr.tag, 'attr')
self.assertEqual(attr.get('value'),
str(obj['test']['attrs'][attr.get('key')]))
idx += 1
self.assertEqual(result[idx].tag, 'image')
self.assertEqual(result[idx].get('id'),
str(obj['test']['image']['id']))
self.assertEqual(result[idx].text, obj['test']['image']['name'])
class MasterTemplateBuilder(xmlutil.TemplateBuilder):
def construct(self):
elem = xmlutil.TemplateElement('test')
return xmlutil.MasterTemplate(elem, 1)
class SlaveTemplateBuilder(xmlutil.TemplateBuilder):
def construct(self):
elem = xmlutil.TemplateElement('test')
return xmlutil.SlaveTemplate(elem, 1)
class TemplateBuilderTest(test.TestCase):
def test_master_template_builder(self):
# Make sure the template hasn't been built yet
self.assertEqual(MasterTemplateBuilder._tmpl, None)
# Now, construct the template
tmpl1 = MasterTemplateBuilder()
# Make sure that there is a template cached...
self.assertNotEqual(MasterTemplateBuilder._tmpl, None)
# Make sure it wasn't what was returned...
self.assertNotEqual(MasterTemplateBuilder._tmpl, tmpl1)
# Make sure it doesn't get rebuilt
cached = MasterTemplateBuilder._tmpl
tmpl2 = MasterTemplateBuilder()
self.assertEqual(MasterTemplateBuilder._tmpl, cached)
# Make sure we're always getting fresh copies
self.assertNotEqual(tmpl1, tmpl2)
# Make sure we can override the copying behavior
tmpl3 = MasterTemplateBuilder(False)
self.assertEqual(MasterTemplateBuilder._tmpl, tmpl3)
def test_slave_template_builder(self):
# Make sure the template hasn't been built yet
self.assertEqual(SlaveTemplateBuilder._tmpl, None)
# Now, construct the template
tmpl1 = SlaveTemplateBuilder()
# Make sure there is a template cached...
self.assertNotEqual(SlaveTemplateBuilder._tmpl, None)
# Make sure it was what was returned...
self.assertEqual(SlaveTemplateBuilder._tmpl, tmpl1)
# Make sure it doesn't get rebuilt
tmpl2 = SlaveTemplateBuilder()
self.assertEqual(SlaveTemplateBuilder._tmpl, tmpl1)
# Make sure we're always getting the cached copy
self.assertEqual(tmpl1, tmpl2)
class MiscellaneousXMLUtilTests(test.TestCase):
def test_make_flat_dict(self):
expected_xml = ("<?xml version='1.0' encoding='UTF-8'?>\n"
'<wrapper><a>foo</a><b>bar</b></wrapper>')
root = xmlutil.make_flat_dict('wrapper')
tmpl = xmlutil.MasterTemplate(root, 1)
result = tmpl.serialize(dict(wrapper=dict(a='foo', b='bar')))
self.assertEqual(result, expected_xml)
|
apache-2.0
|
sbalde/edxplatform
|
lms/djangoapps/open_ended_grading/utils.py
|
63
|
7288
|
import logging
from urllib import urlencode
from xmodule.modulestore import search
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.open_ended_grading_classes.controller_query_service import ControllerQueryService
from xmodule.open_ended_grading_classes.grading_service_module import GradingServiceError
from django.utils.translation import ugettext as _
from django.conf import settings
from edxmako.shortcuts import render_to_string
log = logging.getLogger(__name__)
GRADER_DISPLAY_NAMES = {
'ML': _("AI Assessment"),
'PE': _("Peer Assessment"),
'NA': _("Not yet available"),
'BC': _("Automatic Checker"),
'IN': _("Instructor Assessment"),
}
STUDENT_ERROR_MESSAGE = _("Error occurred while contacting the grading service. Please notify course staff.")
STAFF_ERROR_MESSAGE = _("Error occurred while contacting the grading service. Please notify your edX point of contact.")
def generate_problem_url(problem_url_parts, base_course_url):
"""
From a list of problem url parts generated by search.path_to_location and a base course url, generates a url to a problem
@param problem_url_parts: Output of search.path_to_location
@param base_course_url: Base url of a given course
@return: A path to the problem
"""
activate_block_id = problem_url_parts[-1]
problem_url_parts = problem_url_parts[0:-1]
problem_url = base_course_url + "/"
for i, part in enumerate(problem_url_parts):
if part is not None:
# This is the course_key. We need to turn it into its deprecated
# form.
if i == 0:
part = part.to_deprecated_string()
# This is placed between the course id and the rest of the url.
if i == 1:
problem_url += "courseware/"
problem_url += part + "/"
problem_url += '?{}'.format(urlencode({'activate_block_id': unicode(activate_block_id)}))
return problem_url
def does_location_exist(usage_key):
"""
Checks to see if a valid module exists at a given location (ie has not been deleted)
course_id - string course id
location - string location
"""
try:
search.path_to_location(modulestore(), usage_key)
return True
except ItemNotFoundError:
# If the problem cannot be found at the location received from the grading controller server,
# it has been deleted by the course author.
return False
except NoPathToItem:
# If the problem can be found, but there is no path to it, then we assume it is a draft.
# Log a warning in any case.
log.warn("Got an unexpected NoPathToItem error in staff grading with location %s. "
"This is ok if it is a draft; ensure that the location is valid.", usage_key)
return False
def create_controller_query_service():
"""
Return an instance of a service that can query edX ORA.
"""
return ControllerQueryService(settings.OPEN_ENDED_GRADING_INTERFACE, render_to_string)
class StudentProblemList(object):
"""
Get a list of problems that the student has attempted from ORA.
Add in metadata as needed.
"""
def __init__(self, course_id, user_id):
"""
@param course_id: The id of a course object. Get using course.id.
@param user_id: The anonymous id of the user, from the unique_id_for_user function.
"""
self.course_id = course_id
self.user_id = user_id
# We want to append this string to all of our error messages.
self.course_error_ending = _("for course {0} and student {1}.").format(self.course_id, user_id)
# This is our generic error message.
self.error_text = STUDENT_ERROR_MESSAGE
self.success = False
# Create a service to query edX ORA.
self.controller_qs = create_controller_query_service()
def fetch_from_grading_service(self):
"""
Fetch a list of problems that the student has answered from ORA.
Handle various error conditions.
@return: A boolean success indicator.
"""
# In the case of multiple calls, ensure that success is false initially.
self.success = False
try:
#Get list of all open ended problems that the grading server knows about
problem_list_dict = self.controller_qs.get_grading_status_list(self.course_id, self.user_id)
except GradingServiceError:
log.error("Problem contacting open ended grading service " + self.course_error_ending)
return self.success
except ValueError:
log.error("Problem with results from external grading service for open ended" + self.course_error_ending)
return self.success
success = problem_list_dict['success']
if 'error' in problem_list_dict:
self.error_text = problem_list_dict['error']
return success
if 'problem_list' not in problem_list_dict:
log.error("Did not receive a problem list in ORA response" + self.course_error_ending)
return success
self.problem_list = problem_list_dict['problem_list']
self.success = True
return self.success
def add_problem_data(self, base_course_url):
"""
Add metadata to problems.
@param base_course_url: the base url for any course. Can get with reverse('course')
@return: A list of valid problems in the course and their appended data.
"""
# Our list of valid problems.
valid_problems = []
if not self.success or not isinstance(self.problem_list, list):
log.error("Called add_problem_data without a valid problem list" + self.course_error_ending)
return valid_problems
# Iterate through all of our problems and add data.
for problem in self.problem_list:
try:
# Try to load the problem.
usage_key = self.course_id.make_usage_key_from_deprecated_string(problem['location'])
problem_url_parts = search.path_to_location(modulestore(), usage_key)
except (ItemNotFoundError, NoPathToItem):
# If the problem cannot be found at the location received from the grading controller server,
# it has been deleted by the course author. We should not display it.
error_message = "Could not find module for course {0} at location {1}".format(self.course_id,
problem['location'])
log.error(error_message)
continue
# Get the problem url in the courseware.
problem_url = generate_problem_url(problem_url_parts, base_course_url)
# Map the grader name from ORA to a human readable version.
grader_type_display_name = GRADER_DISPLAY_NAMES.get(problem['grader_type'], "edX Assessment")
problem['actual_url'] = problem_url
problem['grader_type_display_name'] = grader_type_display_name
valid_problems.append(problem)
return valid_problems
|
agpl-3.0
|
ZeYt/mitmproxy
|
libmproxy/console/searchable.py
|
29
|
2808
|
import urwid
from . import signals
class Highlight(urwid.AttrMap):
def __init__(self, t):
urwid.AttrMap.__init__(
self,
urwid.Text(t.text),
"focusfield",
)
self.backup = t
class Searchable(urwid.ListBox):
def __init__(self, state, contents):
self.walker = urwid.SimpleFocusListWalker(contents)
urwid.ListBox.__init__(self, self.walker)
self.state = state
self.search_offset = 0
self.current_highlight = None
self.search_term = None
def keypress(self, size, key):
if key == "/":
signals.status_prompt.send(
prompt = "Search for",
text = "",
callback = self.set_search
)
elif key == "n":
self.find_next(False)
elif key == "N":
self.find_next(True)
elif key == "g":
self.set_focus(0)
self.walker._modified()
elif key == "G":
self.set_focus(len(self.walker) - 1)
self.walker._modified()
else:
return super(self.__class__, self).keypress(size, key)
def set_search(self, text):
self.state.last_search = text
self.search_term = text or None
self.find_next(False)
def set_highlight(self, offset):
if self.current_highlight is not None:
old = self.body[self.current_highlight]
self.body[self.current_highlight] = old.backup
if offset is None:
self.current_highlight = None
else:
self.body[offset] = Highlight(self.body[offset])
self.current_highlight = offset
def get_text(self, w):
if isinstance(w, urwid.Text):
return w.text
elif isinstance(w, Highlight):
return w.backup.text
else:
return None
def find_next(self, backwards):
if not self.search_term:
if self.state.last_search:
self.search_term = self.state.last_search
else:
self.set_highlight(None)
return
# Start search at focus + 1
if backwards:
rng = xrange(len(self.body) - 1, -1, -1)
else:
rng = xrange(1, len(self.body) + 1)
for i in rng:
off = (self.focus_position + i) % len(self.body)
w = self.body[off]
txt = self.get_text(w)
if txt and self.search_term in txt:
self.set_highlight(off)
self.set_focus(off, coming_from="above")
self.body._modified()
return
else:
self.set_highlight(None)
signals.status_message.send(message="Search not found.", expire=1)
|
mit
|
nopjmp/SickRage
|
lib/github/NotificationSubject.py
|
74
|
3304
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class NotificationSubject(github.GithubObject.NonCompletableGithubObject):
"""
This class represents Subjects of Notifications as returned for example by http://developer.github.com/v3/activity/notifications/#list-your-notifications
"""
@property
def title(self):
"""
:type: string
"""
return self._title.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
@property
def latest_comment_url(self):
"""
:type: string
"""
return self._latest_comment_url.value
@property
def type(self):
"""
:type: string
"""
return self._type.value
def _initAttributes(self):
self._title = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
self._latest_comment_url = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "title" in attributes: # pragma no branch
self._title = self._makeStringAttribute(attributes["title"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
if "latest_comment_url" in attributes: # pragma no branch
self._latest_comment_url = self._makeStringAttribute(attributes["latest_comment_url"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
|
gpl-3.0
|
xxd3vin/spp-sdk
|
opt/Python27/Lib/test/test_userdict.py
|
118
|
10636
|
# Check every path through every method of UserDict
from test import test_support, mapping_tests
import UserDict
d0 = {}
d1 = {"one": 1}
d2 = {"one": 1, "two": 2}
d3 = {"one": 1, "two": 3, "three": 5}
d4 = {"one": None, "two": None}
d5 = {"one": 1, "two": 1}
class UserDictTest(mapping_tests.TestHashMappingProtocol):
type2test = UserDict.IterableUserDict
def test_all(self):
# Test constructors
u = UserDict.UserDict()
u0 = UserDict.UserDict(d0)
u1 = UserDict.UserDict(d1)
u2 = UserDict.IterableUserDict(d2)
uu = UserDict.UserDict(u)
uu0 = UserDict.UserDict(u0)
uu1 = UserDict.UserDict(u1)
uu2 = UserDict.UserDict(u2)
# keyword arg constructor
self.assertEqual(UserDict.UserDict(one=1, two=2), d2)
# item sequence constructor
self.assertEqual(UserDict.UserDict([('one',1), ('two',2)]), d2)
self.assertEqual(UserDict.UserDict(dict=[('one',1), ('two',2)]), d2)
# both together
self.assertEqual(UserDict.UserDict([('one',1), ('two',2)], two=3, three=5), d3)
# alternate constructor
self.assertEqual(UserDict.UserDict.fromkeys('one two'.split()), d4)
self.assertEqual(UserDict.UserDict().fromkeys('one two'.split()), d4)
self.assertEqual(UserDict.UserDict.fromkeys('one two'.split(), 1), d5)
self.assertEqual(UserDict.UserDict().fromkeys('one two'.split(), 1), d5)
self.assertTrue(u1.fromkeys('one two'.split()) is not u1)
self.assertIsInstance(u1.fromkeys('one two'.split()), UserDict.UserDict)
self.assertIsInstance(u2.fromkeys('one two'.split()), UserDict.IterableUserDict)
# Test __repr__
self.assertEqual(str(u0), str(d0))
self.assertEqual(repr(u1), repr(d1))
self.assertEqual(repr(u2), repr(d2))
# Test __cmp__ and __len__
all = [d0, d1, d2, u, u0, u1, u2, uu, uu0, uu1, uu2]
for a in all:
for b in all:
self.assertEqual(cmp(a, b), cmp(len(a), len(b)))
# Test __getitem__
self.assertEqual(u2["one"], 1)
self.assertRaises(KeyError, u1.__getitem__, "two")
# Test __setitem__
u3 = UserDict.UserDict(u2)
u3["two"] = 2
u3["three"] = 3
# Test __delitem__
del u3["three"]
self.assertRaises(KeyError, u3.__delitem__, "three")
# Test clear
u3.clear()
self.assertEqual(u3, {})
# Test copy()
u2a = u2.copy()
self.assertEqual(u2a, u2)
u2b = UserDict.UserDict(x=42, y=23)
u2c = u2b.copy() # making a copy of a UserDict is special cased
self.assertEqual(u2b, u2c)
class MyUserDict(UserDict.UserDict):
def display(self): print self
m2 = MyUserDict(u2)
m2a = m2.copy()
self.assertEqual(m2a, m2)
# SF bug #476616 -- copy() of UserDict subclass shared data
m2['foo'] = 'bar'
self.assertNotEqual(m2a, m2)
# Test keys, items, values
self.assertEqual(u2.keys(), d2.keys())
self.assertEqual(u2.items(), d2.items())
self.assertEqual(u2.values(), d2.values())
# Test has_key and "in".
for i in u2.keys():
self.assertIn(i, u2)
self.assertEqual(i in u1, i in d1)
self.assertEqual(i in u0, i in d0)
with test_support.check_py3k_warnings():
self.assertTrue(u2.has_key(i))
self.assertEqual(u1.has_key(i), d1.has_key(i))
self.assertEqual(u0.has_key(i), d0.has_key(i))
# Test update
t = UserDict.UserDict()
t.update(u2)
self.assertEqual(t, u2)
class Items:
def items(self):
return (("x", 42), ("y", 23))
t = UserDict.UserDict()
t.update(Items())
self.assertEqual(t, {"x": 42, "y": 23})
# Test get
for i in u2.keys():
self.assertEqual(u2.get(i), u2[i])
self.assertEqual(u1.get(i), d1.get(i))
self.assertEqual(u0.get(i), d0.get(i))
# Test "in" iteration.
for i in xrange(20):
u2[i] = str(i)
ikeys = []
for k in u2:
ikeys.append(k)
keys = u2.keys()
self.assertEqual(set(ikeys), set(keys))
# Test setdefault
t = UserDict.UserDict()
self.assertEqual(t.setdefault("x", 42), 42)
self.assertTrue(t.has_key("x"))
self.assertEqual(t.setdefault("x", 23), 42)
# Test pop
t = UserDict.UserDict(x=42)
self.assertEqual(t.pop("x"), 42)
self.assertRaises(KeyError, t.pop, "x")
self.assertEqual(t.pop("x", 1), 1)
t["x"] = 42
self.assertEqual(t.pop("x", 1), 42)
# Test popitem
t = UserDict.UserDict(x=42)
self.assertEqual(t.popitem(), ("x", 42))
self.assertRaises(KeyError, t.popitem)
def test_missing(self):
# Make sure UserDict doesn't have a __missing__ method
self.assertEqual(hasattr(UserDict, "__missing__"), False)
# Test several cases:
# (D) subclass defines __missing__ method returning a value
# (E) subclass defines __missing__ method raising RuntimeError
# (F) subclass sets __missing__ instance variable (no effect)
# (G) subclass doesn't define __missing__ at a all
class D(UserDict.UserDict):
def __missing__(self, key):
return 42
d = D({1: 2, 3: 4})
self.assertEqual(d[1], 2)
self.assertEqual(d[3], 4)
self.assertNotIn(2, d)
self.assertNotIn(2, d.keys())
self.assertEqual(d[2], 42)
class E(UserDict.UserDict):
def __missing__(self, key):
raise RuntimeError(key)
e = E()
try:
e[42]
except RuntimeError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("e[42] didn't raise RuntimeError")
class F(UserDict.UserDict):
def __init__(self):
# An instance variable __missing__ should have no effect
self.__missing__ = lambda key: None
UserDict.UserDict.__init__(self)
f = F()
try:
f[42]
except KeyError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("f[42] didn't raise KeyError")
class G(UserDict.UserDict):
pass
g = G()
try:
g[42]
except KeyError, err:
self.assertEqual(err.args, (42,))
else:
self.fail("g[42] didn't raise KeyError")
##########################
# Test Dict Mixin
class SeqDict(UserDict.DictMixin):
"""Dictionary lookalike implemented with lists.
Used to test and demonstrate DictMixin
"""
def __init__(self, other=None, **kwargs):
self.keylist = []
self.valuelist = []
if other is not None:
for (key, value) in other:
self[key] = value
for (key, value) in kwargs.iteritems():
self[key] = value
def __getitem__(self, key):
try:
i = self.keylist.index(key)
except ValueError:
raise KeyError
return self.valuelist[i]
def __setitem__(self, key, value):
try:
i = self.keylist.index(key)
self.valuelist[i] = value
except ValueError:
self.keylist.append(key)
self.valuelist.append(value)
def __delitem__(self, key):
try:
i = self.keylist.index(key)
except ValueError:
raise KeyError
self.keylist.pop(i)
self.valuelist.pop(i)
def keys(self):
return list(self.keylist)
def copy(self):
d = self.__class__()
for key, value in self.iteritems():
d[key] = value
return d
@classmethod
def fromkeys(cls, keys, value=None):
d = cls()
for key in keys:
d[key] = value
return d
class UserDictMixinTest(mapping_tests.TestMappingProtocol):
type2test = SeqDict
def test_all(self):
## Setup test and verify working of the test class
# check init
s = SeqDict()
# exercise setitem
s[10] = 'ten'
s[20] = 'twenty'
s[30] = 'thirty'
# exercise delitem
del s[20]
# check getitem and setitem
self.assertEqual(s[10], 'ten')
# check keys() and delitem
self.assertEqual(s.keys(), [10, 30])
## Now, test the DictMixin methods one by one
# has_key
self.assertTrue(s.has_key(10))
self.assertTrue(not s.has_key(20))
# __contains__
self.assertIn(10, s)
self.assertNotIn(20, s)
# __iter__
self.assertEqual([k for k in s], [10, 30])
# __len__
self.assertEqual(len(s), 2)
# iteritems
self.assertEqual(list(s.iteritems()), [(10,'ten'), (30, 'thirty')])
# iterkeys
self.assertEqual(list(s.iterkeys()), [10, 30])
# itervalues
self.assertEqual(list(s.itervalues()), ['ten', 'thirty'])
# values
self.assertEqual(s.values(), ['ten', 'thirty'])
# items
self.assertEqual(s.items(), [(10,'ten'), (30, 'thirty')])
# get
self.assertEqual(s.get(10), 'ten')
self.assertEqual(s.get(15,'fifteen'), 'fifteen')
self.assertEqual(s.get(15), None)
# setdefault
self.assertEqual(s.setdefault(40, 'forty'), 'forty')
self.assertEqual(s.setdefault(10, 'null'), 'ten')
del s[40]
# pop
self.assertEqual(s.pop(10), 'ten')
self.assertNotIn(10, s)
s[10] = 'ten'
self.assertEqual(s.pop("x", 1), 1)
s["x"] = 42
self.assertEqual(s.pop("x", 1), 42)
# popitem
k, v = s.popitem()
self.assertNotIn(k, s)
s[k] = v
# clear
s.clear()
self.assertEqual(len(s), 0)
# empty popitem
self.assertRaises(KeyError, s.popitem)
# update
s.update({10: 'ten', 20:'twenty'})
self.assertEqual(s[10], 'ten')
self.assertEqual(s[20], 'twenty')
# cmp
self.assertEqual(s, {10: 'ten', 20:'twenty'})
t = SeqDict()
t[20] = 'twenty'
t[10] = 'ten'
self.assertEqual(s, t)
def test_main():
test_support.run_unittest(
UserDictTest,
UserDictMixinTest
)
if __name__ == "__main__":
test_main()
|
mit
|
sarnold/exaile
|
plugins/previewdevice/__init__.py
|
2
|
12544
|
# Copyright (C) 2012 Dustin Spicuzza
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
from gi.repository import Gdk
from gi.repository import Gtk
import os
from xl import (
event,
providers,
player,
settings,
xdg
)
from xl.nls import gettext as _
from xlgui import guiutil, main
from xlgui.widgets import menu, playback
import previewprefs
import logging
logger = logging.getLogger(__name__)
class SecondaryOutputPlugin(object):
'''Implements logic for plugin'''
def get_preferences_pane(self):
return previewprefs
def enable(self, exaile):
self.exaile = exaile
def on_gui_loaded(self):
self.hooked = False
self.resuming = False
#
# Initialize the player objects needed
#
self.player = player.player.ExailePlayer('preview_device')
self.queue = player.queue.PlayQueue(
self.player,
location=os.path.join(
xdg.get_data_dir(),
'preview_device_queue.state'
),
name='Preview Device Queue'
)
#
# Initialize the GUI stuff
#
self._init_gui()
# preserve state
if settings.get_option('plugin/previewdevice/shown', True):
self._init_gui_hooks()
def disable(self, exaile):
logger.debug('Disabling Preview Device')
event.log_event('preview_device_disabling', self, None)
self._destroy_gui_hooks()
self._destroy_gui()
self.player.destroy()
self.player = None
self.queue = None
logger.debug('Preview Device Disabled')
def _init_gui(self):
self.pane = Gtk.HPaned()
# stolen from main
self.info_area = main.MainWindowTrackInfoPane(self.player)
self.info_area.set_auto_update(True)
self.info_area.set_padding(3, 3, 3, 3)
self.info_area.hide()
self.info_area.set_no_show_all(True)
volume_control = playback.VolumeControl(self.player)
self.info_area.get_action_area().pack_start(volume_control, True, True, 0)
self.playpause_button = Gtk.Button()
self.playpause_button.set_relief(Gtk.ReliefStyle.NONE)
self._on_playback_end(None, None, None)
self.playpause_button.connect(
'button-press-event',
self._on_playpause_button_clicked
)
self.progress_bar = playback.SeekProgressBar(self.player, use_markers=False)
play_toolbar = Gtk.HBox()
play_toolbar.pack_start(self.playpause_button, False, False, 0)
play_toolbar.pack_start(self.progress_bar, True, True, 0)
# stick our player controls into this box
self.pane1_box = Gtk.VBox()
self.pane2_box = Gtk.VBox()
self.pane2_box.pack_start(self.info_area, False, False, 0)
self.pane2_box.pack_start(play_toolbar, False, False, 0)
self.pane.pack1(self.pane1_box, resize=True, shrink=True)
self.pane.pack2(self.pane2_box, resize=True, shrink=True)
# setup menus
# add menu item to 'view' to display our playlist
self.menu = menu.check_menu_item(
'preview_player',
'',
_('Preview Player'),
lambda *e: self.hooked,
self._on_view
)
providers.register('menubar-view-menu', self.menu)
self.preview_menuitem = menu.simple_menu_item('_preview', ['enqueue'],
_('Preview'), callback=self._on_preview,
condition_fn=lambda n, p, c: not c['selection-empty'])
# TODO: Setup on other context menus
self.preview_provides = [
'track-panel-menu',
'playlist-context-menu',
]
for provide in self.preview_provides:
providers.register(provide, self.preview_menuitem)
self._on_option_set('gui_option_set', settings, 'gui/show_info_area')
self._on_option_set('gui_option_set', settings, 'gui/show_info_area_covers')
event.add_ui_callback(self._on_option_set, 'option_set')
def _destroy_gui(self):
event.remove_callback(self._on_option_set, 'option_set')
for provide in self.preview_provides:
providers.unregister(provide, self.preview_menuitem)
providers.unregister('menubar-view-menu', self.menu)
self.info_area.destroy()
self.playpause_button.destroy()
self.pane2_box.destroy()
self.pane1_box.destroy()
self.pane.destroy()
def _setup_events(self, setup):
setup(self._on_playback_resume, 'playback_player_resume', self.player)
setup(self._on_playback_end, 'playback_player_end', self.player)
setup(self._on_playback_error, 'playback_error', self.player)
setup(self._on_playback_start, 'playback_track_start', self.player)
setup(self._on_toggle_pause, 'playback_toggle_pause', self.player)
def _init_gui_hooks(self):
'''
Initializes any hooks into the main Exaile GUI
Note that this is rather ugly, but currently exaile doesn't really
have a better way to do this, and there isn't a better place to
stick our gui objects.
'''
if self.hooked:
return
# the info_area will be where we sit, and the info_area
# will be duplicated for two sides
# need to move the play_toolbar, and duplicate it
# also once for each side
info_area = main.mainwindow().info_area
play_toolbar = main.mainwindow().builder.get_object('play_toolbar')
parent = play_toolbar.get_parent()
parent.remove(play_toolbar)
parent = info_area.get_parent()
parent.remove(info_area)
parent.pack_start(self.pane, False, False, 0)
parent.reorder_child(self.pane, 0)
# stick the main player controls into this box
self.pane1_box.pack_start(info_area, False, False, 0)
self.pane1_box.pack_start(play_toolbar, False, False, 0)
# and do it
self.pane.show_all()
# add player events
self._setup_events(event.add_ui_callback)
self.hooked = True
settings.set_option('plugin/previewdevice/shown', True)
logger.debug("Preview device gui hooked")
event.log_event('preview_device_enabled', self, None)
def _destroy_gui_hooks(self):
'''
Removes any hooks from the main Exaile GUI
'''
if not self.hooked:
return
info_area = main.mainwindow().info_area
play_toolbar = main.mainwindow().builder.get_object('play_toolbar')
# detach main GUI elements
parent = play_toolbar.get_parent()
parent.remove(play_toolbar)
parent = info_area.get_parent()
parent.remove(info_area)
# detach the element we added to hold them
parent = self.pane.get_parent()
parent.remove(self.pane)
# reattach
parent.pack_start(info_area, False, False, 0)
parent.reorder_child(info_area, 0)
parent.pack_start(play_toolbar, False, False, 0)
# remove player events
self._setup_events(event.remove_callback)
self.hooked = False
settings.set_option('plugin/previewdevice/shown', False)
logger.debug('Preview device unhooked')
#
# Menu events
#
def _on_view(self, menu, name, parent, context):
if self.hooked:
self._destroy_gui_hooks()
else:
self._init_gui_hooks()
def _on_preview(self, menu, display_name, playlist_view, context):
self._init_gui_hooks()
tracks = context['selected-tracks']
if len(tracks) > 0:
self.queue.play(tracks[0])
#
# Various player events
#
def _on_playpause_button_clicked(self, widget, event):
"""
Called when the play button is clicked
"""
if event.button == 1:
if event.type == Gdk.EventType.BUTTON_PRESS and \
(self.player.is_paused() or self.player.is_playing()):
self.player.toggle_pause()
elif event.type == Gdk.EventType._2BUTTON_PRESS:
self.player.stop()
def _on_option_set(self, name, object, option):
"""
Handles changes of settings
"""
if option == 'gui/show_info_area':
self.info_area.set_no_show_all(False)
if settings.get_option(option, True):
self.info_area.show_all()
else:
self.info_area.hide()
self.info_area.set_no_show_all(True)
elif option == 'gui/show_info_area_covers':
cover = self.info_area.cover
cover.set_no_show_all(False)
if settings.get_option(option, True):
cover.show_all()
else:
cover.hide()
cover.set_no_show_all(True)
def _on_playback_resume(self, type, player, data):
self.resuming = True
def _on_playback_start(self, type, player, object):
"""
Called when playback starts
Sets the currently playing track visible in the currently selected
playlist if the user has chosen this setting
"""
if self.resuming:
self.resuming = False
return
image = Gtk.Image.new_from_stock(Gtk.STOCK_MEDIA_PAUSE,
Gtk.IconSize.BUTTON)
self.playpause_button.set_image(image)
self.playpause_button.set_tooltip_text(
_('Pause Playback (double click to stop)'))
def _on_playback_end(self, type, player, object):
"""
Called when playback ends
"""
image = Gtk.Image.new_from_stock(Gtk.STOCK_MEDIA_PLAY,
Gtk.IconSize.BUTTON)
self.playpause_button.set_image(image)
self.playpause_button.set_tooltip_text(_('Start Playback'))
def _on_playback_error(self, type, player, message):
"""
Called when there has been a playback error
"""
main.mainwindow().message.show_error(
_('Playback error encountered!'), message)
def _on_toggle_pause(self, type, player, object):
"""
Called when the user clicks the play button after playback has
already begun
"""
if player.is_paused():
image = Gtk.Image.new_from_stock(Gtk.STOCK_MEDIA_PLAY,
Gtk.IconSize.BUTTON)
tooltip = _('Continue Playback')
else:
image = Gtk.Image.new_from_stock(Gtk.STOCK_MEDIA_PAUSE,
Gtk.IconSize.BUTTON)
tooltip = _('Pause Playback')
self.playpause_button.set_image(image)
self.playpause_button.set_tooltip_text(tooltip)
plugin_class = SecondaryOutputPlugin
|
gpl-2.0
|
kawamon/hue
|
apps/sqoop/src/sqoop/api/autocomplete.py
|
35
|
2241
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from django.http import Http404
from desktop.lib.django_util import JsonResponse
from librdbms import conf as librdbms_conf
from librdbms.server import dbms
def get_query_server_config_from_request(request):
check_params = {
'server_name': request.GET.get('server_type')
}
if 'host' in request.GET:
check_params['server_host'] = request.GET.get('host')
if 'port' in request.GET:
check_params['server_port'] = request.GET.get('port')
if 'username' in request.GET:
check_params['username'] = request.GET.get('username')
for alias in librdbms_conf.DATABASES:
config = dbms.get_query_server_config(alias)
if all([check_params[param] == config[param] for param in check_params]):
return config
return None
def autocomplete(request, database=None, table=None):
response = {
'status': 0,
'errors': []
}
if 'server_type' not in request.GET:
raise Http404()
query_server = get_query_server_config_from_request(request)
if database:
query_server['name'] = database
if not query_server:
raise Http404()
db = dbms.get(request.user, query_server)
if database:
response['databases'] = [database]
if table:
response['tables'] = [table]
response['columns'] = db.get_columns(database, table)
else:
response['tables'] = db.get_tables(database)
else:
response['databases'] = db.get_databases()
return JsonResponse(response)
|
apache-2.0
|
tea321000/django-project
|
musicsite/music/migrations/0002_auto_20170305_2121.py
|
1
|
1364
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-03-05 13:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('music', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='music',
name='singer',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='Musician_singer', to='music.Musician'),
),
migrations.AlterField(
model_name='musician',
name='birthday',
field=models.DateTimeField(verbose_name='\u51fa\u751f\u65e5\u671f'),
),
migrations.AlterField(
model_name='musician',
name='name',
field=models.CharField(max_length=40, verbose_name='\u539f\u540d'),
),
migrations.AlterField(
model_name='musician',
name='sex',
field=models.CharField(choices=[('M', '\u7537'), ('F', '\u5973')], max_length=1, verbose_name='\u6027\u522b'),
),
migrations.AlterField(
model_name='musician',
name='stagename',
field=models.CharField(blank=True, max_length=40, null=True, verbose_name='\u827a\u540d'),
),
]
|
mit
|
m1ck/bookadoptions
|
django/contrib/contenttypes/management.py
|
315
|
2458
|
from django.contrib.contenttypes.models import ContentType
from django.db.models import get_apps, get_models, signals
from django.utils.encoding import smart_unicode
def update_contenttypes(app, created_models, verbosity=2, **kwargs):
"""
Creates content types for models in the given app, removing any model
entries that no longer have a matching model class.
"""
ContentType.objects.clear_cache()
content_types = list(ContentType.objects.filter(app_label=app.__name__.split('.')[-2]))
app_models = get_models(app)
if not app_models:
return
for klass in app_models:
opts = klass._meta
try:
ct = ContentType.objects.get(app_label=opts.app_label,
model=opts.object_name.lower())
content_types.remove(ct)
except ContentType.DoesNotExist:
ct = ContentType(name=smart_unicode(opts.verbose_name_raw),
app_label=opts.app_label, model=opts.object_name.lower())
ct.save()
if verbosity >= 2:
print "Adding content type '%s | %s'" % (ct.app_label, ct.model)
# The presence of any remaining content types means the supplied app has an
# undefined model. Confirm that the content type is stale before deletion.
if content_types:
if kwargs.get('interactive', False):
content_type_display = '\n'.join([' %s | %s' % (ct.app_label, ct.model) for ct in content_types])
ok_to_delete = raw_input("""The following content types are stale and need to be deleted:
%s
Any objects related to these content types by a foreign key will also
be deleted. Are you sure you want to delete these content types?
If you're unsure, answer 'no'.
Type 'yes' to continue, or 'no' to cancel: """ % content_type_display)
else:
ok_to_delete = False
if ok_to_delete == 'yes':
for ct in content_types:
if verbosity >= 2:
print "Deleting stale content type '%s | %s'" % (ct.app_label, ct.model)
ct.delete()
else:
if verbosity >= 2:
print "Stale content types remain."
def update_all_contenttypes(verbosity=2, **kwargs):
for app in get_apps():
update_contenttypes(app, None, verbosity, **kwargs)
signals.post_syncdb.connect(update_contenttypes)
if __name__ == "__main__":
update_all_contenttypes()
|
bsd-3-clause
|
errx/django
|
tests/distinct_on_fields/tests.py
|
17
|
5611
|
from __future__ import unicode_literals
from django.db.models import Max
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import str_prefix
from .models import Tag, Celebrity, Fan, Staff, StaffTag
@skipUnlessDBFeature('can_distinct_on_fields')
class DistinctOnTests(TestCase):
def setUp(self):
t1 = Tag.objects.create(name='t1')
Tag.objects.create(name='t2', parent=t1)
t3 = Tag.objects.create(name='t3', parent=t1)
Tag.objects.create(name='t4', parent=t3)
Tag.objects.create(name='t5', parent=t3)
self.p1_o1 = Staff.objects.create(id=1, name="p1", organisation="o1")
self.p2_o1 = Staff.objects.create(id=2, name="p2", organisation="o1")
self.p3_o1 = Staff.objects.create(id=3, name="p3", organisation="o1")
self.p1_o2 = Staff.objects.create(id=4, name="p1", organisation="o2")
self.p1_o1.coworkers.add(self.p2_o1, self.p3_o1)
StaffTag.objects.create(staff=self.p1_o1, tag=t1)
StaffTag.objects.create(staff=self.p1_o1, tag=t1)
celeb1 = Celebrity.objects.create(name="c1")
celeb2 = Celebrity.objects.create(name="c2")
self.fan1 = Fan.objects.create(fan_of=celeb1)
self.fan2 = Fan.objects.create(fan_of=celeb1)
self.fan3 = Fan.objects.create(fan_of=celeb2)
def test_basic_distinct_on(self):
"""QuerySet.distinct('field', ...) works"""
# (qset, expected) tuples
qsets = (
(
Staff.objects.distinct().order_by('name'),
['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
),
(
Staff.objects.distinct('name').order_by('name'),
['<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
),
(
Staff.objects.distinct('organisation').order_by('organisation', 'name'),
['<Staff: p1>', '<Staff: p1>'],
),
(
Staff.objects.distinct('name', 'organisation').order_by('name', 'organisation'),
['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
),
(
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).distinct('name').order_by('name'),
['<Celebrity: c1>', '<Celebrity: c2>'],
),
# Does combining querysets work?
(
(Celebrity.objects.filter(fan__in=[self.fan1, self.fan2]).
distinct('name').order_by('name') |
Celebrity.objects.filter(fan__in=[self.fan3]).
distinct('name').order_by('name')),
['<Celebrity: c1>', '<Celebrity: c2>'],
),
(
StaffTag.objects.distinct('staff', 'tag'),
['<StaffTag: t1 -> p1>'],
),
(
Tag.objects.order_by('parent__pk', 'pk').distinct('parent'),
['<Tag: t2>', '<Tag: t4>', '<Tag: t1>'],
),
(
StaffTag.objects.select_related('staff').distinct('staff__name').order_by('staff__name'),
['<StaffTag: t1 -> p1>'],
),
# Fetch the alphabetically first coworker for each worker
(
(Staff.objects.distinct('id').order_by('id', 'coworkers__name').
values_list('id', 'coworkers__name')),
[str_prefix("(1, %(_)s'p2')"), str_prefix("(2, %(_)s'p1')"),
str_prefix("(3, %(_)s'p1')"), "(4, None)"]
),
)
for qset, expected in qsets:
self.assertQuerysetEqual(qset, expected)
self.assertEqual(qset.count(), len(expected))
# Combining queries with different distinct_fields is not allowed.
base_qs = Celebrity.objects.all()
self.assertRaisesMessage(
AssertionError,
"Cannot combine queries with different distinct fields.",
lambda: (base_qs.distinct('id') & base_qs.distinct('name'))
)
# Test join unreffing
c1 = Celebrity.objects.distinct('greatest_fan__id', 'greatest_fan__fan_of')
self.assertIn('OUTER JOIN', str(c1.query))
c2 = c1.distinct('pk')
self.assertNotIn('OUTER JOIN', str(c2.query))
def test_distinct_not_implemented_checks(self):
# distinct + annotate not allowed
with self.assertRaises(NotImplementedError):
Celebrity.objects.annotate(Max('id')).distinct('id')[0]
with self.assertRaises(NotImplementedError):
Celebrity.objects.distinct('id').annotate(Max('id'))[0]
# However this check is done only when the query executes, so you
# can use distinct() to remove the fields before execution.
Celebrity.objects.distinct('id').annotate(Max('id')).distinct()[0]
# distinct + aggregate not allowed
with self.assertRaises(NotImplementedError):
Celebrity.objects.distinct('id').aggregate(Max('id'))
def test_distinct_on_in_ordered_subquery(self):
qs = Staff.objects.distinct('name').order_by('name', 'id')
qs = Staff.objects.filter(pk__in=qs).order_by('name')
self.assertQuerysetEqual(
qs, [self.p1_o1, self.p2_o1, self.p3_o1],
lambda x: x
)
qs = Staff.objects.distinct('name').order_by('name', '-id')
qs = Staff.objects.filter(pk__in=qs).order_by('name')
self.assertQuerysetEqual(
qs, [self.p1_o2, self.p2_o1, self.p3_o1],
lambda x: x
)
|
bsd-3-clause
|
acq4/acq4
|
acq4/modules/DataManager/FileInfoViewTemplate.py
|
3
|
2242
|
# -*- coding: utf-8 -*-
from __future__ import print_function
# Form implementation generated from reading ui file './acq4/modules/DataManager/FileInfoViewTemplate.ui'
#
# Created: Tue Dec 24 01:49:10 2013
# by: PyQt4 UI code generator 4.10
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.scrollArea = QtGui.QScrollArea(Form)
self.scrollArea.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName(_fromUtf8("scrollArea"))
self.scrollAreaWidgetContents = QtGui.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 398, 298))
self.scrollAreaWidgetContents.setObjectName(_fromUtf8("scrollAreaWidgetContents"))
self.formLayout_2 = QtGui.QFormLayout(self.scrollAreaWidgetContents)
self.formLayout_2.setFieldGrowthPolicy(QtGui.QFormLayout.ExpandingFieldsGrow)
self.formLayout_2.setMargin(0)
self.formLayout_2.setHorizontalSpacing(10)
self.formLayout_2.setVerticalSpacing(0)
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout.addWidget(self.scrollArea)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
|
mit
|
mustafat/odoo-1
|
addons/stock/procurement.py
|
227
|
22183
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, float_compare, float_round
from openerp import SUPERUSER_ID
from dateutil.relativedelta import relativedelta
from datetime import datetime
from psycopg2 import OperationalError
import openerp
class procurement_group(osv.osv):
_inherit = 'procurement.group'
_columns = {
'partner_id': fields.many2one('res.partner', 'Partner')
}
class procurement_rule(osv.osv):
_inherit = 'procurement.rule'
def _get_action(self, cr, uid, context=None):
result = super(procurement_rule, self)._get_action(cr, uid, context=context)
return result + [('move', _('Move From Another Location'))]
def _get_rules(self, cr, uid, ids, context=None):
res = []
for route in self.browse(cr, uid, ids):
res += [x.id for x in route.pull_ids]
return res
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'),
'location_src_id': fields.many2one('stock.location', 'Source Location',
help="Source location is action=move"),
'route_id': fields.many2one('stock.location.route', 'Route',
help="If route_id is False, the rule is global"),
'procure_method': fields.selection([('make_to_stock', 'Take From Stock'), ('make_to_order', 'Create Procurement')], 'Move Supply Method', required=True,
help="""Determines the procurement method of the stock move that will be generated: whether it will need to 'take from the available stock' in its source location or needs to ignore its stock and create a procurement over there."""),
'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence',
store={
'stock.location.route': (_get_rules, ['sequence'], 10),
'procurement.rule': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10),
}),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type',
help="Picking Type determines the way the picking should be shown in the view, reports, ..."),
'delay': fields.integer('Number of Days'),
'partner_address_id': fields.many2one('res.partner', 'Partner Address'),
'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move of the move (which was generated by a next procurement) is cancelled or split, the move generated by this move will too'),
'warehouse_id': fields.many2one('stock.warehouse', 'Served Warehouse', help='The warehouse this rule is for'),
'propagate_warehouse_id': fields.many2one('stock.warehouse', 'Warehouse to Propagate', help="The warehouse to propagate on the created move/procurement, which can be different of the warehouse this rule is for (e.g for resupplying rules from another warehouse)"),
}
_defaults = {
'procure_method': 'make_to_stock',
'propagate': True,
'delay': 0,
}
class procurement_order(osv.osv):
_inherit = "procurement.order"
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'), # not required because task may create procurements that aren't linked to a location with sale_service
'partner_dest_id': fields.many2one('res.partner', 'Customer Address', help="In case of dropshipping, we need to know the destination address more precisely"),
'move_ids': fields.one2many('stock.move', 'procurement_id', 'Moves', help="Moves created by the procurement"),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Move which caused (created) the procurement"),
'route_ids': fields.many2many('stock.location.route', 'stock_location_route_procurement', 'procurement_id', 'route_id', 'Preferred Routes', help="Preferred route to be followed by the procurement order. Usually copied from the generating document (SO) but could be set up manually."),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Warehouse to consider for the route selection"),
'orderpoint_id': fields.many2one('stock.warehouse.orderpoint', 'Minimum Stock Rule'),
}
def propagate_cancel(self, cr, uid, procurement, context=None):
if procurement.rule_id.action == 'move' and procurement.move_ids:
self.pool.get('stock.move').action_cancel(cr, uid, [m.id for m in procurement.move_ids], context=context)
def cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
to_cancel_ids = self.get_cancel_ids(cr, uid, ids, context=context)
ctx = context.copy()
#set the context for the propagation of the procurement cancelation
ctx['cancel_procurement'] = True
for procurement in self.browse(cr, uid, to_cancel_ids, context=ctx):
self.propagate_cancel(cr, uid, procurement, context=ctx)
return super(procurement_order, self).cancel(cr, uid, to_cancel_ids, context=ctx)
def _find_parent_locations(self, cr, uid, procurement, context=None):
location = procurement.location_id
res = [location.id]
while location.location_id:
location = location.location_id
res.append(location.id)
return res
def change_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
if warehouse_id:
warehouse = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
return {'value': {'location_id': warehouse.lot_stock_id.id}}
return {}
def _search_suitable_rule(self, cr, uid, procurement, domain, context=None):
'''we try to first find a rule among the ones defined on the procurement order group and if none is found, we try on the routes defined for the product, and finally we fallback on the default behavior'''
pull_obj = self.pool.get('procurement.rule')
warehouse_route_ids = []
if procurement.warehouse_id:
domain += ['|', ('warehouse_id', '=', procurement.warehouse_id.id), ('warehouse_id', '=', False)]
warehouse_route_ids = [x.id for x in procurement.warehouse_id.route_ids]
product_route_ids = [x.id for x in procurement.product_id.route_ids + procurement.product_id.categ_id.total_route_ids]
procurement_route_ids = [x.id for x in procurement.route_ids]
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', procurement_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', product_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = warehouse_route_ids and pull_obj.search(cr, uid, domain + [('route_id', 'in', warehouse_route_ids)], order='route_sequence, sequence', context=context) or []
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context)
return res
def _find_suitable_rule(self, cr, uid, procurement, context=None):
rule_id = super(procurement_order, self)._find_suitable_rule(cr, uid, procurement, context=context)
if not rule_id:
#a rule defined on 'Stock' is suitable for a procurement in 'Stock\Bin A'
all_parent_location_ids = self._find_parent_locations(cr, uid, procurement, context=context)
rule_id = self._search_suitable_rule(cr, uid, procurement, [('location_id', 'in', all_parent_location_ids)], context=context)
rule_id = rule_id and rule_id[0] or False
return rule_id
def _run_move_create(self, cr, uid, procurement, context=None):
''' Returns a dictionary of values that will be used to create a stock move from a procurement.
This function assumes that the given procurement has a rule (action == 'move') set on it.
:param procurement: browse record
:rtype: dictionary
'''
newdate = (datetime.strptime(procurement.date_planned, '%Y-%m-%d %H:%M:%S') - relativedelta(days=procurement.rule_id.delay or 0)).strftime('%Y-%m-%d %H:%M:%S')
group_id = False
if procurement.rule_id.group_propagation_option == 'propagate':
group_id = procurement.group_id and procurement.group_id.id or False
elif procurement.rule_id.group_propagation_option == 'fixed':
group_id = procurement.rule_id.group_id and procurement.rule_id.group_id.id or False
#it is possible that we've already got some move done, so check for the done qty and create
#a new move with the correct qty
already_done_qty = 0
already_done_qty_uos = 0
for move in procurement.move_ids:
already_done_qty += move.product_uom_qty if move.state == 'done' else 0
already_done_qty_uos += move.product_uos_qty if move.state == 'done' else 0
qty_left = max(procurement.product_qty - already_done_qty, 0)
qty_uos_left = max(procurement.product_uos_qty - already_done_qty_uos, 0)
vals = {
'name': procurement.name,
'company_id': procurement.rule_id.company_id.id or procurement.rule_id.location_src_id.company_id.id or procurement.rule_id.location_id.company_id.id or procurement.company_id.id,
'product_id': procurement.product_id.id,
'product_uom': procurement.product_uom.id,
'product_uom_qty': qty_left,
'product_uos_qty': (procurement.product_uos and qty_uos_left) or qty_left,
'product_uos': (procurement.product_uos and procurement.product_uos.id) or procurement.product_uom.id,
'partner_id': procurement.rule_id.partner_address_id.id or (procurement.group_id and procurement.group_id.partner_id.id) or False,
'location_id': procurement.rule_id.location_src_id.id,
'location_dest_id': procurement.location_id.id,
'move_dest_id': procurement.move_dest_id and procurement.move_dest_id.id or False,
'procurement_id': procurement.id,
'rule_id': procurement.rule_id.id,
'procure_method': procurement.rule_id.procure_method,
'origin': procurement.origin,
'picking_type_id': procurement.rule_id.picking_type_id.id,
'group_id': group_id,
'route_ids': [(4, x.id) for x in procurement.route_ids],
'warehouse_id': procurement.rule_id.propagate_warehouse_id.id or procurement.rule_id.warehouse_id.id,
'date': newdate,
'date_expected': newdate,
'propagate': procurement.rule_id.propagate,
'priority': procurement.priority,
}
return vals
def _run(self, cr, uid, procurement, context=None):
if procurement.rule_id and procurement.rule_id.action == 'move':
if not procurement.rule_id.location_src_id:
self.message_post(cr, uid, [procurement.id], body=_('No source location defined!'), context=context)
return False
move_obj = self.pool.get('stock.move')
move_dict = self._run_move_create(cr, uid, procurement, context=context)
#create the move as SUPERUSER because the current user may not have the rights to do it (mto product launched by a sale for example)
move_obj.create(cr, SUPERUSER_ID, move_dict, context=context)
return True
return super(procurement_order, self)._run(cr, uid, procurement, context=context)
def run(self, cr, uid, ids, autocommit=False, context=None):
new_ids = [x.id for x in self.browse(cr, uid, ids, context=context) if x.state not in ('running', 'done', 'cancel')]
res = super(procurement_order, self).run(cr, uid, new_ids, autocommit=autocommit, context=context)
#after all the procurements are run, check if some created a draft stock move that needs to be confirmed
#(we do that in batch because it fasts the picking assignation and the picking state computation)
move_to_confirm_ids = []
for procurement in self.browse(cr, uid, new_ids, context=context):
if procurement.state == "running" and procurement.rule_id and procurement.rule_id.action == "move":
move_to_confirm_ids += [m.id for m in procurement.move_ids if m.state == 'draft']
if move_to_confirm_ids:
self.pool.get('stock.move').action_confirm(cr, uid, move_to_confirm_ids, context=context)
return res
def _check(self, cr, uid, procurement, context=None):
''' Implement the procurement checking for rules of type 'move'. The procurement will be satisfied only if all related
moves are done/cancel and if the requested quantity is moved.
'''
if procurement.rule_id and procurement.rule_id.action == 'move':
uom_obj = self.pool.get('product.uom')
# In case Phantom BoM splits only into procurements
if not procurement.move_ids:
return True
cancel_test_list = [x.state == 'cancel' for x in procurement.move_ids]
done_cancel_test_list = [x.state in ('done', 'cancel') for x in procurement.move_ids]
at_least_one_cancel = any(cancel_test_list)
all_done_or_cancel = all(done_cancel_test_list)
all_cancel = all(cancel_test_list)
if not all_done_or_cancel:
return False
elif all_done_or_cancel and not all_cancel:
return True
elif all_cancel:
self.message_post(cr, uid, [procurement.id], body=_('All stock moves have been cancelled for this procurement.'), context=context)
self.write(cr, uid, [procurement.id], {'state': 'cancel'}, context=context)
return False
return super(procurement_order, self)._check(cr, uid, procurement, context)
def do_view_pickings(self, cr, uid, ids, context=None):
'''
This function returns an action that display the pickings of the procurements belonging
to the same procurement group of given ids.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'do_view_pickings')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
group_ids = set([proc.group_id.id for proc in self.browse(cr, uid, ids, context=context) if proc.group_id])
result['domain'] = "[('group_id','in',[" + ','.join(map(str, list(group_ids))) + "])]"
return result
def run_scheduler(self, cr, uid, use_new_cursor=False, company_id=False, context=None):
'''
Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
and the availability of moves. This function is intended to be run for all the companies at the same time, so
we run functions as SUPERUSER to avoid intercompanies and access rights issues.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
@param context: A standard dictionary for contextual values
@return: Dictionary of values
'''
super(procurement_order, self).run_scheduler(cr, uid, use_new_cursor=use_new_cursor, company_id=company_id, context=context)
if context is None:
context = {}
try:
if use_new_cursor:
cr = openerp.registry(cr.dbname).cursor()
move_obj = self.pool.get('stock.move')
#Minimum stock rules
self._procure_orderpoint_confirm(cr, SUPERUSER_ID, use_new_cursor=use_new_cursor, company_id=company_id, context=context)
#Search all confirmed stock_moves and try to assign them
confirmed_ids = move_obj.search(cr, uid, [('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc', context=context)
for x in xrange(0, len(confirmed_ids), 100):
move_obj.action_assign(cr, uid, confirmed_ids[x:x + 100], context=context)
if use_new_cursor:
cr.commit()
if use_new_cursor:
cr.commit()
finally:
if use_new_cursor:
try:
cr.close()
except Exception:
pass
return {}
def _get_orderpoint_date_planned(self, cr, uid, orderpoint, start_date, context=None):
date_planned = start_date + relativedelta(days=orderpoint.product_id.seller_delay or 0.0)
return date_planned.strftime(DEFAULT_SERVER_DATE_FORMAT)
def _prepare_orderpoint_procurement(self, cr, uid, orderpoint, product_qty, context=None):
return {
'name': orderpoint.name,
'date_planned': self._get_orderpoint_date_planned(cr, uid, orderpoint, datetime.today(), context=context),
'product_id': orderpoint.product_id.id,
'product_qty': product_qty,
'company_id': orderpoint.company_id.id,
'product_uom': orderpoint.product_uom.id,
'location_id': orderpoint.location_id.id,
'origin': orderpoint.name,
'warehouse_id': orderpoint.warehouse_id.id,
'orderpoint_id': orderpoint.id,
'group_id': orderpoint.group_id.id,
}
def _product_virtual_get(self, cr, uid, order_point):
product_obj = self.pool.get('product.product')
return product_obj._product_available(cr, uid,
[order_point.product_id.id],
context={'location': order_point.location_id.id})[order_point.product_id.id]['virtual_available']
def _procure_orderpoint_confirm(self, cr, uid, use_new_cursor=False, company_id = False, context=None):
'''
Create procurement based on Orderpoint
:param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
'''
if context is None:
context = {}
if use_new_cursor:
cr = openerp.registry(cr.dbname).cursor()
orderpoint_obj = self.pool.get('stock.warehouse.orderpoint')
procurement_obj = self.pool.get('procurement.order')
dom = company_id and [('company_id', '=', company_id)] or []
orderpoint_ids = orderpoint_obj.search(cr, uid, dom)
prev_ids = []
while orderpoint_ids:
ids = orderpoint_ids[:100]
del orderpoint_ids[:100]
for op in orderpoint_obj.browse(cr, uid, ids, context=context):
try:
prods = self._product_virtual_get(cr, uid, op)
if prods is None:
continue
if float_compare(prods, op.product_min_qty, precision_rounding=op.product_uom.rounding) < 0:
qty = max(op.product_min_qty, op.product_max_qty) - prods
reste = op.qty_multiple > 0 and qty % op.qty_multiple or 0.0
if float_compare(reste, 0.0, precision_rounding=op.product_uom.rounding) > 0:
qty += op.qty_multiple - reste
if float_compare(qty, 0.0, precision_rounding=op.product_uom.rounding) <= 0:
continue
qty -= orderpoint_obj.subtract_procurements(cr, uid, op, context=context)
qty_rounded = float_round(qty, precision_rounding=op.product_uom.rounding)
if qty_rounded > 0:
proc_id = procurement_obj.create(cr, uid,
self._prepare_orderpoint_procurement(cr, uid, op, qty_rounded, context=context),
context=context)
self.check(cr, uid, [proc_id])
self.run(cr, uid, [proc_id])
if use_new_cursor:
cr.commit()
except OperationalError:
if use_new_cursor:
orderpoint_ids.append(op.id)
cr.rollback()
continue
else:
raise
if use_new_cursor:
cr.commit()
if prev_ids == ids:
break
else:
prev_ids = ids
if use_new_cursor:
cr.commit()
cr.close()
return {}
|
agpl-3.0
|
mdj2/django
|
django/conf/urls/__init__.py
|
107
|
2601
|
from django.core.urlresolvers import (RegexURLPattern,
RegexURLResolver, LocaleRegexURLResolver)
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from django.utils import six
__all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'patterns', 'url']
handler400 = 'django.views.defaults.bad_request'
handler403 = 'django.views.defaults.permission_denied'
handler404 = 'django.views.defaults.page_not_found'
handler500 = 'django.views.defaults.server_error'
def include(arg, namespace=None, app_name=None):
if isinstance(arg, tuple):
# callable returning a namespace hint
if namespace:
raise ImproperlyConfigured('Cannot override the namespace for a dynamic module that provides a namespace')
urlconf_module, app_name, namespace = arg
else:
# No namespace hint - use manually provided namespace
urlconf_module = arg
if isinstance(urlconf_module, six.string_types):
urlconf_module = import_module(urlconf_module)
patterns = getattr(urlconf_module, 'urlpatterns', urlconf_module)
# Make sure we can iterate through the patterns (without this, some
# testcases will break).
if isinstance(patterns, (list, tuple)):
for url_pattern in patterns:
# Test if the LocaleRegexURLResolver is used within the include;
# this should throw an error since this is not allowed!
if isinstance(url_pattern, LocaleRegexURLResolver):
raise ImproperlyConfigured(
'Using i18n_patterns in an included URLconf is not allowed.')
return (urlconf_module, app_name, namespace)
def patterns(prefix, *args):
pattern_list = []
for t in args:
if isinstance(t, (list, tuple)):
t = url(prefix=prefix, *t)
elif isinstance(t, RegexURLPattern):
t.add_prefix(prefix)
pattern_list.append(t)
return pattern_list
def url(regex, view, kwargs=None, name=None, prefix=''):
if isinstance(view, (list,tuple)):
# For include(...) processing.
urlconf_module, app_name, namespace = view
return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace)
else:
if isinstance(view, six.string_types):
if not view:
raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex)
if prefix:
view = prefix + '.' + view
return RegexURLPattern(regex, view, kwargs, name)
|
bsd-3-clause
|
mRokita/DPLib
|
dplib/server.py
|
1
|
47676
|
# DPLib - Asynchronous bot framework for Digital Paint: Paintball 2 servers
# Copyright (C) 2017 Michał Rokita
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import select
from collections import OrderedDict
from enum import Enum
from subprocess import Popen
import asyncio
import os
from socket import socket, AF_INET, SOCK_DGRAM
from time import time
from dplib.parse import render_text, decode_ingame_text
class ServerEvent(Enum):
TIMEOUT = 0
CHAT = 1
ELIM = 2
RESPAWN = 3
MAPCHANGE = 4
DATE = 5
NAMECHANGE = 6
ENTRANCE = 7
FLAG_CAPTURED = 8
ELIM_TEAMS_FLAG = 9
ROUND_STARTED = 10
TEAM_SWITCHED = 11
DISCONNECT = 12
FLAG_GRAB = 13
FLAG_DROP = 14
ROUND_END = 15
GAMEMODE = 16
GAME_END = 17
class GameMode(Enum):
CTF = 'CTF'
ONE_FLAG = '1Flag'
ELIMINATION = 'Elim'
DEATHMATCH = 'DM'
SIEGE = 'Siege'
TDM = 'TDM'
KOTH = 'KOTH'
PONG = 'Pong'
class BadRconPasswordError(Exception):
pass
class SecurityCheckError(Exception):
pass
class MapNotFoundError(Exception):
pass
class ListenerType(Enum):
PERMANENT = 0
TRIGGER_ONCE = 1
REGEXPS = OrderedDict([
(re.compile('^\\[\d\d:\d\d:\d\d\\] (?:(?:\\[OBS\\] )|(?:\\[ELIM\\] ))?(.*?): (.*?)\r?\n'), ServerEvent.CHAT),
# [19:54:18] hTml: test
(re.compile(
'^\\[\d\d:\d\d:\d\d\\] \\*(.*?) (?:\\((.*?)\\) eliminated \\*(.*?) \\((.*?)\\)\\.\r?\n|'
'eliminated ((?:himself)|(?:herself)) with a paintgren\\.\r?\n)'), ServerEvent.ELIM),
# [18:54:24] *|ACEBot_1| (Spyder SE) eliminated *|herself| (Spyder SE).
# [12:25:44] *whoa eliminated herself with a paintgren.
# [12:26:09] *whoa eliminated himself with a paintgren.
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) revived!\r?\n'), ServerEvent.RESPAWN),
# [19:03:57] *Red's ACEBot_6 revived!
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) entered the game \\((.*?)\\) \\[(.*?)\\]\r?\n'), ServerEvent.ENTRANCE),
# [19:03:57] mRokita entered the game (build 41)
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) returned the(?: \\*(.*?))? flag!\r?\n'), ServerEvent.FLAG_CAPTURED),
# [18:54:24] *Red's hTml returned the *Blue flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?)\\\'s (.*?) earned (\d+) points for possesion of eliminated teams flag!\r?\n'),
ServerEvent.ELIM_TEAMS_FLAG),
# [19:30:23] *Blue's mRokita earned 3 points for possesion of eliminated teams flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] Round started\\.\\.\\.\r?\n'), ServerEvent.ROUND_STARTED),
# [10:20:11] Round started...
(re.compile(
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) switched from \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))'
' to \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow))\\.\r?\n)|'
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) joined the \\*((?:Red)|(?:Purple)|(?:Blue)|(?:Yellow)) team\\.\r?\n)|'
'(?:^\\[\d\d:\d\d:\d\d\\] (.*?) is now (observing)?\\.\r?\n)'), ServerEvent.TEAM_SWITCHED),
# [10:20:11] mRokita switched from Blue to Red.
# [10:20:11] mRokita is now observing.
# [10:20:11] mRokita is now observing.
(re.compile('^\\[\d\d:\d\d:\d\d\\] [\t|-]{2}GameEnd[\t-](.*?)\r?\n'), ServerEvent.GAME_END),
# [22:40:33] GameEnd 441.9 No winner
# [22:40:33] GameEnd 1032.6 Red:23,Blue:22
# [22:40:33] GameEnd 4.9 DPBot01 wins!
# [22:40:33] GameEnd 42.9 Yellow:5,Blue:0,Purple:0,Red:0
# [22:40:33] GameEnd 42.9 Yellow:5,Blue:12,Purple:7
(re.compile('^\\[\d\d:\d\d:\d\d\\] == Map Loaded: (.+) ==\r?\n'), ServerEvent.MAPCHANGE),
# [10:20:11] == Map Loaded: airtime ==
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) changed name to (.*?)\\.\r?\n'), ServerEvent.NAMECHANGE),
# [19:54:54] name1 changed name to name2.
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) disconnected\\.\r?\n'), ServerEvent.DISCONNECT),
# [19:03:57] whoa disconnected.
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) got the(?: \\*(.*?))? flag\\!\r?\n'), ServerEvent.FLAG_GRAB),
# [19:03:57] *whoa got the *Red flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] \\*(.*?) dropped the flag\\!\r?\n'), ServerEvent.FLAG_DROP),
# [19:03:57] *whoa dropped the flag!
(re.compile('^\\[\d\d:\d\d:\d\d\\] (.*?) team wins the round\\!\r?\n'), ServerEvent.ROUND_END),
# [14:38:50] Blue team wins the round!
(re.compile('^\\[\d\d:\d\d:\d\d\\] === ((?:Deathmatch)|(?:Team Flag CTF)|(?:Single Flag CTF)|(?:Team Siege)|(?:Team Elim)|(?:Team Siege)|(?:Team Deathmatch)|(?:Team KOTH)|(?:Pong)) ===\r?\n'), ServerEvent.GAMEMODE),
# [09:58:11] === Team Flag CTF ===
# [13:16:19] === Team Siege ===
# [21:53:54] === Pong ===
# [12:21:05] === Deathmatch ===
])
class Player(object):
"""
Player info from sv players command
:Attributes:
* dplogin - dplogin.com account id, None when Player has no account
* nick - nickname:
* build - game build
* server - an instance of :class:`Server`
"""
def __init__(self, server, id, dplogin, nick, build):
self.server = server
self.id = id
self.dplogin = dplogin
self.nick = nick
self.build = build
class Server(object):
"""
Represents a DP:PB2 server
:param hostname: Server hostname, for example '127.0.0.1'
:type hostname: str
:param port: Server port, default 27910
:type port: int
:param logfile: Path to logfile
:param rcon_password: rcon password
:param pty_master: Master of the dp2 process (useful only if you want to run the server from your Python script). Go to the getting started section for details.
:type pty_master: int
:param init_vars: Send come commands used for security
"""
def __init__(self, hostname, port=27910, logfile=None, rcon_password=None, pty_master=None, init_vars=True):
self.__rcon_password = rcon_password
self.__hostname = hostname
self.__init_vars = init_vars
self.__port = port
self.__log_file = None
self.__is_secure = False
self.__alive = False
self.__logfile_name = logfile if not pty_master else None
self.__pty_master = pty_master
self.handlers = {
ServerEvent.CHAT: 'on_chat',
ServerEvent.ELIM: 'on_elim',
ServerEvent.RESPAWN: 'on_respawn',
ServerEvent.ENTRANCE: 'on_entrance',
ServerEvent.FLAG_CAPTURED: 'on_flag_captured',
ServerEvent.ELIM_TEAMS_FLAG: 'on_elim_teams_flag',
ServerEvent.ROUND_STARTED: 'on_round_started',
ServerEvent.TEAM_SWITCHED: 'on_team_switched',
ServerEvent.GAME_END: 'on_game_end',
ServerEvent.MAPCHANGE: 'on_mapchange',
ServerEvent.NAMECHANGE: 'on_namechange',
ServerEvent.DISCONNECT: 'on_disconnect',
ServerEvent.FLAG_GRAB: 'on_flag_grab',
ServerEvent.FLAG_DROP: 'on_flag_drop',
ServerEvent.ROUND_END: 'on_round_end',
ServerEvent.GAMEMODE: 'gamemode',
}
self.__listeners = {
ServerEvent.CHAT: [],
ServerEvent.ELIM: [],
ServerEvent.RESPAWN: [],
ServerEvent.ENTRANCE: [],
ServerEvent.FLAG_CAPTURED: [],
ServerEvent.ELIM_TEAMS_FLAG: [],
ServerEvent.ROUND_STARTED: [],
ServerEvent.TEAM_SWITCHED: [],
ServerEvent.GAME_END: [],
ServerEvent.MAPCHANGE: [],
ServerEvent.NAMECHANGE: [],
ServerEvent.DISCONNECT: [],
ServerEvent.FLAG_GRAB: [],
ServerEvent.FLAG_DROP: [],
ServerEvent.ROUND_END: [],
ServerEvent.GAMEMODE: [],
}
self.loop = asyncio.get_event_loop()
def is_listening(self):
"""
Check if the main loop is running.
:rtype: bool
"""
return self.__alive
@asyncio.coroutine
def on_chat(self, nick, message):
"""
On chat, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick.
:type nick: str
:param message: Message.
:type message: str
"""
pass
@asyncio.coroutine
def on_flag_captured(self, team, nick, flag):
"""
On flag captured, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team.
:type team: str
:param nick: Player's nick.
:type nick: str
:param flag: Captured flag (Blue|Red|Yellow|Purple|White)
:type flag: str
"""
pass
@asyncio.coroutine
def on_team_switched(self, nick, old_team, new_team):
"""
On team switched, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param old_team: Old team (Blue|Red|Yellow|Purple|Observer)
:type old_team: str
:param new_team: New team (Blue|Red|Yellow|Purple|Observer)
:type new_team: str
"""
pass
@asyncio.coroutine
def on_round_started(self):
"""
On round started, can be overridden using the :func:`.Server.event` decorator.
"""
pass
@asyncio.coroutine
def on_elim_teams_flag(self, team, nick, points):
"""
On scored points for possession of eliminated teams flag, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team.
:type team: str
:param nick: Player's nick.
:type nick: str
:param points: Points earned.
:type points: int
"""
pass
@asyncio.coroutine
def on_entrance(self, nick, build, addr):
"""
On entrance, can be overriden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param build: Player's game version ('build 41' for example
:type build: str
:param addr: Player's address, IP:PORT ('127.0.0.1:23414' for example)
:type addr: str
"""
pass
@asyncio.coroutine
def on_game_end(self, score_blue, score_red, score_yellow, score_purple):
"""
On game end, can be overriden using the :func:`.Server.event` decorator.
:param score_blue: Blue's score - None if there was no Blue team.
:param score_red: Red's score - None if there was no Red team.
:param score_yellow: Yellow's score - None if there was no Yellow team.
:param score_purple: Purple's score - None if there was no Purple team.
"""
pass
@asyncio.coroutine
def on_elim(self, killer_nick, killer_weapon, victim_nick, victim_weapon, suicide):
"""
On elim can be overridden using the :func:`.Server.event` decorator.
:param killer_nick: Killer's nick
:type killer_nick: str
:param killer_weapon: Killer's weapon
:type killer_weapon: str
:param victim_nick: Victim's nick
:type victim_nick: str
:param victim_weapon: Victim's weapon
:type victim_weapon: str
"""
pass
@asyncio.coroutine
def on_respawn(self, team, nick):
"""
On respawn, can be overridden using the :func:`.Server.event` decorator.
:param team: Player's team (Blue|Red|Yellow|Purple)
:type team: str
:param nick: Player's nick
:type nick: str
"""
pass
@asyncio.coroutine
def on_mapchange(self, mapname):
"""
On mapcange, can be overridden using the :func:`.Server.event` decorator.
:param mapname: Mapname
:type mapname: str
"""
pass
@asyncio.coroutine
def on_namechange(self, old_nick, new_nick):
"""
On name change, can be overridden using the :func:`.Server.event` decorator.
:param old_nick: Old nick
:type old_nick: str
:param new_nick: Old nick
:type new_nick: str
"""
pass
@asyncio.coroutine
def on_disconnect(self, nick):
"""
On disconnect, can be overridden using the :func:`.Server.event`decorator.
:param nick: Disconnected player's nick
:type nick: str
"""
pass
@asyncio.coroutine
def on_flag_grab(self, nick, flag):
"""
On flag grab, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param team: Flag color (Blue|Red|Yellow|Purple)
:type team: str
"""
pass
@asyncio.coroutine
def on_flag_drop(self, nick):
"""
On flag grab, can be overridden using the :func:`.Server.event` decorator.
:param nick: Player's nick
:type nick: str
:param team: Flag color (Blue|Red|Yellow|Purple)
:type team: str
"""
pass
@asyncio.coroutine
def on_round_end(self):
"""
Onround end, can be overridden using the :func:`.Server.event` decorator.
"""
pass
@asyncio.coroutine
def gamemode(self, gamemode):
"""
Onround end, can be overridden using the :func:`.Server.event` decorator.
:param gamemode: map's gamemode
:type gamemode: str
"""
pass
def event(self, func):
"""
Decorator, used for event registration.
:param func: function to register
:rtype: builtin_function_or_method
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> @s.event
... def on_chat(nick, message):
... print((nick, message))
...
>>> s.run()
('mRokita', 'Hi')
"""
if func.__name__ in self.handlers.values():
setattr(self, func.__name__, asyncio.coroutine(func))
return func
else:
raise Exception('Event \'%s\' doesn\'t exist' % func.__name__)
def stop_listening(self):
"""
Stop the main loop
"""
self.__alive = False
def __perform_listeners(self, event_type, args, kwargs):
"""
Performs all pending listeners.
:param event_type: Event type, one of members :class:`ServerEvent`
:param args: Event info
:type args: tuple
:param kwargs: Event info
:type kwargs: dict
"""
to_remove = list()
for i, (check, future) in enumerate(self.__listeners[event_type]):
if not future.cancelled() and not future.done():
if check(*args):
future.set_result(kwargs)
else:
to_remove.append(i)
for i in reversed(to_remove):
self.__listeners[event_type].pop(i)
def nicks_valid(self, *nicks):
nicks_ingame = [p.nick for p in self.get_players()]
for nick in nicks:
if nick not in nicks_ingame:
return False
return True
@asyncio.coroutine
def __handle_event(self, event_type, args):
"""
Handles an event.
:param event_type: Event type, one of members :class:`ServerEvent`
:param args: Event info (re.findall() results)
"""
kwargs = dict()
if event_type == ServerEvent.CHAT:
if args[0] not in [p.nick for p in self.get_players()]:
return
kwargs = {
'nick': args[0],
'message': args[1],
}
self.__perform_listeners(ServerEvent.CHAT, args, kwargs)
elif event_type == ServerEvent.ELIM:
kwargs = {
'killer_nick': args[0],
'killer_weapon': args[1],
'victim_nick': args[2],
'victim_weapon': args[3],
'suicide': args[4],
}
self.__perform_listeners(ServerEvent.ELIM, args, kwargs)
elif event_type == ServerEvent.RESPAWN:
kwargs = {
'team': args[0],
'nick': args[1],
}
self.__perform_listeners(ServerEvent.RESPAWN, args, kwargs)
elif event_type == ServerEvent.ENTRANCE:
kwargs = {
'nick': args[0],
'build': args[1],
'addr': args[2],
}
self.__perform_listeners(ServerEvent.ENTRANCE, args, kwargs)
elif event_type == ServerEvent.FLAG_CAPTURED:
kwargs = {
'team': args[0],
'nick': args[1],
'flag': args[2],
}
self.__perform_listeners(ServerEvent.FLAG_CAPTURED, args, kwargs)
elif event_type == ServerEvent.ELIM_TEAMS_FLAG:
kwargs = {
'team': args[0],
'nick': args[1],
'points': int(args[2]),
}
self.__perform_listeners(ServerEvent.ELIM_TEAMS_FLAG, args, kwargs)
elif event_type == ServerEvent.ROUND_STARTED:
kwargs = dict()
self.__perform_listeners(ServerEvent.ROUND_STARTED, args, kwargs)
elif event_type == ServerEvent.TEAM_SWITCHED:
new_args = tuple([arg for arg in args if arg])
kwargs = {
'nick': new_args[0],
'old_team': new_args[1] if len(new_args) > 2 else 'Observer',
'new_team': new_args[2] if len(new_args) > 2 else new_args[1]
}
if kwargs['new_team'] == 'observing':
kwargs['new_team'] = 'Observer'
kwargs['old_team'] = None
self.__perform_listeners(ServerEvent.TEAM_SWITCHED, new_args, kwargs)
elif event_type == ServerEvent.GAME_END:
kwargs = {
'score_blue': None,
'score_red': None,
'score_purple': None,
'score_yellow': None,
}
teams = args.split(',')
for t in teams:
data = t.split(':')
if data[0] == 'Blue':
kwargs['score_blue'] = data[1]
elif data[0] == 'Red':
kwargs['score_red'] = data[1]
elif data[0] == 'Yellow':
kwargs['score_yellow'] = data[1]
elif data[0] == 'Purple':
kwargs['score_purple'] = data[1]
self.__perform_listeners(ServerEvent.GAME_END,
(kwargs['score_blue'],
kwargs['score_red'],
kwargs['score_yellow'],
kwargs['score_purple']), kwargs)
elif event_type == ServerEvent.MAPCHANGE:
kwargs = {
'mapname': args
}
self.__perform_listeners(ServerEvent.MAPCHANGE, (kwargs['mapname'],), kwargs)
elif event_type == ServerEvent.NAMECHANGE:
kwargs = {
'old_nick': args[0],
'new_nick': args[1]
}
self.__perform_listeners(ServerEvent.NAMECHANGE, (kwargs['old_nick'], kwargs['new_nick']), kwargs)
elif event_type == ServerEvent.DISCONNECT:
kwargs = {
'nick': args
}
self.__perform_listeners(ServerEvent.DISCONNECT, (kwargs['nick'],), kwargs)
elif event_type == ServerEvent.FLAG_GRAB:
kwargs = {
'nick': args[0],
'flag': args[1],
}
self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'], kwargs['flag']), kwargs)
elif event_type == ServerEvent.FLAG_DROP:
kwargs = {
'nick': args
}
self.__perform_listeners(ServerEvent.FLAG_GRAB, (kwargs['nick'],), kwargs)
elif event_type == ServerEvent.ROUND_END:
kwargs = dict()
self.__perform_listeners(ServerEvent.ROUND_END, args, kwargs)
elif event_type == ServerEvent.GAMEMODE:
kwargs = {
'gamemode': args
}
self.__perform_listeners(ServerEvent.GAMEMODE, args, kwargs)
asyncio.ensure_future(self.get_event_handler(event_type)(**kwargs))
def get_event_handler(self, event_type):
return getattr(self, self.handlers[event_type])
@asyncio.coroutine
def __parse_line(self, line):
"""
Tries to match line with all event regexps.
:param line: Line from logs
"""
for r in REGEXPS:
results = r.findall(line)
e = REGEXPS[r]
for res in results:
if e == ServerEvent.CHAT: # For security reasons
if self.nicks_valid(res[0]):
yield from self.__handle_event(event_type=e, args=res)
return
else:
continue
yield from self.__handle_event(event_type=e, args=res)
def rcon(self, command, socket_timeout=3):
"""
Execute a console command using RCON.
:param command: Command
:param socket_timeout: Timeout for the UDP socket.
:return: Response from server
:rtype: str
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> s.rcon('sv listuserip')
'ÿÿÿÿprint\\n mRokita [127.0.0.1:9419]\\nadmin is listing IP for mRokita [127.0.0.1:9419]\\n'
"""
sock = socket(AF_INET, SOCK_DGRAM)
sock.connect((self.__hostname, self.__port))
sock.settimeout(socket_timeout)
sock.send(bytes('\xFF\xFF\xFF\xFFrcon {} {}\n'.format(self.__rcon_password, command).encode('latin-1')))
ret = sock.recv(2048).decode('latin-1')
return ret
def status(self):
"""
Execute status query.
:return: Status string
:rtype: str
"""
sock = socket(AF_INET, SOCK_DGRAM)
sock.connect((self.__hostname, self.__port))
sock.settimeout(3)
sock.send(b'\xFF\xFF\xFF\xFFstatus\n')
return sock.recv(2048).decode('latin-1')
def new_map(self, map_name, gamemode=None):
"""
Changes the map using sv newmap <mapname> <gamemode>
:param map_name: map name, without .bsp
:param gamemode: Game mode
:type gamemode: GameMode
:return: Rcon response
:raises MapNotFoundError: When map is not found on the server
:rtype: str
"""
command = 'sv newmap {map}'
if gamemode:
command += ' {gamemode}'
res = self.rcon(command.format(map=map_name, gamemode=gamemode))
if 'Cannot find mapfile' in res or 'usage' in res:
raise MapNotFoundError
return res
def permaban(self, ip=None):
"""
Bans IP address or range of adresses and saves ban list to disk.
:param ip: IP address to ban
:return: Rcon response
:rtype: str
"""
if ip:
resp = self.rcon('addip %s' % ip)
resp += '\n' + self.rcon('writeban')
return resp
else:
raise TypeError('IP address is required.')
def remove_permaban(self, ip=None):
"""
Removes ban on IP address and saves ban list to disk.
:param ip: IP address to unban
:return: Rcon response
:rtype: str
"""
if ip:
resp = self.rcon('removeip %s' % ip)
resp += '\n' + self.rcon('writeban')
return resp
else:
raise TypeError('IP address is required.')
def tempoban(self, id=None, nick=None, duration=3):
"""
Temporarily bans a player with specified id using rcon
:param id: Player's id
:param nick: Player's nick
:param duration: Ban duration in minutes (defaults to 3)
:return: Rcon response
:rtype: str
"""
if type(duration) != int:
raise TypeError('Ban duration should be an integer, not a ' + str(type(duration)))
if nick:
id = self.get_ingame_info(nick).id
if id:
return self.rcon('tban %s %s' % (id, str(duration)))
else:
raise TypeError('Player id or nick is required.')
def remove_tempobans(self):
"""
Removes all temporary bans
:return: Rcon response
:rtype: str
"""
return self.rcon("removetbans")
def kick(self, id=None, nick=None):
"""
Kicks a player with id using rcon.
:param id: Player's id
:param nick: Player's nick
:return: Rcon response
:rtype: str
"""
if nick:
id = self.get_ingame_info(nick).id
if id:
return self.rcon('kick %s' % id)
else:
raise TypeError('Player id or nick is required.')
def say(self, message):
"""
Say a message
:param message: Text, can contain {C} - color char {U} - underline char {I} italic.
Remember to escape user input using :func:`dplib.parse.escape_braces`.
:rtype: str
:return: Rcon response
:example:
.. code-block:: python
:linenos:
>>> from dplib.server import Server
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'qconsole27910.log', rcon_password='hello')
>>> s.say('{C}ARed text')
>>> s.say('{U}Underline{U}')
>>> s.say('{I}Italic{I}')
:ingame result:
.. image:: ..\..\doc\images\say_test.png
"""
return self.rcon('say "%s"' % render_text(message))
def cprint(self, message):
"""
Cprints a message.
:param message: Text, can contain {C} - color char {U} - underline char {I} italic.
Remember to escape user input using :func:`dplib.parse.escape_brac
:return: Rcon response
:rtype: str
"""
return self.rcon('sv cprint "%s"' % render_text(message))
def set_cvar(self, var, value):
"""
Set a server cvar
:param var: cvar name
:param value: value to set
:return: Rcon response
:rtype: str
"""
return self.rcon('set %s "%s"' % (var, value))
def get_cvar(self, var):
"""
Gets cvar value
:param var: Variable name
:type var: str
:return: Cvar value
:rtype: str
"""
res = self.rcon('"%s"' % var)
if re.match('^....print\\\nUnknown command \\"%s"\\.\\\n' % re.escape(var), res):
raise NameError('Cvar "%s" does not exist' % var)
return re.findall('^....print\\\n\\"%s\\" is \\"(.*?)\\"\\\n' % re.escape(var), res)[0]
@staticmethod
def __get_predicate(margs, check):
"""
Returns a comparator.
:param margs: Args to check
:param check: Check function
:return: Returns a function that compiles the check function and comparision strings
"""
def predicate(*args):
if len(args) != len(margs):
raise TypeError('predicate() takes %d positional arguments but %d were given' % (len(margs), len(args)))
result = True
for i, a in enumerate(margs):
if a:
result = result and a == args[i]
if callable(check):
result = result and check(*args)
return result
return predicate
@asyncio.coroutine
def wait_for_entrance(self, timeout=None, nick=None, build=None, addr=None, check=None):
"""
Waits for entrance.
:param timeout: Time to wait for entrance event, if exceeded, returns None.
:param nick: Player's nick.
:param build: Player's build.
:param addr: Player's address (IP:PORT)
:return:
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, build, addr)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ENTRANCE].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_respawn(self, timeout=None, team=None, nick=None, check=None):
"""
Waits for respawn event.
:param timeout: Time to wait for respawn event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('team', 'nick').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.RESPAWN].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_elim_teams_flag(self, timeout=None, team=None, nick=None, points=None, check=None):
"""
Waits for elim teams flag event.
:param timeout: Time to wait for event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param points: Points scored.
:type points: int
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('team', 'nick', 'points').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick, points)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ELIM_TEAMS_FLAG].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_team_switched(self, timeout=None, nick=None, old_team=None, new_team=None, check=None):
"""
Waits for team switch event.
:param timeout: Time to wait for event, if exceeded, returns None.
:param old_team: Player's old team.
:param new_team: Player's new team.
:param nick: Player's nick.
:param check: Check function, ignored if none.
:return: Returns message info dict keys: ('nick', 'old_team', 'new_nick').
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, old_team, new_team)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.TEAM_SWITCHED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_round_started(self, timeout=None, check=None):
"""
Waits for round start.
:param timeout: Time to wait for event, if exceeded, returns None.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = tuple()
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ROUND_STARTED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_flag_captured(self, timeout=None, team=None, nick=None, flag=None, check=None):
"""
Waits for flag capture.
:param timeout: Time to wait for event, if exceeded, returns None.
:param team: Player's team.
:param nick: Player's nick.
:param flag: Captured flag.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (team, nick, flag)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.FLAG_CAPTURED].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_game_end(self, timeout=None, score_blue=None, score_red=None, score_yellow=None, score_purple=None, check=None):
"""
Waits for game end.
:param timeout: Time to wait for event, if exceeded, returns None.
:param score_blue: Blue score
:param score_red: Red score.
:param score_yellow: Yellow score.
:param score_purple: Purple score.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (score_blue, score_red, score_yellow, score_purple)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.GAME_END].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
@asyncio.coroutine
def wait_for_elim(self, timeout=None, killer_nick=None, killer_weapon=None, victim_nick=None, victim_weapon=None,
check=None):
"""
Waits for elimination event.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param killer_nick: Killer's nick to match, ignored if None.
:param killer_weapon: Killer's weapon to match, ignored if None.
:param victim_nick: Victim's nick to match, ignored if None.
:param victim_weapon: Victim's weapon to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (killer_nick, killer_weapon, victim_nick, victim_weapon)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.ELIM].append((predicate, future))
try:
elim_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
elim_info = None
return elim_info
@asyncio.coroutine
def wait_for_mapchange(self, timeout=None, mapname=None, check=None):
"""
Waits for mapchange.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param mapname: Killer's nick to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (mapname,)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.MAPCHANGE].append((predicate, future))
try:
mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
mapchange_info = None
return mapchange_info
@asyncio.coroutine
def wait_for_namechange(self, timeout=None, old_nick=None, new_nick=None, check=None):
"""
Waits for mapchange.
:param timeout: Time to wait for elimination event, if exceeded, returns None.
:param mapname: Killer's nick to match, ignored if None.
:param check: Check function, ignored if None.
:return: Returns message info dict keys: ('killer_nick', 'killer_weapon', 'victim_nick', 'victim_weapon')
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (old_nick, new_nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.NAMECHANGE].append((predicate, future))
try:
mapchange_info = yield from asyncio.wait_for(future, timeout, loop=self.loop)
except asyncio.TimeoutError:
mapchange_info = None
return mapchange_info
@asyncio.coroutine
def wait_for_message(self, timeout=None, nick=None, message=None, check=None):
"""
Waits for a message.
:param timeout: Time to wait for message, if exceeded, returns None.
:param nick: Player's nick to match, ignored if None
:type nick: str
:param message: Message text to match, ignored if None
:type message: str
:param check: Check function, ignored if None
:return: Returns message info dict keys: ('nick', 'message')
:rtype: dict
:example:
.. code-block:: python
:linenos:
@s.event
def on_chat(nick, message):
if message == '!start' and not elim_active:
msg = yield from s.wait_for_message(check=lambda n, m: m.startswith('!hi '))
s.say('Hi ' + msg['message'].split('!hi ')[1] + '!')
"""
future = asyncio.Future(loop=self.loop)
margs = (nick, message)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.CHAT].append((predicate, future))
try:
message = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
message = None
return message
@asyncio.coroutine
def wait_for_flag_drop(self, timeout=None, nick=None, check=None):
"""
Waits for flag drop.
:param timeout: Time to wait for event, if exceeded, returns None.
:param nick: Player's nick.
:param flag: dropped flag.
:param check: Check function, ignored if none.
:return: Returns an empty dict.
:rtype: dict
"""
future = asyncio.Future(loop=self.loop)
margs = (nick)
predicate = self.__get_predicate(margs, check)
self.__listeners[ServerEvent.FLAG_DROP].append((predicate, future))
try:
data = yield from asyncio.wait_for(future, timeout,
loop=self.loop)
except asyncio.TimeoutError:
data = None
return data
def start(self, scan_old=False, realtime=True, debug=False):
"""
Main loop.
:param scan_old: Scan present logfile data
:type scan_old: bool
:param realtime: Wait for incoming logfile data
:type realtime: bool
"""
if not (self.__logfile_name or self.__pty_master):
raise AttributeError("Logfile name or a Popen process is required.")
self.__alive = True
if self.__logfile_name:
self.__log_file = open(self.__logfile_name, 'rb')
if self.__log_file and scan_old:
self.__log_file.readlines()
buf = ''
if realtime:
while self.__alive:
try:
buf += self._read_log()
lines = buf.splitlines(True)
line = ''
for line in lines:
if debug:
print("[DPLib] %s" % line.strip())
yield from self.__parse_line(line)
if not line or line[-1] != '\n':
buf = line
else:
buf = ''
yield from asyncio.sleep(0.05)
except OSError as e:
raise e
if self.__log_file:
self.__log_file.close()
if self.__pty_master:
os.close(self.__pty_master)
def _read_log(self):
if self.__log_file:
return self.__log_file.readline().decode('latin-1')
elif self.__pty_master:
r, w, x = select.select([self.__pty_master], [], [], 0.01)
if r:
return os.read(self.__pty_master, 1024).decode('latin-1')
else:
return ''
def get_players(self):
"""
Gets playerlist.
:return: List of :class:`.Player` instances
:rtype: list
"""
response = self.rcon('sv players')
response = re.findall('(\d+) \\(?(.*?)\\)?\\] \\* (?:OP \d+, )?(.+) \\((b\d+)\\)', response)
players = list()
for p_data in response:
player = Player(nick=p_data[2],
id=p_data[0],
dplogin=p_data[1],
build=p_data[3],
server=self)
players.append(player)
return players
def get_simple_playerlist(self):
"""
Get a list of player names
:return: List of nicks
:rtype: list
"""
status = self.get_status()
players = status['players']
playerlist = []
for p in players:
playerlist.append(p['name'])
return playerlist
def get_status(self):
"""
Gets server status
:example:
.. code-block:: python
:linenos:
>>> s = Server(hostname='127.0.0.1', port=27910, logfile=r'C:\Games\Paintball2\pball\qconsole27910.log', rcon_password='hello')
>>> s.get_status()
{'players': [{'score': '0', 'ping': '13', 'name': 'mRokita'}], 'sv_certificated': '1', 'mapname': 'beta/wobluda_fix', 'TimeLeft': '20:00', '_scores': 'Red:0 Blue:0 ', 'gamename': 'Digital Paint Paintball 2 v1.930(186)', 'gameversion': 'DPPB2 v1.930(186)', 'sv_login': '1', 'needpass': '0', 'gamedate': 'Aug 10 2015', 'protocol': '34', 'version': '2.00 x86 Aug 10 2015 Win32 RELEASE (41)', 'hostname': 'asdfgh', 'elim': 'airtime', 'fraglimit': '50', 'timelimit': '20', 'gamedir': 'pball', 'game': 'pball', 'maxclients': '8'}
:return: status dict
:rtype: dict
"""
dictionary = {}
players = []
response = self.status().split('\n')[1:]
variables = response[0]
players_str = (response[1:])
for i in players_str:
if not i:
continue
temp_dict = {}
cleaned_name = decode_ingame_text(i)
separated = cleaned_name.split(' ')
temp_dict['score'] = separated[0]
temp_dict['ping'] = separated[1]
temp_dict['name'] = cleaned_name.split("%s %s " % (separated[0], separated[1]))[1][1:-1]
players.append(temp_dict)
dictionary['players'] = players
variables = variables.split('\\')[1:]
for i in range(0, len(variables), 2):
dictionary[variables[i]] = variables[i + 1]
return dictionary
def get_ingame_info(self, nick):
"""
Get ingame info about a player with nickname
:param nick: Nick
:return: An instance of :class:`.Player`
"""
players = self.get_players()
for p in players:
if p.nick == nick:
return p
return None
def make_secure(self, timeout=10):
"""
This function fixes some compatibility and security issues on DP server side
- Adds "mapchange" to sv_blockednames
- Sets sl_logging to 1
All variables are set using the rcon protocol, use this function if you want to wait for the server to start.
:param timeout: Timeout in seconds
"""
sl_logging_set = False
sv_blockednames_set = False
self.__is_secure = False
start_time = time()
while not (sl_logging_set and sv_blockednames_set) and time() - start_time < timeout:
try:
if not sl_logging_set:
sl_logging = self.get_cvar('sl_logging')
if sl_logging != '1':
self.set_cvar('sl_logging', '1')
else:
sl_logging_set = True
if not sv_blockednames_set:
blockednames = self.get_cvar('sv_blockednames')
if not 'maploaded' in blockednames:
self.set_cvar('sv_blockednames', ','.join([blockednames, 'maploaded']))
else:
sv_blockednames_set = True
except ConnectionError or timeout:
pass
if not (sl_logging_set and sv_blockednames_set):
raise SecurityCheckError(
"Configuring the DP server failed,"
" check if the server is running "
"and the rcon_password is correct.")
else:
self.__is_secure = True
def run(self, scan_old=False, realtime=True, debug=False, make_secure=True):
"""
Runs the main loop using asyncio.
:param scan_old: Scan present logfile data
:type scan_old: bool
:param realtime: Wait for incoming logfile data
:type realtime: bool
"""
if make_secure and not self.__rcon_password:
raise AttributeError(
"Setting the rcon_password is required to secure DPLib."
" You have to either set a rcon_password or add set"
" \"sl_logging 1; set sv_blockednames mapname\" "
"to your DP server config and use Server.run with"
" make_secure=False")
if make_secure:
self.make_secure()
self.loop.run_until_complete(self.start(scan_old, realtime, debug))
|
agpl-3.0
|
4shadoww/hakkuframework
|
core/lib/requests/packages/urllib3/packages/backports/makefile.py
|
339
|
1461
|
# -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
def backport_makefile(self, mode="r", buffering=None, encoding=None,
errors=None, newline=None):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= set(["r", "w", "b"]):
raise ValueError(
"invalid mode %r (only r, w, b allowed)" % (mode,)
)
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
|
mit
|
0k/odoo
|
addons/event/wizard/__init__.py
|
435
|
1067
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import event_confirm
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
FlukeNetworks/snackers-kernel
|
tools/perf/scripts/python/futex-contention.py
|
11261
|
1486
|
# futex contention
# (c) 2010, Arnaldo Carvalho de Melo <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
|
gpl-2.0
|
dbckz/ansible
|
lib/ansible/utils/module_docs_fragments/dellos6.py
|
303
|
2451
|
#
# (c) 2015, Peter Sprygada <[email protected]>
#
# Copyright (c) 2016 Dell Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
provider:
description:
- A dict object containing connection details.
default: null
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
required: true
port:
description:
- Specifies the port to use when building the connection to the remote
device.
default: 22
username:
description:
- User to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Password to authenticate the SSH session to the remote device. If the
value is not specified in the task, the value of environment variable
C(ANSIBLE_NET_PASSWORD) will be used instead.
default: null
ssh_keyfile:
description:
- Path to an ssh key used to authenticate the SSH session to the remote
device. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead.
timeout:
description:
- Specifies idle timeout (in seconds) for the connection. Useful if the
console freezes before continuing. For example when saving
configurations.
default: 10
"""
|
gpl-3.0
|
django-notifications/django-notifications
|
notifications/tests/urls.py
|
2
|
1821
|
''' Django notification urls for tests '''
# -*- coding: utf-8 -*-
from distutils.version import StrictVersion # pylint: disable=no-name-in-module,import-error
from django import get_version
from django.contrib import admin
from notifications.tests.views import (live_tester, # pylint: disable=no-name-in-module,import-error
make_notification)
if StrictVersion(get_version()) >= StrictVersion('2.1'):
from django.urls import include, path # noqa
from django.contrib.auth.views import LoginView
urlpatterns = [
path('test_make/', make_notification),
path('test/', live_tester),
path('login/', LoginView.as_view(), name='login'), # reverse for django login is not working
path('admin/', admin.site.urls),
path('', include('notifications.urls', namespace='notifications')),
]
elif StrictVersion(get_version()) >= StrictVersion('2.0') and StrictVersion(get_version()) < StrictVersion('2.1'):
from django.urls import include, path # noqa
from django.contrib.auth.views import login
urlpatterns = [
path('test_make/', make_notification),
path('test/', live_tester),
path('login/', login, name='login'), # reverse for django login is not working
path('admin/', admin.site.urls),
path('', include('notifications.urls', namespace='notifications')),
]
else:
from django.conf.urls import include, url
from django.contrib.auth.views import login
urlpatterns = [
url(r'^login/$', login, name='login'), # reverse for django login is not working
url(r'^test_make/', make_notification),
url(r'^test/', live_tester),
url(r'^', include('notifications.urls', namespace='notifications')),
url(r'^admin/', admin.site.urls),
]
|
bsd-3-clause
|
mgeorgehansen/FIFE_Technomage
|
engine/python/fife/extensions/fife_settings.py
|
1
|
15915
|
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2010 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
"""
Settings
==================================
This module provides a nice framework for loading and saving game settings.
It is by no means complete but it does provide a good starting point.
"""
import shutil
import os
from StringIO import StringIO
from fife.extensions import pychan
from fife.extensions.fife_utils import getUserDataDirectory
from fife.extensions.serializers.simplexml import SimpleXMLSerializer
SETTINGS_GUI_XML="""\
<Window name="Settings" title="Settings">
<Label text="Settings menu!" />
<HBox>
<VBox>
<Label text="Resolution:" />
<Label text="Renderer:" />
<Label text="Light Model:" />
</VBox>
<VBox min_size="120,60">
<DropDown name="screen_resolution" min_size="120,0" />
<DropDown name="render_backend" min_size="120,0" />
<DropDown name="lighting_model" min_size="120,0" />
</VBox>
</HBox>
<CheckBox name="enable_fullscreen" text="Use the full screen mode" />
<CheckBox name="enable_sound" text="Enable sound" />
<HBox>
<Spacer />
<Button name="cancelButton" text="Cancel" />
<Button name="okButton" text="Ok" />
<Button name="defaultButton" text="Defaults" />
</HBox>
</Window>
"""
CHANGES_REQUIRE_RESTART="""\
<Window title="Changes require restart">
<Label text="Some of your changes require you to restart." />
<HBox>
<Spacer />
<Button name="closeButton" text="Ok" />
</HBox>
</Window>
"""
FIFE_MODULE = "FIFE"
class Setting(object):
"""
This class manages loading and saving of game settings.
Usage::
from fife.extensions.fife_settings import Setting
settings = Setting(app_name="myapp")
screen_width = settings.get("FIFE", "ScreenWidth", 1024)
screen_height = settings.get("FIFE", "ScreenHeight", 768)
"""
def __init__(self, app_name="", settings_file="", default_settings_file= "settings-dist.xml", settings_gui_xml="", changes_gui_xml="", copy_dist=True, serializer=None):
"""
Initializes the Setting object.
@param app_name: The applications name. If this parameter is provided
alone it will try to read the settings file from the users home directory.
In windows this will be something like: C:\Documents and Settings\user\Application Data\fife
@type app_name: C{string}
@param settings_file: The name of the settings file. If this parameter is
provided it will look for the setting file as you specify it, first looking
in the working directory. It will NOT look in the users home directory.
@type settings_file: C{string}
@param default_settings_file: The name of the default settings file. If the settings_file
does not exist this file will be copied into the place of the settings_file. This file
must exist in the root directory of your project!
@type default_settings_file: C{string}
@param settings_gui_xml: If you specify this parameter you can customize the look
of the settings dialog box.
@param copy_dist: Copies the default settings file to the settings_file location. If
this is False it will create a new empty setting file.
@param serializer: Overrides the default XML serializer
@type serializer: C{SimpleSerializer}
"""
self._app_name = app_name
self._settings_file = settings_file
self._default_settings_file = default_settings_file
self._settings_gui_xml = settings_gui_xml
self._changes_gui_xml = changes_gui_xml
self.OptionsDlg = None
# Holds SettingEntries
self._entries = {}
if self._settings_file == "":
self._settings_file = "settings.xml"
self._appdata = getUserDataDirectory("fife", self._app_name)
else:
self._appdata = os.path.dirname(self._settings_file)
self._settings_file = os.path.basename(self._settings_file)
if self._settings_gui_xml == "":
self._settings_gui_xml = SETTINGS_GUI_XML
if self._changes_gui_xml == "":
self._changes_gui_xml = CHANGES_REQUIRE_RESTART
if not os.path.exists(os.path.join(self._appdata, self._settings_file)):
if os.path.exists(self._default_settings_file) and copy_dist:
shutil.copyfile(self._default_settings_file, os.path.join(self._appdata, self._settings_file))
#default settings
self._resolutions = ['640x480', '800x600', '1024x768', '1280x800', '1440x900']
self._renderbackends = ['OpenGL', 'SDL']
self._lightingmodels = [0, 1, 2]
#Used to stylize the options gui
self._gui_style = "default"
#Initialize the serializer
if serializer:
self._serializer = serializer
else:
self._serializer = SimpleXMLSerializer()
self.initSerializer()
self._initDefaultSettingEntries()
def initSerializer(self):
self._serializer.load(os.path.join(self._appdata, self._settings_file))
def _initDefaultSettingEntries(self):
"""Initializes the default fife setting entries. Not to be called from
outside this class."""
self.createAndAddEntry(FIFE_MODULE, "PlaySounds", "enable_sound",
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "FullScreen", "enable_fullscreen",
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "ScreenResolution", "screen_resolution", initialdata = self._resolutions,
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "RenderBackend", "render_backend", initialdata = self._renderbackends,
requiresrestart=True)
self.createAndAddEntry(FIFE_MODULE, "Lighting", "lighting_model", initialdata = self._lightingmodels,
requiresrestart=True)
def createAndAddEntry(self, module, name, widgetname, applyfunction=None, initialdata=None, requiresrestart=False):
""""
@param module: The Setting module this Entry belongs to
@type module: C{String}
@param name: The Setting's name
@type name: C{String}
@param widgetname: The name of the widget that is used to change this
setting
@type widgetname: C{String}
@param applyfunction: function that makes the changes when the Setting is
saved
@type applyfunction: C{function}
@param initialdata: If the widget supports the setInitialData() function
this can be used to set the initial data
@type initialdata: C{String} or C{Boolean}
@param requiresrestart: Whether or not the changing of this setting
requires a restart
@type requiresrestart: C{Boolean}
"""
entry = SettingEntry(module, name, widgetname, applyfunction, initialdata, requiresrestart)
self.addEntry(entry)
def addEntry(self, entry):
"""Adds a new C{SettingEntry} to the Settting
@param entry: A new SettingEntry that is to be added
@type entry: C{SettingEntry}
"""
if entry.module not in self._entries:
self._entries[entry.module] = {}
self._entries[entry.module][entry.name] = entry
# Make sure the new entry is available
if self.get(entry.module, entry.name) is None:
print "Updating", self._settings_file, "to the default, it is missing the entry:"\
, entry.name ,"for module", entry.module
self.setDefaults()
if self.get(entry.module, entry.name) is None:
print "WARNING:", entry.module, ":", entry.name, "still not found!"
def saveSettings(self, filename=""):
""" Writes the settings to the settings file
@param filename: Specifies the file to save the settings to. If it is not specified
the original settings file is used.
@type filename: C{string}
"""
if self._serializer:
if filename == "":
self._serializer.save(os.path.join(self._appdata, self._settings_file))
else:
self._serializer.save(filename)
def get(self, module, name, defaultValue=None):
""" Gets the value of a specified setting
@param module: Name of the module to get the setting from
@param name: Setting name
@param defaultValue: Specifies the default value to return if the setting is not found
@type defaultValue: C{str} or C{unicode} or C{int} or C{float} or C{bool} or C{list} or C{dict}
"""
if self._serializer:
return self._serializer.get(module, name, defaultValue)
else:
return None
def set(self, module, name, value, extra_attrs={}):
"""
Sets a setting to specified value.
@param module: Module where the setting should be set
@param name: Name of setting
@param value: Value to assign to setting
@type value: C{str} or C{unicode} or C{int} or C{float} or C{bool} or C{list} or C{dict}
@param extra_attrs: Extra attributes to be stored in the XML-file
@type extra_attrs: C{dict}
"""
if self._serializer:
self._serializer.set(module, name, value, extra_attrs)
def setGuiStyle(self, style):
""" Set a custom gui style used for the option dialog.
@param style: Pychan style to be used
@type style: C{string}
"""
self._gui_style = style
def onOptionsPress(self):
"""
Opens the options dialog box. Usually you would bind this to a button.
"""
self.changesRequireRestart = False
self.isSetToDefault = False
if not self.OptionsDlg:
self.loadSettingsDialog()
self.fillWidgets()
self.OptionsDlg.show()
def loadSettingsDialog(self):
"""
Load up the settings xml and return the widget.
"""
self.OptionsDlg = self._loadWidget(self._settings_gui_xml)
self.OptionsDlg.stylize(self._gui_style)
self.OptionsDlg.mapEvents({
'okButton' : self.applySettings,
'cancelButton' : self.OptionsDlg.hide,
'defaultButton' : self.setDefaults
})
return self.OptionsDlg
def _loadWidget(self, dialog):
"""Loads a widget. Can load both files and pure xml strings"""
if os.path.isfile(self._settings_gui_xml):
return pychan.loadXML(dialog)
else:
return pychan.loadXML(StringIO(dialog))
def fillWidgets(self):
for module in self._entries.itervalues():
for entry in module.itervalues():
widget = self.OptionsDlg.findChildByName(entry.settingwidgetname)
value = self.get(entry.module, entry.name)
if type(entry.initialdata) is list:
try:
value = entry.initialdata.index(value)
except ValueError:
raise ValueError("\"" + value + "\" is not a valid value for " + entry.name + ". Valid options: " + str(entry.initialdata))
entry.initializeWidget(widget, value)
def applySettings(self):
"""
Writes the settings file. If a change requires a restart of the engine
it notifies you with a small dialog box.
"""
for module in self._entries.itervalues():
for entry in module.itervalues():
widget = self.OptionsDlg.findChildByName(entry.settingwidgetname)
data = widget.getData()
# If the data is a list we need to get the correct selected data
# from the list. This is needed for e.g. dropdowns or listboxs
if type(entry.initialdata) is list:
data = entry.initialdata[data]
# only take action if something really changed
if data != self.get(entry.module, entry.name):
self.set(entry.module, entry.name, data)
entry.onApply(data)
if entry.requiresrestart:
self.changesRequireRestart = True
self.saveSettings()
self.OptionsDlg.hide()
if self.changesRequireRestart:
self._showChangeRequireRestartDialog()
def _showChangeRequireRestartDialog(self):
"""Shows a dialog that informes the user that a restart is required
to perform the changes."""
RestartDlg = self._loadWidget(self._changes_gui_xml)
RestartDlg.stylize(self._gui_style)
RestartDlg.mapEvents({ 'closeButton' : RestartDlg.hide })
RestartDlg.show()
def setAvailableScreenResolutions(self, reslist):
"""
A list of valid default screen resolutions. This should be called once
right after you instantiate Settings.
Valid screen resolutions must be strings in the form of: WIDTHxHEIGHT
Example:
settings.setAvailableScreenResolutions(["800x600", "1024x768"])
"""
self._resolutions = reslist
def setDefaults(self):
"""
Overwrites the setting file with the default settings file.
"""
shutil.copyfile(self._default_settings_file, os.path.join(self._appdata, self._settings_file))
self.changesRequireRestart = True
self.initSerializer()
#update all widgets with the new data
self.fillWidgets()
def _getEntries(self):
return self._entries
def _setEntries(self, entries):
self._entries = entries
def _getSerializer(self):
return self._serializer
entries = property(_getEntries, _setEntries)
serializer = property(_getSerializer)
class SettingEntry(object):
def __init__(self, module, name, widgetname, applyfunction=None, initialdata=None, requiresrestart=False):
"""
@param module: The Setting module this Entry belongs to
@type module: C{String}
@param name: The Setting's name
@type name: C{String}
@param widgetname: The name of the widget that is used to change this
setting
@type widgetname: C{String}
@param applyfunction: function that makes the changes when the Setting is
saved
@type applyfunction: C{function}
@param initialdata: If the widget supports the setInitialData() function
this can be used to set the initial data
@type initialdata: C{String} or C{Boolean}
@param requiresrestart: Whether or not the changing of this setting
requires a restart
@type requiresrestart: C{Boolean}
"""
self._module = module
self._name = name
self._settingwidgetname = widgetname
self._requiresrestart = requiresrestart
self._initialdata = initialdata
self._applyfunction = applyfunction
def initializeWidget(self, widget, currentValue):
"""Initialize the widget with needed data"""
if self._initialdata is not None:
widget.setInitialData(self._initialdata)
widget.setData(currentValue)
def onApply(self, data):
"""Implement actions that need to be taken when the setting is changed
here.
"""
if self._applyfunction is not None:
self._applyfunction(data)
def _getModule(self):
return self._module
def _setModule(self, module):
self._module = module
def _getName(self):
return self._name
def _setName(self, name):
self._name = name
def _getSettingWidgetName(self):
return self._settingwidgetname
def _setSettingWidgetName(self, settingwidgetname):
self._settingwidgetname = settingwidgetname
def _getRequiresRestart(self):
return self._requiresrestart
def _setRequiresRestart(self, requiresrestart):
self._requiresrestart = requiresrestart
def _getInitialData(self):
return self._initialdata
def _setInitialData(self, initialdata):
self._initialdata = initialdata
def _getApplyFunction(self):
return self._applyfunction
def _setApplyFunction(self, applyfunction):
self._applyfunction = applyfunction
module = property(_getModule, _setModule)
name = property(_getName, _setName)
settingwidgetname = property(_getSettingWidgetName, _setSettingWidgetName)
requiresrestart = property(_getRequiresRestart, _setRequiresRestart)
initialdata = property(_getInitialData, _setInitialData)
applyfunction = property(_getApplyFunction, _setApplyFunction)
def __str__(self):
return "SettingEntry: " + self.name + " Module: " + self.module + " Widget: " + \
self.settingwidgetname + " requiresrestart: " + str(self.requiresrestart) + \
" initialdata: " + str(self.initialdata)
|
lgpl-2.1
|
lmazuel/ansible
|
lib/ansible/utils/encrypt.py
|
37
|
6565
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import multiprocessing
import os
import stat
import tempfile
import time
import warnings
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_text, to_bytes
PASSLIB_AVAILABLE = False
try:
import passlib.hash
PASSLIB_AVAILABLE = True
except:
pass
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
KEYCZAR_AVAILABLE = False
try:
try:
# some versions of pycrypto may not have this?
from Crypto.pct_warnings import PowmInsecureWarning
except ImportError:
PowmInsecureWarning = RuntimeWarning
with warnings.catch_warnings(record=True) as warning_handler:
warnings.simplefilter("error", PowmInsecureWarning)
try:
import keyczar.errors as key_errors
from keyczar.keys import AesKey
except PowmInsecureWarning:
display.system_warning(
"The version of gmp you have installed has a known issue regarding "
"timing vulnerabilities when used with pycrypto. "
"If possible, you should update it (i.e. yum update gmp)."
)
warnings.resetwarnings()
warnings.simplefilter("ignore")
import keyczar.errors as key_errors
from keyczar.keys import AesKey
KEYCZAR_AVAILABLE = True
except ImportError:
pass
__all__ = ['do_encrypt']
_LOCK = multiprocessing.Lock()
def do_encrypt(result, encrypt, salt_size=None, salt=None):
if PASSLIB_AVAILABLE:
try:
crypt = getattr(passlib.hash, encrypt)
except:
raise AnsibleError("passlib does not support '%s' algorithm" % encrypt)
if salt_size:
result = crypt.encrypt(result, salt_size=salt_size)
elif salt:
if crypt._salt_is_bytes:
salt = to_bytes(salt, encoding='ascii', errors='strict')
else:
salt = to_text(salt, encoding='ascii', errors='strict')
result = crypt.encrypt(result, salt=salt)
else:
result = crypt.encrypt(result)
else:
raise AnsibleError("passlib must be installed to encrypt vars_prompt values")
# Hashes from passlib.hash should be represented as ascii strings of hex
# digits so this should not traceback. If it's not representable as such
# we need to traceback and then blacklist such algorithms because it may
# impact calling code.
return to_text(result, errors='strict')
def key_for_hostname(hostname):
# fireball mode is an implementation of ansible firing up zeromq via SSH
# to use no persistent daemons or key management
if not KEYCZAR_AVAILABLE:
raise AnsibleError("python-keyczar must be installed on the control machine to use accelerated modes")
key_path = os.path.expanduser(C.ACCELERATE_KEYS_DIR)
if not os.path.exists(key_path):
# avoid race with multiple forks trying to create paths on host
# but limit when locking is needed to creation only
with(_LOCK):
if not os.path.exists(key_path):
# use a temp directory and rename to ensure the directory
# searched for only appears after permissions applied.
tmp_dir = tempfile.mkdtemp(dir=os.path.dirname(key_path))
os.chmod(tmp_dir, int(C.ACCELERATE_KEYS_DIR_PERMS, 8))
os.rename(tmp_dir, key_path)
elif not os.path.isdir(key_path):
raise AnsibleError('ACCELERATE_KEYS_DIR is not a directory.')
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_DIR_PERMS, 8):
raise AnsibleError('Incorrect permissions on the private key directory. Use `chmod 0%o %s` to correct this issue, and make sure any of the keys files '
'contained within that directory are set to 0%o' % (int(C.ACCELERATE_KEYS_DIR_PERMS, 8), C.ACCELERATE_KEYS_DIR,
int(C.ACCELERATE_KEYS_FILE_PERMS, 8)))
key_path = os.path.join(key_path, hostname)
# use new AES keys every 2 hours, which means fireball must not allow running for longer either
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60 * 60 * 2):
# avoid race with multiple forks trying to create key
# but limit when locking is needed to creation only
with(_LOCK):
if not os.path.exists(key_path) or (time.time() - os.path.getmtime(key_path) > 60 * 60 * 2):
key = AesKey.Generate()
# use temp file to ensure file only appears once it has
# desired contents and permissions
with tempfile.NamedTemporaryFile(mode='w', dir=os.path.dirname(key_path), delete=False) as fh:
tmp_key_path = fh.name
fh.write(str(key))
os.chmod(tmp_key_path, int(C.ACCELERATE_KEYS_FILE_PERMS, 8))
os.rename(tmp_key_path, key_path)
return key
if stat.S_IMODE(os.stat(key_path).st_mode) != int(C.ACCELERATE_KEYS_FILE_PERMS, 8):
raise AnsibleError('Incorrect permissions on the key file for this host. Use `chmod 0%o %s` to '
'correct this issue.' % (int(C.ACCELERATE_KEYS_FILE_PERMS, 8), key_path))
fh = open(key_path)
key = AesKey.Read(fh.read())
fh.close()
return key
def keyczar_encrypt(key, msg):
return key.Encrypt(msg.encode('utf-8'))
def keyczar_decrypt(key, msg):
try:
return key.Decrypt(msg)
except key_errors.InvalidSignatureError:
raise AnsibleError("decryption failed")
|
gpl-3.0
|
tiagofrepereira2012/tensorflow
|
tensorflow/contrib/tpu/python/tpu/tpu_optimizer.py
|
13
|
3848
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Optimizer that implements cross-shard gradient reduction for TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.python.training import optimizer
class CrossShardOptimizer(optimizer.Optimizer):
"""A optimizer sums gradients across TPU shards."""
def __init__(self, opt, name="CrossShardOptimizer"):
super(CrossShardOptimizer, self).__init__(False, name)
self._opt = opt
def compute_gradients(self, *args, **kwargs):
"""Compute gradients of "loss" for the variables in "var_list".
This simply wraps the compute_gradients() from the real optimizer. The
gradients will be aggregated in the apply_gradients() so that user can
modify the gradients like clipping with per replica global norm if needed.
The global norm with aggregated gradients can be bad as one replica's huge
gradients can hurt the gradients from other replicas.
Args:
*args: Arguments for compute_gradients().
**kwargs: Keyword arguments for compute_gradients().
Returns:
A list of (gradient, variable) pairs.
"""
return self._opt.compute_gradients(*args, **kwargs)
def apply_gradients(self, grads_and_vars, global_step=None, name=None):
"""Apply gradients to variables.
Calls tpu_ops.cross_replica_sum() to sum gradient contributions across
replicas, and then applies the real optimizer.
Args:
grads_and_vars: List of (gradient, variable) pairs as returned by
compute_gradients().
global_step: Optional Variable to increment by one after the
variables have been updated.
name: Optional name for the returned operation. Default to the
name passed to the Optimizer constructor.
Returns:
An `Operation` that applies the gradients. If `global_step` was not None,
that operation also increments `global_step`.
Raises:
ValueError: If the grads_and_vars is malformed.
"""
summed_grads_and_vars = []
for (grad, var) in grads_and_vars:
if grad is None:
summed_grads_and_vars.append((grad, var))
else:
summed_grads_and_vars.append((tpu_ops.cross_replica_sum(grad), var))
return self._opt.apply_gradients(summed_grads_and_vars, global_step, name)
def get_slot(self, *args, **kwargs):
"""Return a slot named "name" created for "var" by the Optimizer.
This simply wraps the get_slot() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
The `Variable` for the slot if it was created, `None` otherwise.
"""
return self._opt.get_slot(*args, **kwargs)
def get_slot_names(self, *args, **kwargs):
"""Return a list of the names of slots created by the `Optimizer`.
This simply wraps the get_slot_names() from the actual optimizer.
Args:
*args: Arguments for get_slot().
**kwargs: Keyword arguments for get_slot().
Returns:
A list of strings.
"""
return self._opt.get_slot_names(*args, **kwargs)
|
apache-2.0
|
rrrene/django
|
tests/utils_tests/test_ipv6.py
|
332
|
2831
|
from __future__ import unicode_literals
import unittest
from django.utils.ipv6 import clean_ipv6_address, is_valid_ipv6_address
class TestUtilsIPv6(unittest.TestCase):
def test_validates_correct_plain_address(self):
self.assertTrue(is_valid_ipv6_address('fe80::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('2a02::223:6cff:fe8a:2e8a'))
self.assertTrue(is_valid_ipv6_address('1::2:3:4:5:6:7'))
self.assertTrue(is_valid_ipv6_address('::'))
self.assertTrue(is_valid_ipv6_address('::a'))
self.assertTrue(is_valid_ipv6_address('2::'))
def test_validates_correct_with_v4mapping(self):
self.assertTrue(is_valid_ipv6_address('::ffff:254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::ffff:0a0a:0a0a'))
def test_validates_incorrect_plain_address(self):
self.assertFalse(is_valid_ipv6_address('foo'))
self.assertFalse(is_valid_ipv6_address('127.0.0.1'))
self.assertFalse(is_valid_ipv6_address('12345::'))
self.assertFalse(is_valid_ipv6_address('1::2:3::4'))
self.assertFalse(is_valid_ipv6_address('1::zzz'))
self.assertFalse(is_valid_ipv6_address('1::2:3:4:5:6:7:8'))
self.assertFalse(is_valid_ipv6_address('1:2'))
self.assertFalse(is_valid_ipv6_address('1:::2'))
def test_validates_incorrect_with_v4mapping(self):
self.assertFalse(is_valid_ipv6_address('::ffff:999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::ffff:zzzz:0a0a'))
# The ::1.2.3.4 format used to be valid but was deprecated
# in rfc4291 section 2.5.5.1
self.assertTrue(is_valid_ipv6_address('::254.42.16.14'))
self.assertTrue(is_valid_ipv6_address('::0a0a:0a0a'))
self.assertFalse(is_valid_ipv6_address('::999.42.16.14'))
self.assertFalse(is_valid_ipv6_address('::zzzz:0a0a'))
def test_cleanes_plain_address(self):
self.assertEqual(clean_ipv6_address('DEAD::0:BEEF'), 'dead::beef')
self.assertEqual(clean_ipv6_address('2001:000:a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
self.assertEqual(clean_ipv6_address('2001::a:0000:0:fe:fe:beef'), '2001:0:a::fe:fe:beef')
def test_cleanes_with_v4_mapping(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a'), '::ffff:10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234'), '::ffff:18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52'), '::ffff:18.52.18.52')
def test_unpacks_ipv4(self):
self.assertEqual(clean_ipv6_address('::ffff:0a0a:0a0a', unpack_ipv4=True), '10.10.10.10')
self.assertEqual(clean_ipv6_address('::ffff:1234:1234', unpack_ipv4=True), '18.52.18.52')
self.assertEqual(clean_ipv6_address('::ffff:18.52.18.52', unpack_ipv4=True), '18.52.18.52')
|
bsd-3-clause
|
simplyguru-dot/ansible
|
lib/ansible/plugins/lookup/pipe.py
|
184
|
1866
|
# (c) 2012, Daniel Hokka Zakrisson <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import subprocess
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
'''
http://docs.python.org/2/library/subprocess.html#popen-constructor
The shell argument (which defaults to False) specifies whether to use the
shell as the program to execute. If shell is True, it is recommended to pass
args as a string rather than as a sequence
https://github.com/ansible/ansible/issues/6550
'''
term = str(term)
p = subprocess.Popen(term, cwd=self._loader.get_basedir(), shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode == 0:
ret.append(stdout.decode("utf-8").rstrip())
else:
raise AnsibleError("lookup_plugin.pipe(%s) returned %d" % (term, p.returncode))
return ret
|
gpl-3.0
|
wkritzinger/asuswrt-merlin
|
release/src/router/samba3/source/stf/strings.py
|
55
|
5889
|
#! /usr/bin/python
# Comfychair test cases for Samba string functions.
# Copyright (C) 2003 by Martin Pool <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
# XXX: All this code assumes that the Unix character set is UTF-8,
# which is the most common setting. I guess it would be better to
# force it to that value while running the tests. I'm not sure of the
# best way to do that yet.
#
# Note that this is NOT the case in C code until the loadparm table is
# intialized -- the default seems to be ASCII, which rather lets Samba
# off the hook. :-) The best way seems to be to put this in the test
# harnesses:
#
# lp_load("/dev/null", True, False, False);
#
# -- mbp
import sys, re, comfychair
from unicodenames import *
def signum(a):
if a < 0:
return -1
elif a > 0:
return +1
else:
return 0
class PushUCS2_Tests(comfychair.TestCase):
"""Conversion to/from UCS2"""
def runtest(self):
OE = LATIN_CAPITAL_LETTER_O_WITH_DIARESIS
oe = LATIN_CAPITAL_LETTER_O_WITH_DIARESIS
cases = ['hello',
'hello world',
'g' + OE + OE + 'gomobile',
'g' + OE + oe + 'gomobile',
u'foo\u0100',
KATAKANA_LETTER_A * 20,
]
for u8str in cases:
out, err = self.runcmd("t_push_ucs2 \"%s\"" % u8str.encode('utf-8'))
self.assert_equal(out, "0\n")
class StrCaseCmp(comfychair.TestCase):
"""String comparisons in simple ASCII"""
def run_strcmp(self, a, b, expect):
out, err = self.runcmd('t_strcmp \"%s\" \"%s\"' % (a.encode('utf-8'), b.encode('utf-8')))
if signum(int(out)) != expect:
self.fail("comparison failed:\n"
" a=%s\n"
" b=%s\n"
" expected=%s\n"
" result=%s\n" % (`a`, `b`, `expect`, `out`))
def runtest(self):
# A, B, strcasecmp(A, B)
cases = [('hello', 'hello', 0),
('hello', 'goodbye', +1),
('goodbye', 'hello', -1),
('hell', 'hello', -1),
('', '', 0),
('a', '', +1),
('', 'a', -1),
('a', 'A', 0),
('aa', 'aA', 0),
('Aa', 'aa', 0),
('longstring ' * 100, 'longstring ' * 100, 0),
('longstring ' * 100, 'longstring ' * 100 + 'a', -1),
('longstring ' * 100 + 'a', 'longstring ' * 100, +1),
(KATAKANA_LETTER_A, KATAKANA_LETTER_A, 0),
(KATAKANA_LETTER_A, 'a', 1),
]
for a, b, expect in cases:
self.run_strcmp(a, b, expect)
class strstr_m(comfychair.TestCase):
"""String comparisons in simple ASCII"""
def run_strstr(self, a, b, expect):
out, err = self.runcmd('t_strstr \"%s\" \"%s\"' % (a.encode('utf-8'), b.encode('utf-8')))
if (out != (expect + '\n').encode('utf-8')):
self.fail("comparison failed:\n"
" a=%s\n"
" b=%s\n"
" expected=%s\n"
" result=%s\n" % (`a`, `b`, `expect+'\n'`, `out`))
def runtest(self):
# A, B, strstr_m(A, B)
cases = [('hello', 'hello', 'hello'),
('hello', 'goodbye', '(null)'),
('goodbye', 'hello', '(null)'),
('hell', 'hello', '(null)'),
('hello', 'hell', 'hello'),
('', '', ''),
('a', '', 'a'),
('', 'a', '(null)'),
('a', 'A', '(null)'),
('aa', 'aA', '(null)'),
('Aa', 'aa', '(null)'),
('%v foo', '%v', '%v foo'),
('foo %v foo', '%v', '%v foo'),
('foo %v', '%v', '%v'),
('longstring ' * 100, 'longstring ' * 99, 'longstring ' * 100),
('longstring ' * 99, 'longstring ' * 100, '(null)'),
('longstring a' * 99, 'longstring ' * 100 + 'a', '(null)'),
('longstring ' * 100 + 'a', 'longstring ' * 100, 'longstring ' * 100 + 'a'),
(KATAKANA_LETTER_A, KATAKANA_LETTER_A + 'bcd', '(null)'),
(KATAKANA_LETTER_A + 'bcde', KATAKANA_LETTER_A + 'bcd', KATAKANA_LETTER_A + 'bcde'),
('d'+KATAKANA_LETTER_A + 'bcd', KATAKANA_LETTER_A + 'bcd', KATAKANA_LETTER_A + 'bcd'),
('d'+KATAKANA_LETTER_A + 'bd', KATAKANA_LETTER_A + 'bcd', '(null)'),
('e'+KATAKANA_LETTER_A + 'bcdf', KATAKANA_LETTER_A + 'bcd', KATAKANA_LETTER_A + 'bcdf'),
(KATAKANA_LETTER_A, KATAKANA_LETTER_A + 'bcd', '(null)'),
(KATAKANA_LETTER_A*3, 'a', '(null)'),
]
for a, b, expect in cases:
self.run_strstr(a, b, expect)
# Define the tests exported by this module
tests = [StrCaseCmp,
strstr_m,
PushUCS2_Tests]
# Handle execution of this file as a main program
if __name__ == '__main__':
comfychair.main(tests)
# Local variables:
# coding: utf-8
# End:
|
gpl-2.0
|
quait/madcow
|
madcow/include/simplejson/decoder.py
|
296
|
15152
|
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
def _import_c_scanstring():
try:
from simplejson._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, pos)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
|
gpl-3.0
|
takeshineshiro/heat
|
heat/common/messaging.py
|
13
|
4273
|
# -*- coding: utf-8 -*-
# Copyright 2013 eNovance <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
from oslo_config import cfg
import oslo_messaging
from oslo_serialization import jsonutils
from osprofiler import profiler
from heat.common import context
TRANSPORT = None
NOTIFIER = None
_ALIASES = {
'heat.openstack.common.rpc.impl_kombu': 'rabbit',
'heat.openstack.common.rpc.impl_qpid': 'qpid',
'heat.openstack.common.rpc.impl_zmq': 'zmq',
}
class RequestContextSerializer(oslo_messaging.Serializer):
def __init__(self, base):
self._base = base
def serialize_entity(self, ctxt, entity):
if not self._base:
return entity
return self._base.serialize_entity(ctxt, entity)
def deserialize_entity(self, ctxt, entity):
if not self._base:
return entity
return self._base.deserialize_entity(ctxt, entity)
@staticmethod
def serialize_context(ctxt):
_context = ctxt.to_dict()
prof = profiler.get()
if prof:
trace_info = {
"hmac_key": prof.hmac_key,
"base_id": prof.get_base_id(),
"parent_id": prof.get_id()
}
_context.update({"trace_info": trace_info})
return _context
@staticmethod
def deserialize_context(ctxt):
trace_info = ctxt.pop("trace_info", None)
if trace_info:
profiler.init(**trace_info)
return context.RequestContext.from_dict(ctxt)
class JsonPayloadSerializer(oslo_messaging.NoOpSerializer):
@classmethod
def serialize_entity(cls, context, entity):
return jsonutils.to_primitive(entity, convert_instances=True)
def setup(url=None, optional=False):
"""Initialise the oslo_messaging layer."""
global TRANSPORT, NOTIFIER
if url and url.startswith("fake://"):
# NOTE(sileht): oslo_messaging fake driver uses time.sleep
# for task switch, so we need to monkey_patch it
eventlet.monkey_patch(time=True)
if not TRANSPORT:
oslo_messaging.set_transport_defaults('heat')
exmods = ['heat.common.exception']
try:
TRANSPORT = oslo_messaging.get_transport(
cfg.CONF, url, allowed_remote_exmods=exmods, aliases=_ALIASES)
except oslo_messaging.InvalidTransportURL as e:
TRANSPORT = None
if not optional or e.url:
# NOTE(sileht): oslo_messaging is configured but unloadable
# so reraise the exception
raise
if not NOTIFIER and TRANSPORT:
serializer = RequestContextSerializer(JsonPayloadSerializer())
NOTIFIER = oslo_messaging.Notifier(TRANSPORT, serializer=serializer)
def cleanup():
"""Cleanup the oslo_messaging layer."""
global TRANSPORT, NOTIFIER
if TRANSPORT:
TRANSPORT.cleanup()
TRANSPORT = NOTIFIER = None
def get_rpc_server(target, endpoint):
"""Return a configured oslo_messaging rpc server."""
serializer = RequestContextSerializer(JsonPayloadSerializer())
return oslo_messaging.get_rpc_server(TRANSPORT, target, [endpoint],
executor='eventlet',
serializer=serializer)
def get_rpc_client(**kwargs):
"""Return a configured oslo_messaging RPCClient."""
target = oslo_messaging.Target(**kwargs)
serializer = RequestContextSerializer(JsonPayloadSerializer())
return oslo_messaging.RPCClient(TRANSPORT, target,
serializer=serializer)
def get_notifier(publisher_id):
"""Return a configured oslo_messaging notifier."""
return NOTIFIER.prepare(publisher_id=publisher_id)
|
apache-2.0
|
georgewhewell/CouchPotatoServer
|
couchpotato/core/notifications/pushbullet/main.py
|
4
|
2483
|
from couchpotato.core.helpers.encoding import toUnicode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
import base64
import json
log = CPLog(__name__)
class Pushbullet(Notification):
url = 'https://api.pushbullet.com/api/%s'
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
devices = self.getDevices()
if devices is None:
return False
# Get all the device IDs linked to this user
if not len(devices):
response = self.request('devices')
if not response:
return False
devices += [device.get('id') for device in response['devices']]
successful = 0
for device in devices:
response = self.request(
'pushes',
cache = False,
device_id = device,
type = 'note',
title = self.default_title,
body = toUnicode(message)
)
if response:
successful += 1
else:
log.error('Unable to push notification to Pushbullet device with ID %s' % device)
return successful == len(devices)
def getDevices(self):
devices = [d.strip() for d in self.conf('devices').split(',')]
# Remove empty items
devices = [d for d in devices if len(d)]
# Break on any ids that aren't integers
valid_devices = []
for device_id in devices:
d = tryInt(device_id, None)
if not d:
log.error('Device ID "%s" is not valid', device_id)
return None
valid_devices.append(d)
return valid_devices
def request(self, method, cache = True, **kwargs):
try:
base64string = base64.encodestring('%s:' % self.conf('api_key'))[:-1]
headers = {
"Authorization": "Basic %s" % base64string
}
if cache:
return self.getJsonData(self.url % method, headers = headers, data = kwargs)
else:
data = self.urlopen(self.url % method, headers = headers, data = kwargs)
return json.loads(data)
except Exception, ex:
log.error('Pushbullet request failed')
log.debug(ex)
return None
|
gpl-3.0
|
EDUlib/edx-ora2
|
test/acceptance/auto_auth.py
|
7
|
2426
|
"""
Auto-auth page (used to automatically log in during testing).
"""
import re
import urllib
from bok_choy.page_object import PageObject
import os
BASE_URL = os.environ.get('BASE_URL')
class AutoAuthPage(PageObject):
"""
The automatic authorization page.
When allowed via the django settings file, visiting
this url will create a user and log them in.
"""
def __init__(self, browser, username=None, email=None, password=None, staff=None, course_id=None, roles=None):
"""
Auto-auth is an end-point for HTTP GET requests.
By default, it will create accounts with random user credentials,
but you can also specify credentials using querystring parameters.
`username`, `email`, and `password` are the user's credentials (strings)
`staff` is a boolean indicating whether the user is global staff.
`course_id` is the ID of the course to enroll the student in.
Currently, this has the form "org/number/run"
Note that "global staff" is NOT the same as course staff.
"""
super(AutoAuthPage, self).__init__(browser)
# Create query string parameters if provided
self._params = {}
if username is not None:
self._params['username'] = username
if email is not None:
self._params['email'] = email
if password is not None:
self._params['password'] = password
if staff is not None:
self._params['staff'] = "true" if staff else "false"
if course_id is not None:
self._params['course_id'] = course_id
if roles is not None:
self._params['roles'] = roles
@property
def url(self):
"""
Construct the URL.
"""
url = BASE_URL + "/auto_auth"
query_str = urllib.urlencode(self._params)
if query_str:
url += "?" + query_str
return url
def is_browser_on_page(self):
message = self.q(css='BODY').text[0]
match = re.search(r'Logged in user ([^$]+) with password ([^$]+) and user_id ([^$]+)$', message)
return True if match else False
def get_user_id(self):
"""
Finds and returns the user_id
"""
message = self.q(css='BODY').text[0].strip()
match = re.search(r' user_id ([^$]+)$', message)
return match.groups()[0] if match else None
|
agpl-3.0
|
kevinlondon/sentry
|
src/sentry/migrations/0009_auto__add_field_message_message_id.py
|
36
|
4502
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Message.message_id'
db.add_column('sentry_message', 'message_id', self.gf('django.db.models.fields.CharField')(max_length=32, unique=True, null=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Message.message_id'
db.delete_column('sentry_message', 'message_id')
models = {
'sentry.filtervalue': {
'Meta': {'unique_together': "(('key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.groupedmessage': {
'Meta': {'unique_together': "(('logger', 'view', 'checksum'),)", 'object_name': 'GroupedMessage'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'sentry.message': {
'Meta': {'object_name': 'Message'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'class_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'message_set'", 'null': 'True', 'to': "orm['sentry.GroupedMessage']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'traceback': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'view': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['sentry']
|
bsd-3-clause
|
hojel/calibre
|
src/calibre/gui2/metadata/single_download.py
|
11
|
42340
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2011, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
DEBUG_DIALOG = False
# Imports {{{
import os, time
from threading import Thread, Event
from operator import attrgetter
from Queue import Queue, Empty
from io import BytesIO
from PyQt5.Qt import (
QStyledItemDelegate, QTextDocument, QRectF, QIcon, Qt, QApplication,
QDialog, QVBoxLayout, QLabel, QDialogButtonBox, QStyle, QStackedWidget,
QWidget, QTableView, QGridLayout, QFontInfo, QPalette, QTimer, pyqtSignal,
QAbstractTableModel, QSize, QListView, QPixmap, QModelIndex, QUrl,
QAbstractListModel, QColor, QRect, QTextBrowser, QStringListModel, QMenu,
QCursor, QHBoxLayout, QPushButton, QSizePolicy)
from PyQt5.QtWebKitWidgets import QWebView
from calibre.customize.ui import metadata_plugins
from calibre.ebooks.metadata import authors_to_string
from calibre.utils.logging import GUILog as Log
from calibre.ebooks.metadata.sources.identify import urls_from_identifiers
from calibre.ebooks.metadata.book.base import Metadata
from calibre.ebooks.metadata.opf2 import OPF
from calibre.gui2 import error_dialog, rating_font, gprefs
from calibre.utils.date import (utcnow, fromordinal, format_date,
UNDEFINED_DATE, as_utc)
from calibre.library.comments import comments_to_html
from calibre import force_unicode
from calibre.utils.config import tweaks
from calibre.utils.ipc.simple_worker import fork_job, WorkerError
from calibre.ptempfile import TemporaryDirectory
# }}}
class RichTextDelegate(QStyledItemDelegate): # {{{
def __init__(self, parent=None, max_width=160):
QStyledItemDelegate.__init__(self, parent)
self.max_width = max_width
self.dummy_model = QStringListModel([' '], self)
self.dummy_index = self.dummy_model.index(0)
def to_doc(self, index, option=None):
doc = QTextDocument()
if option is not None and option.state & QStyle.State_Selected:
p = option.palette
group = (p.Active if option.state & QStyle.State_Active else
p.Inactive)
c = p.color(group, p.HighlightedText)
c = 'rgb(%d, %d, %d)'%c.getRgb()[:3]
doc.setDefaultStyleSheet(' * { color: %s }'%c)
doc.setHtml(index.data() or '')
return doc
def sizeHint(self, option, index):
doc = self.to_doc(index, option=option)
ans = doc.size().toSize()
if ans.width() > self.max_width - 10:
ans.setWidth(self.max_width)
ans.setHeight(ans.height()+10)
return ans
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, self.dummy_index)
painter.save()
painter.setClipRect(QRectF(option.rect))
painter.translate(option.rect.topLeft())
self.to_doc(index, option).drawContents(painter)
painter.restore()
# }}}
class CoverDelegate(QStyledItemDelegate): # {{{
needs_redraw = pyqtSignal()
def __init__(self, parent):
QStyledItemDelegate.__init__(self, parent)
self.angle = 0
self.timer = QTimer(self)
self.timer.timeout.connect(self.frame_changed)
self.color = parent.palette().color(QPalette.WindowText)
self.spinner_width = 64
def frame_changed(self, *args):
self.angle = (self.angle+30)%360
self.needs_redraw.emit()
def start_animation(self):
self.angle = 0
self.timer.start(200)
def stop_animation(self):
self.timer.stop()
def draw_spinner(self, painter, rect):
width = rect.width()
outer_radius = (width-1)*0.5
inner_radius = (width-1)*0.5*0.38
capsule_height = outer_radius - inner_radius
capsule_width = int(capsule_height * (0.23 if width > 32 else 0.35))
capsule_radius = capsule_width//2
painter.save()
painter.setRenderHint(painter.Antialiasing)
for i in xrange(12):
color = QColor(self.color)
color.setAlphaF(1.0 - (i/12.0))
painter.setPen(Qt.NoPen)
painter.setBrush(color)
painter.save()
painter.translate(rect.center())
painter.rotate(self.angle - i*30.0)
painter.drawRoundedRect(-capsule_width*0.5,
-(inner_radius+capsule_height), capsule_width,
capsule_height, capsule_radius, capsule_radius)
painter.restore()
painter.restore()
def paint(self, painter, option, index):
QStyledItemDelegate.paint(self, painter, option, index)
style = QApplication.style()
waiting = self.timer.isActive() and bool(index.data(Qt.UserRole))
if waiting:
rect = QRect(0, 0, self.spinner_width, self.spinner_width)
rect.moveCenter(option.rect.center())
self.draw_spinner(painter, rect)
else:
# Ensure the cover is rendered over any selection rect
style.drawItemPixmap(painter, option.rect, Qt.AlignTop|Qt.AlignHCenter,
QPixmap(index.data(Qt.DecorationRole)))
# }}}
class ResultsModel(QAbstractTableModel): # {{{
COLUMNS = (
'#', _('Title'), _('Published'), _('Has cover'), _('Has summary')
)
HTML_COLS = (1, 2)
ICON_COLS = (3, 4)
def __init__(self, results, parent=None):
QAbstractTableModel.__init__(self, parent)
self.results = results
self.yes_icon = (QIcon(I('ok.png')))
def rowCount(self, parent=None):
return len(self.results)
def columnCount(self, parent=None):
return len(self.COLUMNS)
def headerData(self, section, orientation, role):
if orientation == Qt.Horizontal and role == Qt.DisplayRole:
try:
return (self.COLUMNS[section])
except:
return None
return None
def data_as_text(self, book, col):
if col == 0:
return unicode(book.gui_rank+1)
if col == 1:
t = book.title if book.title else _('Unknown')
a = authors_to_string(book.authors) if book.authors else ''
return '<b>%s</b><br><i>%s</i>' % (t, a)
if col == 2:
d = format_date(book.pubdate, 'yyyy') if book.pubdate else _('Unknown')
p = book.publisher if book.publisher else ''
return '<b>%s</b><br><i>%s</i>' % (d, p)
def data(self, index, role):
row, col = index.row(), index.column()
try:
book = self.results[row]
except:
return None
if role == Qt.DisplayRole and col not in self.ICON_COLS:
res = self.data_as_text(book, col)
if res:
return (res)
return None
elif role == Qt.DecorationRole and col in self.ICON_COLS:
if col == 3 and getattr(book, 'has_cached_cover_url', False):
return self.yes_icon
if col == 4 and book.comments:
return self.yes_icon
elif role == Qt.UserRole:
return book
elif role == Qt.ToolTipRole and col == 3:
return (
_('The has cover indication is not fully\n'
'reliable. Sometimes results marked as not\n'
'having a cover will find a cover in the download\n'
'cover stage, and vice versa.'))
return None
def sort(self, col, order=Qt.AscendingOrder):
key = lambda x: x
if col == 0:
key = attrgetter('gui_rank')
elif col == 1:
key = attrgetter('title')
elif col == 2:
def dategetter(x):
x = getattr(x, 'pubdate', None)
if x is None:
x = UNDEFINED_DATE
return as_utc(x)
key = dategetter
elif col == 3:
key = attrgetter('has_cached_cover_url')
elif key == 4:
key = lambda x: bool(x.comments)
self.beginResetModel()
self.results.sort(key=key, reverse=order==Qt.AscendingOrder)
self.endResetModel()
# }}}
class ResultsView(QTableView): # {{{
show_details_signal = pyqtSignal(object)
book_selected = pyqtSignal(object)
def __init__(self, parent=None):
QTableView.__init__(self, parent)
self.rt_delegate = RichTextDelegate(self)
self.setSelectionMode(self.SingleSelection)
self.setAlternatingRowColors(True)
self.setSelectionBehavior(self.SelectRows)
self.setIconSize(QSize(24, 24))
self.clicked.connect(self.show_details)
self.doubleClicked.connect(self.select_index)
self.setSortingEnabled(True)
def show_results(self, results):
self._model = ResultsModel(results, self)
self.setModel(self._model)
for i in self._model.HTML_COLS:
self.setItemDelegateForColumn(i, self.rt_delegate)
self.resizeRowsToContents()
self.resizeColumnsToContents()
self.setFocus(Qt.OtherFocusReason)
idx = self.model().index(0, 0)
if idx.isValid() and self.model().rowCount() > 0:
self.show_details(idx)
sm = self.selectionModel()
sm.select(idx, sm.ClearAndSelect|sm.Rows)
def resize_delegate(self):
self.rt_delegate.max_width = int(self.width()/2.1)
self.resizeColumnsToContents()
def resizeEvent(self, ev):
ret = super(ResultsView, self).resizeEvent(ev)
self.resize_delegate()
return ret
def currentChanged(self, current, previous):
ret = QTableView.currentChanged(self, current, previous)
self.show_details(current)
return ret
def show_details(self, index):
f = rating_font()
book = self.model().data(index, Qt.UserRole)
parts = [
'<center>',
'<h2>%s</h2>'%book.title,
'<div><i>%s</i></div>'%authors_to_string(book.authors),
]
if not book.is_null('series'):
series = book.format_field('series')
if series[1]:
parts.append('<div>%s: %s</div>'%series)
if not book.is_null('rating'):
style = 'style=\'font-family:"%s"\''%f
parts.append('<div %s>%s</div>'%(style, '\u2605'*int(book.rating)))
parts.append('</center>')
if book.identifiers:
urls = urls_from_identifiers(book.identifiers)
ids = ['<a href="%s">%s</a>'%(url, name) for name, ign, ign, url in urls]
if ids:
parts.append('<div><b>%s:</b> %s</div><br>'%(_('See at'), ', '.join(ids)))
if book.tags:
parts.append('<div>%s</div><div>\u00a0</div>'%', '.join(book.tags))
if book.comments:
parts.append(comments_to_html(book.comments))
self.show_details_signal.emit(''.join(parts))
def select_index(self, index):
if self.model() is None:
return
if not index.isValid():
index = self.model().index(0, 0)
book = self.model().data(index, Qt.UserRole)
self.book_selected.emit(book)
def get_result(self):
self.select_index(self.currentIndex())
def keyPressEvent(self, ev):
if ev.key() in (Qt.Key_Left, Qt.Key_Right):
ac = self.MoveDown if ev.key() == Qt.Key_Right else self.MoveUp
index = self.moveCursor(ac, ev.modifiers())
if index.isValid() and index != self.currentIndex():
m = self.selectionModel()
m.select(index, m.Select|m.Current|m.Rows)
self.setCurrentIndex(index)
ev.accept()
return
return QTableView.keyPressEvent(self, ev)
# }}}
class Comments(QWebView): # {{{
def __init__(self, parent=None):
QWebView.__init__(self, parent)
self.setAcceptDrops(False)
self.setMaximumWidth(300)
self.setMinimumWidth(300)
palette = self.palette()
palette.setBrush(QPalette.Base, Qt.transparent)
self.page().setPalette(palette)
self.setAttribute(Qt.WA_OpaquePaintEvent, False)
self.page().setLinkDelegationPolicy(self.page().DelegateAllLinks)
self.linkClicked.connect(self.link_clicked)
def link_clicked(self, url):
from calibre.gui2 import open_url
if unicode(url.toString(QUrl.None)).startswith('http://'):
open_url(url)
def turnoff_scrollbar(self, *args):
self.page().mainFrame().setScrollBarPolicy(Qt.Horizontal, Qt.ScrollBarAlwaysOff)
def show_data(self, html):
def color_to_string(col):
ans = '#000000'
if col.isValid():
col = col.toRgb()
if col.isValid():
ans = unicode(col.name())
return ans
fi = QFontInfo(QApplication.font(self.parent()))
f = fi.pixelSize()+1+int(tweaks['change_book_details_font_size_by'])
fam = unicode(fi.family()).strip().replace('"', '')
if not fam:
fam = 'sans-serif'
c = color_to_string(QApplication.palette().color(QPalette.Normal,
QPalette.WindowText))
templ = '''\
<html>
<head>
<style type="text/css">
body, td {background-color: transparent; font-family: "%s"; font-size: %dpx; color: %s }
a { text-decoration: none; color: blue }
div.description { margin-top: 0; padding-top: 0; text-indent: 0 }
table { margin-bottom: 0; padding-bottom: 0; }
</style>
</head>
<body>
<div class="description">
%%s
</div>
</body>
<html>
'''%(fam, f, c)
self.setHtml(templ%html)
def sizeHint(self):
# This is needed, because on windows the dialog cannot be resized to
# so that this widgets height become < sizeHint().height(). Qt sets the
# sizeHint to (800, 600), which makes the dialog unusable on smaller
# screens.
return QSize(800, 300)
# }}}
class IdentifyWorker(Thread): # {{{
def __init__(self, log, abort, title, authors, identifiers, caches):
Thread.__init__(self)
self.daemon = True
self.log, self.abort = log, abort
self.title, self.authors, self.identifiers = (title, authors,
identifiers)
self.results = []
self.error = None
self.caches = caches
def sample_results(self):
m1 = Metadata('The Great Gatsby', ['Francis Scott Fitzgerald'])
m2 = Metadata('The Great Gatsby - An extra long title to test resizing', ['F. Scott Fitzgerald'])
m1.has_cached_cover_url = True
m2.has_cached_cover_url = False
m1.comments = 'Some comments '*10
m1.tags = ['tag%d'%i for i in range(20)]
m1.rating = 4.4
m1.language = 'en'
m2.language = 'fr'
m1.pubdate = utcnow()
m2.pubdate = fromordinal(1000000)
m1.publisher = 'Publisher 1'
m2.publisher = 'Publisher 2'
return [m1, m2]
def run(self):
try:
if DEBUG_DIALOG:
self.results = self.sample_results()
else:
res = fork_job(
'calibre.ebooks.metadata.sources.worker',
'single_identify', (self.title, self.authors,
self.identifiers), no_output=True, abort=self.abort)
self.results, covers, caches, log_dump = res['result']
self.results = [OPF(BytesIO(r), basedir=os.getcwdu(),
populate_spine=False).to_book_metadata() for r in self.results]
for r, cov in zip(self.results, covers):
r.has_cached_cover_url = cov
self.caches.update(caches)
self.log.load(log_dump)
for i, result in enumerate(self.results):
result.gui_rank = i
except WorkerError as e:
self.error = force_unicode(e.orig_tb)
except:
import traceback
self.error = force_unicode(traceback.format_exc())
# }}}
class IdentifyWidget(QWidget): # {{{
rejected = pyqtSignal()
results_found = pyqtSignal()
book_selected = pyqtSignal(object, object)
def __init__(self, log, parent=None):
QWidget.__init__(self, parent)
self.log = log
self.abort = Event()
self.caches = {}
self.l = l = QGridLayout()
self.setLayout(l)
names = ['<b>'+p.name+'</b>' for p in metadata_plugins(['identify']) if
p.is_configured()]
self.top = QLabel('<p>'+_('calibre is downloading metadata from: ') +
', '.join(names))
self.top.setWordWrap(True)
l.addWidget(self.top, 0, 0)
self.results_view = ResultsView(self)
self.results_view.book_selected.connect(self.emit_book_selected)
self.get_result = self.results_view.get_result
l.addWidget(self.results_view, 1, 0)
self.comments_view = Comments(self)
l.addWidget(self.comments_view, 1, 1)
self.results_view.show_details_signal.connect(self.comments_view.show_data)
self.query = QLabel('download starting...')
self.query.setWordWrap(True)
l.addWidget(self.query, 2, 0, 1, 2)
self.comments_view.show_data('<h2>'+_('Please wait')+
'<br><span id="dots">.</span></h2>'+
'''
<script type="text/javascript">
window.onload=function(){
var dotspan = document.getElementById('dots');
window.setInterval(function(){
if(dotspan.textContent == '............'){
dotspan.textContent = '.';
}
else{
dotspan.textContent += '.';
}
}, 400);
}
</script>
''')
def emit_book_selected(self, book):
self.book_selected.emit(book, self.caches)
def start(self, title=None, authors=None, identifiers={}):
self.log.clear()
self.log('Starting download')
parts, simple_desc = [], ''
if title:
parts.append('title:'+title)
simple_desc += _('Title: %s ') % title
if authors:
parts.append('authors:'+authors_to_string(authors))
simple_desc += _('Authors: %s ') % authors_to_string(authors)
if identifiers:
x = ', '.join('%s:%s'%(k, v) for k, v in identifiers.iteritems())
parts.append(x)
if 'isbn' in identifiers:
simple_desc += ' ISBN: %s' % identifiers['isbn']
self.query.setText(simple_desc)
self.log(unicode(self.query.text()))
self.worker = IdentifyWorker(self.log, self.abort, title,
authors, identifiers, self.caches)
self.worker.start()
QTimer.singleShot(50, self.update)
def update(self):
if self.worker.is_alive():
QTimer.singleShot(50, self.update)
else:
self.process_results()
def process_results(self):
if self.worker.error is not None:
error_dialog(self, _('Download failed'),
_('Failed to download metadata. Click '
'Show Details to see details'),
show=True, det_msg=self.worker.error)
self.rejected.emit()
return
if not self.worker.results:
log = ''.join(self.log.plain_text)
error_dialog(self, _('No matches found'), '<p>' +
_('Failed to find any books that '
'match your search. Try making the search <b>less '
'specific</b>. For example, use only the author\'s '
'last name and a single distinctive word from '
'the title.<p>To see the full log, click Show Details.'),
show=True, det_msg=log)
self.rejected.emit()
return
self.results_view.show_results(self.worker.results)
self.results_found.emit()
def cancel(self):
self.abort.set()
# }}}
class CoverWorker(Thread): # {{{
def __init__(self, log, abort, title, authors, identifiers, caches):
Thread.__init__(self)
self.daemon = True
self.log, self.abort = log, abort
self.title, self.authors, self.identifiers = (title, authors,
identifiers)
self.caches = caches
self.rq = Queue()
self.error = None
def fake_run(self):
images = ['donate.png', 'config.png', 'column.png', 'eject.png', ]
time.sleep(2)
for pl, im in zip(metadata_plugins(['cover']), images):
self.rq.put((pl.name, 1, 1, 'png', I(im, data=True)))
def run(self):
try:
if DEBUG_DIALOG:
self.fake_run()
else:
self.run_fork()
except WorkerError as e:
self.error = force_unicode(e.orig_tb)
except:
import traceback
self.error = force_unicode(traceback.format_exc())
def run_fork(self):
with TemporaryDirectory('_single_metadata_download') as tdir:
self.keep_going = True
t = Thread(target=self.monitor_tdir, args=(tdir,))
t.daemon = True
t.start()
try:
res = fork_job('calibre.ebooks.metadata.sources.worker',
'single_covers',
(self.title, self.authors, self.identifiers, self.caches,
tdir),
no_output=True, abort=self.abort)
self.log.append_dump(res['result'])
finally:
self.keep_going = False
t.join()
def scan_once(self, tdir, seen):
for x in list(os.listdir(tdir)):
if x in seen:
continue
if x.endswith('.cover') and os.path.exists(os.path.join(tdir,
x+'.done')):
name = x.rpartition('.')[0]
try:
plugin_name, width, height, fmt = name.split(',,')
width, height = int(width), int(height)
with open(os.path.join(tdir, x), 'rb') as f:
data = f.read()
except:
import traceback
traceback.print_exc()
else:
seen.add(x)
self.rq.put((plugin_name, width, height, fmt, data))
def monitor_tdir(self, tdir):
seen = set()
while self.keep_going:
time.sleep(1)
self.scan_once(tdir, seen)
# One last scan after the download process has ended
self.scan_once(tdir, seen)
# }}}
class CoversModel(QAbstractListModel): # {{{
def __init__(self, current_cover, parent=None):
QAbstractListModel.__init__(self, parent)
if current_cover is None:
current_cover = QPixmap(I('default_cover.png'))
self.blank = QPixmap(I('blank.png')).scaled(150, 200)
self.cc = current_cover
self.reset_covers(do_reset=False)
def reset_covers(self, do_reset=True):
self.covers = [self.get_item(_('Current cover'), self.cc)]
self.plugin_map = {}
for i, plugin in enumerate(metadata_plugins(['cover'])):
self.covers.append((plugin.name+'\n'+_('Searching...'),
(self.blank), None, True))
self.plugin_map[plugin] = [i+1]
if do_reset:
self.beginResetModel(), self.endResetModel()
def get_item(self, src, pmap, waiting=False):
sz = '%dx%d'%(pmap.width(), pmap.height())
text = (src + '\n' + sz)
scaled = pmap.scaled(150, 200, Qt.IgnoreAspectRatio,
Qt.SmoothTransformation)
return (text, (scaled), pmap, waiting)
def rowCount(self, parent=None):
return len(self.covers)
def data(self, index, role):
try:
text, pmap, cover, waiting = self.covers[index.row()]
except:
return None
if role == Qt.DecorationRole:
return pmap
if role == Qt.DisplayRole:
return text
if role == Qt.UserRole:
return waiting
return None
def plugin_for_index(self, index):
row = index.row() if hasattr(index, 'row') else index
for k, v in self.plugin_map.iteritems():
if row in v:
return k
def clear_failed(self):
# Remove entries that are still waiting
good = []
pmap = {}
def keygen(x):
pmap = x[2]
if pmap is None:
return 1
return pmap.width()*pmap.height()
dcovers = sorted(self.covers[1:], key=keygen, reverse=True)
cmap = {i:self.plugin_for_index(i) for i in xrange(len(self.covers))}
for i, x in enumerate(self.covers[0:1] + dcovers):
if not x[-1]:
good.append(x)
plugin = cmap[i]
if plugin is not None:
try:
pmap[plugin].append(len(good) - 1)
except KeyError:
pmap[plugin] = [len(good)-1]
self.covers = good
self.plugin_map = pmap
self.beginResetModel(), self.endResetModel()
def pointer_from_index(self, index):
row = index.row() if hasattr(index, 'row') else index
try:
return self.covers[row][2]
except IndexError:
pass
def index_from_pointer(self, pointer):
for r, (text, scaled, pmap, waiting) in enumerate(self.covers):
if pointer == pmap:
return self.index(r)
return self.index(0)
def update_result(self, plugin_name, width, height, data):
if plugin_name.endswith('}'):
# multi cover plugin
plugin_name = plugin_name.partition('{')[0]
plugin = [plugin for plugin in self.plugin_map if plugin.name == plugin_name]
if not plugin:
return
plugin = plugin[0]
last_row = max(self.plugin_map[plugin])
pmap = QPixmap()
pmap.loadFromData(data)
if pmap.isNull():
return
self.beginInsertRows(QModelIndex(), last_row, last_row)
for rows in self.plugin_map.itervalues():
for i in xrange(len(rows)):
if rows[i] >= last_row:
rows[i] += 1
self.plugin_map[plugin].insert(-1, last_row)
self.covers.insert(last_row, self.get_item(plugin_name, pmap, waiting=False))
self.endInsertRows()
else:
# single cover plugin
idx = None
for plugin, rows in self.plugin_map.iteritems():
if plugin.name == plugin_name:
idx = rows[0]
break
if idx is None:
return
pmap = QPixmap()
pmap.loadFromData(data)
if pmap.isNull():
return
self.covers[idx] = self.get_item(plugin_name, pmap, waiting=False)
self.dataChanged.emit(self.index(idx), self.index(idx))
def cover_pixmap(self, index):
row = index.row()
if row > 0 and row < len(self.covers):
pmap = self.covers[row][2]
if pmap is not None and not pmap.isNull():
return pmap
# }}}
class CoversView(QListView): # {{{
chosen = pyqtSignal()
def __init__(self, current_cover, parent=None):
QListView.__init__(self, parent)
self.m = CoversModel(current_cover, self)
self.setModel(self.m)
self.setFlow(self.LeftToRight)
self.setWrapping(True)
self.setResizeMode(self.Adjust)
self.setGridSize(QSize(190, 260))
self.setIconSize(QSize(150, 200))
self.setSelectionMode(self.SingleSelection)
self.setViewMode(self.IconMode)
self.delegate = CoverDelegate(self)
self.setItemDelegate(self.delegate)
self.delegate.needs_redraw.connect(self.viewport().update,
type=Qt.QueuedConnection)
self.doubleClicked.connect(self.chosen, type=Qt.QueuedConnection)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.show_context_menu)
def select(self, num):
current = self.model().index(num)
sm = self.selectionModel()
sm.select(current, sm.SelectCurrent)
def start(self):
self.select(0)
self.delegate.start_animation()
def reset_covers(self):
self.m.reset_covers()
def clear_failed(self):
pointer = self.m.pointer_from_index(self.currentIndex())
self.m.clear_failed()
if pointer is None:
self.select(0)
else:
self.select(self.m.index_from_pointer(pointer).row())
def show_context_menu(self, point):
idx = self.currentIndex()
if idx and idx.isValid() and not idx.data(Qt.UserRole):
m = QMenu()
m.addAction(QIcon(I('view.png')), _('View this cover at full size'), self.show_cover)
m.addAction(QIcon(I('edit-copy.png')), _('Copy this cover to clipboard'), self.copy_cover)
m.exec_(QCursor.pos())
def show_cover(self):
idx = self.currentIndex()
pmap = self.model().cover_pixmap(idx)
if pmap is None and idx.row() == 0:
pmap = self.model().cc
if pmap is not None:
from calibre.gui2.viewer.image_popup import ImageView
d = ImageView(self, pmap, unicode(idx.data(Qt.DisplayRole) or ''), geom_name='metadata_download_cover_popup_geom')
d(use_exec=True)
def copy_cover(self):
idx = self.currentIndex()
pmap = self.model().cover_pixmap(idx)
if pmap is None and idx.row() == 0:
pmap = self.model().cc
if pmap is not None:
QApplication.clipboard().setPixmap(pmap)
def keyPressEvent(self, ev):
if ev.key() in (Qt.Key_Enter, Qt.Key_Return):
self.chosen.emit()
ev.accept()
return
return QListView.keyPressEvent(self, ev)
# }}}
class CoversWidget(QWidget): # {{{
chosen = pyqtSignal()
finished = pyqtSignal()
def __init__(self, log, current_cover, parent=None):
QWidget.__init__(self, parent)
self.log = log
self.abort = Event()
self.l = l = QGridLayout()
self.setLayout(l)
self.msg = QLabel()
self.msg.setWordWrap(True)
l.addWidget(self.msg, 0, 0)
self.covers_view = CoversView(current_cover, self)
self.covers_view.chosen.connect(self.chosen)
l.addWidget(self.covers_view, 1, 0)
self.continue_processing = True
def reset_covers(self):
self.covers_view.reset_covers()
def start(self, book, current_cover, title, authors, caches):
self.continue_processing = True
self.abort.clear()
self.book, self.current_cover = book, current_cover
self.title, self.authors = title, authors
self.log('Starting cover download for:', book.title)
self.log('Query:', title, authors, self.book.identifiers)
self.msg.setText('<p>'+
_('Downloading covers for <b>%s</b>, please wait...')%book.title)
self.covers_view.start()
self.worker = CoverWorker(self.log, self.abort, self.title,
self.authors, book.identifiers, caches)
self.worker.start()
QTimer.singleShot(50, self.check)
self.covers_view.setFocus(Qt.OtherFocusReason)
def check(self):
if self.worker.is_alive() and not self.abort.is_set():
QTimer.singleShot(50, self.check)
try:
self.process_result(self.worker.rq.get_nowait())
except Empty:
pass
else:
self.process_results()
def process_results(self):
while self.continue_processing:
try:
self.process_result(self.worker.rq.get_nowait())
except Empty:
break
if self.continue_processing:
self.covers_view.clear_failed()
if self.worker.error is not None:
error_dialog(self, _('Download failed'),
_('Failed to download any covers, click'
' "Show details" for details.'),
det_msg=self.worker.error, show=True)
num = self.covers_view.model().rowCount()
if num < 2:
txt = _('Could not find any covers for <b>%s</b>')%self.book.title
else:
txt = _('Found <b>%(num)d</b> possible covers for %(title)s. '
'When the download completes, the covers will be sorted by size.')%dict(num=num-1,
title=self.title)
self.msg.setText(txt)
self.msg.setWordWrap(True)
self.finished.emit()
def process_result(self, result):
if not self.continue_processing:
return
plugin_name, width, height, fmt, data = result
self.covers_view.model().update_result(plugin_name, width, height, data)
def cleanup(self):
self.covers_view.delegate.stop_animation()
self.continue_processing = False
def cancel(self):
self.cleanup()
self.abort.set()
def cover_pixmap(self):
idx = None
for i in self.covers_view.selectionModel().selectedIndexes():
if i.isValid():
idx = i
break
if idx is None:
idx = self.covers_view.currentIndex()
return self.covers_view.model().cover_pixmap(idx)
# }}}
class LogViewer(QDialog): # {{{
def __init__(self, log, parent=None):
QDialog.__init__(self, parent)
self.log = log
self.l = l = QVBoxLayout()
self.setLayout(l)
self.tb = QTextBrowser(self)
l.addWidget(self.tb)
self.bb = QDialogButtonBox(QDialogButtonBox.Close)
l.addWidget(self.bb)
self.copy_button = self.bb.addButton(_('Copy to clipboard'),
self.bb.ActionRole)
self.copy_button.clicked.connect(self.copy_to_clipboard)
self.copy_button.setIcon(QIcon(I('edit-copy.png')))
self.bb.rejected.connect(self.reject)
self.bb.accepted.connect(self.accept)
self.setWindowTitle(_('Download log'))
self.setWindowIcon(QIcon(I('debug.png')))
self.resize(QSize(800, 400))
self.keep_updating = True
self.last_html = None
self.finished.connect(self.stop)
QTimer.singleShot(100, self.update_log)
self.show()
def copy_to_clipboard(self):
QApplication.clipboard().setText(''.join(self.log.plain_text))
def stop(self, *args):
self.keep_updating = False
def update_log(self):
if not self.keep_updating:
return
html = self.log.html
if html != self.last_html:
self.last_html = html
self.tb.setHtml('<pre style="font-family:monospace">%s</pre>'%html)
QTimer.singleShot(1000, self.update_log)
# }}}
class FullFetch(QDialog): # {{{
def __init__(self, current_cover=None, parent=None):
QDialog.__init__(self, parent)
self.current_cover = current_cover
self.log = Log()
self.book = self.cover_pixmap = None
self.setWindowTitle(_('Downloading metadata...'))
self.setWindowIcon(QIcon(I('download-metadata.png')))
self.stack = QStackedWidget()
self.l = l = QVBoxLayout()
self.setLayout(l)
l.addWidget(self.stack)
self.bb = QDialogButtonBox(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.h = h = QHBoxLayout()
l.addLayout(h)
self.bb.rejected.connect(self.reject)
self.bb.accepted.connect(self.accept)
self.ok_button = self.bb.button(self.bb.Ok)
self.ok_button.setEnabled(False)
self.ok_button.clicked.connect(self.ok_clicked)
self.prev_button = pb = QPushButton(QIcon(I('back.png')), _('&Back'), self)
pb.clicked.connect(self.back_clicked)
pb.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
self.log_button = self.bb.addButton(_('View log'), self.bb.ActionRole)
self.log_button.clicked.connect(self.view_log)
self.log_button.setIcon(QIcon(I('debug.png')))
self.prev_button.setVisible(False)
h.addWidget(self.prev_button), h.addWidget(self.bb)
self.identify_widget = IdentifyWidget(self.log, self)
self.identify_widget.rejected.connect(self.reject)
self.identify_widget.results_found.connect(self.identify_results_found)
self.identify_widget.book_selected.connect(self.book_selected)
self.stack.addWidget(self.identify_widget)
self.covers_widget = CoversWidget(self.log, self.current_cover, parent=self)
self.covers_widget.chosen.connect(self.ok_clicked)
self.stack.addWidget(self.covers_widget)
self.resize(850, 600)
geom = gprefs.get('metadata_single_gui_geom', None)
if geom is not None and geom:
self.restoreGeometry(geom)
self.finished.connect(self.cleanup)
def view_log(self):
self._lv = LogViewer(self.log, self)
def book_selected(self, book, caches):
self.prev_button.setVisible(True)
self.book = book
self.stack.setCurrentIndex(1)
self.log('\n\n')
self.covers_widget.start(book, self.current_cover,
self.title, self.authors, caches)
self.ok_button.setFocus()
def back_clicked(self):
self.prev_button.setVisible(False)
self.stack.setCurrentIndex(0)
self.covers_widget.cancel()
self.covers_widget.reset_covers()
def accept(self):
# Prevent the usual dialog accept mechanisms from working
gprefs['metadata_single_gui_geom'] = bytearray(self.saveGeometry())
if DEBUG_DIALOG:
if self.stack.currentIndex() == 2:
return QDialog.accept(self)
else:
if self.stack.currentIndex() == 1:
return QDialog.accept(self)
def reject(self):
gprefs['metadata_single_gui_geom'] = bytearray(self.saveGeometry())
self.identify_widget.cancel()
self.covers_widget.cancel()
return QDialog.reject(self)
def cleanup(self):
self.covers_widget.cleanup()
def identify_results_found(self):
self.ok_button.setEnabled(True)
def next_clicked(self, *args):
gprefs['metadata_single_gui_geom'] = bytearray(self.saveGeometry())
self.identify_widget.get_result()
def ok_clicked(self, *args):
self.cover_pixmap = self.covers_widget.cover_pixmap()
if self.stack.currentIndex() == 0:
self.next_clicked()
return
if DEBUG_DIALOG:
if self.cover_pixmap is not None:
self.w = QLabel()
self.w.setPixmap(self.cover_pixmap)
self.stack.addWidget(self.w)
self.stack.setCurrentIndex(2)
else:
QDialog.accept(self)
def start(self, title=None, authors=None, identifiers={}):
self.title, self.authors = title, authors
self.identify_widget.start(title=title, authors=authors,
identifiers=identifiers)
return self.exec_()
# }}}
class CoverFetch(QDialog): # {{{
def __init__(self, current_cover=None, parent=None):
QDialog.__init__(self, parent)
self.current_cover = current_cover
self.log = Log()
self.cover_pixmap = None
self.setWindowTitle(_('Downloading cover...'))
self.setWindowIcon(QIcon(I('book.png')))
self.l = l = QVBoxLayout()
self.setLayout(l)
self.covers_widget = CoversWidget(self.log, self.current_cover, parent=self)
self.covers_widget.chosen.connect(self.accept)
l.addWidget(self.covers_widget)
self.resize(850, 600)
self.finished.connect(self.cleanup)
self.bb = QDialogButtonBox(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
l.addWidget(self.bb)
self.log_button = self.bb.addButton(_('View log'), self.bb.ActionRole)
self.log_button.clicked.connect(self.view_log)
self.log_button.setIcon(QIcon(I('debug.png')))
self.bb.rejected.connect(self.reject)
self.bb.accepted.connect(self.accept)
geom = gprefs.get('single-cover-fetch-dialog-geometry', None)
if geom is not None:
self.restoreGeometry(geom)
def cleanup(self):
self.covers_widget.cleanup()
def reject(self):
gprefs.set('single-cover-fetch-dialog-geometry', bytearray(self.saveGeometry()))
self.covers_widget.cancel()
return QDialog.reject(self)
def accept(self, *args):
gprefs.set('single-cover-fetch-dialog-geometry', bytearray(self.saveGeometry()))
self.cover_pixmap = self.covers_widget.cover_pixmap()
QDialog.accept(self)
def start(self, title, authors, identifiers):
book = Metadata(title, authors)
book.identifiers = identifiers
self.covers_widget.start(book, self.current_cover,
title, authors, {})
return self.exec_()
def view_log(self):
self._lv = LogViewer(self.log, self)
# }}}
if __name__ == '__main__':
DEBUG_DIALOG = True
app = QApplication([])
d = FullFetch()
d.start(title='great gatsby', authors=['fitzgerald'])
|
gpl-3.0
|
aenon/OnlineJudge
|
leetcode/5.BitManipulation/477.TotalHammingDistance.py
|
1
|
1100
|
# 477. Total Hamming Distance
# The Hamming distance between two integers is the number of positions at which the corresponding bits are different.
# Now your job is to find the total Hamming distance between all pairs of the given numbers.
# Example:
# Input: 4, 14, 2
# Output: 6
# Explanation: In binary representation, the 4 is 0100, 14 is 1110, and 2 is 0010 (just
# showing the four bits relevant in this case). So the answer will be:
# HammingDistance(4, 14) + HammingDistance(4, 2) + HammingDistance(14, 2) = 2 + 2 + 2 = 6.
# Note:
# Elements of the given array are in the range of 0 to 10^9
# Length of the array will not exceed 10^4.
class Solution(object):
def totalHammingDistance(self, nums):
"""
:type nums: List[int]
:rtype: int
loop through all the digits
"""
result = 0
for i in xrange(32):
counts = [0] * 2 # the number of 0's and 1's in the ith digit
for number in nums:
counts[number>>i & 1] += 1
result += counts[0] * counts[1]
return result
|
mit
|
M4sse/chromium.src
|
tools/memory_inspector/memory_inspector/data/file_storage_unittest.py
|
89
|
5331
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This unittest covers both file_storage and serialization modules."""
import os
import tempfile
import time
import unittest
from memory_inspector.core import memory_map
from memory_inspector.core import native_heap
from memory_inspector.core import stacktrace
from memory_inspector.core import symbol
from memory_inspector.data import file_storage
class FileStorageTest(unittest.TestCase):
def setUp(self):
self._storage_path = tempfile.mkdtemp()
self._storage = file_storage.Storage(self._storage_path)
def tearDown(self):
os.removedirs(self._storage_path)
def testSettings(self):
settings_1 = { 'foo' : 1, 'bar' : 2 }
settings_2 = { 'foo' : 1, 'bar' : 2 }
self._storage.StoreSettings('one', settings_1)
self._storage.StoreSettings('two', settings_2)
self._DeepCompare(settings_1, self._storage.LoadSettings('one'))
self._DeepCompare(settings_2, self._storage.LoadSettings('two'))
self._storage.StoreSettings('one', {})
self._storage.StoreSettings('two', {})
def testArchives(self):
self._storage.OpenArchive('foo', create=True)
self._storage.OpenArchive('bar', create=True)
self._storage.OpenArchive('baz', create=True)
self._storage.DeleteArchive('bar')
self.assertTrue('foo' in self._storage.ListArchives())
self.assertFalse('bar' in self._storage.ListArchives())
self.assertTrue('baz' in self._storage.ListArchives())
self._storage.DeleteArchive('foo')
self._storage.DeleteArchive('baz')
def testSnapshots(self):
archive = self._storage.OpenArchive('snapshots', create=True)
t1 = archive.StartNewSnapshot()
archive.StoreMemMaps(memory_map.Map())
time.sleep(0.01) # Max snapshot resolution is in the order of usecs.
t2 = archive.StartNewSnapshot()
archive.StoreMemMaps(memory_map.Map())
archive.StoreNativeHeap(native_heap.NativeHeap())
self.assertIn(t1, archive.ListSnapshots())
self.assertIn(t2, archive.ListSnapshots())
self.assertTrue(archive.HasMemMaps(t1))
self.assertFalse(archive.HasNativeHeap(t1))
self.assertTrue(archive.HasMemMaps(t2))
self.assertTrue(archive.HasNativeHeap(t2))
self._storage.DeleteArchive('snapshots')
def testMmap(self):
archive = self._storage.OpenArchive('mmap', create=True)
timestamp = archive.StartNewSnapshot()
mmap = memory_map.Map()
map_entry1 = memory_map.MapEntry(4096, 8191, 'rw--', '/foo', 0)
map_entry2 = memory_map.MapEntry(65536, 81919, 'rw--', '/bar', 4096)
map_entry2.resident_pages = [5]
mmap.Add(map_entry1)
mmap.Add(map_entry2)
archive.StoreMemMaps(mmap)
mmap_deser = archive.LoadMemMaps(timestamp)
self._DeepCompare(mmap, mmap_deser)
self._storage.DeleteArchive('mmap')
def testNativeHeap(self):
archive = self._storage.OpenArchive('nheap', create=True)
timestamp = archive.StartNewSnapshot()
nh = native_heap.NativeHeap()
for i in xrange(1, 4):
stack_trace = stacktrace.Stacktrace()
frame = nh.GetStackFrame(i * 10 + 1)
frame.SetExecFileInfo('foo.so', 1)
stack_trace.Add(frame)
frame = nh.GetStackFrame(i * 10 + 2)
frame.SetExecFileInfo('bar.so', 2)
stack_trace.Add(frame)
nh.Add(native_heap.Allocation(size=i * 10,
stack_trace=stack_trace,
start=i * 20,
flags=i * 30))
archive.StoreNativeHeap(nh)
nh_deser = archive.LoadNativeHeap(timestamp)
self._DeepCompare(nh, nh_deser)
self._storage.DeleteArchive('nheap')
def testSymbols(self):
archive = self._storage.OpenArchive('symbols', create=True)
symbols = symbol.Symbols()
# Symbol db is global per archive, no need to StartNewSnapshot.
symbols.Add('foo.so', 1, symbol.Symbol('sym1', 'file1.c', 11))
symbols.Add('bar.so', 2, symbol.Symbol('sym2', 'file2.c', 12))
sym3 = symbol.Symbol('sym3', 'file2.c', 13)
sym3.AddSourceLineInfo('outer_file.c', 23)
symbols.Add('baz.so', 3, sym3)
archive.StoreSymbols(symbols)
symbols_deser = archive.LoadSymbols()
self._DeepCompare(symbols, symbols_deser)
self._storage.DeleteArchive('symbols')
def _DeepCompare(self, a, b, prefix=''):
"""Recursively compares two objects (original and deserialized)."""
self.assertEqual(a is None, b is None)
if a is None:
return
_BASICTYPES = (long, int, basestring, float)
if isinstance(a, _BASICTYPES) and isinstance(b, _BASICTYPES):
return self.assertEqual(a, b, prefix)
self.assertEqual(type(a), type(b), prefix + ' type (%s vs %s' % (
type(a), type(b)))
if isinstance(a, list):
self.assertEqual(len(a), len(b), prefix + ' len (%d vs %d)' % (
len(a), len(b)))
for i in range(len(a)):
self._DeepCompare(a[i], b[i], prefix + '[%d]' % i)
return
if isinstance(a, dict):
self.assertEqual(a.keys(), b.keys(), prefix + ' keys (%s vs %s)' % (
str(a.keys()), str(b.keys())))
for k in a.iterkeys():
self._DeepCompare(a[k], b[k], prefix + '.' + str(k))
return
return self._DeepCompare(a.__dict__, b.__dict__, prefix)
|
bsd-3-clause
|
yongshengwang/builthue
|
desktop/core/ext-py/tablib-develop/tablib/packages/xlwt3/Worksheet.py
|
46
|
43958
|
'''
BOF
UNCALCED
INDEX
Calculation Settings Block
PRINTHEADERS
PRINTGRIDLINES
GRIDSET
GUTS
DEFAULTROWHEIGHT
WSBOOL
Page Settings Block
Worksheet Protection Block
DEFCOLWIDTH
COLINFO
SORT
DIMENSIONS
Row Blocks
WINDOW2
SCL
PANE
SELECTION
STANDARDWIDTH
MERGEDCELLS
LABELRANGES
PHONETIC
Conditional Formatting Table
Hyperlink Table
Data Validity Table
SHEETLAYOUT (BIFF8X only)
SHEETPROTECTION (BIFF8X only)
RANGEPROTECTION (BIFF8X only)
EOF
'''
from . import BIFFRecords
from . import Bitmap
from . import Formatting
from . import Style
import tempfile
class Worksheet(object):
from .Workbook import Workbook
#################################################################
## Constructor
#################################################################
def __init__(self, sheetname, parent_book, cell_overwrite_ok=False):
from . import Row
self.Row = Row #(to_py3): Row.Row -> Row
from . import Column
self.Column = Column #(to_py3): Column.Column -> Column
self.__name = sheetname
self.__parent = parent_book
self._cell_overwrite_ok = cell_overwrite_ok
self.__rows = {}
self.__cols = {}
self.__merged_ranges = []
self.__bmp_rec = b''
self.__show_formulas = 0
self.__show_grid = 1
self.__show_headers = 1
self.__panes_frozen = 0
### self.__show_empty_as_zero = 1 ### deprecated with extreme prejudice 2009-05-19
self.show_zero_values = 1
self.__auto_colour_grid = 1
self.__cols_right_to_left = 0
self.__show_outline = 1
self.__remove_splits = 0
self.__selected = 0
# RED HERRING ALERT: "sheet_visible" is a clone of the "selected" attribute.
# Typically a workbook created by the Excel UI will have one sheet
# (the sheet that was selected when the user saved it)
# with both bits set to 1, and all other sheets will have both
# bits set to 0. The true visibility of the sheet is found in the "visibility"
# attribute obtained from the BOUNDSHEET record.
self.__sheet_visible = 0
self.__page_preview = 0
self.__first_visible_row = 0
self.__first_visible_col = 0
self.__grid_colour = 0x40
self.__preview_magn = 60 # percent
self.__normal_magn = 100 # percent
self.visibility = 0 # from/to BOUNDSHEET record.
self.__vert_split_pos = None
self.__horz_split_pos = None
self.__vert_split_first_visible = None
self.__horz_split_first_visible = None
self.__split_active_pane = None
self.__row_gut_width = 0
self.__col_gut_height = 0
self.__show_auto_page_breaks = 1
self.__dialogue_sheet = 0
self.__auto_style_outline = 0
self.__outline_below = 0
self.__outline_right = 0
self.__fit_num_pages = 0
self.__show_row_outline = 1
self.__show_col_outline = 1
self.__alt_expr_eval = 0
self.__alt_formula_entries = 0
self.__row_default_height = 0x00FF
self.row_default_height_mismatch = 0
self.row_default_hidden = 0
self.row_default_space_above = 0
self.row_default_space_below = 0
self.__col_default_width = 0x0008
self.__calc_mode = 1
self.__calc_count = 0x0064
self.__RC_ref_mode = 1
self.__iterations_on = 0
self.__delta = 0.001
self.__save_recalc = 0
self.__print_headers = 0
self.__print_grid = 0
self.__grid_set = 1
self.__vert_page_breaks = []
self.__horz_page_breaks = []
self.__header_str = '&P'
self.__footer_str = '&F'
self.__print_centered_vert = 0
self.__print_centered_horz = 1
self.__left_margin = 0.3 #0.5
self.__right_margin = 0.3 #0.5
self.__top_margin = 0.61 #1.0
self.__bottom_margin = 0.37 #1.0
self.__paper_size_code = 9 # A4
self.__print_scaling = 100
self.__start_page_number = 1
self.__fit_width_to_pages = 1
self.__fit_height_to_pages = 1
self.__print_in_rows = 1
self.__portrait = 1
self.__print_not_colour = 0
self.__print_draft = 0
self.__print_notes = 0
self.__print_notes_at_end = 0
self.__print_omit_errors = 0
self.__print_hres = 0x012C # 300 dpi
self.__print_vres = 0x012C # 300 dpi
self.__header_margin = 0.1
self.__footer_margin = 0.1
self.__copies_num = 1
self.__wnd_protect = 0
self.__obj_protect = 0
self.__protect = 0
self.__scen_protect = 0
self.__password = b''
self.last_used_row = 0
self.first_used_row = 65535
self.last_used_col = 0
self.first_used_col = 255
self.row_tempfile = None
self.__flushed_rows = {}
self.__row_visible_levels = 0
#################################################################
## Properties, "getters", "setters"
#################################################################
def set_name(self, value):
self.__name = value
def get_name(self):
return self.__name
name = property(get_name, set_name)
#################################################################
def get_parent(self):
return self.__parent
parent = property(get_parent)
#################################################################
def get_rows(self):
return self.__rows
rows = property(get_rows)
#################################################################
def get_cols(self):
return self.__cols
cols = property(get_cols)
#################################################################
def get_merged_ranges(self):
return self.__merged_ranges
merged_ranges = property(get_merged_ranges)
#################################################################
def get_bmp_rec(self):
return self.__bmp_rec
bmp_rec = property(get_bmp_rec)
#################################################################
def set_show_formulas(self, value):
self.__show_formulas = int(value)
def get_show_formulas(self):
return bool(self.__show_formulas)
show_formulas = property(get_show_formulas, set_show_formulas)
#################################################################
def set_show_grid(self, value):
self.__show_grid = int(value)
def get_show_grid(self):
return bool(self.__show_grid)
show_grid = property(get_show_grid, set_show_grid)
#################################################################
def set_show_headers(self, value):
self.__show_headers = int(value)
def get_show_headers(self):
return bool(self.__show_headers)
show_headers = property(get_show_headers, set_show_headers)
#################################################################
def set_panes_frozen(self, value):
self.__panes_frozen = int(value)
def get_panes_frozen(self):
return bool(self.__panes_frozen)
panes_frozen = property(get_panes_frozen, set_panes_frozen)
#################################################################
### def set_show_empty_as_zero(self, value):
### self.__show_empty_as_zero = int(value)
### def get_show_empty_as_zero(self):
### return bool(self.__show_empty_as_zero)
### show_empty_as_zero = property(get_show_empty_as_zero, set_show_empty_as_zero)
#################################################################
def set_auto_colour_grid(self, value):
self.__auto_colour_grid = int(value)
def get_auto_colour_grid(self):
return bool(self.__auto_colour_grid)
auto_colour_grid = property(get_auto_colour_grid, set_auto_colour_grid)
#################################################################
def set_cols_right_to_left(self, value):
self.__cols_right_to_left = int(value)
def get_cols_right_to_left(self):
return bool(self.__cols_right_to_left)
cols_right_to_left = property(get_cols_right_to_left, set_cols_right_to_left)
#################################################################
def set_show_outline(self, value):
self.__show_outline = int(value)
def get_show_outline(self):
return bool(self.__show_outline)
show_outline = property(get_show_outline, set_show_outline)
#################################################################
def set_remove_splits(self, value):
self.__remove_splits = int(value)
def get_remove_splits(self):
return bool(self.__remove_splits)
remove_splits = property(get_remove_splits, set_remove_splits)
#################################################################
def set_selected(self, value):
self.__selected = int(value)
def get_selected(self):
return bool(self.__selected)
selected = property(get_selected, set_selected)
#################################################################
def set_sheet_visible(self, value):
self.__sheet_visible = int(value)
def get_sheet_visible(self):
return bool(self.__sheet_visible)
sheet_visible = property(get_sheet_visible, set_sheet_visible)
#################################################################
def set_page_preview(self, value):
self.__page_preview = int(value)
def get_page_preview(self):
return bool(self.__page_preview)
page_preview = property(get_page_preview, set_page_preview)
#################################################################
def set_first_visible_row(self, value):
self.__first_visible_row = value
def get_first_visible_row(self):
return self.__first_visible_row
first_visible_row = property(get_first_visible_row, set_first_visible_row)
#################################################################
def set_first_visible_col(self, value):
self.__first_visible_col = value
def get_first_visible_col(self):
return self.__first_visible_col
first_visible_col = property(get_first_visible_col, set_first_visible_col)
#################################################################
def set_grid_colour(self, value):
self.__grid_colour = value
def get_grid_colour(self):
return self.__grid_colour
grid_colour = property(get_grid_colour, set_grid_colour)
#################################################################
def set_preview_magn(self, value):
self.__preview_magn = value
def get_preview_magn(self):
return self.__preview_magn
preview_magn = property(get_preview_magn, set_preview_magn)
#################################################################
def set_normal_magn(self, value):
self.__normal_magn = value
def get_normal_magn(self):
return self.__normal_magn
normal_magn = property(get_normal_magn, set_normal_magn)
#################################################################
def set_vert_split_pos(self, value):
self.__vert_split_pos = abs(value)
def get_vert_split_pos(self):
return self.__vert_split_pos
vert_split_pos = property(get_vert_split_pos, set_vert_split_pos)
#################################################################
def set_horz_split_pos(self, value):
self.__horz_split_pos = abs(value)
def get_horz_split_pos(self):
return self.__horz_split_pos
horz_split_pos = property(get_horz_split_pos, set_horz_split_pos)
#################################################################
def set_vert_split_first_visible(self, value):
self.__vert_split_first_visible = abs(value)
def get_vert_split_first_visible(self):
return self.__vert_split_first_visible
vert_split_first_visible = property(get_vert_split_first_visible, set_vert_split_first_visible)
#################################################################
def set_horz_split_first_visible(self, value):
self.__horz_split_first_visible = abs(value)
def get_horz_split_first_visible(self):
return self.__horz_split_first_visible
horz_split_first_visible = property(get_horz_split_first_visible, set_horz_split_first_visible)
#################################################################
#def set_split_active_pane(self, value):
# self.__split_active_pane = abs(value) & 0x03
#
#def get_split_active_pane(self):
# return self.__split_active_pane
#
#split_active_pane = property(get_split_active_pane, set_split_active_pane)
#################################################################
#def set_row_gut_width(self, value):
# self.__row_gut_width = value
#
#def get_row_gut_width(self):
# return self.__row_gut_width
#
#row_gut_width = property(get_row_gut_width, set_row_gut_width)
#
#################################################################
#
#def set_col_gut_height(self, value):
# self.__col_gut_height = value
#
#def get_col_gut_height(self):
# return self.__col_gut_height
#
#col_gut_height = property(get_col_gut_height, set_col_gut_height)
#
#################################################################
def set_show_auto_page_breaks(self, value):
self.__show_auto_page_breaks = int(value)
def get_show_auto_page_breaks(self):
return bool(self.__show_auto_page_breaks)
show_auto_page_breaks = property(get_show_auto_page_breaks, set_show_auto_page_breaks)
#################################################################
def set_dialogue_sheet(self, value):
self.__dialogue_sheet = int(value)
def get_dialogue_sheet(self):
return bool(self.__dialogue_sheet)
dialogue_sheet = property(get_dialogue_sheet, set_dialogue_sheet)
#################################################################
def set_auto_style_outline(self, value):
self.__auto_style_outline = int(value)
def get_auto_style_outline(self):
return bool(self.__auto_style_outline)
auto_style_outline = property(get_auto_style_outline, set_auto_style_outline)
#################################################################
def set_outline_below(self, value):
self.__outline_below = int(value)
def get_outline_below(self):
return bool(self.__outline_below)
outline_below = property(get_outline_below, set_outline_below)
#################################################################
def set_outline_right(self, value):
self.__outline_right = int(value)
def get_outline_right(self):
return bool(self.__outline_right)
outline_right = property(get_outline_right, set_outline_right)
#################################################################
def set_fit_num_pages(self, value):
self.__fit_num_pages = value
def get_fit_num_pages(self):
return self.__fit_num_pages
fit_num_pages = property(get_fit_num_pages, set_fit_num_pages)
#################################################################
def set_show_row_outline(self, value):
self.__show_row_outline = int(value)
def get_show_row_outline(self):
return bool(self.__show_row_outline)
show_row_outline = property(get_show_row_outline, set_show_row_outline)
#################################################################
def set_show_col_outline(self, value):
self.__show_col_outline = int(value)
def get_show_col_outline(self):
return bool(self.__show_col_outline)
show_col_outline = property(get_show_col_outline, set_show_col_outline)
#################################################################
def set_alt_expr_eval(self, value):
self.__alt_expr_eval = int(value)
def get_alt_expr_eval(self):
return bool(self.__alt_expr_eval)
alt_expr_eval = property(get_alt_expr_eval, set_alt_expr_eval)
#################################################################
def set_alt_formula_entries(self, value):
self.__alt_formula_entries = int(value)
def get_alt_formula_entries(self):
return bool(self.__alt_formula_entries)
alt_formula_entries = property(get_alt_formula_entries, set_alt_formula_entries)
#################################################################
def set_row_default_height(self, value):
self.__row_default_height = value
def get_row_default_height(self):
return self.__row_default_height
row_default_height = property(get_row_default_height, set_row_default_height)
#################################################################
def set_col_default_width(self, value):
self.__col_default_width = value
def get_col_default_width(self):
return self.__col_default_width
col_default_width = property(get_col_default_width, set_col_default_width)
#################################################################
def set_calc_mode(self, value):
self.__calc_mode = value & 0x03
def get_calc_mode(self):
return self.__calc_mode
calc_mode = property(get_calc_mode, set_calc_mode)
#################################################################
def set_calc_count(self, value):
self.__calc_count = value
def get_calc_count(self):
return self.__calc_count
calc_count = property(get_calc_count, set_calc_count)
#################################################################
def set_RC_ref_mode(self, value):
self.__RC_ref_mode = int(value)
def get_RC_ref_mode(self):
return bool(self.__RC_ref_mode)
RC_ref_mode = property(get_RC_ref_mode, set_RC_ref_mode)
#################################################################
def set_iterations_on(self, value):
self.__iterations_on = int(value)
def get_iterations_on(self):
return bool(self.__iterations_on)
iterations_on = property(get_iterations_on, set_iterations_on)
#################################################################
def set_delta(self, value):
self.__delta = value
def get_delta(self):
return self.__delta
delta = property(get_delta, set_delta)
#################################################################
def set_save_recalc(self, value):
self.__save_recalc = int(value)
def get_save_recalc(self):
return bool(self.__save_recalc)
save_recalc = property(get_save_recalc, set_save_recalc)
#################################################################
def set_print_headers(self, value):
self.__print_headers = int(value)
def get_print_headers(self):
return bool(self.__print_headers)
print_headers = property(get_print_headers, set_print_headers)
#################################################################
def set_print_grid(self, value):
self.__print_grid = int(value)
def get_print_grid(self):
return bool(self.__print_grid)
print_grid = property(get_print_grid, set_print_grid)
#################################################################
#
#def set_grid_set(self, value):
# self.__grid_set = int(value)
#
#def get_grid_set(self):
# return bool(self.__grid_set)
#
#grid_set = property(get_grid_set, set_grid_set)
#
#################################################################
def set_vert_page_breaks(self, value):
self.__vert_page_breaks = value
def get_vert_page_breaks(self):
return self.__vert_page_breaks
vert_page_breaks = property(get_vert_page_breaks, set_vert_page_breaks)
#################################################################
def set_horz_page_breaks(self, value):
self.__horz_page_breaks = value
def get_horz_page_breaks(self):
return self.__horz_page_breaks
horz_page_breaks = property(get_horz_page_breaks, set_horz_page_breaks)
#################################################################
def set_header_str(self, value):
if isinstance(value, str):
value = str(value, self.__parent.encoding)
self.__header_str = value
def get_header_str(self):
return self.__header_str
header_str = property(get_header_str, set_header_str)
#################################################################
def set_footer_str(self, value):
if isinstance(value, str):
value = str(value, self.__parent.encoding)
self.__footer_str = value
def get_footer_str(self):
return self.__footer_str
footer_str = property(get_footer_str, set_footer_str)
#################################################################
def set_print_centered_vert(self, value):
self.__print_centered_vert = int(value)
def get_print_centered_vert(self):
return bool(self.__print_centered_vert)
print_centered_vert = property(get_print_centered_vert, set_print_centered_vert)
#################################################################
def set_print_centered_horz(self, value):
self.__print_centered_horz = int(value)
def get_print_centered_horz(self):
return bool(self.__print_centered_horz)
print_centered_horz = property(get_print_centered_horz, set_print_centered_horz)
#################################################################
def set_left_margin(self, value):
self.__left_margin = value
def get_left_margin(self):
return self.__left_margin
left_margin = property(get_left_margin, set_left_margin)
#################################################################
def set_right_margin(self, value):
self.__right_margin = value
def get_right_margin(self):
return self.__right_margin
right_margin = property(get_right_margin, set_right_margin)
#################################################################
def set_top_margin(self, value):
self.__top_margin = value
def get_top_margin(self):
return self.__top_margin
top_margin = property(get_top_margin, set_top_margin)
#################################################################
def set_bottom_margin(self, value):
self.__bottom_margin = value
def get_bottom_margin(self):
return self.__bottom_margin
bottom_margin = property(get_bottom_margin, set_bottom_margin)
#################################################################
def set_paper_size_code(self, value):
self.__paper_size_code = value
def get_paper_size_code(self):
return self.__paper_size_code
paper_size_code = property(get_paper_size_code, set_paper_size_code)
#################################################################
def set_print_scaling(self, value):
self.__print_scaling = value
def get_print_scaling(self):
return self.__print_scaling
print_scaling = property(get_print_scaling, set_print_scaling)
#################################################################
def set_start_page_number(self, value):
self.__start_page_number = value
def get_start_page_number(self):
return self.__start_page_number
start_page_number = property(get_start_page_number, set_start_page_number)
#################################################################
def set_fit_width_to_pages(self, value):
self.__fit_width_to_pages = value
def get_fit_width_to_pages(self):
return self.__fit_width_to_pages
fit_width_to_pages = property(get_fit_width_to_pages, set_fit_width_to_pages)
#################################################################
def set_fit_height_to_pages(self, value):
self.__fit_height_to_pages = value
def get_fit_height_to_pages(self):
return self.__fit_height_to_pages
fit_height_to_pages = property(get_fit_height_to_pages, set_fit_height_to_pages)
#################################################################
def set_print_in_rows(self, value):
self.__print_in_rows = int(value)
def get_print_in_rows(self):
return bool(self.__print_in_rows)
print_in_rows = property(get_print_in_rows, set_print_in_rows)
#################################################################
def set_portrait(self, value):
self.__portrait = int(value)
def get_portrait(self):
return bool(self.__portrait)
portrait = property(get_portrait, set_portrait)
#################################################################
def set_print_colour(self, value):
self.__print_not_colour = int(not value)
def get_print_colour(self):
return not bool(self.__print_not_colour)
print_colour = property(get_print_colour, set_print_colour)
#################################################################
def set_print_draft(self, value):
self.__print_draft = int(value)
def get_print_draft(self):
return bool(self.__print_draft)
print_draft = property(get_print_draft, set_print_draft)
#################################################################
def set_print_notes(self, value):
self.__print_notes = int(value)
def get_print_notes(self):
return bool(self.__print_notes)
print_notes = property(get_print_notes, set_print_notes)
#################################################################
def set_print_notes_at_end(self, value):
self.__print_notes_at_end = int(value)
def get_print_notes_at_end(self):
return bool(self.__print_notes_at_end)
print_notes_at_end = property(get_print_notes_at_end, set_print_notes_at_end)
#################################################################
def set_print_omit_errors(self, value):
self.__print_omit_errors = int(value)
def get_print_omit_errors(self):
return bool(self.__print_omit_errors)
print_omit_errors = property(get_print_omit_errors, set_print_omit_errors)
#################################################################
def set_print_hres(self, value):
self.__print_hres = value
def get_print_hres(self):
return self.__print_hres
print_hres = property(get_print_hres, set_print_hres)
#################################################################
def set_print_vres(self, value):
self.__print_vres = value
def get_print_vres(self):
return self.__print_vres
print_vres = property(get_print_vres, set_print_vres)
#################################################################
def set_header_margin(self, value):
self.__header_margin = value
def get_header_margin(self):
return self.__header_margin
header_margin = property(get_header_margin, set_header_margin)
#################################################################
def set_footer_margin(self, value):
self.__footer_margin = value
def get_footer_margin(self):
return self.__footer_margin
footer_margin = property(get_footer_margin, set_footer_margin)
#################################################################
def set_copies_num(self, value):
self.__copies_num = value
def get_copies_num(self):
return self.__copies_num
copies_num = property(get_copies_num, set_copies_num)
##################################################################
def set_wnd_protect(self, value):
self.__wnd_protect = int(value)
def get_wnd_protect(self):
return bool(self.__wnd_protect)
wnd_protect = property(get_wnd_protect, set_wnd_protect)
#################################################################
def set_obj_protect(self, value):
self.__obj_protect = int(value)
def get_obj_protect(self):
return bool(self.__obj_protect)
obj_protect = property(get_obj_protect, set_obj_protect)
#################################################################
def set_protect(self, value):
self.__protect = int(value)
def get_protect(self):
return bool(self.__protect)
protect = property(get_protect, set_protect)
#################################################################
def set_scen_protect(self, value):
self.__scen_protect = int(value)
def get_scen_protect(self):
return bool(self.__scen_protect)
scen_protect = property(get_scen_protect, set_scen_protect)
#################################################################
def set_password(self, value):
self.__password = value
def get_password(self):
return self.__password
password = property(get_password, set_password)
##################################################################
## Methods
##################################################################
def get_parent(self):
return self.__parent
def write(self, r, c, label=b"", style=Style.default_style):
self.row(r).write(c, label, style)
def merge(self, r1, r2, c1, c2, style=Style.default_style):
# Stand-alone merge of previously written cells.
# Problems: (1) style to be used should be existing style of
# the top-left cell, not an arg.
# (2) should ensure that any previous data value in
# non-top-left cells is nobbled.
# Note: if a cell is set by a data record then later
# is referenced by a [MUL]BLANK record, Excel will blank
# out the cell on the screen, but OOo & Gnu will not
# blank it out. Need to do something better than writing
# multiple records. In the meantime, avoid this method and use
# write_merge() instead.
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def write_merge(self, r1, r2, c1, c2, label=b"", style=Style.default_style):
assert 0 <= c1 <= c2 <= 255
assert 0 <= r1 <= r2 <= 65535
self.write(r1, c1, label, style)
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style) # skip (r1, c1)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def insert_bitmap(self, filename, row, col, x = 0, y = 0, scale_x = 1, scale_y = 1):
bmp = Bitmap.ImDataBmpRecord(filename)
obj = Bitmap.ObjBmpRecord(row, col, self, bmp, x, y, scale_x, scale_y)
self.__bmp_rec += obj.get() + bmp.get()
def col(self, indx):
if indx not in self.__cols:
self.__cols[indx] = self.Column(indx, self)
return self.__cols[indx]
def row(self, indx):
if indx not in self.__rows:
if indx in self.__flushed_rows:
raise Exception("Attempt to reuse row index %d of sheet %r after flushing" % (indx, self.__name))
self.__rows[indx] = self.Row(indx, self)
if indx > self.last_used_row:
self.last_used_row = indx
if indx < self.first_used_row:
self.first_used_row = indx
return self.__rows[indx]
def row_height(self, row): # in pixels
if row in self.__rows:
return self.__rows[row].get_height_in_pixels()
else:
return 17
def col_width(self, col): # in pixels
if col in self.__cols:
return self.__cols[col].width_in_pixels()
else:
return 64
##################################################################
## BIFF records generation
##################################################################
def __bof_rec(self):
return BIFFRecords.Biff8BOFRecord(BIFFRecords.Biff8BOFRecord.WORKSHEET).get()
def __update_row_visible_levels(self):
if self.__rows:
temp = max([self.__rows[r].level for r in self.__rows]) + 1
self.__row_visible_levels = max(temp, self.__row_visible_levels)
def __guts_rec(self):
self.__update_row_visible_levels()
col_visible_levels = 0
if len(self.__cols) != 0:
col_visible_levels = max([self.__cols[c].level for c in self.__cols]) + 1
return BIFFRecords.GutsRecord(
self.__row_gut_width, self.__col_gut_height, self.__row_visible_levels, col_visible_levels).get()
def __defaultrowheight_rec(self):
options = 0x0000
options |= (self.row_default_height_mismatch & 1) << 0
options |= (self.row_default_hidden & 1) << 1
options |= (self.row_default_space_above & 1) << 2
options |= (self.row_default_space_below & 1) << 3
defht = self.__row_default_height
return BIFFRecords.DefaultRowHeightRecord(options, defht).get()
def __wsbool_rec(self):
options = 0x00
options |= (self.__show_auto_page_breaks & 0x01) << 0
options |= (self.__dialogue_sheet & 0x01) << 4
options |= (self.__auto_style_outline & 0x01) << 5
options |= (self.__outline_below & 0x01) << 6
options |= (self.__outline_right & 0x01) << 7
options |= (self.__fit_num_pages & 0x01) << 8
options |= (self.__show_row_outline & 0x01) << 10
options |= (self.__show_col_outline & 0x01) << 11
options |= (self.__alt_expr_eval & 0x01) << 14
options |= (self.__alt_formula_entries & 0x01) << 15
return BIFFRecords.WSBoolRecord(options).get()
def __eof_rec(self):
return BIFFRecords.EOFRecord().get()
def __colinfo_rec(self):
result = b''
for col in self.__cols:
result += self.__cols[col].get_biff_record()
return result
def __dimensions_rec(self):
return BIFFRecords.DimensionsRecord(
self.first_used_row, self.last_used_row,
self.first_used_col, self.last_used_col
).get()
def __window2_rec(self):
# Appends SCL record.
options = 0
options |= (self.__show_formulas & 0x01) << 0
options |= (self.__show_grid & 0x01) << 1
options |= (self.__show_headers & 0x01) << 2
options |= (self.__panes_frozen & 0x01) << 3
options |= (self.show_zero_values & 0x01) << 4
options |= (self.__auto_colour_grid & 0x01) << 5
options |= (self.__cols_right_to_left & 0x01) << 6
options |= (self.__show_outline & 0x01) << 7
options |= (self.__remove_splits & 0x01) << 8
options |= (self.__selected & 0x01) << 9
options |= (self.__sheet_visible & 0x01) << 10
options |= (self.__page_preview & 0x01) << 11
if self.__page_preview:
scl_magn = self.__preview_magn
else:
scl_magn = self.__normal_magn
return BIFFRecords.Window2Record(
options, self.__first_visible_row, self.__first_visible_col,
self.__grid_colour,
self.__preview_magn, self.__normal_magn, scl_magn).get()
def __panes_rec(self):
if self.__vert_split_pos is None and self.__horz_split_pos is None:
return b""
if self.__vert_split_pos is None:
self.__vert_split_pos = 0
if self.__horz_split_pos is None:
self.__horz_split_pos = 0
if self.__panes_frozen:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = self.__vert_split_pos
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = self.__horz_split_pos
else:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = 0
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = 0
# inspired by pyXLWriter
self.__horz_split_pos = 20*self.__horz_split_pos + 255
self.__vert_split_pos = int(113.879*self.__vert_split_pos + 390)
if self.__vert_split_pos > 0 and self.__horz_split_pos > 0:
self.__split_active_pane = 0
elif self.__vert_split_pos > 0 and self.__horz_split_pos == 0:
self.__split_active_pane = 1
elif self.__vert_split_pos == 0 and self.__horz_split_pos > 0:
self.__split_active_pane = 2
else:
self.__split_active_pane = 3
result = BIFFRecords.PanesRecord(self.__vert_split_pos,
self.__horz_split_pos,
self.__horz_split_first_visible,
self.__vert_split_first_visible,
self.__split_active_pane).get()
return result
def __row_blocks_rec(self):
result = []
for row in self.__rows.values():
result.append(row.get_row_biff_data())
result.append(row.get_cells_biff_data())
return b''.join(result)
def __merged_rec(self):
return BIFFRecords.MergedCellsRecord(self.__merged_ranges).get()
def __bitmaps_rec(self):
return self.__bmp_rec
def __calc_settings_rec(self):
result = b''
result += BIFFRecords.CalcModeRecord(self.__calc_mode & 0x01).get()
result += BIFFRecords.CalcCountRecord(self.__calc_count & 0xFFFF).get()
result += BIFFRecords.RefModeRecord(self.__RC_ref_mode & 0x01).get()
result += BIFFRecords.IterationRecord(self.__iterations_on & 0x01).get()
result += BIFFRecords.DeltaRecord(self.__delta).get()
result += BIFFRecords.SaveRecalcRecord(self.__save_recalc & 0x01).get()
return result
def __print_settings_rec(self):
result = b''
result += BIFFRecords.PrintHeadersRecord(self.__print_headers).get()
result += BIFFRecords.PrintGridLinesRecord(self.__print_grid).get()
result += BIFFRecords.GridSetRecord(self.__grid_set).get()
result += BIFFRecords.HorizontalPageBreaksRecord(self.__horz_page_breaks).get()
result += BIFFRecords.VerticalPageBreaksRecord(self.__vert_page_breaks).get()
result += BIFFRecords.HeaderRecord(self.__header_str).get()
result += BIFFRecords.FooterRecord(self.__footer_str).get()
result += BIFFRecords.HCenterRecord(self.__print_centered_horz).get()
result += BIFFRecords.VCenterRecord(self.__print_centered_vert).get()
result += BIFFRecords.LeftMarginRecord(self.__left_margin).get()
result += BIFFRecords.RightMarginRecord(self.__right_margin).get()
result += BIFFRecords.TopMarginRecord(self.__top_margin).get()
result += BIFFRecords.BottomMarginRecord(self.__bottom_margin).get()
setup_page_options = (self.__print_in_rows & 0x01) << 0
setup_page_options |= (self.__portrait & 0x01) << 1
setup_page_options |= (0x00 & 0x01) << 2
setup_page_options |= (self.__print_not_colour & 0x01) << 3
setup_page_options |= (self.__print_draft & 0x01) << 4
setup_page_options |= (self.__print_notes & 0x01) << 5
setup_page_options |= (0x00 & 0x01) << 6
setup_page_options |= (0x01 & 0x01) << 7
setup_page_options |= (self.__print_notes_at_end & 0x01) << 9
setup_page_options |= (self.__print_omit_errors & 0x03) << 10
result += BIFFRecords.SetupPageRecord(self.__paper_size_code,
self.__print_scaling,
self.__start_page_number,
self.__fit_width_to_pages,
self.__fit_height_to_pages,
setup_page_options,
self.__print_hres,
self.__print_vres,
self.__header_margin,
self.__footer_margin,
self.__copies_num).get()
return result
def __protection_rec(self):
result = b''
result += BIFFRecords.ProtectRecord(self.__protect).get()
result += BIFFRecords.ScenProtectRecord(self.__scen_protect).get()
result += BIFFRecords.WindowProtectRecord(self.__wnd_protect).get()
result += BIFFRecords.ObjectProtectRecord(self.__obj_protect).get()
result += BIFFRecords.PasswordRecord(self.__password).get()
return result
def get_biff_data(self):
result = [
self.__bof_rec(),
self.__calc_settings_rec(),
self.__guts_rec(),
self.__defaultrowheight_rec(),
self.__wsbool_rec(),
self.__colinfo_rec(),
self.__dimensions_rec(),
self.__print_settings_rec(),
self.__protection_rec(),
]
if self.row_tempfile:
self.row_tempfile.flush()
self.row_tempfile.seek(0)
result.append(self.row_tempfile.read())
result.extend([
self.__row_blocks_rec(),
self.__merged_rec(),
self.__bitmaps_rec(),
self.__window2_rec(),
self.__panes_rec(),
self.__eof_rec(),
])
return b''.join(result)
def flush_row_data(self):
if self.row_tempfile is None:
self.row_tempfile = tempfile.TemporaryFile()
self.row_tempfile.write(self.__row_blocks_rec())
for rowx in self.__rows:
self.__flushed_rows[rowx] = 1
self.__update_row_visible_levels()
self.__rows = {}
|
apache-2.0
|
f-prettyland/angr
|
angr/engines/vex/statements/loadg.py
|
1
|
2392
|
from .... import sim_options as o
from ....state_plugins.sim_action_object import SimActionObject
from ....state_plugins.sim_action import SimActionData
from . import SimIRStmt, SimStatementError
class SimIRStmt_LoadG(SimIRStmt):
def _execute(self):
addr = self._translate_expr(self.stmt.addr)
alt = self._translate_expr(self.stmt.alt)
guard = self._translate_expr(self.stmt.guard)
read_type, converted_type = self.stmt.cvt_types
read_size = self.size_bytes(read_type)
converted_size = self.size_bytes(converted_type)
read_expr = self.state.memory.load(addr.expr, read_size, endness=self.stmt.end)
if read_size == converted_size:
converted_expr = read_expr
elif "S" in self.stmt.cvt:
converted_expr = read_expr.sign_extend(converted_size*self.state.arch.byte_width -
read_size*self.state.arch.byte_width)
elif "U" in self.stmt.cvt:
converted_expr = read_expr.zero_extend(converted_size*self.state.arch.byte_width -
read_size*self.state.arch.byte_width)
else:
raise SimStatementError("Unrecognized IRLoadGOp %s!" % self.stmt.cvt)
read_expr = self.state.se.If(guard.expr != 0, converted_expr, alt.expr)
if o.ACTION_DEPS in self.state.options:
reg_deps = addr.reg_deps() | alt.reg_deps() | guard.reg_deps()
tmp_deps = addr.tmp_deps() | alt.tmp_deps() | guard.tmp_deps()
else:
reg_deps = None
tmp_deps = None
self.state.scratch.store_tmp(self.stmt.dst, read_expr, reg_deps, tmp_deps)
if o.TRACK_MEMORY_ACTIONS in self.state.options:
data_ao = SimActionObject(converted_expr)
alt_ao = SimActionObject(alt.expr, reg_deps=alt.reg_deps(), tmp_deps=alt.tmp_deps())
addr_ao = SimActionObject(addr.expr, reg_deps=addr.reg_deps(), tmp_deps=addr.tmp_deps())
guard_ao = SimActionObject(guard.expr, reg_deps=guard.reg_deps(), tmp_deps=guard.tmp_deps())
size_ao = SimActionObject(self.size_bits(converted_type))
r = SimActionData(self.state, self.state.memory.id, SimActionData.READ, addr=addr_ao, data=data_ao, condition=guard_ao, size=size_ao, fallback=alt_ao)
self.actions.append(r)
|
bsd-2-clause
|
camradal/ansible-modules-extras
|
packaging/language/bower.py
|
75
|
7251
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Michael Warkentin <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bower
short_description: Manage bower packages with bower
description:
- Manage bower packages with bower
version_added: 1.9
author: "Michael Warkentin (@mwarkentin)"
options:
name:
description:
- The name of a bower package to install
required: false
offline:
description:
- Install packages from local cache, if the packages were installed before
required: false
default: no
choices: [ "yes", "no" ]
production:
description:
- Install with --production flag
required: false
default: no
choices: [ "yes", "no" ]
version_added: "2.0"
path:
description:
- The base path where to install the bower packages
required: true
relative_execpath:
description:
- Relative path to bower executable from install path
default: null
required: false
version_added: "2.1"
state:
description:
- The state of the bower package
required: false
default: present
choices: [ "present", "absent", "latest" ]
version:
description:
- The version to be installed
required: false
'''
EXAMPLES = '''
description: Install "bootstrap" bower package.
- bower: name=bootstrap
description: Install "bootstrap" bower package on version 3.1.1.
- bower: name=bootstrap version=3.1.1
description: Remove the "bootstrap" bower package.
- bower: name=bootstrap state=absent
description: Install packages based on bower.json.
- bower: path=/app/location
description: Update packages based on bower.json to their latest version.
- bower: path=/app/location state=latest
description: install bower locally and run from there
- npm: path=/app/location name=bower global=no
- bower: path=/app/location relative_execpath=node_modules/.bin
'''
class Bower(object):
def __init__(self, module, **kwargs):
self.module = module
self.name = kwargs['name']
self.offline = kwargs['offline']
self.production = kwargs['production']
self.path = kwargs['path']
self.relative_execpath = kwargs['relative_execpath']
self.version = kwargs['version']
if kwargs['version']:
self.name_version = self.name + '#' + self.version
else:
self.name_version = self.name
def _exec(self, args, run_in_check_mode=False, check_rc=True):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = []
if self.relative_execpath:
cmd.append(os.path.join(self.path, self.relative_execpath, "bower"))
if not os.path.isfile(cmd[-1]):
self.module.fail_json(msg="bower not found at relative path %s" % self.relative_execpath)
else:
cmd.append("bower")
cmd.extend(args)
cmd.extend(['--config.interactive=false', '--allow-root'])
if self.name:
cmd.append(self.name_version)
if self.offline:
cmd.append('--offline')
if self.production:
cmd.append('--production')
# If path is specified, cd into that path and run the command.
cwd = None
if self.path:
if not os.path.exists(self.path):
os.makedirs(self.path)
if not os.path.isdir(self.path):
self.module.fail_json(msg="path %s is not a directory" % self.path)
cwd = self.path
rc, out, err = self.module.run_command(cmd, check_rc=check_rc, cwd=cwd)
return out
return ''
def list(self):
cmd = ['list', '--json']
installed = list()
missing = list()
outdated = list()
data = json.loads(self._exec(cmd, True, False))
if 'dependencies' in data:
for dep in data['dependencies']:
dep_data = data['dependencies'][dep]
if dep_data.get('missing', False):
missing.append(dep)
elif ('version' in dep_data['pkgMeta'] and
'update' in dep_data and
dep_data['pkgMeta']['version'] != dep_data['update']['latest']):
outdated.append(dep)
elif dep_data.get('incompatible', False):
outdated.append(dep)
else:
installed.append(dep)
# Named dependency not installed
else:
missing.append(self.name)
return installed, missing, outdated
def install(self):
return self._exec(['install'])
def update(self):
return self._exec(['update'])
def uninstall(self):
return self._exec(['uninstall'])
def main():
arg_spec = dict(
name=dict(default=None),
offline=dict(default='no', type='bool'),
production=dict(default='no', type='bool'),
path=dict(required=True, type='path'),
relative_execpath=dict(default=None, required=False, type='path'),
state=dict(default='present', choices=['present', 'absent', 'latest', ]),
version=dict(default=None),
)
module = AnsibleModule(
argument_spec=arg_spec
)
name = module.params['name']
offline = module.params['offline']
production = module.params['production']
path = os.path.expanduser(module.params['path'])
relative_execpath = module.params['relative_execpath']
state = module.params['state']
version = module.params['version']
if state == 'absent' and not name:
module.fail_json(msg='uninstalling a package is only available for named packages')
bower = Bower(module, name=name, offline=offline, production=production, path=path, relative_execpath=relative_execpath, version=version)
changed = False
if state == 'present':
installed, missing, outdated = bower.list()
if len(missing):
changed = True
bower.install()
elif state == 'latest':
installed, missing, outdated = bower.list()
if len(missing) or len(outdated):
changed = True
bower.update()
else: # Absent
installed, missing, outdated = bower.list()
if name in installed:
changed = True
bower.uninstall()
module.exit_json(changed=changed)
# Import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
fcheslack/libZotero
|
lib/py/test/test_library.py
|
2
|
2406
|
import sys
sys.path.append('../')
import unittest
from libZotero import zotero
class TestApiUrlBuilding(unittest.TestCase):
def test_apiurls(self):
config = {'target': 'collections', 'libraryType': 'user', 'libraryID': 1, 'content': 'json', 'limit': '100'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/collections")
self.assertEqual(zotero.apiQueryString(config), "?content=json&limit=100")
config = {'target': 'items', 'libraryType': 'group', 'libraryID': 1, 'format': 'atom', 'content': 'json', 'limit': '100'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/groups/1/items")
self.assertEqual(zotero.apiQueryString(config), "?content=json&format=atom&limit=100")
config = {'target': 'items', 'libraryType': 'user', 'libraryID': 1, 'content': 'json,coins', 'limit': '25'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/items")
self.assertEqual(zotero.apiQueryString(config), "?content=json%2Ccoins&limit=25")
config = {'target': 'item', 'libraryType': 'user', 'libraryID': 1, 'content': 'json', 'itemKey': 'ASDF1234'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/items/ASDF1234")
self.assertEqual(zotero.apiQueryString(config), "?content=json")
config = {'target': 'items', 'libraryType': 'user', 'libraryID': 1, 'content': 'bibtex', 'limit': '100', 'itemKey': 'ASDF1234,FDSA4321'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/items")
self.assertEqual(zotero.apiQueryString(config), "?content=bibtex&itemKey=ASDF1234%2CFDSA4321&limit=100")
config = {'target': 'deleted', 'libraryType': 'user', 'libraryID': 1, 'content': 'json', 'limit': '100'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/deleted")
self.assertEqual(zotero.apiQueryString(config), "?content=json&limit=100")
config = {'target': 'children', 'libraryType': 'user', 'libraryID': 1, 'itemKey': 'ASDF1234', 'content': 'json', 'limit': '100'}
self.assertEqual(zotero.apiRequestUrl(config), "https://api.zotero.org/users/1/items/ASDF1234/children")
self.assertEqual(zotero.apiQueryString(config), "?content=json&limit=100")
if __name__ == '__main__':
unittest.main()
|
agpl-3.0
|
gnowxilef/youtube-dl
|
youtube_dl/extractor/playvid.py
|
64
|
3299
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_unquote,
compat_urllib_parse_unquote_plus,
)
from ..utils import (
clean_html,
ExtractorError,
)
class PlayvidIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?playvid\.com/watch(\?v=|/)(?P<id>.+?)(?:#|$)'
_TESTS = [{
'url': 'http://www.playvid.com/watch/RnmBNgtrrJu',
'md5': 'ffa2f6b2119af359f544388d8c01eb6c',
'info_dict': {
'id': 'RnmBNgtrrJu',
'ext': 'mp4',
'title': 'md5:9256d01c6317e3f703848b5906880dc8',
'duration': 82,
'age_limit': 18,
},
'skip': 'Video removed due to ToS',
}, {
'url': 'http://www.playvid.com/watch/hwb0GpNkzgH',
'md5': '39d49df503ad7b8f23a4432cbf046477',
'info_dict': {
'id': 'hwb0GpNkzgH',
'ext': 'mp4',
'title': 'Ellen Euro Cutie Blond Takes a Sexy Survey Get Facial in The Park',
'age_limit': 18,
'thumbnail': r're:^https?://.*\.jpg$',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
m_error = re.search(
r'<div class="block-error">\s*<div class="heading">\s*<div>(?P<msg>.+?)</div>\s*</div>', webpage)
if m_error:
raise ExtractorError(clean_html(m_error.group('msg')), expected=True)
video_title = None
duration = None
video_thumbnail = None
formats = []
# most of the information is stored in the flashvars
flashvars = self._html_search_regex(
r'flashvars="(.+?)"', webpage, 'flashvars')
infos = compat_urllib_parse_unquote(flashvars).split(r'&')
for info in infos:
videovars_match = re.match(r'^video_vars\[(.+?)\]=(.+?)$', info)
if videovars_match:
key = videovars_match.group(1)
val = videovars_match.group(2)
if key == 'title':
video_title = compat_urllib_parse_unquote_plus(val)
if key == 'duration':
try:
duration = int(val)
except ValueError:
pass
if key == 'big_thumb':
video_thumbnail = val
videourl_match = re.match(
r'^video_urls\]\[(?P<resolution>[0-9]+)p', key)
if videourl_match:
height = int(videourl_match.group('resolution'))
formats.append({
'height': height,
'url': val,
})
self._sort_formats(formats)
# Extract title - should be in the flashvars; if not, look elsewhere
if video_title is None:
video_title = self._html_search_regex(
r'<title>(.*?)</title', webpage, 'title')
return {
'id': video_id,
'formats': formats,
'title': video_title,
'thumbnail': video_thumbnail,
'duration': duration,
'description': None,
'age_limit': 18
}
|
unlicense
|
cvium/Flexget
|
tests/test_delay.py
|
22
|
1040
|
from __future__ import unicode_literals, division, absolute_import
from datetime import timedelta
from tests import FlexGetBase
from flexget.manager import Session
from flexget.plugins.filter.delay import DelayedEntry
class TestDelay(FlexGetBase):
__yaml__ = """
tasks:
test:
mock:
- title: entry 1
delay: 1 hours
"""
def test_delay(self):
self.execute_task('test')
assert not self.task.entries, 'No entries should have passed delay'
# Age the entry in the db
session = Session()
delayed_entries = session.query(DelayedEntry).all()
for entry in delayed_entries:
entry.expire = entry.expire - timedelta(hours=1)
session.commit()
self.execute_task('test')
assert self.task.entries, 'Entry should have passed delay and been inserted'
# Make sure entry is only injected once
self.execute_task('test')
assert not self.task.entries, 'Entry should only be insert'
|
mit
|
karek314/bitcoin
|
contrib/testgen/base58.py
|
2139
|
2818
|
'''
Bitcoin base58 encoding and decoding.
Based on https://bitcointalk.org/index.php?topic=1026.0 (public domain)
'''
import hashlib
# for compatibility with following code...
class SHA256:
new = hashlib.sha256
if str != bytes:
# Python 3.x
def ord(c):
return c
def chr(n):
return bytes( (n,) )
__b58chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
__b58base = len(__b58chars)
b58chars = __b58chars
def b58encode(v):
""" encode v, which is a string of bytes, to base58.
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += (256**i) * ord(c)
result = ''
while long_value >= __b58base:
div, mod = divmod(long_value, __b58base)
result = __b58chars[mod] + result
long_value = div
result = __b58chars[long_value] + result
# Bitcoin does a little leading-zero-compression:
# leading 0-bytes in the input become leading-1s
nPad = 0
for c in v:
if c == '\0': nPad += 1
else: break
return (__b58chars[0]*nPad) + result
def b58decode(v, length = None):
""" decode v into a string of len bytes
"""
long_value = 0
for (i, c) in enumerate(v[::-1]):
long_value += __b58chars.find(c) * (__b58base**i)
result = bytes()
while long_value >= 256:
div, mod = divmod(long_value, 256)
result = chr(mod) + result
long_value = div
result = chr(long_value) + result
nPad = 0
for c in v:
if c == __b58chars[0]: nPad += 1
else: break
result = chr(0)*nPad + result
if length is not None and len(result) != length:
return None
return result
def checksum(v):
"""Return 32-bit checksum based on SHA256"""
return SHA256.new(SHA256.new(v).digest()).digest()[0:4]
def b58encode_chk(v):
"""b58encode a string, with 32-bit checksum"""
return b58encode(v + checksum(v))
def b58decode_chk(v):
"""decode a base58 string, check and remove checksum"""
result = b58decode(v)
if result is None:
return None
h3 = checksum(result[:-4])
if result[-4:] == checksum(result[:-4]):
return result[:-4]
else:
return None
def get_bcaddress_version(strAddress):
""" Returns None if strAddress is invalid. Otherwise returns integer version of address. """
addr = b58decode_chk(strAddress)
if addr is None or len(addr)!=21: return None
version = addr[0]
return ord(version)
if __name__ == '__main__':
# Test case (from http://gitorious.org/bitcoin/python-base58.git)
assert get_bcaddress_version('15VjRaDX9zpbA8LVnbrCAFzrVzN7ixHNsC') is 0
_ohai = 'o hai'.encode('ascii')
_tmp = b58encode(_ohai)
assert _tmp == 'DYB3oMS'
assert b58decode(_tmp, 5) == _ohai
print("Tests passed")
|
mit
|
PriceChild/ansible
|
lib/ansible/modules/web_infrastructure/ansible_tower/tower_credential.py
|
33
|
9476
|
#!/usr/bin/python
#coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <[email protected]>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_credential
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower credential.
description:
- Create, update, or destroy Ansible Tower credentials. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the credential.
required: True
description:
description:
- The description to use for the credential.
user:
description:
- User that should own this credential.
required: False
default: null
team:
description:
- Team that should own this credential.
required: False
default: null
project:
description:
- Project that should for this credential.
required: False
default: null
organization:
description:
- Organization that should own the credential.
required: False
default: null
kind:
description:
- Type of credential being added.
required: True
choices: ["ssh", "net", "scm", "aws", "rax", "vmware", "satellite6", "cloudforms", "gce", "azure", "azure_rm", "openstack"]
host:
description:
- Host for this credential.
required: False
default: null
username:
description:
- Username for this credential. access_key for AWS.
required: False
default: null
password:
description:
- Password for this credential. Use ASK for prompting. secret_key for AWS. api_key for RAX.
required: False
default: null
ssh_key_data:
description:
- Path to SSH private key.
required: False
default: null
ssh_key_unlock:
description:
- Unlock password for ssh_key. Use ASK for prompting.
authorize:
description:
- Should use authroize for net type.
required: False
default: False
authorize_password:
description:
- Password for net credentials that require authroize.
required: False
default: null
client:
description:
- Client or application ID for azure_rm type.
required: False
default: null
secret:
description:
- Secret token for azure_rm type.
required: False
default: null
subscription:
description:
- Subscription ID for azure_rm type.
required: False
default: null
tenant:
description:
- Tenant ID for azure_rm type.
required: False
default: null
domain:
description:
- Domain for openstack type.
required: False
default: null
become_method:
description:
- Become method to Use for privledge escalation.
required: False
choices: ["None", "sudo", "su", "pbrun", "pfexec"]
default: "None"
become_username:
description:
- Become username. Use ASK for prompting.
required: False
default: null
become_password:
description:
- Become password. Use ASK for prompting.
required: False
default: null
vault_password:
description:
- Valut password. Use ASK for prompting.
state:
description:
- Desired state of the resource.
required: False
default: "present"
choices: ["present", "absent"]
tower_host:
description:
- URL to your Tower instance.
required: False
default: null
tower_username:
description:
- Username for your Tower instance.
required: False
default: null
tower_password:
description:
- Password for your Tower instance.
required: False
default: null
tower_verify_ssl:
description:
- Dis/allow insecure connections to Tower. If C(no), SSL certificates will not be validated.
This should only be used on personally controlled sites using self-signed certificates.
required: False
default: True
tower_config_file:
description:
- Path to the Tower config file. See notes.
required: False
default: null
requirements:
- "python >= 2.6"
- "ansible-tower-cli >= 3.0.2"
notes:
- If no I(config_file) is provided we will attempt to use the tower-cli library
defaults to find your Tower host information.
- I(config_file) should contain Tower configuration in the following format
host=hostname
username=username
password=password
'''
EXAMPLES = '''
- name: Add tower credential
tower_credential:
name: Team Name
description: Team Description
organization: test-org
state: present
tower_config_file: "~/tower_cli.cfg"
'''
try:
import os
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
from ansible.module_utils.ansible_tower import tower_auth_config, tower_check_mode
HAS_TOWER_CLI = True
except ImportError:
HAS_TOWER_CLI = False
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
user = dict(),
team = dict(),
kind = dict(required=True,
choices=["ssh", "net", "scm", "aws", "rax", "vmware", "satellite6",
"cloudforms", "gce", "azure", "azure_rm", "openstack"]),
host = dict(),
username = dict(),
password = dict(no_log=True),
ssh_key_data = dict(no_log=True),
ssh_key_unlock = dict(no_log=True),
authorize = dict(type='bool', default=False),
authorize_password = dict(no_log=True),
client = dict(),
secret = dict(),
tenant = dict(),
subscription = dict(),
domain = dict(),
become_method = dict(),
become_username = dict(),
become_password = dict(no_log=True),
vault_password = dict(no_log=True),
description = dict(),
organization = dict(required=True),
project = dict(),
tower_host = dict(),
tower_username = dict(),
tower_password = dict(no_log=True),
tower_verify_ssl = dict(type='bool', default=True),
tower_config_file = dict(type='path'),
state = dict(choices=['present', 'absent'], default='present'),
),
supports_check_mode=True
)
if not HAS_TOWER_CLI:
module.fail_json(msg='ansible-tower-cli required for this module')
name = module.params.get('name')
organization = module.params.get('organization')
state = module.params.get('state')
json_output = {'credential': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
credential = tower_cli.get_resource('credential')
try:
params = module.params.copy()
params['create_on_missing'] = True
if organization:
org_res = tower_cli.get_resource('organization')
org = org_res.get(name=organization)
params['organization'] = org['id']
if params['ssh_key_data']:
filename = params['ssh_key_data']
filename = os.path.expanduser(filename)
if not os.path.exists(filename):
module.fail_json(msg='file not found: %s' % filename)
if os.path.isdir(filename):
module.fail_json(msg='attempted to read contents of directory: %s' % filename)
with open(filename, 'rb') as f:
params['ssh_key_data'] = f.read()
if state == 'present':
result = credential.modify(**params)
json_output['id'] = result['id']
elif state == 'absent':
result = credential.delete(**params)
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update credential, organization not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest, exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update credential: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
|
gpl-3.0
|
tkurnosova/selenium
|
py/selenium/webdriver/remote/command.py
|
64
|
5715
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
class Command(object):
"""
Defines constants for the standard WebDriver commands.
While these constants have no meaning in and of themselves, they are
used to marshal commands through a service that implements WebDriver's
remote wire protocol:
https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol
"""
# Keep in sync with org.openqa.selenium.remote.DriverCommand
STATUS = "status"
NEW_SESSION = "newSession"
GET_ALL_SESSIONS = "getAllSessions"
DELETE_SESSION = "deleteSession"
CLOSE = "close"
QUIT = "quit"
GET = "get"
GO_BACK = "goBack"
GO_FORWARD = "goForward"
REFRESH = "refresh"
ADD_COOKIE = "addCookie"
GET_COOKIE = "getCookie"
GET_ALL_COOKIES = "getCookies"
DELETE_COOKIE = "deleteCookie"
DELETE_ALL_COOKIES = "deleteAllCookies"
FIND_ELEMENT = "findElement"
FIND_ELEMENTS = "findElements"
FIND_CHILD_ELEMENT = "findChildElement"
FIND_CHILD_ELEMENTS = "findChildElements"
CLEAR_ELEMENT = "clearElement"
CLICK_ELEMENT = "clickElement"
SEND_KEYS_TO_ELEMENT = "sendKeysToElement"
SEND_KEYS_TO_ACTIVE_ELEMENT = "sendKeysToActiveElement"
SUBMIT_ELEMENT = "submitElement"
UPLOAD_FILE = "uploadFile"
GET_CURRENT_WINDOW_HANDLE = "getCurrentWindowHandle"
GET_WINDOW_HANDLES = "getWindowHandles"
GET_WINDOW_SIZE = "getWindowSize"
W3C_GET_WINDOW_SIZE = "w3cGetWindowSize"
GET_WINDOW_POSITION = "getWindowPosition"
SET_WINDOW_SIZE = "setWindowSize"
W3C_SET_WINDOW_SIZE = "w3cSetWindowSize"
SET_WINDOW_POSITION = "setWindowPosition"
SWITCH_TO_WINDOW = "switchToWindow"
SWITCH_TO_FRAME = "switchToFrame"
SWITCH_TO_PARENT_FRAME = "switchToParentFrame"
GET_ACTIVE_ELEMENT = "getActiveElement"
GET_CURRENT_URL = "getCurrentUrl"
GET_PAGE_SOURCE = "getPageSource"
GET_TITLE = "getTitle"
EXECUTE_SCRIPT = "executeScript"
GET_ELEMENT_TEXT = "getElementText"
GET_ELEMENT_VALUE = "getElementValue"
GET_ELEMENT_TAG_NAME = "getElementTagName"
SET_ELEMENT_SELECTED = "setElementSelected"
IS_ELEMENT_SELECTED = "isElementSelected"
IS_ELEMENT_ENABLED = "isElementEnabled"
IS_ELEMENT_DISPLAYED = "isElementDisplayed"
GET_ELEMENT_LOCATION = "getElementLocation"
GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW = "getElementLocationOnceScrolledIntoView"
GET_ELEMENT_SIZE = "getElementSize"
GET_ELEMENT_RECT = "getElementRect"
GET_ELEMENT_ATTRIBUTE = "getElementAttribute"
GET_ELEMENT_VALUE_OF_CSS_PROPERTY = "getElementValueOfCssProperty"
ELEMENT_EQUALS = "elementEquals"
SCREENSHOT = "screenshot"
ELEMENT_SCREENSHOT = "elementScreenshot"
IMPLICIT_WAIT = "implicitlyWait"
EXECUTE_ASYNC_SCRIPT = "executeAsyncScript"
SET_SCRIPT_TIMEOUT = "setScriptTimeout"
SET_TIMEOUTS = "setTimeouts"
MAXIMIZE_WINDOW = "windowMaximize"
W3C_MAXIMIZE_WINDOW = "w3cMaximizeWindow"
GET_LOG = "getLog"
GET_AVAILABLE_LOG_TYPES = "getAvailableLogTypes"
#Alerts
DISMISS_ALERT = "dismissAlert"
ACCEPT_ALERT = "acceptAlert"
SET_ALERT_VALUE = "setAlertValue"
GET_ALERT_TEXT = "getAlertText"
SET_ALERT_CREDENTIALS = "setAlertCredentials"
# Advanced user interactions
CLICK = "mouseClick"
DOUBLE_CLICK = "mouseDoubleClick"
MOUSE_DOWN = "mouseButtonDown"
MOUSE_UP = "mouseButtonUp"
MOVE_TO = "mouseMoveTo"
# Screen Orientation
SET_SCREEN_ORIENTATION = "setScreenOrientation"
GET_SCREEN_ORIENTATION = "getScreenOrientation"
# Touch Actions
SINGLE_TAP = "touchSingleTap"
TOUCH_DOWN = "touchDown"
TOUCH_UP = "touchUp"
TOUCH_MOVE = "touchMove"
TOUCH_SCROLL = "touchScroll"
DOUBLE_TAP = "touchDoubleTap"
LONG_PRESS = "touchLongPress"
FLICK = "touchFlick"
#HTML 5
EXECUTE_SQL = "executeSql"
GET_LOCATION = "getLocation"
SET_LOCATION = "setLocation"
GET_APP_CACHE = "getAppCache"
GET_APP_CACHE_STATUS = "getAppCacheStatus"
CLEAR_APP_CACHE = "clearAppCache"
GET_LOCAL_STORAGE_ITEM = "getLocalStorageItem"
REMOVE_LOCAL_STORAGE_ITEM = "removeLocalStorageItem"
GET_LOCAL_STORAGE_KEYS = "getLocalStorageKeys"
SET_LOCAL_STORAGE_ITEM = "setLocalStorageItem"
CLEAR_LOCAL_STORAGE = "clearLocalStorage"
GET_LOCAL_STORAGE_SIZE = "getLocalStorageSize"
GET_SESSION_STORAGE_ITEM = "getSessionStorageItem"
REMOVE_SESSION_STORAGE_ITEM = "removeSessionStorageItem"
GET_SESSION_STORAGE_KEYS = "getSessionStorageKeys"
SET_SESSION_STORAGE_ITEM = "setSessionStorageItem"
CLEAR_SESSION_STORAGE = "clearSessionStorage"
GET_SESSION_STORAGE_SIZE = "getSessionStorageSize"
# Mobile
GET_NETWORK_CONNECTION = "getNetworkConnection"
SET_NETWORK_CONNECTION = "setNetworkConnection"
CURRENT_CONTEXT_HANDLE = "getCurrentContextHandle"
CONTEXT_HANDLES = "getContextHandles"
SWITCH_TO_CONTEXT = "switchToContext"
|
apache-2.0
|
mtlchun/edx
|
cms/djangoapps/contentstore/management/commands/tests/test_create_course.py
|
137
|
2495
|
"""
Unittests for creating a course in an chosen modulestore
"""
import unittest
import ddt
from django.core.management import CommandError, call_command
from contentstore.management.commands.create_course import Command
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.django import modulestore
class TestArgParsing(unittest.TestCase):
"""
Tests for parsing arguments for the `create_course` management command
"""
def setUp(self):
super(TestArgParsing, self).setUp()
self.command = Command()
def test_no_args(self):
errstring = "create_course requires 5 arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle('create_course')
def test_invalid_store(self):
with self.assertRaises(CommandError):
self.command.handle("foo", "[email protected]", "org", "course", "run")
def test_xml_store(self):
with self.assertRaises(CommandError):
self.command.handle(ModuleStoreEnum.Type.xml, "[email protected]", "org", "course", "run")
def test_nonexistent_user_id(self):
errstring = "No user 99 found"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("split", "99", "org", "course", "run")
def test_nonexistent_user_email(self):
errstring = "No user [email protected] found"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("mongo", "[email protected]", "org", "course", "run")
@ddt.ddt
class TestCreateCourse(ModuleStoreTestCase):
"""
Unit tests for creating a course in either old mongo or split mongo via command line
"""
def setUp(self):
super(TestCreateCourse, self).setUp(create_user=True)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_all_stores_user_email(self, store):
call_command(
"create_course",
store,
str(self.user.email),
"org", "course", "run"
)
new_key = modulestore().make_course_key("org", "course", "run")
self.assertTrue(
modulestore().has_course(new_key),
"Could not find course in {}".format(store)
)
# pylint: disable=protected-access
self.assertEqual(store, modulestore()._get_modulestore_for_courselike(new_key).get_modulestore_type())
|
agpl-3.0
|
Dhivyap/ansible
|
lib/ansible/modules/storage/purestorage/purefa_pg.py
|
21
|
17987
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Simon Dodsley ([email protected])
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_pg
version_added: '2.4'
short_description: Manage protection groups on Pure Storage FlashArrays
description:
- Create, delete or modify protection groups on Pure Storage FlashArrays.
- If a protection group exists and you try to add non-valid types, eg. a host
to a volume protection group the module will ignore the invalid types.
- Protection Groups on Offload targets are supported.
author:
- Pure Storage Ansible Team (@sdodsley) <[email protected]>
options:
pgroup:
description:
- The name of the protection group.
type: str
required: true
state:
description:
- Define whether the protection group should exist or not.
type: str
default: present
choices: [ absent, present ]
volume:
description:
- List of existing volumes to add to protection group.
type: list
host:
description:
- List of existing hosts to add to protection group.
type: list
hostgroup:
description:
- List of existing hostgroups to add to protection group.
type: list
eradicate:
description:
- Define whether to eradicate the protection group on delete and leave in trash.
type : bool
default: 'no'
enabled:
description:
- Define whether to enabled snapshots for the protection group.
type : bool
default: 'yes'
target:
description:
- List of remote arrays or offload target for replication protection group
to connect to.
- Note that all replicated protection groups are asynchronous.
- Target arrays or offload targets must already be connected to the source array.
- Maximum number of targets per Protection Group is 4, assuming your
configuration supports this.
type: list
version_added: '2.8'
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Create new local protection group
purefa_pg:
pgroup: foo
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create new replicated protection group
purefa_pg:
pgroup: foo
target:
- arrayb
- arrayc
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create new replicated protection group to offload target and remote array
purefa_pg:
pgroup: foo
target:
- offload
- arrayc
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create new protection group with snapshots disabled
purefa_pg:
pgroup: foo
enabled: false
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Delete protection group
purefa_pg:
pgroup: foo
eradicate: true
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Eradicate protection group foo on offload target where source array is arrayA
purefa_pg:
pgroup: "arrayA:foo"
target: offload
eradicate: true
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
state: absent
- name: Create protection group for hostgroups
purefa_pg:
pgroup: bar
hostgroup:
- hg1
- hg2
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create protection group for hosts
purefa_pg:
pgroup: bar
host:
- host1
- host2
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Create replicated protection group for volumes
purefa_pg:
pgroup: bar
volume:
- vol1
- vol2
target: arrayb
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
OFFLOAD_API_VERSION = '1.16'
def get_targets(array):
"""Get Offload Targets"""
targets = []
try:
target_details = array.list_offload()
except Exception:
return None
for targetcnt in range(0, len(target_details)):
if target_details[targetcnt]['status'] == "connected":
targets.append(target_details[targetcnt]['name'])
return targets
def get_arrays(array):
""" Get Connected Arrays"""
arrays = []
array_details = array.list_array_connections()
for arraycnt in range(0, len(array_details)):
if array_details[arraycnt]['connected']:
arrays.append(array_details[arraycnt]['array_name'])
return arrays
def get_pending_pgroup(module, array):
""" Get Protection Group"""
pgroup = None
if ":" in module.params['pgroup']:
for pgrp in array.list_pgroups(pending=True, on="*"):
if pgrp["name"] == module.params['pgroup'] and pgrp['time_remaining']:
pgroup = pgrp
break
else:
for pgrp in array.list_pgroups(pending=True):
if pgrp["name"] == module.params['pgroup'] and pgrp['time_remaining']:
pgroup = pgrp
break
return pgroup
def get_pgroup(module, array):
""" Get Protection Group"""
pgroup = None
if ":" in module.params['pgroup']:
for pgrp in array.list_pgroups(on="*"):
if pgrp["name"] == module.params['pgroup']:
pgroup = pgrp
break
else:
for pgrp in array.list_pgroups():
if pgrp["name"] == module.params['pgroup']:
pgroup = pgrp
break
return pgroup
def get_pgroup_sched(module, array):
""" Get Protection Group Schedule"""
pgroup = None
for pgrp in array.list_pgroups(schedule=True):
if pgrp["name"] == module.params['pgroup']:
pgroup = pgrp
break
return pgroup
def check_pg_on_offload(module, array):
""" Check if PG already exists on offload target """
array_name = array.get()['array_name']
remote_pg = array_name + ":" + module.params['pgroup']
targets = get_targets(array)
for target in targets:
remote_pgs = array.list_pgroups(pending=True, on=target)
for rpg in range(0, len(remote_pgs)):
if remote_pg == remote_pgs[rpg]['name']:
return target
return None
def make_pgroup(module, array):
""" Create Protection Group"""
changed = False
if module.params['target']:
api_version = array._list_available_rest_versions()
connected_targets = []
connected_arrays = get_arrays(array)
if OFFLOAD_API_VERSION in api_version:
connected_targets = get_targets(array)
offload_name = check_pg_on_offload(module, array)
if offload_name and offload_name in module.params['target'][0:4]:
module.fail_json(msg='Protection Group {0} already exists on offload target {1}.'.format(module.params['pgroup'], offload_name))
connected_arrays = connected_arrays + connected_targets
if connected_arrays == []:
module.fail_json(msg='No connected targets on source array.')
if set(module.params['target'][0:4]).issubset(connected_arrays):
try:
array.create_pgroup(module.params['pgroup'], targetlist=module.params['target'][0:4])
except Exception:
module.fail_json(msg='Creation of replicated pgroup {0} failed. {1}'.format(module.params['pgroup'], module.params['target'][0:4]))
else:
module.fail_json(msg='Check all selected targets are connected to the source array.')
else:
try:
array.create_pgroup(module.params['pgroup'])
except Exception:
module.fail_json(msg='Creation of pgroup {0} failed.'.format(module.params['pgroup']))
try:
if module.params['target']:
array.set_pgroup(module.params['pgroup'], replicate_enabled=module.params['enabled'])
else:
array.set_pgroup(module.params['pgroup'], snap_enabled=module.params['enabled'])
except Exception:
module.fail_json(msg='Enabling pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['volume']:
try:
array.set_pgroup(module.params['pgroup'], vollist=module.params['volume'])
except Exception:
module.fail_json(msg='Adding volumes to pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['host']:
try:
array.set_pgroup(module.params['pgroup'], hostlist=module.params['host'])
except Exception:
module.fail_json(msg='Adding hosts to pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['hostgroup']:
try:
array.set_pgroup(module.params['pgroup'], hgrouplist=module.params['hostgroup'])
except Exception:
module.fail_json(msg='Adding hostgroups to pgroup {0} failed.'.format(module.params['pgroup']))
changed = True
module.exit_json(changed=changed)
def update_pgroup(module, array):
""" Update Protection Group"""
changed = False
if module.params['target']:
api_version = array._list_available_rest_versions()
connected_targets = []
connected_arrays = get_arrays(array)
if OFFLOAD_API_VERSION in api_version:
connected_targets = get_targets(array)
offload_name = check_pg_on_offload(module, array)
if offload_name and offload_name in module.params['target'][0:4]:
module.fail_json(msg='Protection Group {0} already exists on offload target {1}.'.format(module.params['pgroup'], offload_name))
connected_arrays = connected_arrays + connected_targets
if connected_arrays == []:
module.fail_json(msg='No targets connected to source array.')
current_connects = array.get_pgroup(module.params['pgroup'])['targets']
current_targets = []
if current_connects:
for targetcnt in range(0, len(current_connects)):
current_targets.append(current_connects[targetcnt]['name'])
if set(module.params['target'][0:4]) != set(current_targets):
if not set(module.params['target'][0:4]).issubset(connected_arrays):
module.fail_json(msg='Check all selected targets are connected to the source array.')
try:
array.set_pgroup(module.params['pgroup'], targetlist=module.params['target'][0:4])
changed = True
except Exception:
module.fail_json(msg='Changing targets for pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['target'] and module.params['enabled'] != get_pgroup_sched(module, array)['replicate_enabled']:
try:
array.set_pgroup(module.params['pgroup'], replicate_enabled=module.params['enabled'])
changed = True
except Exception:
module.fail_json(msg='Changing enabled status of pgroup {0} failed.'.format(module.params['pgroup']))
elif not module.params['target'] and module.params['enabled'] != get_pgroup_sched(module, array)['snap_enabled']:
try:
array.set_pgroup(module.params['pgroup'], snap_enabled=module.params['enabled'])
changed = True
except Exception:
module.fail_json(msg='Changing enabled status of pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['volume'] and get_pgroup(module, array)['hosts'] is None and get_pgroup(module, array)['hgroups'] is None:
if get_pgroup(module, array)['volumes'] is None:
try:
array.set_pgroup(module.params['pgroup'], vollist=module.params['volume'])
changed = True
except Exception:
module.fail_json(msg='Adding volumes to pgroup {0} failed.'.format(module.params['pgroup']))
else:
if not all(x in get_pgroup(module, array)['volumes'] for x in module.params['volume']):
try:
array.set_pgroup(module.params['pgroup'], vollist=module.params['volume'])
changed = True
except Exception:
module.fail_json(msg='Changing volumes in pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['host'] and get_pgroup(module, array)['volumes'] is None and get_pgroup(module, array)['hgroups'] is None:
if not get_pgroup(module, array)['hosts'] is None:
try:
array.set_pgroup(module.params['pgroup'], hostlist=module.params['host'])
changed = True
except Exception:
module.fail_json(msg='Adding hosts to pgroup {0} failed.'.format(module.params['pgroup']))
else:
if not all(x in get_pgroup(module, array)['hosts'] for x in module.params['host']):
try:
array.set_pgroup(module.params['pgroup'], hostlist=module.params['host'])
changed = True
except Exception:
module.fail_json(msg='Changing hosts in pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['hostgroup'] and get_pgroup(module, array)['hosts'] is None and get_pgroup(module, array)['volumes'] is None:
if not get_pgroup(module, array)['hgroups'] is None:
try:
array.set_pgroup(module.params['pgroup'], hgrouplist=module.params['hostgroup'])
changed = True
except Exception:
module.fail_json(msg='Adding hostgroups to pgroup {0} failed.'.format(module.params['pgroup']))
else:
if not all(x in get_pgroup(module, array)['hgroups'] for x in module.params['hostgroup']):
try:
array.set_pgroup(module.params['pgroup'], hgrouplist=module.params['hostgroup'])
changed = True
except Exception:
module.fail_json(msg='Changing hostgroups in pgroup {0} failed.'.format(module.params['pgroup']))
module.exit_json(changed=changed)
def eradicate_pgroup(module, array):
""" Eradicate Protection Group"""
changed = False
if ":" in module.params['pgroup']:
try:
target = ''.join(module.params['target'])
array.destroy_pgroup(module.params['pgroup'], on=target, eradicate=True)
changed = True
except Exception:
module.fail_json(msg='Eradicating pgroup {0} failed.'.format(module.params['pgroup']))
else:
try:
array.destroy_pgroup(module.params['pgroup'], eradicate=True)
changed = True
except Exception:
module.fail_json(msg='Eradicating pgroup {0} failed.'.format(module.params['pgroup']))
module.exit_json(changed=changed)
def delete_pgroup(module, array):
""" Delete Protection Group"""
changed = False
if ":" in module.params['pgroup']:
try:
target = ''.join(module.params['target'])
array.destroy_pgroup(module.params['pgroup'], on=target)
changed = True
except Exception:
module.fail_json(msg='Deleting pgroup {0} failed.'.format(module.params['pgroup']))
else:
try:
array.destroy_pgroup(module.params['pgroup'])
changed = True
except Exception:
module.fail_json(msg='Deleting pgroup {0} failed.'.format(module.params['pgroup']))
if module.params['eradicate']:
eradicate_pgroup(module, array)
module.exit_json(changed=changed)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
pgroup=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['absent', 'present']),
volume=dict(type='list'),
host=dict(type='list'),
hostgroup=dict(type='list'),
target=dict(type='list'),
eradicate=dict(type='bool', default=False),
enabled=dict(type='bool', default=True),
))
mutually_exclusive = [['volume', 'host', 'hostgroup']]
module = AnsibleModule(argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=False)
state = module.params['state']
array = get_system(module)
api_version = array._list_available_rest_versions()
if ":" in module.params['pgroup'] and OFFLOAD_API_VERSION not in api_version:
module.fail_json(msg='API version does not support offload protection groups.')
pgroup = get_pgroup(module, array)
xpgroup = get_pending_pgroup(module, array)
if module.params['host']:
try:
for hst in module.params['host']:
array.get_host(hst)
except Exception:
module.fail_json(msg='Host {0} not found'.format(hst))
if module.params['hostgroup']:
try:
for hstg in module.params['hostgroup']:
array.get_hgroup(hstg)
except Exception:
module.fail_json(msg='Hostgroup {0} not found'.format(hstg))
if pgroup and state == 'present':
update_pgroup(module, array)
elif pgroup and state == 'absent':
delete_pgroup(module, array)
elif xpgroup and state == 'absent' and module.params['eradicate']:
eradicate_pgroup(module, array)
elif not pgroup and not xpgroup and state == 'present':
make_pgroup(module, array)
elif pgroup is None and state == 'absent':
module.exit_json(changed=False)
if __name__ == '__main__':
main()
|
gpl-3.0
|
Pennebaker/wagtail
|
wagtail/wagtailadmin/views/userbar.py
|
19
|
2436
|
import warnings
from django.shortcuts import render
from django.contrib.auth.decorators import permission_required
from wagtail.wagtailadmin.userbar import EditPageItem, AddPageItem, ApproveModerationEditPageItem, RejectModerationEditPageItem
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import Page, PageRevision
from wagtail.utils.deprecation import RemovedInWagtail12Warning
@permission_required('wagtailadmin.access_admin', raise_exception=True)
def for_frontend(request, page_id):
items = [
EditPageItem(Page.objects.get(id=page_id)),
AddPageItem(Page.objects.get(id=page_id)),
]
# TODO: Remove in 1.2 release
run_deprecated_edit_bird_hook(request, items)
for fn in hooks.get_hooks('construct_wagtail_userbar'):
fn(request, items)
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Render the edit bird
return render(request, 'wagtailadmin/userbar/base.html', {
'items': rendered_items,
})
@permission_required('wagtailadmin.access_admin', raise_exception=True)
def for_moderation(request, revision_id):
items = [
EditPageItem(PageRevision.objects.get(id=revision_id).page),
AddPageItem(PageRevision.objects.get(id=revision_id).page),
ApproveModerationEditPageItem(PageRevision.objects.get(id=revision_id)),
RejectModerationEditPageItem(PageRevision.objects.get(id=revision_id)),
]
# TODO: Remove in 1.2 release
run_deprecated_edit_bird_hook(request, items)
for fn in hooks.get_hooks('construct_wagtail_userbar'):
fn(request, items)
# Render the items
rendered_items = [item.render(request) for item in items]
# Remove any unrendered items
rendered_items = [item for item in rendered_items if item]
# Render the edit bird
return render(request, 'wagtailadmin/userbar/base.html', {
'items': rendered_items,
})
def run_deprecated_edit_bird_hook(request, items):
for fn in hooks.get_hooks('construct_wagtail_edit_bird'):
fn(request, items)
warnings.warn(
"The 'construct_wagtail_edit_bird' hook has been renamed to 'construct_wagtail_userbar'."
"Please update function '%s' in '%s'." % (fn.__name__, fn.__module__), RemovedInWagtail12Warning
)
|
bsd-3-clause
|
tellesnobrega/sahara
|
sahara/service/validation.py
|
3
|
6298
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
from oslo_utils import reflection
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.utils import api as u
from sahara.utils import api_validator
from sahara.utils import types
def _get_path(path):
if path:
path_string = path[0]
for x in path[1:]:
path_string += '[%s]' % str(x)
return path_string + ': '
return ''
def _generate_error(errors):
message = [_get_path(list(e.path)) + e.message for e in errors]
if message:
return ex.SaharaException('\n'.join(message), "VALIDATION_ERROR")
def validate_pagination_limit():
request_args = u.get_request_args()
if 'limit' in request_args:
if types.is_int(request_args['limit']):
if not int(request_args['limit']) > 0:
raise ex.SaharaException(
_("'limit' must be positive integer"), 400)
else:
raise ex.SaharaException(
_("'limit' must be positive integer"), 400)
def get_sorting_field():
request_args = u.get_request_args()
if 'sort_by' in request_args:
sort_by = request_args['sort_by']
if sort_by:
sort_by = sort_by[1:] if sort_by[0] == '-' else sort_by
return sort_by
return None
def validate_sorting_clusters():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'plugin_name', 'hadoop_version',
'status']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_cluster_templates():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'plugin_name', 'hadoop_version',
'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_node_group_templates():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'plugin_name', 'hadoop_version',
'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_job_binaries():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_job_binary_internals():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_data_sources():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'type', 'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_jobs():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'name', 'type', 'created_at', 'updated_at']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate_sorting_job_executions():
field = get_sorting_field()
if field is None:
return
if field not in ['id', 'job_template', 'cluster', 'status']:
raise ex.SaharaException(
_("Unknown field for sorting %s") % field, 400)
def validate(schema, *validators):
def decorator(func):
@functools.wraps(func)
def handler(*args, **kwargs):
request_data = u.request_data()
try:
if schema:
validator = api_validator.ApiValidator(schema)
errors = validator.iter_errors(request_data)
error = _generate_error(errors)
if error:
return u.bad_request(error)
if validators:
for validator in validators:
validator(**kwargs)
except ex.SaharaException as e:
return u.bad_request(e)
except Exception as e:
return u.internal_error(
500, "Error occurred during validation", e)
return func(*args, **kwargs)
return handler
return decorator
def check_exists(get_func, *id_prop, **get_args):
def decorator(func):
@functools.wraps(func)
def handler(*args, **kwargs):
if id_prop and not get_args:
get_args['id'] = id_prop[0]
if 'marker' in id_prop:
if 'marker' not in u.get_request_args():
return func(*args, **kwargs)
kwargs['marker'] = u.get_request_args()['marker']
get_kwargs = {}
for get_arg in get_args:
get_kwargs[get_arg] = kwargs[get_args[get_arg]]
obj = None
try:
obj = get_func(**get_kwargs)
except Exception as e:
cls_name = reflection.get_class_name(e, fully_qualified=False)
if 'notfound' not in cls_name.lower():
raise e
if obj is None:
e = ex.NotFoundException(get_kwargs,
_('Object with %s not found'))
return u.not_found(e)
if 'marker' in kwargs:
del(kwargs['marker'])
return func(*args, **kwargs)
return handler
return decorator
|
apache-2.0
|
moozilla/dvcticker
|
dvcticker/main.py
|
1
|
12328
|
#todo: raise exceptions, then catch them to generate error images
import webapp2
from google.appengine.api import urlfetch
import json
from PIL import Image, ImageDraw, ImageFont
from google.appengine.api import memcache
import StringIO
import jinja2
import os
from decimal import * #used fixed point math for better accuracy
from google.appengine import runtime # for catching DeadlineExceededError
from google.appengine.api import urlfetch_errors # "
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
#imgFont = ImageFont.load('static/font/ncenB12.pil') # for testing locally, can't get truetype to work locally
imgFont = ImageFont.truetype('static/font/tahoma_bold.ttf', 14, encoding='unic')
def urlfetch_cache(url,exchange):
# fetches a url, but using memcache to not hammer the exchanges server
data = memcache.get(url)
if data is not None:
return process_json(data, exchange)
else:
try:
result = urlfetch.fetch(url,deadline=30) #timeout after 30 sec
if result.status_code == 200:
value = process_json(result.content, exchange)
memcache.add(url, result.content, 30) #cache for 30 sec
memcache.add('longcache'+url, result.content, 3000) #also cache for 5min in case of timeouts
return value
else:
return 'Error: '+exchange+' status code '+str(result.status_code) #'Error accessing Vircurex API'
except runtime.DeadlineExceededError: #raised if the overall request times out
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except runtime.apiproxy_errors.DeadlineExceededError: #raised if an RPC exceeded its deadline (set)
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except urlfetch_errors.DeadlineExceededError: #raised if the URLFetch times out
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
except urlfetch.Error: #catch DownloadError
data = memcache.get('longcache'+url)
if data is not None: return process_json(data, exchange)
else: return 'Error: '+exchange+' timeout'
def process_json(txt, exchange):
#should probably add error handling in case bad json is passed
if exchange == 'vircurex':
if txt == '"Unknown currency"': return 'Error: bad Vircurex API result'
obj = json.loads(txt)
return obj['value']
elif exchange == 'mtgox_bid':
obj = json.loads(txt)
if obj['result'] == 'success':
return obj['return']['buy']['value']
else:
return 'Error: bad MTGox API result'
elif exchange == 'mtgox_ask':
obj = json.loads(txt)
if obj['result'] == 'success':
return obj['return']['sell']['value']
else:
return 'Error: bad MTGox API result'
elif exchange == 'btce_bid':
obj = json.loads(txt)
if not any('error' in s for s in obj):
return str(obj['ticker']['buy'])
else:
return 'Error: bad BTC-E API result'
elif exchange == 'btce_ask':
obj = json.loads(txt)
if not any('error' in s for s in obj):
return str(obj['ticker']['sell'])
else:
return 'Error: bad BTC-E API result'
elif exchange == 'campbx_bid':
obj = json.loads(txt)
# need to check for error
return obj['Best Bid']
elif exchange == 'campbx_ask':
obj = json.loads(txt)
# need to check for error
return obj['Best Ask']
else:
return 'Error: invalid exchange'
def get_campbx_value(base,alt,amount):
url = 'http://campbx.com/api/xticker.php'
reverse = False
if base == 'btc':
if alt != 'usd': return 'Error: only BTC/USD valid on CampBX'
exch = 'campbx_bid'
elif base == 'usd':
if alt != 'btc': return 'Error: only BTC/USD valid on CampBX'
exch = 'campbx_ask'
reverse = True
else:
return 'Error: only BTC/USD valid on CampBX'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_mtgox_value(base,alt,amount):
cur = ['usd', 'aud', 'cad', 'chf', 'cny', 'dkk',
'eur', 'gbp', 'hkd', 'jpy', 'nzd', 'pln', 'rub', 'sek', 'sgd', 'thb']
reverse = False # true if going from cur-> btc
if base == 'btc':
if not any(alt in s for s in cur):
return 'Error: invalid destination currency'
url = 'http://data.mtgox.com/api/1/btc'+alt+'/ticker'
exch = 'mtgox_bid'
elif any(base in s for s in cur):
if alt != 'btc':
return 'Error: destination currency must be BTC'
url = 'http://data.mtgox.com/api/1/btc'+base+'/ticker' #mtgox api always has btc first
exch = 'mtgox_ask'
reverse = True
else:
return 'Error: invalid base currency'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_btce_value(base,alt,amount):
# in BTC-e currencies must be traded in pairs, we also support going in reverse (buying)
cur_fwd = {'btc':['usd','rur','eur'], 'ltc':['btc','usd','rur'], 'nmc':['btc'], 'usd':['rur'], 'eur':['usd'], 'nvc':['btc'], 'trc':['btc'], 'ppc':['btc'], 'ftc':['btc'], 'cnc':['btc']}
cur_rev = {'btc':['ltc','nmc','nvc','trc','ppc','ftc','cnc'], 'usd':['btc','ltc'], 'rur':['btc','usd'], 'eur':['btc']}
reverse = False # if going from cur-> btc
if any(base in s for s in cur_fwd) and any(alt in s for s in cur_fwd[base]):
#if not any(alt in s for s in cur_fwd[base]):
#return 'Error: invalid destination currency' # can't return here because some can be base or alt
url = 'https://btc-e.com/api/2/'+base+'_'+alt+'/ticker' #https://btc-e.com/api/2/nmc_btc/ticker
exch = 'btce_bid'
else:
if any(base in s for s in cur_rev):
if not any(alt in s for s in cur_rev[base]):
return 'Error: invalid currency pair'
url = 'https://btc-e.com/api/2/'+alt+'_'+base+'/ticker'
exch = 'btce_ask'
reverse = True
else:
return 'Error: invalid currency pair'
value = urlfetch_cache(url,exch)
if value.startswith('Error'): return value
if reverse: return str((Decimal(amount) / Decimal(value)).quantize(Decimal('.00000001'), rounding=ROUND_DOWN)) # need to round to a certain number
else: return str(Decimal(amount) * Decimal(value))
def get_vircurex_value(type, base, alt, amount):
# gets json from vircurex about bid/ask prices
# eg. https://vircurex.com/api/get_highest_bid.json?base=BTC&alt=NMC
if type == 'bid':
url = 'https://vircurex.com/api/get_highest_bid.json'
elif type == 'ask':
url = 'https://vircurex.com/api/get_lowest_ask.json'
else:
return 'Error: Type must be either "bid" or "ask"'
cur = ['btc', 'dvc', 'ixc', 'ltc', 'nmc', 'ppc', 'trc', 'usd', 'eur', 'ftc', 'frc', 'cnc']
if not any(base in s for s in cur): return 'Error: invalid currency'
if not any(alt in s for s in cur): return 'Error: invalid currency'
url += '?base=' + base + '&alt=' + alt
value = urlfetch_cache(url,'vircurex')
if value.startswith('Error'): return value
return str(Decimal(amount)*Decimal(value)) # return amount * value
def get_bid(exchange, amount, base, alt):
if exchange == 'vircurex':
return get_vircurex_value('bid',base,alt,amount)
elif exchange == 'mtgox':
return get_mtgox_value(base,alt,amount)
elif exchange == 'btc-e':
return get_btce_value(base,alt,amount)
elif exchange == 'campbx':
return get_campbx_value(base,alt,amount)
else:
return 'Error: bad exchange'
def get_text_width(str):
img = Image.new("RGBA", (1,1)) # just used to calculate the text size, size doesn't matter
draw = ImageDraw.Draw(img)
w, h = draw.textsize(str, imgFont) # calculate width font will take up
return w
# returns text, with optional coin icon, in string encoded form so it can be written to HTTP response
def make_img(str, text_pos, coinimg=None):
img = Image.new("RGBA", (get_text_width(str) + text_pos, 20))
draw = ImageDraw.Draw(img) # set draw to new image
if coinimg != None:
img.paste(coinimg, (0,2)) #paste the coin image into the generated image
draw.text((text_pos,1), str, font=imgFont, fill='#555555')
output = StringIO.StringIO()
img.save(output, format='png')
img_to_serve = output.getvalue()
output.close()
return img_to_serve
class MainHandler(webapp2.RequestHandler):
def get(self):
#base = self.request.get('base','dvc')
#alt = self.request.get('alt','btc')
#value = get_vircurex_value('bid',base,alt)
#template_values = {
# 'value': value
#}
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render())#template_values))
class ImageHandler(webapp2.RequestHandler):
def get(self,exchange,amount,base,alt):
if amount == '': amount = '1' # default amount is 1
exchange = exchange.lower() # make sure everything is lowercase
base = base.lower()
if alt == None:
if base == 'btc': alt = 'usd' # btc.png just shows btc value in usd
else: alt = 'btc' # if no alt specified, default to BTC
alt = alt.lower()
value = get_bid(exchange,amount,base,alt)
#if bid.startswith('Error'): value = bid
#else: value = str(Decimal(amount)*Decimal(bid))
text_pos = 19 # 3 px after coin image (all are 16x16)
if value.startswith('Error'):
text_pos = 0
elif alt == 'usd':
# round down to 2 decimal places
value = '$ '+str(Decimal(value).quantize(Decimal('.01'), rounding=ROUND_DOWN))
text_pos = 2
elif alt == 'eur':
# euro symbol in unicode (only works with truetype fonts)
value = u'\u20AC '+str(Decimal(value).quantize(Decimal('.01'), rounding=ROUND_DOWN))
text_pos = 2 # have to position euro symbol so it doesn't cut off
elif any(alt in s for s in ['aud', 'cad', 'chf', 'cny', 'dkk',
'gbp', 'hkd', 'jpy', 'nzd', 'pln', 'rub', 'sek', 'sgd', 'thb', 'rur', 'nvc']):
value = alt.upper() + ' ' + value
text_pos = 2
#text_pos 0 = error
if text_pos!=0 and any(alt in s for s in ['btc', 'dvc', 'ixc', 'ltc', 'nmc', 'ppc', 'trc', 'ftc', 'frc', 'cnc']):
coinimg = Image.open('static/img/'+alt+'.png')
else: coinimg = None
img_to_serve = make_img(value, text_pos, coinimg)
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(img_to_serve)
class ErrorHandler(webapp2.RequestHandler):
def get(self):
img_to_serve = make_img('Error: Malformed URL', 0)
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(img_to_serve)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/([^/]+)/(\d*\.?\d*)([A-Za-z]+)(?:/([A-Za-z]+))?(?:\.png)?', ImageHandler),
('/.*', ErrorHandler)
], debug=True)
|
mit
|
m45t3r/i3pystatus
|
i3pystatus/scores/nhl.py
|
4
|
14628
|
from i3pystatus.core.util import internet, require
from i3pystatus.scores import ScoresBackend
import copy
import json
import pytz
import re
import time
from datetime import datetime
from urllib.request import urlopen
LIVE_URL = 'https://www.nhl.com/gamecenter/%s'
SCOREBOARD_URL = 'https://www.nhl.com/scores'
API_URL = 'https://statsapi.web.nhl.com/api/v1/schedule?startDate=%04d-%02d-%02d&endDate=%04d-%02d-%02d&expand=schedule.teams,schedule.linescore,schedule.broadcasts.all&site=en_nhl&teamId='
class NHL(ScoresBackend):
'''
Backend to retrieve NHL scores. For usage examples, see :py:mod:`here
<.scores>`.
.. rubric:: Available formatters
* `{home_name}` — Name of home team
* `{home_city}` — Name of home team's city
* `{home_abbrev}` — 3-letter abbreviation for home team's city
* `{home_score}` — Home team's current score
* `{home_wins}` — Home team's number of wins
* `{home_losses}` — Home team's number of losses
* `{home_otl}` — Home team's number of overtime losses
* `{home_favorite}` — Displays the value for the :py:mod:`.scores` module's
``favorite`` attribute, if the home team is one of the teams being
followed. Otherwise, this formatter will be blank.
* `{home_empty_net}` — Shows the value from the ``empty_net`` parameter
when the home team's net is empty.
* `{away_name}` — Name of away team
* `{away_city}` — Name of away team's city
* `{away_abbrev}` — 2 or 3-letter abbreviation for away team's city
* `{away_score}` — Away team's current score
* `{away_wins}` — Away team's number of wins
* `{away_losses}` — Away team's number of losses
* `{away_otl}` — Away team's number of overtime losses
* `{away_favorite}` — Displays the value for the :py:mod:`.scores` module's
``favorite`` attribute, if the away team is one of the teams being
followed. Otherwise, this formatter will be blank.
* `{away_empty_net}` — Shows the value from the ``empty_net`` parameter
when the away team's net is empty.
* `{period}` — Current period
* `{venue}` — Name of arena where game is being played
* `{start_time}` — Start time of game in system's localtime (supports
strftime formatting, e.g. `{start_time:%I:%M %p}`)
* `{overtime}` — If the game ended in overtime or a shootout, this
formatter will show ``OT`` kor ``SO``. If the game ended in regulation,
or has not yet completed, this formatter will be blank.
.. rubric:: Playoffs
In the playoffs, losses are not important (as the losses will be equal to
the other team's wins). Therefore, it is a good idea during the playoffs to
manually set format strings to exclude information on team losses. For
example:
.. code-block:: python
from i3pystatus import Status
from i3pystatus.scores import nhl
status = Status()
status.register(
'scores',
hints={'markup': 'pango'},
colorize_teams=True,
favorite_icon='<span size="small" color="#F5FF00">★</span>',
backends=[
nhl.NHL(
favorite_teams=['CHI'],
format_pregame = '[{scroll} ]NHL: [{away_favorite} ]{away_abbrev} ({away_wins}) at [{home_favorite} ]{home_abbrev} ({home_wins}) {start_time:%H:%M %Z}',
format_final = '[{scroll} ]NHL: [{away_favorite} ]{away_abbrev} {away_score} ({away_wins}) at [{home_favorite} ]{home_abbrev} {home_score} ({home_wins}) (Final[/{overtime}])',
),
],
)
.. rubric:: Team abbreviations
* **ANA** — Anaheim Ducks
* **ARI** — Arizona Coyotes
* **BOS** — Boston Bruins
* **BUF** — Buffalo Sabres
* **CAR** — Carolina Hurricanes
* **CBJ** — Columbus Blue Jackets
* **CGY** — Calgary Flames
* **CHI** — Chicago Blackhawks
* **COL** — Colorado Avalanche
* **DAL** — Dallas Stars
* **DET** — Detroit Red Wings
* **EDM** — Edmonton Oilers
* **FLA** — Florida Panthers
* **LAK** — Los Angeles Kings
* **MIN** — Minnesota Wild
* **MTL** — Montreal Canadiens
* **NJD** — New Jersey Devils
* **NSH** — Nashville Predators
* **NYI** — New York Islanders
* **NYR** — New York Rangers
* **OTT** — Ottawa Senators
* **PHI** — Philadelphia Flyers
* **PIT** — Pittsburgh Penguins
* **SJS** — San Jose Sharks
* **STL** — St. Louis Blues
* **TBL** — Tampa Bay Lightning
* **TOR** — Toronto Maple Leafs
* **VAN** — Vancouver Canucks
* **WPG** — Winnipeg Jets
* **WSH** — Washington Capitals
'''
interval = 300
settings = (
('favorite_teams', 'List of abbreviations of favorite teams. Games '
'for these teams will appear first in the scroll '
'list. A detailed description of how games are '
'ordered can be found '
':ref:`here <scores-game-order>`.'),
('all_games', 'If set to ``True``, all games will be present in '
'the scroll list. If set to ``False``, then only '
'games from **favorite_teams** will be present in '
'the scroll list.'),
('display_order', 'When **all_games** is set to ``True``, this '
'option will dictate the order in which games from '
'teams not in **favorite_teams** are displayed'),
('format_no_games', 'Format used when no tracked games are scheduled '
'for the current day (does not support formatter '
'placeholders)'),
('format_pregame', 'Format used when the game has not yet started'),
('format_in_progress', 'Format used when the game is in progress'),
('format_final', 'Format used when the game is complete'),
('empty_net', 'Value for the ``{away_empty_net}`` or '
'``{home_empty_net}`` formatter when the net is empty. '
'When the net is not empty, these formatters will be '
'empty strings.'),
('team_colors', 'Dictionary mapping team abbreviations to hex color '
'codes. If overridden, the passed values will be '
'merged with the defaults, so it is not necessary to '
'define all teams if specifying this value.'),
('date', 'Date for which to display game scores, in **YYYY-MM-DD** '
'format. If unspecified, the current day\'s games will be '
'displayed starting at 10am Eastern time, with last '
'evening\'s scores being shown before then. This option '
'exists primarily for troubleshooting purposes.'),
('live_url', 'URL string to launch NHL GameCenter. This value should '
'not need to be changed.'),
('scoreboard_url', 'Link to the NHL.com scoreboard page. Like '
'**live_url**, this value should not need to be '
'changed.'),
('api_url', 'Alternate URL string from which to retrieve score data. '
'Like **live_url**, this value should not need to be '
'changed.'),
)
required = ()
_default_colors = {
'ANA': '#B4A277',
'ARI': '#AC313A',
'BOS': '#F6BD27',
'BUF': '#1568C5',
'CAR': '#FA272E',
'CBJ': '#1568C5',
'CGY': '#D23429',
'CHI': '#CD0E24',
'COL': '#9F415B',
'DAL': '#058158',
'DET': '#E51937',
'EDM': '#2F6093',
'FLA': '#E51837',
'LAK': '#DADADA',
'MIN': '#176B49',
'MTL': '#C8011D',
'NJD': '#CC0000',
'NSH': '#FDB71A',
'NYI': '#F8630D',
'NYR': '#1576CA',
'OTT': '#C50B2F',
'PHI': '#FF690B',
'PIT': '#FFB81C',
'SJS': '#007888',
'STL': '#1764AD',
'TBL': '#296AD5',
'TOR': '#296AD5',
'VAN': '#0454FA',
'WPG': '#1568C5',
'WSH': '#E51937',
}
_valid_teams = [x for x in _default_colors]
_valid_display_order = ['in_progress', 'final', 'pregame']
display_order = _valid_display_order
format_no_games = 'NHL: No games'
format_pregame = '[{scroll} ]NHL: [{away_favorite} ]{away_abbrev} ({away_wins}-{away_losses}-{away_otl}) at [{home_favorite} ]{home_abbrev} ({home_wins}-{home_losses}-{home_otl}) {start_time:%H:%M %Z}'
format_in_progress = '[{scroll} ]NHL: [{away_favorite} ]{away_abbrev} {away_score}[ ({away_power_play})][ ({away_empty_net})], [{home_favorite} ]{home_abbrev} {home_score}[ ({home_power_play})][ ({home_empty_net})] ({time_remaining} {period})'
format_final = '[{scroll} ]NHL: [{away_favorite} ]{away_abbrev} {away_score} ({away_wins}-{away_losses}-{away_otl}) at [{home_favorite} ]{home_abbrev} {home_score} ({home_wins}-{home_losses}-{home_otl}) (Final[/{overtime}])'
empty_net = 'EN'
team_colors = _default_colors
live_url = LIVE_URL
scoreboard_url = SCOREBOARD_URL
api_url = API_URL
@require(internet)
def check_scores(self):
self.get_api_date()
url = self.api_url % (self.date.year, self.date.month, self.date.day,
self.date.year, self.date.month, self.date.day)
game_list = self.get_nested(self.api_request(url),
'dates:0:games',
default=[])
# Convert list of games to dictionary for easy reference later on
data = {}
team_game_map = {}
for game in game_list:
try:
id_ = game['gamePk']
except KeyError:
continue
try:
for key in ('home', 'away'):
team = game['teams'][key]['team']['abbreviation'].upper()
if team in self.favorite_teams:
team_game_map.setdefault(team, []).append(id_)
except KeyError:
continue
data[id_] = game
self.interpret_api_return(data, team_game_map)
def process_game(self, game):
ret = {}
def _update(ret_key, game_key=None, callback=None, default='?'):
ret[ret_key] = self.get_nested(game,
game_key or ret_key,
callback=callback,
default=default)
self.logger.debug('Processing %s game data: %s',
self.__class__.__name__, game)
_update('id', 'gamePk')
ret['live_url'] = self.live_url % ret['id']
_update('period', 'linescore:currentPeriodOrdinal', default='')
_update('time_remaining',
'linescore:currentPeriodTimeRemaining',
lambda x: x.capitalize(),
default='')
_update('venue', 'venue:name')
pp_strength = self.get_nested(game,
'linescore:powerPlayStrength',
default='')
for team in ('home', 'away'):
_update('%s_score' % team,
'teams:%s:score' % team,
callback=self.force_int,
default=0)
_update('%s_wins' % team,
'teams:%s:leagueRecord:wins' % team,
callback=self.force_int,
default=0)
_update('%s_losses' % team,
'teams:%s:leagueRecord:losses' % team,
callback=self.force_int,
default=0)
_update('%s_otl' % team,
'teams:%s:leagueRecord:ot' % team,
callback=self.force_int,
default=0)
_update('%s_city' % team, 'teams:%s:team:shortName' % team)
_update('%s_name' % team, 'teams:%s:team:teamName' % team)
_update('%s_abbrev' % team, 'teams:%s:team:abbreviation' % team)
_update('%s_power_play' % team,
'linescore:teams:%s:powerPlay' % team,
lambda x: pp_strength if x and pp_strength != 'Even' else '')
_update('%s_empty_net' % team,
'linescore:teams:%s:goaliePulled' % team,
lambda x: self.empty_net if x else '')
if game.get('gameType') == 'P':
for team in ('home', 'away'):
# Series wins are the remainder of dividing wins by 4
ret['_'.join((team, 'wins'))] %= 4
# Series losses are the other team's wins
ret['home_losses'] = ret['away_wins']
ret['away_losses'] = ret['home_wins']
_update('status',
'status:abstractGameState',
lambda x: x.lower().replace(' ', '_'))
if ret['status'] == 'live':
ret['status'] = 'in_progress'
elif ret['status'] == 'final':
_update('overtime',
'linescore:currentPeriodOrdinal',
lambda x: x if 'OT' in x or x == 'SO' else '')
elif ret['status'] != 'in_progress':
ret['status'] = 'pregame'
# Game time is in UTC, ISO format, thank the FSM
# Ex. 2016-04-02T17:00:00Z
game_time_str = game.get('gameDate', '')
try:
game_time = datetime.strptime(game_time_str, '%Y-%m-%dT%H:%M:%SZ')
except ValueError as exc:
# Log when the date retrieved from the API return doesn't match the
# expected format (to help troubleshoot API changes), and set an
# actual datetime so format strings work as expected. The times
# will all be wrong, but the logging here will help us make the
# necessary changes to adapt to any API changes.
self.logger.error(
'Error encountered determining %s game time for game %s:',
self.__class__.__name__,
game['id'],
exc_info=True
)
game_time = datetime.datetime(1970, 1, 1)
ret['start_time'] = pytz.utc.localize(game_time).astimezone()
self.logger.debug('Returned %s formatter data: %s',
self.__class__.__name__, ret)
return ret
|
mit
|
seanjtaylor/out-for-justice
|
scripts/test_optimize.py
|
1
|
1921
|
import random
import pickle
import numpy as np
import networkx as nx
from app.optim import slow_compute_loss, step
def main(input_file, num_police, num_steps, prob_step):
"""
Parameters
----------
num_police : the number of police to use
num_steps : the number of steps to take
prob_step : the probability of taking a step if it doesn't improve loss
"""
with open(input_file) as f:
graph = pickle.load(f)
graph = nx.convert_node_labels_to_integers(graph)
N = graph.number_of_nodes()
# compute random starting places
starting_positions = np.zeros(N)
places = random.sample(xrange(N), num_police)
starting_positions[places] = 1
# one outcome that is uniformly distributed
risks = np.ones(N).reshape((-1, 1))
import time
start = time.time()
# initialize the optimization
positions = [starting_positions]
losses = [slow_compute_loss(graph, positions[-1], risks)]
current = positions[-1]
tried = set()
for i in range(num_steps):
new_position = step(graph, current)
pos_id = tuple(new_position.nonzero()[0])
if pos_id in tried:
continue
tried.add(pos_id)
positions.append(new_position)
losses.append(slow_compute_loss(graph, new_position, risks))
if (losses[-1] < losses[-2]) or (random.random() < prob_step):
current = new_position
print time.time() - start
print sorted(losses)[:10]
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser()
parser.add_argument('input_file')
parser.add_argument('--num_police', type=int, default=1)
parser.add_argument('--num_steps', type=int, default=100)
parser.add_argument('--prob_step', type=float, default=0.25)
args = parser.parse_args()
main(args.input_file, args.num_police, args.num_steps, args.prob_step)
|
mit
|
dothiko/mypaint
|
lib/layer/test.py
|
1
|
1433
|
# This file is part of MyPaint.
# Copyright (C) 2011-2015 by Andrew Chadwick <[email protected]>
# Copyright (C) 2007-2012 by Martin Renold <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
def make_test_stack():
"""Makes a simple test RootLayerStack (2 branches of 3 leaves each)
:return: The root stack, and a list of its leaves.
:rtype: tuple
"""
import lib.layer.group
import lib.layer.data
import lib.layer.tree
root = lib.layer.tree.RootLayerStack(doc=None)
layer0 = lib.layer.group.LayerStack(name='0')
root.append(layer0)
layer00 = lib.layer.data.PaintingLayer(name='00')
layer0.append(layer00)
layer01 = lib.layer.data.PaintingLayer(name='01')
layer0.append(layer01)
layer02 = lib.layer.data.PaintingLayer(name='02')
layer0.append(layer02)
layer1 = lib.layer.group.LayerStack(name='1')
root.append(layer1)
layer10 = lib.layer.data.PaintingLayer(name='10')
layer1.append(layer10)
layer11 = lib.layer.data.PaintingLayer(name='11')
layer1.append(layer11)
layer12 = lib.layer.data.PaintingLayer(name='12')
layer1.append(layer12)
return (root, [layer00, layer01, layer02, layer10, layer11, layer12])
|
gpl-2.0
|
gamedev2/cocos2d-x-2.2
|
tools/bindings-generator/generator.py
|
3
|
34140
|
#!/usr/bin/env python
# generator.py
# simple C++ generator, originally targetted for Spidermonkey bindings
#
# Copyright (c) 2011 - Zynga Inc.
from clang import cindex
import sys
import pdb
import ConfigParser
import yaml
import re
import os
import inspect
from Cheetah.Template import Template
type_map = {
cindex.TypeKind.VOID : "void",
cindex.TypeKind.BOOL : "bool",
cindex.TypeKind.CHAR_U : "unsigned char",
cindex.TypeKind.UCHAR : "unsigned char",
cindex.TypeKind.CHAR16 : "char",
cindex.TypeKind.CHAR32 : "char",
cindex.TypeKind.USHORT : "unsigned short",
cindex.TypeKind.UINT : "unsigned int",
cindex.TypeKind.ULONG : "unsigned long",
cindex.TypeKind.ULONGLONG : "unsigned long long",
cindex.TypeKind.CHAR_S : "char",
cindex.TypeKind.SCHAR : "char",
cindex.TypeKind.WCHAR : "wchar_t",
cindex.TypeKind.SHORT : "short",
cindex.TypeKind.INT : "int",
cindex.TypeKind.LONG : "long",
cindex.TypeKind.LONGLONG : "long long",
cindex.TypeKind.FLOAT : "float",
cindex.TypeKind.DOUBLE : "double",
cindex.TypeKind.LONGDOUBLE : "long double",
cindex.TypeKind.NULLPTR : "NULL",
cindex.TypeKind.OBJCID : "id",
cindex.TypeKind.OBJCCLASS : "class",
cindex.TypeKind.OBJCSEL : "SEL",
# cindex.TypeKind.ENUM : "int"
}
INVALID_NATIVE_TYPE = "??"
default_arg_type_arr = [
# An integer literal.
cindex.CursorKind.INTEGER_LITERAL,
# A floating point number literal.
cindex.CursorKind.FLOATING_LITERAL,
# An imaginary number literal.
cindex.CursorKind.IMAGINARY_LITERAL,
# A string literal.
cindex.CursorKind.STRING_LITERAL,
# A character literal.
cindex.CursorKind.CHARACTER_LITERAL,
# [C++ 2.13.5] C++ Boolean Literal.
cindex.CursorKind.CXX_BOOL_LITERAL_EXPR,
# [C++0x 2.14.7] C++ Pointer Literal.
cindex.CursorKind.CXX_NULL_PTR_LITERAL_EXPR
]
def native_name_from_type(ntype, underlying=False):
kind = ntype.get_canonical().kind
const = "const " if ntype.is_const_qualified() else ""
if not underlying and kind == cindex.TypeKind.ENUM:
decl = ntype.get_declaration()
return namespaced_name(decl)
elif kind in type_map:
return const + type_map[kind]
elif kind == cindex.TypeKind.RECORD:
# might be an std::string
decl = ntype.get_declaration()
parent = decl.semantic_parent
if decl.spelling == "string" and parent and parent.spelling == "std":
return "std::string"
else:
# print >> sys.stderr, "probably a function pointer: " + str(decl.spelling)
return const + decl.spelling
else:
# name = ntype.get_declaration().spelling
# print >> sys.stderr, "Unknown type: " + str(kind) + " " + str(name)
return INVALID_NATIVE_TYPE
# pdb.set_trace()
def build_namespace(cursor, namespaces=[]):
'''
build the full namespace for a specific cursor
'''
if cursor:
parent = cursor.semantic_parent
if parent:
if parent.kind == cindex.CursorKind.NAMESPACE or parent.kind == cindex.CursorKind.CLASS_DECL:
namespaces.append(parent.displayname)
build_namespace(parent, namespaces)
return namespaces
def namespaced_name(declaration_cursor):
ns_list = build_namespace(declaration_cursor, [])
ns_list.reverse()
ns = "::".join(ns_list)
if len(ns) > 0:
return ns + "::" + declaration_cursor.displayname
return declaration_cursor.displayname
class NativeType(object):
def __init__(self, ntype):
self.type = ntype
self.is_pointer = False
self.is_object = False
self.not_supported = False
self.namespaced_name = ""
self.name = ""
if ntype.kind == cindex.TypeKind.POINTER:
pointee = ntype.get_pointee()
self.is_pointer = True
if pointee.kind == cindex.TypeKind.RECORD:
decl = pointee.get_declaration()
self.is_object = True
self.name = decl.displayname
self.namespaced_name = namespaced_name(decl)
else:
self.name = native_name_from_type(pointee)
self.namespaced_name = self.name
self.name += "*"
self.namespaced_name += "*"
elif ntype.kind == cindex.TypeKind.LVALUEREFERENCE:
pointee = ntype.get_pointee()
decl = pointee.get_declaration()
self.namespaced_name = namespaced_name(decl)
if pointee.kind == cindex.TypeKind.RECORD:
self.name = decl.displayname
self.is_object = True
else:
self.name = native_name_from_type(pointee)
else:
if ntype.kind == cindex.TypeKind.RECORD:
decl = ntype.get_declaration()
self.is_object = True
self.name = decl.displayname
self.namespaced_name = namespaced_name(decl)
else:
self.name = native_name_from_type(ntype)
self.namespaced_name = self.name
# mark argument as not supported
if self.name == INVALID_NATIVE_TYPE:
self.not_supported = True
def from_native(self, convert_opts):
assert(convert_opts.has_key('generator'))
generator = convert_opts['generator']
name = self.name
if self.is_object:
if self.is_pointer and not name in generator.config['conversions']['from_native']:
name = "object"
elif not generator.config['conversions']['from_native'].has_key(name):
name = "object"
elif self.type.get_canonical().kind == cindex.TypeKind.ENUM:
name = "int"
if generator.config['conversions']['from_native'].has_key(name):
tpl = generator.config['conversions']['from_native'][name]
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION FROM NATIVE FOR " + name
def to_native(self, convert_opts):
assert('generator' in convert_opts)
generator = convert_opts['generator']
name = self.name
if self.is_object:
if self.is_pointer and not name in generator.config['conversions']['to_native']:
name = "object"
elif not name in generator.config['conversions']['to_native']:
name = "object"
elif self.type.get_canonical().kind == cindex.TypeKind.ENUM:
name = "int"
if generator.config['conversions']['to_native'].has_key(name):
tpl = generator.config['conversions']['to_native'][name]
tpl = Template(tpl, searchList=[convert_opts])
return str(tpl).rstrip()
return "#pragma warning NO CONVERSION TO NATIVE FOR " + name
def to_string(self, generator):
conversions = generator.config['conversions']
if conversions.has_key('native_types') and conversions['native_types'].has_key(self.namespaced_name):
return conversions['native_types'][self.namespaced_name]
return self.namespaced_name
def __str__(self):
return self.namespaced_name
class NativeField(object):
def __init__(self, cursor):
cursor = cursor.canonical
self.cursor = cursor
self.name = cursor.displayname
self.kind = cursor.type.kind
self.location = cursor.location
member_field_re = re.compile('m_(\w+)')
match = member_field_re.match(self.name)
if match:
self.pretty_name = match.group(1)
else:
self.pretty_name = self.name
# return True if found default argument.
def iterate_param_node(param_node):
for node in param_node.get_children():
if (node.kind in default_arg_type_arr):
# print("------ "+str(node.kind))
return True
if (iterate_param_node(node)):
return True
return False
class NativeFunction(object):
def __init__(self, cursor):
self.cursor = cursor
self.func_name = cursor.spelling
self.signature_name = self.func_name
self.arguments = []
self.static = cursor.kind == cindex.CursorKind.CXX_METHOD and cursor.is_method_static()
self.implementations = []
self.is_constructor = False
self.not_supported = False
result = cursor.result_type
# get the result
if result.kind == cindex.TypeKind.LVALUEREFERENCE:
result = result.get_pointee()
self.ret_type = NativeType(cursor.result_type)
# parse the arguments
# if self.func_name == "spriteWithFile":
# pdb.set_trace()
for arg in cursor.type.argument_types():
nt = NativeType(arg)
self.arguments.append(nt)
# mark the function as not supported if at least one argument is not supported
if nt.not_supported:
self.not_supported = True
found_default_arg = False
index = -1
for arg_node in self.cursor.get_children():
if arg_node.kind == cindex.CursorKind.PARM_DECL:
index+=1
if (iterate_param_node(arg_node)):
found_default_arg = True
break
self.min_args = index if found_default_arg else len(self.arguments)
def generate_code(self, current_class=None, generator=None):
gen = current_class.generator if current_class else generator
config = gen.config
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
gen.head_file.write(str(tpl))
if self.static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction.c"),
searchList=[current_class, self])
gen.impl_file.write(str(tpl))
apidoc_function_js = Template(file=os.path.join(gen.target,
"templates",
"apidoc_function.js"),
searchList=[current_class, self])
gen.doc_file.write(str(apidoc_function_js))
class NativeOverloadedFunction(object):
def __init__(self, func_array):
self.implementations = func_array
self.func_name = func_array[0].func_name
self.signature_name = self.func_name
self.min_args = 100
self.is_constructor = False
for m in func_array:
self.min_args = min(self.min_args, m.min_args)
def append(self, func):
self.min_args = min(self.min_args, func.min_args)
self.implementations.append(func)
def generate_code(self, current_class=None):
gen = current_class.generator
config = gen.config
static = self.implementations[0].static
tpl = Template(file=os.path.join(gen.target, "templates", "function.h"),
searchList=[current_class, self])
gen.head_file.write(str(tpl))
if static:
if config['definitions'].has_key('sfunction'):
tpl = Template(config['definitions']['sfunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "sfunction_overloaded.c"),
searchList=[current_class, self])
else:
if not self.is_constructor:
if config['definitions'].has_key('ifunction'):
tpl = Template(config['definitions']['ifunction'],
searchList=[current_class, self])
self.signature_name = str(tpl)
else:
if config['definitions'].has_key('constructor'):
tpl = Template(config['definitions']['constructor'],
searchList=[current_class, self])
self.signature_name = str(tpl)
tpl = Template(file=os.path.join(gen.target, "templates", "ifunction_overloaded.c"),
searchList=[current_class, self])
gen.impl_file.write(str(tpl))
class NativeClass(object):
def __init__(self, cursor, generator):
# the cursor to the implementation
self.cursor = cursor
self.class_name = cursor.displayname
self.namespaced_class_name = self.class_name
self.parents = []
self.fields = []
self.methods = {}
self.static_methods = {}
self.generator = generator
self.is_abstract = self.class_name in generator.abstract_classes
self._current_visibility = cindex.AccessSpecifierKind.PRIVATE
registration_name = generator.get_class_or_rename_class(self.class_name)
if generator.remove_prefix:
self.target_class_name = re.sub('^'+generator.remove_prefix, '', registration_name)
else:
self.target_class_name = registration_name
self.namespaced_class_name = namespaced_name(cursor)
self.parse()
def parse(self):
'''
parse the current cursor, getting all the necesary information
'''
self._deep_iterate(self.cursor)
def methods_clean(self):
'''
clean list of methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.methods.iteritems():
should_skip = False
if name == 'constructor':
should_skip = True
else:
if self.generator.should_skip(self.class_name, name):
should_skip = True
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def static_methods_clean(self):
'''
clean list of static methods (without the ones that should be skipped)
'''
ret = []
for name, impl in self.static_methods.iteritems():
should_skip = self.generator.should_skip(self.class_name, name)
if not should_skip:
ret.append({"name": name, "impl": impl})
return ret
def generate_code(self):
'''
actually generate the code. it uses the current target templates/rules in order to
generate the right code
'''
config = self.generator.config
prelude_h = Template(file=os.path.join(self.generator.target, "templates", "prelude.h"),
searchList=[{"current_class": self}])
prelude_c = Template(file=os.path.join(self.generator.target, "templates", "prelude.c"),
searchList=[{"current_class": self}])
apidoc_classhead_js = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classhead.js"),
searchList=[{"current_class": self}])
self.generator.head_file.write(str(prelude_h))
self.generator.impl_file.write(str(prelude_c))
self.generator.doc_file.write(str(apidoc_classhead_js))
for m in self.methods_clean():
m['impl'].generate_code(self)
for m in self.static_methods_clean():
m['impl'].generate_code(self)
# generate register section
register = Template(file=os.path.join(self.generator.target, "templates", "register.c"),
searchList=[{"current_class": self}])
apidoc_classfoot_js = Template(file=os.path.join(self.generator.target,
"templates",
"apidoc_classfoot.js"),
searchList=[{"current_class": self}])
self.generator.impl_file.write(str(register))
self.generator.doc_file.write(str(apidoc_classfoot_js))
def _deep_iterate(self, cursor=None):
for node in cursor.get_children():
if self._process_node(node):
self._deep_iterate(node)
def _process_node(self, cursor):
'''
process the node, depending on the type. If returns true, then it will perform a deep
iteration on its children. Otherwise it will continue with its siblings (if any)
@param: cursor the cursor to analyze
'''
if cursor.kind == cindex.CursorKind.CXX_BASE_SPECIFIER and not self.class_name in self.generator.classes_have_no_parents:
parent = cursor.get_definition()
if parent.displayname not in self.generator.base_classes_to_skip:
#if parent and self.generator.in_listed_classes(parent.displayname):
if not self.generator.generated_classes.has_key(parent.displayname):
parent = NativeClass(parent, self.generator)
self.generator.generated_classes[parent.class_name] = parent
else:
parent = self.generator.generated_classes[parent.displayname]
self.parents.append(parent)
elif cursor.kind == cindex.CursorKind.FIELD_DECL:
self.fields.append(NativeField(cursor))
elif cursor.kind == cindex.CursorKind.CXX_ACCESS_SPEC_DECL:
self._current_visibility = cursor.get_access_specifier()
elif cursor.kind == cindex.CursorKind.CXX_METHOD:
# skip if variadic
if self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and not cursor.type.is_function_variadic():
m = NativeFunction(cursor)
registration_name = self.generator.should_rename_function(self.class_name, m.func_name) or m.func_name
# bail if the function is not supported (at least one arg not supported)
if m.not_supported:
return
if m.static:
if not self.static_methods.has_key(registration_name):
self.static_methods[registration_name] = m
else:
previous_m = self.static_methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.static_methods[registration_name] = NativeOverloadedFunction([m, previous_m])
else:
if not self.methods.has_key(registration_name):
self.methods[registration_name] = m
else:
previous_m = self.methods[registration_name]
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
self.methods[registration_name] = NativeOverloadedFunction([m, previous_m])
return True
elif self._current_visibility == cindex.AccessSpecifierKind.PUBLIC and cursor.kind == cindex.CursorKind.CONSTRUCTOR and not self.is_abstract:
m = NativeFunction(cursor)
m.is_constructor = True
if not self.methods.has_key('constructor'):
self.methods['constructor'] = m
else:
previous_m = self.methods['constructor']
if isinstance(previous_m, NativeOverloadedFunction):
previous_m.append(m)
else:
m = NativeOverloadedFunction([m, previous_m])
m.is_constructor = True
self.methods['constructor'] = m
return True
# else:
# print >> sys.stderr, "unknown cursor: %s - %s" % (cursor.kind, cursor.displayname)
return False
class Generator(object):
def __init__(self, opts):
self.index = cindex.Index.create()
self.outdir = opts['outdir']
self.prefix = opts['prefix']
self.headers = opts['headers'].split(' ')
self.classes = opts['classes']
self.classes_have_no_parents = opts['classes_have_no_parents'].split(' ')
self.base_classes_to_skip = opts['base_classes_to_skip'].split(' ')
self.abstract_classes = opts['abstract_classes'].split(' ')
self.clang_args = opts['clang_args']
self.target = opts['target']
self.remove_prefix = opts['remove_prefix']
self.target_ns = opts['target_ns']
self.impl_file = None
self.head_file = None
self.skip_classes = {}
self.generated_classes = {}
self.rename_functions = {}
self.rename_classes = {}
self.out_file = opts['out_file']
self.script_control_cpp = opts['script_control_cpp'] == "yes"
if opts['skip']:
list_of_skips = re.split(",\n?", opts['skip'])
for skip in list_of_skips:
class_name, methods = skip.split("::")
self.skip_classes[class_name] = []
match = re.match("\[([^]]+)\]", methods)
if match:
self.skip_classes[class_name] = match.group(1).split(" ")
else:
raise Exception("invalid list of skip methods")
if opts['rename_functions']:
list_of_function_renames = re.split(",\n?", opts['rename_functions'])
for rename in list_of_function_renames:
class_name, methods = rename.split("::")
self.rename_functions[class_name] = {}
match = re.match("\[([^]]+)\]", methods)
if match:
list_of_methods = match.group(1).split(" ")
for pair in list_of_methods:
k, v = pair.split("=")
self.rename_functions[class_name][k] = v
else:
raise Exception("invalid list of rename methods")
if opts['rename_classes']:
list_of_class_renames = re.split(",\n?", opts['rename_classes'])
for rename in list_of_class_renames:
class_name, renamed_class_name = rename.split("::")
self.rename_classes[class_name] = renamed_class_name
def should_rename_function(self, class_name, method_name):
if self.rename_functions.has_key(class_name) and self.rename_functions[class_name].has_key(method_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_functions[class_name][method_name]
return None
def get_class_or_rename_class(self, class_name):
if self.rename_classes.has_key(class_name):
# print >> sys.stderr, "will rename %s to %s" % (method_name, self.rename_functions[class_name][method_name])
return self.rename_classes[class_name]
return class_name
def should_skip(self, class_name, method_name, verbose=False):
if class_name == "*" and self.skip_classes.has_key("*"):
for func in self.skip_classes["*"]:
if re.match(func, method_name):
return True
else:
for key in self.skip_classes.iterkeys():
if key == "*" or re.match("^" + key + "$", class_name):
if verbose:
print "%s in skip_classes" % (class_name)
if len(self.skip_classes[key]) == 1 and self.skip_classes[key][0] == "*":
if verbose:
print "%s will be skipped completely" % (class_name)
return True
if method_name != None:
for func in self.skip_classes[key]:
if re.match(func, method_name):
if verbose:
print "%s will skip method %s" % (class_name, method_name)
return True
if verbose:
print "%s will be accepted (%s, %s)" % (class_name, key, self.skip_classes[key])
return False
def in_listed_classes(self, class_name):
"""
returns True if the class is in the list of required classes and it's not in the skip list
"""
for key in self.classes:
md = re.match("^" + key + "$", class_name)
if md and not self.should_skip(class_name, None):
return True
return False
def sorted_classes(self):
'''
sorted classes in order of inheritance
'''
sorted_list = []
for class_name in self.generated_classes.iterkeys():
nclass = self.generated_classes[class_name]
sorted_list += self._sorted_parents(nclass)
# remove dupes from the list
no_dupes = []
[no_dupes.append(i) for i in sorted_list if not no_dupes.count(i)]
return no_dupes
def _sorted_parents(self, nclass):
'''
returns the sorted list of parents for a native class
'''
sorted_parents = []
for p in nclass.parents:
if p.class_name in self.generated_classes.keys():
sorted_parents += self._sorted_parents(p)
if nclass.class_name in self.generated_classes.keys():
sorted_parents.append(nclass.class_name)
return sorted_parents
def generate_code(self):
# must read the yaml file first
stream = file(os.path.join(self.target, "conversions.yaml"), "r")
data = yaml.load(stream)
self.config = data
implfilepath = os.path.join(self.outdir, self.out_file + ".cpp")
headfilepath = os.path.join(self.outdir, self.out_file + ".hpp")
docfilepath = os.path.join(self.outdir, self.out_file + "_api.js")
self.impl_file = open(implfilepath, "w+")
self.head_file = open(headfilepath, "w+")
self.doc_file = open(docfilepath, "w+")
layout_h = Template(file=os.path.join(self.target, "templates", "layout_head.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_head.c"),
searchList=[self])
apidoc_ns_js = Template(file=os.path.join(self.target, "templates", "apidoc_ns.js"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
self.doc_file.write(str(apidoc_ns_js))
self._parse_headers()
layout_h = Template(file=os.path.join(self.target, "templates", "layout_foot.h"),
searchList=[self])
layout_c = Template(file=os.path.join(self.target, "templates", "layout_foot.c"),
searchList=[self])
self.head_file.write(str(layout_h))
self.impl_file.write(str(layout_c))
self.impl_file.close()
self.head_file.close()
self.doc_file.close()
def _pretty_print(self, diagnostics):
print("====\nErrors in parsing headers:")
severities=['Ignored', 'Note', 'Warning', 'Error', 'Fatal']
for idx, d in enumerate(diagnostics):
print "%s. <severity = %s,\n location = %r,\n details = %r>" % (
idx+1, severities[d.severity], d.location, d.spelling)
print("====\n")
def _parse_headers(self):
for header in self.headers:
tu = self.index.parse(header, self.clang_args)
if len(tu.diagnostics) > 0:
self._pretty_print(tu.diagnostics)
is_fatal = False
for d in tu.diagnostics:
if d.severity >= cindex.Diagnostic.Error:
is_fatal = True
if is_fatal:
print("*** Found errors - can not continue")
raise Exception("Fatal error in parsing headers")
self._deep_iterate(tu.cursor)
def _deep_iterate(self, cursor, depth=0):
# get the canonical type
if cursor.kind == cindex.CursorKind.CLASS_DECL:
if cursor == cursor.type.get_declaration() and self.in_listed_classes(cursor.displayname):
if not self.generated_classes.has_key(cursor.displayname):
nclass = NativeClass(cursor, self)
nclass.generate_code()
self.generated_classes[cursor.displayname] = nclass
return
for node in cursor.get_children():
# print("%s %s - %s" % (">" * depth, node.displayname, node.kind))
self._deep_iterate(node, depth + 1)
def main():
from optparse import OptionParser
parser = OptionParser("usage: %prog [options] {configfile}")
parser.add_option("-s", action="store", type="string", dest="section",
help="sets a specific section to be converted")
parser.add_option("-t", action="store", type="string", dest="target",
help="specifies the target vm. Will search for TARGET.yaml")
parser.add_option("-o", action="store", type="string", dest="outdir",
help="specifies the output directory for generated C++ code")
parser.add_option("-n", action="store", type="string", dest="out_file",
help="specifcies the name of the output file, defaults to the prefix in the .ini file")
(opts, args) = parser.parse_args()
# script directory
workingdir = os.path.dirname(inspect.getfile(inspect.currentframe()))
if len(args) == 0:
parser.error('invalid number of arguments')
userconfig = ConfigParser.SafeConfigParser()
userconfig.read('userconf.ini')
print 'Using userconfig \n ', userconfig.items('DEFAULT')
config = ConfigParser.SafeConfigParser()
config.read(args[0])
if (0 == len(config.sections())):
raise Exception("No sections defined in config file")
sections = []
if opts.section:
if (opts.section in config.sections()):
sections = []
sections.append(opts.section)
else:
raise Exception("Section not found in config file")
else:
print("processing all sections")
sections = config.sections()
# find available targets
targetdir = os.path.join(workingdir, "targets")
targets = []
if (os.path.isdir(targetdir)):
targets = [entry for entry in os.listdir(targetdir)
if (os.path.isdir(os.path.join(targetdir, entry)))]
if 0 == len(targets):
raise Exception("No targets defined")
if opts.target:
if (opts.target in targets):
targets = []
targets.append(opts.target)
if opts.outdir:
outdir = opts.outdir
else:
outdir = os.path.join(workingdir, "gen")
if not os.path.exists(outdir):
os.makedirs(outdir)
for t in targets:
# Fix for hidden '.svn', '.cvs' and '.git' etc. folders - these must be ignored or otherwise they will be interpreted as a target.
if t == ".svn" or t == ".cvs" or t == ".git" or t == ".gitignore":
continue
print "\n.... Generating bindings for target", t
for s in sections:
print "\n.... .... Processing section", s, "\n"
gen_opts = {
'prefix': config.get(s, 'prefix'),
'headers': (config.get(s, 'headers' , 0, dict(userconfig.items('DEFAULT')))),
'classes': config.get(s, 'classes').split(' '),
'clang_args': (config.get(s, 'extra_arguments', 0, dict(userconfig.items('DEFAULT'))) or "").split(" "),
'target': os.path.join(workingdir, "targets", t),
'outdir': outdir,
'remove_prefix': config.get(s, 'remove_prefix'),
'target_ns': config.get(s, 'target_namespace'),
'classes_have_no_parents': config.get(s, 'classes_have_no_parents'),
'base_classes_to_skip': config.get(s, 'base_classes_to_skip'),
'abstract_classes': config.get(s, 'abstract_classes'),
'skip': config.get(s, 'skip'),
'rename_functions': config.get(s, 'rename_functions'),
'rename_classes': config.get(s, 'rename_classes'),
'out_file': opts.out_file or config.get(s, 'prefix'),
'script_control_cpp': config.get(s, 'script_control_cpp') if config.has_option(s, 'script_control_cpp') else 'no'
}
generator = Generator(gen_opts)
generator.generate_code()
if __name__ == '__main__':
try:
main()
except Exception as e:
print e
sys.exit(1)
|
mit
|
probablytom/tomwallis.net
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/trie/datrie.py
|
1301
|
1178
|
from __future__ import absolute_import, division, unicode_literals
from datrie import Trie as DATrie
from pip._vendor.six import text_type
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
chars = set()
for key in data.keys():
if not isinstance(key, text_type):
raise TypeError("All keys must be strings")
for char in key:
chars.add(char)
self._data = DATrie("".join(chars))
for key, value in data.items():
self._data[key] = value
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
raise NotImplementedError()
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
return self._data.keys(prefix)
def has_keys_with_prefix(self, prefix):
return self._data.has_keys_with_prefix(prefix)
def longest_prefix(self, prefix):
return self._data.longest_prefix(prefix)
def longest_prefix_item(self, prefix):
return self._data.longest_prefix_item(prefix)
|
artistic-2.0
|
delimitry/ascii_clock
|
asciicanvas.py
|
1
|
6119
|
#-*- coding: utf-8 -*-
#-----------------------------------------------------------------------
# Author: delimitry
#-----------------------------------------------------------------------
class AsciiCanvas(object):
"""
ASCII canvas for drawing in console using ASCII chars
"""
def __init__(self, cols, lines, fill_char=' '):
"""
Initialize ASCII canvas
"""
if cols < 1 or cols > 1000 or lines < 1 or lines > 1000:
raise Exception('Canvas cols/lines must be in range [1..1000]')
self.cols = cols
self.lines = lines
if not fill_char:
fill_char = ' '
elif len(fill_char) > 1:
fill_char = fill_char[0]
self.fill_char = fill_char
self.canvas = [[fill_char] * (cols) for _ in range(lines)]
def clear(self):
"""
Fill canvas with empty chars
"""
self.canvas = [[self.fill_char] * (self.cols) for _ in range(self.lines)]
def print_out(self):
"""
Print out canvas to console
"""
print(self.get_canvas_as_str())
def add_line(self, x0, y0, x1, y1, fill_char='o'):
"""
Add ASCII line (x0, y0 -> x1, y1) to the canvas, fill line with `fill_char`
"""
if not fill_char:
fill_char = 'o'
elif len(fill_char) > 1:
fill_char = fill_char[0]
if x0 > x1:
# swap A and B
x1, x0 = x0, x1
y1, y0 = y0, y1
# get delta x, y
dx = x1 - x0
dy = y1 - y0
# if a length of line is zero just add point
if dx == 0 and dy == 0:
if self.check_coord_in_range(x0, y0):
self.canvas[y0][x0] = fill_char
return
# when dx >= dy use fill by x-axis, and use fill by y-axis otherwise
if abs(dx) >= abs(dy):
for x in range(x0, x1 + 1):
y = y0 if dx == 0 else y0 + int(round((x - x0) * dy / float((dx))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
else:
if y0 < y1:
for y in range(y0, y1 + 1):
x = x0 if dy == 0 else x0 + int(round((y - y0) * dx / float((dy))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
else:
for y in range(y1, y0 + 1):
x = x0 if dy == 0 else x1 + int(round((y - y1) * dx / float((dy))))
if self.check_coord_in_range(x, y):
self.canvas[y][x] = fill_char
def add_text(self, x, y, text):
"""
Add text to canvas at position (x, y)
"""
for i, c in enumerate(text):
if self.check_coord_in_range(x + i, y):
self.canvas[y][x + i] = c
def add_rect(self, x, y, w, h, fill_char=' ', outline_char='o'):
"""
Add rectangle filled with `fill_char` and outline with `outline_char`
"""
if not fill_char:
fill_char = ' '
elif len(fill_char) > 1:
fill_char = fill_char[0]
if not outline_char:
outline_char = 'o'
elif len(outline_char) > 1:
outline_char = outline_char[0]
for px in range(x, x + w):
for py in range(y, y + h):
if self.check_coord_in_range(px, py):
if px == x or px == x + w - 1 or py == y or py == y + h - 1:
self.canvas[py][px] = outline_char
else:
self.canvas[py][px] = fill_char
def add_nine_patch_rect(self, x, y, w, h, outline_3x3_chars=None):
"""
Add nine-patch rectangle
"""
default_outline_3x3_chars = (
'.', '-', '.',
'|', ' ', '|',
'`', '-', "'"
)
if not outline_3x3_chars:
outline_3x3_chars = default_outline_3x3_chars
# filter chars
filtered_outline_3x3_chars = []
for index, char in enumerate(outline_3x3_chars[0:9]):
if not char:
char = default_outline_3x3_chars[index]
elif len(char) > 1:
char = char[0]
filtered_outline_3x3_chars.append(char)
for px in range(x, x + w):
for py in range(y, y + h):
if self.check_coord_in_range(px, py):
if px == x and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[0]
elif px == x and y < py < y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[3]
elif px == x and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[6]
elif x < px < x + w - 1 and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[1]
elif x < px < x + w - 1 and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[7]
elif px == x + w - 1 and py == y:
self.canvas[py][px] = filtered_outline_3x3_chars[2]
elif px == x + w - 1 and y < py < y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[5]
elif px == x + w - 1 and py == y + h - 1:
self.canvas[py][px] = filtered_outline_3x3_chars[8]
else:
self.canvas[py][px] = filtered_outline_3x3_chars[4]
def check_coord_in_range(self, x, y):
"""
Check that coordinate (x, y) is in range, to prevent out of range error
"""
return 0 <= x < self.cols and 0 <= y < self.lines
def get_canvas_as_str(self):
"""
Return canvas as a string
"""
return '\n'.join([''.join(col) for col in self.canvas])
def __str__(self):
"""
Return canvas as a string
"""
return self.get_canvas_as_str()
|
mit
|
dhhjx880713/GPy
|
GPy/plotting/matplot_dep/variational_plots.py
|
6
|
4094
|
from matplotlib import pyplot as pb, numpy as np
def plot(parameterized, fignum=None, ax=None, colors=None, figsize=(12, 6)):
"""
Plot latent space X in 1D:
- if fig is given, create input_dim subplots in fig and plot in these
- if ax is given plot input_dim 1D latent space plots of X into each `axis`
- if neither fig nor ax is given create a figure with fignum and plot in there
colors:
colors of different latent space dimensions input_dim
"""
if ax is None:
fig = pb.figure(num=fignum, figsize=figsize)
if colors is None:
from ..Tango import mediumList
from itertools import cycle
colors = cycle(mediumList)
pb.clf()
else:
colors = iter(colors)
lines = []
fills = []
bg_lines = []
means, variances = parameterized.mean.values, parameterized.variance.values
x = np.arange(means.shape[0])
for i in range(means.shape[1]):
if ax is None:
a = fig.add_subplot(means.shape[1], 1, i + 1)
elif isinstance(ax, (tuple, list)):
a = ax[i]
else:
raise ValueError("Need one ax per latent dimension input_dim")
bg_lines.append(a.plot(means, c='k', alpha=.3))
lines.extend(a.plot(x, means.T[i], c=next(colors), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
fills.append(a.fill_between(x,
means.T[i] - 2 * np.sqrt(variances.T[i]),
means.T[i] + 2 * np.sqrt(variances.T[i]),
facecolor=lines[-1].get_color(),
alpha=.3))
a.legend(borderaxespad=0.)
a.set_xlim(x.min(), x.max())
if i < means.shape[1] - 1:
a.set_xticklabels('')
pb.draw()
a.figure.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
return dict(lines=lines, fills=fills, bg_lines=bg_lines)
def plot_SpikeSlab(parameterized, fignum=None, ax=None, colors=None, side_by_side=True):
"""
Plot latent space X in 1D:
- if fig is given, create input_dim subplots in fig and plot in these
- if ax is given plot input_dim 1D latent space plots of X into each `axis`
- if neither fig nor ax is given create a figure with fignum and plot in there
colors:
colors of different latent space dimensions input_dim
"""
if ax is None:
if side_by_side:
fig = pb.figure(num=fignum, figsize=(16, min(12, (2 * parameterized.mean.shape[1]))))
else:
fig = pb.figure(num=fignum, figsize=(8, min(12, (2 * parameterized.mean.shape[1]))))
if colors is None:
from ..Tango import mediumList
from itertools import cycle
colors = cycle(mediumList)
pb.clf()
else:
colors = iter(colors)
plots = []
means, variances, gamma = parameterized.mean, parameterized.variance, parameterized.binary_prob
x = np.arange(means.shape[0])
for i in range(means.shape[1]):
if side_by_side:
sub1 = (means.shape[1],2,2*i+1)
sub2 = (means.shape[1],2,2*i+2)
else:
sub1 = (means.shape[1]*2,1,2*i+1)
sub2 = (means.shape[1]*2,1,2*i+2)
# mean and variance plot
a = fig.add_subplot(*sub1)
a.plot(means, c='k', alpha=.3)
plots.extend(a.plot(x, means.T[i], c=next(colors), label=r"$\mathbf{{X_{{{}}}}}$".format(i)))
a.fill_between(x,
means.T[i] - 2 * np.sqrt(variances.T[i]),
means.T[i] + 2 * np.sqrt(variances.T[i]),
facecolor=plots[-1].get_color(),
alpha=.3)
a.legend(borderaxespad=0.)
a.set_xlim(x.min(), x.max())
if i < means.shape[1] - 1:
a.set_xticklabels('')
# binary prob plot
a = fig.add_subplot(*sub2)
a.bar(x,gamma[:,i],bottom=0.,linewidth=1.,width=1.0,align='center')
a.set_xlim(x.min(), x.max())
a.set_ylim([0.,1.])
pb.draw()
fig.tight_layout(h_pad=.01) # , rect=(0, 0, 1, .95))
return fig
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.